1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  *
4  * Copyright 2016 Mozilla Foundation
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #include "wasm/WasmInstance.h"
20 
21 #include "mozilla/DebugOnly.h"
22 
23 #include <algorithm>
24 
25 #include "jit/AtomicOperations.h"
26 #include "jit/Disassemble.h"
27 #include "jit/InlinableNatives.h"
28 #include "jit/JitCommon.h"
29 #include "jit/JitRealm.h"
30 #include "jit/JitScript.h"
31 #include "js/ForOfIterator.h"
32 #include "util/StringBuffer.h"
33 #include "util/Text.h"
34 #include "vm/BigIntType.h"
35 #include "vm/PlainObject.h"  // js::PlainObject
36 #include "wasm/WasmBuiltins.h"
37 #include "wasm/WasmModule.h"
38 #include "wasm/WasmStubs.h"
39 
40 #include "gc/StoreBuffer-inl.h"
41 #include "vm/ArrayBufferObject-inl.h"
42 #include "vm/JSObject-inl.h"
43 
44 using namespace js;
45 using namespace js::jit;
46 using namespace js::wasm;
47 using mozilla::BitwiseCast;
48 using mozilla::DebugOnly;
49 
50 using CheckedU32 = CheckedInt<uint32_t>;
51 
52 class FuncTypeIdSet {
53   typedef HashMap<const FuncType*, uint32_t, FuncTypeHashPolicy,
54                   SystemAllocPolicy>
55       Map;
56   Map map_;
57 
58  public:
~FuncTypeIdSet()59   ~FuncTypeIdSet() {
60     MOZ_ASSERT_IF(!JSRuntime::hasLiveRuntimes(), map_.empty());
61   }
62 
allocateFuncTypeId(JSContext * cx,const FuncType & funcType,const void ** funcTypeId)63   bool allocateFuncTypeId(JSContext* cx, const FuncType& funcType,
64                           const void** funcTypeId) {
65     Map::AddPtr p = map_.lookupForAdd(funcType);
66     if (p) {
67       MOZ_ASSERT(p->value() > 0);
68       p->value()++;
69       *funcTypeId = p->key();
70       return true;
71     }
72 
73     UniquePtr<FuncType> clone = MakeUnique<FuncType>();
74     if (!clone || !clone->clone(funcType) || !map_.add(p, clone.get(), 1)) {
75       ReportOutOfMemory(cx);
76       return false;
77     }
78 
79     *funcTypeId = clone.release();
80     MOZ_ASSERT(!(uintptr_t(*funcTypeId) & FuncTypeIdDesc::ImmediateBit));
81     return true;
82   }
83 
deallocateFuncTypeId(const FuncType & funcType,const void * funcTypeId)84   void deallocateFuncTypeId(const FuncType& funcType, const void* funcTypeId) {
85     Map::Ptr p = map_.lookup(funcType);
86     MOZ_RELEASE_ASSERT(p && p->key() == funcTypeId && p->value() > 0);
87 
88     p->value()--;
89     if (!p->value()) {
90       js_delete(p->key());
91       map_.remove(p);
92     }
93   }
94 };
95 
96 ExclusiveData<FuncTypeIdSet> funcTypeIdSet(mutexid::WasmFuncTypeIdSet);
97 
addressOfFuncTypeId(const FuncTypeIdDesc & funcTypeId) const98 const void** Instance::addressOfFuncTypeId(
99     const FuncTypeIdDesc& funcTypeId) const {
100   return (const void**)(globalData() + funcTypeId.globalDataOffset());
101 }
102 
funcImportTls(const FuncImport & fi)103 FuncImportTls& Instance::funcImportTls(const FuncImport& fi) {
104   return *(FuncImportTls*)(globalData() + fi.tlsDataOffset());
105 }
106 
tableTls(const TableDesc & td) const107 TableTls& Instance::tableTls(const TableDesc& td) const {
108   return *(TableTls*)(globalData() + td.globalDataOffset);
109 }
110 
111 class NoDebug {
112  public:
113   template <typename T>
print(T v)114   static void print(T v) {}
115 };
116 
117 class DebugCodegenVal {
118   template <typename T>
print(const char * fmt,T v)119   static void print(const char* fmt, T v) {
120     DebugCodegen(DebugChannel::Function, fmt, v);
121   }
122 
123  public:
print(int32_t v)124   static void print(int32_t v) { print(" i32(%d)", v); }
print(int64_t v)125   static void print(int64_t v) { print(" i64(%" PRId64 ")", v); }
print(float v)126   static void print(float v) { print(" f32(%f)", v); }
print(double v)127   static void print(double v) { print(" f64(%lf)", v); }
print(void * v)128   static void print(void* v) { print(" ptr(%p)", v); }
129 };
130 
131 template <typename Debug = NoDebug>
ToWebAssemblyValue_i32(JSContext * cx,HandleValue val,int32_t * loc,bool mustWrite64)132 static bool ToWebAssemblyValue_i32(JSContext* cx, HandleValue val, int32_t* loc,
133                                    bool mustWrite64) {
134   bool ok = ToInt32(cx, val, loc);
135   if (ok && mustWrite64) {
136 #if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
137     loc[1] = loc[0] >> 31;
138 #else
139     loc[1] = 0;
140 #endif
141   }
142   Debug::print(*loc);
143   return ok;
144 }
145 template <typename Debug = NoDebug>
ToWebAssemblyValue_i64(JSContext * cx,HandleValue val,int64_t * loc,bool mustWrite64)146 static bool ToWebAssemblyValue_i64(JSContext* cx, HandleValue val, int64_t* loc,
147                                    bool mustWrite64) {
148   MOZ_ASSERT(mustWrite64);
149   JS_TRY_VAR_OR_RETURN_FALSE(cx, *loc, ToBigInt64(cx, val));
150   Debug::print(*loc);
151   return true;
152 }
153 template <typename Debug = NoDebug>
ToWebAssemblyValue_f32(JSContext * cx,HandleValue val,float * loc,bool mustWrite64)154 static bool ToWebAssemblyValue_f32(JSContext* cx, HandleValue val, float* loc,
155                                    bool mustWrite64) {
156   bool ok = RoundFloat32(cx, val, loc);
157   if (ok && mustWrite64) {
158     loc[1] = 0.0;
159   }
160   Debug::print(*loc);
161   return ok;
162 }
163 template <typename Debug = NoDebug>
ToWebAssemblyValue_f64(JSContext * cx,HandleValue val,double * loc,bool mustWrite64)164 static bool ToWebAssemblyValue_f64(JSContext* cx, HandleValue val, double* loc,
165                                    bool mustWrite64) {
166   MOZ_ASSERT(mustWrite64);
167   bool ok = ToNumber(cx, val, loc);
168   Debug::print(*loc);
169   return ok;
170 }
171 template <typename Debug = NoDebug>
ToWebAssemblyValue_anyref(JSContext * cx,HandleValue val,void ** loc,bool mustWrite64)172 static bool ToWebAssemblyValue_anyref(JSContext* cx, HandleValue val,
173                                       void** loc, bool mustWrite64) {
174   RootedAnyRef result(cx, AnyRef::null());
175   if (!BoxAnyRef(cx, val, &result)) {
176     return false;
177   }
178   *loc = result.get().forCompiledCode();
179 #ifndef JS_64BIT
180   if (mustWrite64) {
181     loc[1] = nullptr;
182   }
183 #endif
184   Debug::print(*loc);
185   return true;
186 }
187 template <typename Debug = NoDebug>
ToWebAssemblyValue_funcref(JSContext * cx,HandleValue val,void ** loc,bool mustWrite64)188 static bool ToWebAssemblyValue_funcref(JSContext* cx, HandleValue val,
189                                        void** loc, bool mustWrite64) {
190   RootedFunction fun(cx);
191   if (!CheckFuncRefValue(cx, val, &fun)) {
192     return false;
193   }
194   *loc = fun;
195 #ifndef JS_64BIT
196   if (mustWrite64) {
197     loc[1] = nullptr;
198   }
199 #endif
200   Debug::print(*loc);
201   return true;
202 }
203 template <typename Debug = NoDebug>
ToWebAssemblyValue_typeref(JSContext * cx,HandleValue val,void ** loc,bool mustWrite64)204 static bool ToWebAssemblyValue_typeref(JSContext* cx, HandleValue val,
205                                        void** loc, bool mustWrite64) {
206   JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
207                            JSMSG_WASM_TYPEREF_FROM_JS);
208   return false;
209 }
210 
211 template <typename Debug = NoDebug>
ToWebAssemblyValue(JSContext * cx,HandleValue val,ValType type,void * loc,bool mustWrite64)212 static bool ToWebAssemblyValue(JSContext* cx, HandleValue val, ValType type,
213                                void* loc, bool mustWrite64) {
214   switch (type.kind()) {
215     case ValType::I32:
216       return ToWebAssemblyValue_i32<Debug>(cx, val, (int32_t*)loc, mustWrite64);
217     case ValType::I64:
218       return ToWebAssemblyValue_i64<Debug>(cx, val, (int64_t*)loc, mustWrite64);
219     case ValType::F32:
220       return ToWebAssemblyValue_f32<Debug>(cx, val, (float*)loc, mustWrite64);
221     case ValType::F64:
222       return ToWebAssemblyValue_f64<Debug>(cx, val, (double*)loc, mustWrite64);
223     case ValType::V128:
224       MOZ_CRASH("unexpected v128 in ToWebAssemblyValue");
225     case ValType::Ref:
226       switch (type.refTypeKind()) {
227         case RefType::Func:
228           return ToWebAssemblyValue_funcref<Debug>(cx, val, (void**)loc,
229                                                    mustWrite64);
230         case RefType::Any:
231           return ToWebAssemblyValue_anyref<Debug>(cx, val, (void**)loc,
232                                                   mustWrite64);
233         case RefType::TypeIndex:
234           return ToWebAssemblyValue_typeref<Debug>(cx, val, (void**)loc,
235                                                    mustWrite64);
236       }
237   }
238   MOZ_CRASH("unreachable");
239 }
240 
241 template <typename Debug = NoDebug>
ToJSValue_i32(JSContext * cx,int32_t src,MutableHandleValue dst)242 static bool ToJSValue_i32(JSContext* cx, int32_t src, MutableHandleValue dst) {
243   dst.set(Int32Value(src));
244   Debug::print(src);
245   return true;
246 }
247 template <typename Debug = NoDebug>
ToJSValue_i64(JSContext * cx,int64_t src,MutableHandleValue dst)248 static bool ToJSValue_i64(JSContext* cx, int64_t src, MutableHandleValue dst) {
249   // If bi is manipulated other than test & storing, it would need
250   // to be rooted here.
251   BigInt* bi = BigInt::createFromInt64(cx, src);
252   if (!bi) {
253     return false;
254   }
255   dst.set(BigIntValue(bi));
256   Debug::print(src);
257   return true;
258 }
259 template <typename Debug = NoDebug>
ToJSValue_f32(JSContext * cx,float src,MutableHandleValue dst)260 static bool ToJSValue_f32(JSContext* cx, float src, MutableHandleValue dst) {
261   dst.set(JS::CanonicalizedDoubleValue(src));
262   Debug::print(src);
263   return true;
264 }
265 template <typename Debug = NoDebug>
ToJSValue_f64(JSContext * cx,double src,MutableHandleValue dst)266 static bool ToJSValue_f64(JSContext* cx, double src, MutableHandleValue dst) {
267   dst.set(JS::CanonicalizedDoubleValue(src));
268   Debug::print(src);
269   return true;
270 }
271 template <typename Debug = NoDebug>
ToJSValue_funcref(JSContext * cx,void * src,MutableHandleValue dst)272 static bool ToJSValue_funcref(JSContext* cx, void* src,
273                               MutableHandleValue dst) {
274   dst.set(UnboxFuncRef(FuncRef::fromCompiledCode(src)));
275   Debug::print(src);
276   return true;
277 }
278 template <typename Debug = NoDebug>
ToJSValue_anyref(JSContext * cx,void * src,MutableHandleValue dst)279 static bool ToJSValue_anyref(JSContext* cx, void* src, MutableHandleValue dst) {
280   dst.set(UnboxAnyRef(AnyRef::fromCompiledCode(src)));
281   Debug::print(src);
282   return true;
283 }
284 template <typename Debug = NoDebug>
ToJSValue_typeref(JSContext * cx,void * src,MutableHandleValue dst)285 static bool ToJSValue_typeref(JSContext* cx, void* src,
286                               MutableHandleValue dst) {
287   JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
288                            JSMSG_WASM_TYPEREF_TO_JS);
289   return false;
290 }
291 
292 template <typename Debug = NoDebug>
ToJSValue(JSContext * cx,const void * src,ValType type,MutableHandleValue dst)293 static bool ToJSValue(JSContext* cx, const void* src, ValType type,
294                       MutableHandleValue dst) {
295   switch (type.kind()) {
296     case ValType::I32:
297       return ToJSValue_i32<Debug>(cx, *reinterpret_cast<const int32_t*>(src),
298                                   dst);
299     case ValType::I64:
300       return ToJSValue_i64<Debug>(cx, *reinterpret_cast<const int64_t*>(src),
301                                   dst);
302     case ValType::F32:
303       return ToJSValue_f32<Debug>(cx, *reinterpret_cast<const float*>(src),
304                                   dst);
305     case ValType::F64:
306       return ToJSValue_f64<Debug>(cx, *reinterpret_cast<const double*>(src),
307                                   dst);
308     case ValType::V128:
309       MOZ_CRASH("unhandled type in ToJSValue");
310     case ValType::Ref:
311       switch (type.refTypeKind()) {
312         case RefType::Func:
313           return ToJSValue_funcref<Debug>(
314               cx, *reinterpret_cast<void* const*>(src), dst);
315         case RefType::Any:
316           return ToJSValue_anyref<Debug>(
317               cx, *reinterpret_cast<void* const*>(src), dst);
318         case RefType::TypeIndex:
319           return ToJSValue_typeref<Debug>(
320               cx, *reinterpret_cast<void* const*>(src), dst);
321       }
322   }
323   MOZ_CRASH("unreachable");
324 }
325 
326 // TODO(1626251): Consolidate definitions into Iterable.h
IterableToArray(JSContext * cx,HandleValue iterable,MutableHandle<ArrayObject * > array)327 static bool IterableToArray(JSContext* cx, HandleValue iterable,
328                             MutableHandle<ArrayObject*> array) {
329   JS::ForOfIterator iterator(cx);
330   if (!iterator.init(iterable, JS::ForOfIterator::ThrowOnNonIterable)) {
331     return false;
332   }
333 
334   array.set(NewDenseEmptyArray(cx));
335   if (!array) {
336     return false;
337   }
338 
339   RootedValue nextValue(cx);
340   while (true) {
341     bool done;
342     if (!iterator.next(&nextValue, &done)) {
343       return false;
344     }
345     if (done) {
346       break;
347     }
348 
349     if (!NewbornArrayPush(cx, array, nextValue)) {
350       return false;
351     }
352   }
353   return true;
354 }
355 
UnpackResults(JSContext * cx,const ValTypeVector & resultTypes,const Maybe<char * > stackResultsArea,MutableHandleValue rval)356 static bool UnpackResults(JSContext* cx, const ValTypeVector& resultTypes,
357                           const Maybe<char*> stackResultsArea,
358                           MutableHandleValue rval) {
359   if (!stackResultsArea) {
360     MOZ_ASSERT(resultTypes.length() <= 1);
361     // Result is either one scalar value to unpack to a wasm value, or
362     // an ignored value for a zero-valued function.
363     return true;
364   }
365 
366   MOZ_ASSERT(stackResultsArea.isSome());
367   RootedArrayObject array(cx);
368   if (!IterableToArray(cx, rval, &array)) {
369     return false;
370   }
371 
372   if (resultTypes.length() != array->length()) {
373     UniqueChars expected(JS_smprintf("%zu", resultTypes.length()));
374     UniqueChars got(JS_smprintf("%u", array->length()));
375 
376     JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
377                              JSMSG_WASM_WRONG_NUMBER_OF_VALUES, expected.get(),
378                              got.get());
379     return false;
380   }
381 
382   DebugOnly<uint64_t> previousOffset = ~(uint64_t)0;
383 
384   ABIResultIter iter(ResultType::Vector(resultTypes));
385   // The values are converted in the order they are pushed on the
386   // abstract WebAssembly stack; switch to iterate in push order.
387   while (!iter.done()) {
388     iter.next();
389   }
390   DebugOnly<bool> seenRegisterResult = false;
391   for (iter.switchToPrev(); !iter.done(); iter.prev()) {
392     const ABIResult& result = iter.cur();
393     MOZ_ASSERT(!seenRegisterResult);
394     // Use rval as a scratch area to hold the extracted result.
395     rval.set(array->getDenseElement(iter.index()));
396     if (result.inRegister()) {
397       // Currently, if a function type has results, there can be only
398       // one register result.  If there is only one result, it is
399       // returned as a scalar and not an iterable, so we don't get here.
400       // If there are multiple results, we extract the register result
401       // and leave `rval` set to the extracted result, to be converted
402       // by the caller.  The register result follows any stack results,
403       // so this preserves conversion order.
404       seenRegisterResult = true;
405       continue;
406     }
407     uint32_t result_size = result.size();
408     MOZ_ASSERT(result_size == 4 || result_size == 8);
409 #ifdef DEBUG
410     if (previousOffset == ~(uint64_t)0) {
411       previousOffset = result.stackOffset();
412     } else {
413       MOZ_ASSERT(previousOffset - (uint64_t)result_size ==
414                  (uint64_t)result.stackOffset());
415       previousOffset -= result_size;
416     }
417 #endif
418     char* loc = stackResultsArea.value() + result.stackOffset();
419     if (!ToWebAssemblyValue(cx, rval, result.type(), loc, result_size == 8)) {
420       return false;
421     }
422   }
423 
424   return true;
425 }
426 
callImport(JSContext * cx,uint32_t funcImportIndex,unsigned argc,const uint64_t * argv,MutableHandleValue rval)427 bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
428                           unsigned argc, const uint64_t* argv,
429                           MutableHandleValue rval) {
430   AssertRealmUnchanged aru(cx);
431 
432   Tier tier = code().bestTier();
433 
434   const FuncImport& fi = metadata(tier).funcImports[funcImportIndex];
435 
436   ArgTypeVector argTypes(fi.funcType());
437   InvokeArgs args(cx);
438   if (!args.init(cx, argTypes.lengthWithoutStackResults())) {
439     return false;
440   }
441 
442 #ifdef ENABLE_WASM_SIMD
443   if (fi.funcType().hasV128ArgOrRet()) {
444     JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
445                              JSMSG_WASM_BAD_VAL_TYPE);
446     return false;
447   }
448 #endif
449 
450   MOZ_ASSERT(argTypes.lengthWithStackResults() == argc);
451   Maybe<char*> stackResultPointer;
452   for (size_t i = 0; i < argc; i++) {
453     const void* rawArgLoc = &argv[i];
454     if (argTypes.isSyntheticStackResultPointerArg(i)) {
455       stackResultPointer = Some(*(char**)rawArgLoc);
456       continue;
457     }
458     size_t naturalIndex = argTypes.naturalIndex(i);
459     ValType type = fi.funcType().args()[naturalIndex];
460     MutableHandleValue argValue = args[naturalIndex];
461     if (!ToJSValue(cx, rawArgLoc, type, argValue)) {
462       return false;
463     }
464   }
465 
466   FuncImportTls& import = funcImportTls(fi);
467   RootedFunction importFun(cx, import.fun);
468   MOZ_ASSERT(cx->realm() == importFun->realm());
469 
470   RootedValue fval(cx, ObjectValue(*importFun));
471   RootedValue thisv(cx, UndefinedValue());
472   if (!Call(cx, fval, thisv, args, rval)) {
473     return false;
474   }
475 
476   if (!UnpackResults(cx, fi.funcType().results(), stackResultPointer, rval)) {
477     return false;
478   }
479 
480   if (!JitOptions.enableWasmJitExit) {
481     return true;
482   }
483 
484   // The import may already have become optimized.
485   for (auto t : code().tiers()) {
486     void* jitExitCode = codeBase(t) + fi.jitExitCodeOffset();
487     if (import.code == jitExitCode) {
488       return true;
489     }
490   }
491 
492   void* jitExitCode = codeBase(tier) + fi.jitExitCodeOffset();
493 
494   // Test if the function is JIT compiled.
495   if (!importFun->hasBytecode()) {
496     return true;
497   }
498 
499   JSScript* script = importFun->nonLazyScript();
500   if (!script->hasJitScript()) {
501     return true;
502   }
503 
504 #ifdef ENABLE_WASM_SIMD
505   // Should have been guarded earlier
506   MOZ_ASSERT(!fi.funcType().hasV128ArgOrRet());
507 #endif
508 
509   // Functions with unsupported reference types in signature don't have a jit
510   // exit at the moment.
511   if (fi.funcType().temporarilyUnsupportedReftypeForExit()) {
512     return true;
513   }
514 
515   // Functions that return multiple values don't have a jit exit at the moment.
516   if (fi.funcType().temporarilyUnsupportedResultCountForJitExit()) {
517     return true;
518   }
519 
520   JitScript* jitScript = script->jitScript();
521 
522   // Ensure the argument types are included in the argument TypeSets stored in
523   // the JitScript. This is necessary for Ion, because the import will use
524   // the skip-arg-checks entry point. When the JitScript is discarded the import
525   // is patched back.
526   if (IsTypeInferenceEnabled()) {
527     AutoSweepJitScript sweep(script);
528 
529     StackTypeSet* thisTypes = jitScript->thisTypes(sweep, script);
530     if (!thisTypes->hasType(TypeSet::UndefinedType())) {
531       return true;
532     }
533 
534     const ValTypeVector& importArgs = fi.funcType().args();
535 
536     size_t numKnownArgs = std::min(importArgs.length(), importFun->nargs());
537     for (uint32_t i = 0; i < numKnownArgs; i++) {
538       StackTypeSet* argTypes = jitScript->argTypes(sweep, script, i);
539       switch (importArgs[i].kind()) {
540         case ValType::I32:
541           if (!argTypes->hasType(TypeSet::Int32Type())) {
542             return true;
543           }
544           break;
545         case ValType::I64:
546           if (!argTypes->hasType(TypeSet::BigIntType())) {
547             return true;
548           }
549           break;
550         case ValType::V128:
551           MOZ_CRASH("Not needed per spec");
552         case ValType::F32:
553           if (!argTypes->hasType(TypeSet::DoubleType())) {
554             return true;
555           }
556           break;
557         case ValType::F64:
558           if (!argTypes->hasType(TypeSet::DoubleType())) {
559             return true;
560           }
561           break;
562         case ValType::Ref:
563           switch (importArgs[i].refTypeKind()) {
564             case RefType::Any:
565               // We don't know what type the value will be, so we can't really
566               // check whether the callee will accept it.  It doesn't make much
567               // sense to see if the callee accepts all of the types an AnyRef
568               // might represent because most callees will not have been exposed
569               // to all those types and so we'll never pass the test.  Instead,
570               // we must use the callee's arg-type-checking entry point, and not
571               // check anything here.  See FuncType::jitExitRequiresArgCheck().
572               break;
573             case RefType::Func:
574               // We handle FuncRef as we do AnyRef: by checking the type
575               // dynamically in the callee.  Code in the stubs layer must box up
576               // the FuncRef as a Value.
577               break;
578             case RefType::TypeIndex:
579               // Guarded by temporarilyUnsupportedReftypeForExit()
580               MOZ_CRASH("case guarded above");
581           }
582           break;
583       }
584     }
585 
586     // These arguments will be filled with undefined at runtime by the
587     // arguments rectifier: check that the imported function can handle
588     // undefined there.
589     for (uint32_t i = importArgs.length(); i < importFun->nargs(); i++) {
590       StackTypeSet* argTypes = jitScript->argTypes(sweep, script, i);
591       if (!argTypes->hasType(TypeSet::UndefinedType())) {
592         return true;
593       }
594     }
595   }
596 
597   // Let's optimize it!
598   if (!jitScript->addDependentWasmImport(cx, *this, funcImportIndex)) {
599     return false;
600   }
601 
602   import.code = jitExitCode;
603   import.jitScript = jitScript;
604   return true;
605 }
606 
607 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_void(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)608 Instance::callImport_void(Instance* instance, int32_t funcImportIndex,
609                           int32_t argc, uint64_t* argv) {
610   JSContext* cx = TlsContext.get();
611   RootedValue rval(cx);
612   return instance->callImport(cx, funcImportIndex, argc, argv, &rval);
613 }
614 
615 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_i32(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)616 Instance::callImport_i32(Instance* instance, int32_t funcImportIndex,
617                          int32_t argc, uint64_t* argv) {
618   JSContext* cx = TlsContext.get();
619   RootedValue rval(cx);
620   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
621     return false;
622   }
623   return ToWebAssemblyValue_i32(cx, rval, (int32_t*)argv, true);
624 }
625 
626 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_i64(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)627 Instance::callImport_i64(Instance* instance, int32_t funcImportIndex,
628                          int32_t argc, uint64_t* argv) {
629   JSContext* cx = TlsContext.get();
630   RootedValue rval(cx);
631   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
632     return false;
633   }
634   return ToWebAssemblyValue_i64(cx, rval, (int64_t*)argv, true);
635 }
636 
637 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_v128(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)638 Instance::callImport_v128(Instance* instance, int32_t funcImportIndex,
639                           int32_t argc, uint64_t* argv) {
640   JSContext* cx = TlsContext.get();
641   JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
642                            JSMSG_WASM_BAD_VAL_TYPE);
643   return false;
644 }
645 
646 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_f64(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)647 Instance::callImport_f64(Instance* instance, int32_t funcImportIndex,
648                          int32_t argc, uint64_t* argv) {
649   JSContext* cx = TlsContext.get();
650   RootedValue rval(cx);
651   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
652     return false;
653   }
654   return ToWebAssemblyValue_f64(cx, rval, (double*)argv, true);
655 }
656 
657 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_anyref(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)658 Instance::callImport_anyref(Instance* instance, int32_t funcImportIndex,
659                             int32_t argc, uint64_t* argv) {
660   JSContext* cx = TlsContext.get();
661   RootedValue rval(cx);
662   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
663     return false;
664   }
665   static_assert(sizeof(argv[0]) >= sizeof(void*), "fits");
666   return ToWebAssemblyValue_anyref(cx, rval, (void**)argv, true);
667 }
668 
669 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
callImport_funcref(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)670 Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex,
671                              int32_t argc, uint64_t* argv) {
672   JSContext* cx = TlsContext.get();
673   RootedValue rval(cx);
674   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
675     return false;
676   }
677   return ToWebAssemblyValue_funcref(cx, rval, (void**)argv, true);
678 }
679 
memoryGrow_i32(Instance * instance,uint32_t delta)680 /* static */ uint32_t Instance::memoryGrow_i32(Instance* instance,
681                                                uint32_t delta) {
682   MOZ_ASSERT(SASigMemoryGrow.failureMode == FailureMode::Infallible);
683   MOZ_ASSERT(!instance->isAsmJS());
684 
685   JSContext* cx = TlsContext.get();
686   RootedWasmMemoryObject memory(cx, instance->memory_);
687 
688   uint32_t ret = WasmMemoryObject::grow(memory, delta, cx);
689 
690   // If there has been a moving grow, this Instance should have been notified.
691   MOZ_RELEASE_ASSERT(instance->tlsData()->memoryBase ==
692                      instance->memory_->buffer().dataPointerEither());
693 
694   return ret;
695 }
696 
memorySize_i32(Instance * instance)697 /* static */ uint32_t Instance::memorySize_i32(Instance* instance) {
698   MOZ_ASSERT(SASigMemorySize.failureMode == FailureMode::Infallible);
699 
700   // This invariant must hold when running Wasm code. Assert it here so we can
701   // write tests for cross-realm calls.
702   MOZ_ASSERT(TlsContext.get()->realm() == instance->realm());
703 
704   uint32_t byteLength = instance->memory()->volatileMemoryLength();
705   MOZ_ASSERT(byteLength % wasm::PageSize == 0);
706   return byteLength / wasm::PageSize;
707 }
708 
709 template <typename T>
PerformWait(Instance * instance,uint32_t byteOffset,T value,int64_t timeout_ns)710 static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
711                            int64_t timeout_ns) {
712   JSContext* cx = TlsContext.get();
713 
714   if (byteOffset & (sizeof(T) - 1)) {
715     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
716                               JSMSG_WASM_UNALIGNED_ACCESS);
717     return -1;
718   }
719 
720   if (byteOffset + sizeof(T) > instance->memory()->volatileMemoryLength()) {
721     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
722                               JSMSG_WASM_OUT_OF_BOUNDS);
723     return -1;
724   }
725 
726   mozilla::Maybe<mozilla::TimeDuration> timeout;
727   if (timeout_ns >= 0) {
728     timeout = mozilla::Some(
729         mozilla::TimeDuration::FromMicroseconds(timeout_ns / 1000));
730   }
731 
732   switch (atomics_wait_impl(cx, instance->sharedMemoryBuffer(), byteOffset,
733                             value, timeout)) {
734     case FutexThread::WaitResult::OK:
735       return 0;
736     case FutexThread::WaitResult::NotEqual:
737       return 1;
738     case FutexThread::WaitResult::TimedOut:
739       return 2;
740     case FutexThread::WaitResult::Error:
741       return -1;
742     default:
743       MOZ_CRASH();
744   }
745 }
746 
wait_i32(Instance * instance,uint32_t byteOffset,int32_t value,int64_t timeout_ns)747 /* static */ int32_t Instance::wait_i32(Instance* instance, uint32_t byteOffset,
748                                         int32_t value, int64_t timeout_ns) {
749   MOZ_ASSERT(SASigWaitI32.failureMode == FailureMode::FailOnNegI32);
750   return PerformWait<int32_t>(instance, byteOffset, value, timeout_ns);
751 }
752 
wait_i64(Instance * instance,uint32_t byteOffset,int64_t value,int64_t timeout_ns)753 /* static */ int32_t Instance::wait_i64(Instance* instance, uint32_t byteOffset,
754                                         int64_t value, int64_t timeout_ns) {
755   MOZ_ASSERT(SASigWaitI64.failureMode == FailureMode::FailOnNegI32);
756   return PerformWait<int64_t>(instance, byteOffset, value, timeout_ns);
757 }
758 
wake(Instance * instance,uint32_t byteOffset,int32_t count)759 /* static */ int32_t Instance::wake(Instance* instance, uint32_t byteOffset,
760                                     int32_t count) {
761   MOZ_ASSERT(SASigWake.failureMode == FailureMode::FailOnNegI32);
762 
763   JSContext* cx = TlsContext.get();
764 
765   // The alignment guard is not in the wasm spec as of 2017-11-02, but is
766   // considered likely to appear, as 4-byte alignment is required for WAKE by
767   // the spec's validation algorithm.
768 
769   if (byteOffset & 3) {
770     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
771                               JSMSG_WASM_UNALIGNED_ACCESS);
772     return -1;
773   }
774 
775   if (byteOffset >= instance->memory()->volatileMemoryLength()) {
776     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
777                               JSMSG_WASM_OUT_OF_BOUNDS);
778     return -1;
779   }
780 
781   int64_t woken = atomics_notify_impl(instance->sharedMemoryBuffer(),
782                                       byteOffset, int64_t(count));
783 
784   if (woken > INT32_MAX) {
785     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
786                               JSMSG_WASM_WAKE_OVERFLOW);
787     return -1;
788   }
789 
790   return int32_t(woken);
791 }
792 
793 template <typename T, typename F>
WasmMemoryCopy(T memBase,uint32_t memLen,uint32_t dstByteOffset,uint32_t srcByteOffset,uint32_t len,F memMove)794 inline int32_t WasmMemoryCopy(T memBase, uint32_t memLen,
795                               uint32_t dstByteOffset, uint32_t srcByteOffset,
796                               uint32_t len, F memMove) {
797   // Bounds check and deal with arithmetic overflow.
798   uint64_t dstOffsetLimit = uint64_t(dstByteOffset) + uint64_t(len);
799   uint64_t srcOffsetLimit = uint64_t(srcByteOffset) + uint64_t(len);
800 
801   if (dstOffsetLimit > memLen || srcOffsetLimit > memLen) {
802     JSContext* cx = TlsContext.get();
803     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
804                               JSMSG_WASM_OUT_OF_BOUNDS);
805     return -1;
806   }
807 
808   memMove(memBase + dstByteOffset, memBase + srcByteOffset, size_t(len));
809   return 0;
810 }
811 
memCopy(Instance * instance,uint32_t dstByteOffset,uint32_t srcByteOffset,uint32_t len,uint8_t * memBase)812 /* static */ int32_t Instance::memCopy(Instance* instance,
813                                        uint32_t dstByteOffset,
814                                        uint32_t srcByteOffset, uint32_t len,
815                                        uint8_t* memBase) {
816   MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
817 
818   const WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
819   uint32_t memLen = rawBuf->byteLength();
820 
821   return WasmMemoryCopy(memBase, memLen, dstByteOffset, srcByteOffset, len,
822                         memmove);
823 }
824 
memCopyShared(Instance * instance,uint32_t dstByteOffset,uint32_t srcByteOffset,uint32_t len,uint8_t * memBase)825 /* static */ int32_t Instance::memCopyShared(Instance* instance,
826                                              uint32_t dstByteOffset,
827                                              uint32_t srcByteOffset,
828                                              uint32_t len, uint8_t* memBase) {
829   MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
830 
831   using RacyMemMove =
832       void (*)(SharedMem<uint8_t*>, SharedMem<uint8_t*>, size_t);
833 
834   const SharedArrayRawBuffer* rawBuf =
835       SharedArrayRawBuffer::fromDataPtr(memBase);
836   uint32_t memLen = rawBuf->volatileByteLength();
837 
838   return WasmMemoryCopy<SharedMem<uint8_t*>, RacyMemMove>(
839       SharedMem<uint8_t*>::shared(memBase), memLen, dstByteOffset,
840       srcByteOffset, len, AtomicOperations::memmoveSafeWhenRacy);
841 }
842 
dataDrop(Instance * instance,uint32_t segIndex)843 /* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) {
844   MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
845 
846   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
847                      "ensured by validation");
848 
849   if (!instance->passiveDataSegments_[segIndex]) {
850     return 0;
851   }
852 
853   SharedDataSegment& segRefPtr = instance->passiveDataSegments_[segIndex];
854   MOZ_RELEASE_ASSERT(!segRefPtr->active());
855 
856   // Drop this instance's reference to the DataSegment so it can be released.
857   segRefPtr = nullptr;
858   return 0;
859 }
860 
861 template <typename T, typename F>
WasmMemoryFill(T memBase,uint32_t memLen,uint32_t byteOffset,uint32_t value,uint32_t len,F memSet)862 inline int32_t WasmMemoryFill(T memBase, uint32_t memLen, uint32_t byteOffset,
863                               uint32_t value, uint32_t len, F memSet) {
864   // Bounds check and deal with arithmetic overflow.
865   uint64_t offsetLimit = uint64_t(byteOffset) + uint64_t(len);
866 
867   if (offsetLimit > memLen) {
868     JSContext* cx = TlsContext.get();
869     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
870                               JSMSG_WASM_OUT_OF_BOUNDS);
871     return -1;
872   }
873 
874   // The required write direction is upward, but that is not currently
875   // observable as there are no fences nor any read/write protect operation.
876   memSet(memBase + byteOffset, int(value), size_t(len));
877   return 0;
878 }
879 
memFill(Instance * instance,uint32_t byteOffset,uint32_t value,uint32_t len,uint8_t * memBase)880 /* static */ int32_t Instance::memFill(Instance* instance, uint32_t byteOffset,
881                                        uint32_t value, uint32_t len,
882                                        uint8_t* memBase) {
883   MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32);
884 
885   const WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
886   uint32_t memLen = rawBuf->byteLength();
887 
888   return WasmMemoryFill(memBase, memLen, byteOffset, value, len, memset);
889 }
890 
memFillShared(Instance * instance,uint32_t byteOffset,uint32_t value,uint32_t len,uint8_t * memBase)891 /* static */ int32_t Instance::memFillShared(Instance* instance,
892                                              uint32_t byteOffset,
893                                              uint32_t value, uint32_t len,
894                                              uint8_t* memBase) {
895   MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32);
896 
897   const SharedArrayRawBuffer* rawBuf =
898       SharedArrayRawBuffer::fromDataPtr(memBase);
899   uint32_t memLen = rawBuf->volatileByteLength();
900 
901   return WasmMemoryFill(SharedMem<uint8_t*>::shared(memBase), memLen,
902                         byteOffset, value, len,
903                         AtomicOperations::memsetSafeWhenRacy);
904 }
905 
memInit(Instance * instance,uint32_t dstOffset,uint32_t srcOffset,uint32_t len,uint32_t segIndex)906 /* static */ int32_t Instance::memInit(Instance* instance, uint32_t dstOffset,
907                                        uint32_t srcOffset, uint32_t len,
908                                        uint32_t segIndex) {
909   MOZ_ASSERT(SASigMemInit.failureMode == FailureMode::FailOnNegI32);
910 
911   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
912                      "ensured by validation");
913 
914   if (!instance->passiveDataSegments_[segIndex]) {
915     if (len == 0 && srcOffset == 0) {
916       return 0;
917     }
918 
919     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
920                               JSMSG_WASM_OUT_OF_BOUNDS);
921     return -1;
922   }
923 
924   const DataSegment& seg = *instance->passiveDataSegments_[segIndex];
925   MOZ_RELEASE_ASSERT(!seg.active());
926 
927   const uint32_t segLen = seg.bytes.length();
928 
929   WasmMemoryObject* mem = instance->memory();
930   const uint32_t memLen = mem->volatileMemoryLength();
931 
932   // We are proposing to copy
933   //
934   //   seg.bytes.begin()[ srcOffset .. srcOffset + len - 1 ]
935   // to
936   //   memoryBase[ dstOffset .. dstOffset + len - 1 ]
937 
938   // Bounds check and deal with arithmetic overflow.
939   uint64_t dstOffsetLimit = uint64_t(dstOffset) + uint64_t(len);
940   uint64_t srcOffsetLimit = uint64_t(srcOffset) + uint64_t(len);
941 
942   if (dstOffsetLimit > memLen || srcOffsetLimit > segLen) {
943     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
944                               JSMSG_WASM_OUT_OF_BOUNDS);
945     return -1;
946   }
947 
948   // The required read/write direction is upward, but that is not currently
949   // observable as there are no fences nor any read/write protect operation.
950   SharedMem<uint8_t*> dataPtr = mem->buffer().dataPointerEither();
951   if (mem->isShared()) {
952     AtomicOperations::memcpySafeWhenRacy(
953         dataPtr + dstOffset, (uint8_t*)seg.bytes.begin() + srcOffset, len);
954   } else {
955     uint8_t* rawBuf = dataPtr.unwrap(/*Unshared*/);
956     memcpy(rawBuf + dstOffset, (const char*)seg.bytes.begin() + srcOffset, len);
957   }
958   return 0;
959 }
960 
tableCopy(Instance * instance,uint32_t dstOffset,uint32_t srcOffset,uint32_t len,uint32_t dstTableIndex,uint32_t srcTableIndex)961 /* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset,
962                                          uint32_t srcOffset, uint32_t len,
963                                          uint32_t dstTableIndex,
964                                          uint32_t srcTableIndex) {
965   MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
966 
967   const SharedTable& srcTable = instance->tables()[srcTableIndex];
968   uint32_t srcTableLen = srcTable->length();
969 
970   const SharedTable& dstTable = instance->tables()[dstTableIndex];
971   uint32_t dstTableLen = dstTable->length();
972 
973   // Bounds check and deal with arithmetic overflow.
974   uint64_t dstOffsetLimit = uint64_t(dstOffset) + len;
975   uint64_t srcOffsetLimit = uint64_t(srcOffset) + len;
976 
977   if (dstOffsetLimit > dstTableLen || srcOffsetLimit > srcTableLen) {
978     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
979                               JSMSG_WASM_OUT_OF_BOUNDS);
980     return -1;
981   }
982 
983   bool isOOM = false;
984 
985   if (&srcTable == &dstTable && dstOffset > srcOffset) {
986     for (uint32_t i = len; i > 0; i--) {
987       if (!dstTable->copy(*srcTable, dstOffset + (i - 1),
988                           srcOffset + (i - 1))) {
989         isOOM = true;
990         break;
991       }
992     }
993   } else if (&srcTable == &dstTable && dstOffset == srcOffset) {
994     // No-op
995   } else {
996     for (uint32_t i = 0; i < len; i++) {
997       if (!dstTable->copy(*srcTable, dstOffset + i, srcOffset + i)) {
998         isOOM = true;
999         break;
1000       }
1001     }
1002   }
1003 
1004   if (isOOM) {
1005     return -1;
1006   }
1007   return 0;
1008 }
1009 
elemDrop(Instance * instance,uint32_t segIndex)1010 /* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) {
1011   MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
1012 
1013   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
1014                      "ensured by validation");
1015 
1016   if (!instance->passiveElemSegments_[segIndex]) {
1017     return 0;
1018   }
1019 
1020   SharedElemSegment& segRefPtr = instance->passiveElemSegments_[segIndex];
1021   MOZ_RELEASE_ASSERT(!segRefPtr->active());
1022 
1023   // Drop this instance's reference to the ElemSegment so it can be released.
1024   segRefPtr = nullptr;
1025   return 0;
1026 }
1027 
initElems(uint32_t tableIndex,const ElemSegment & seg,uint32_t dstOffset,uint32_t srcOffset,uint32_t len)1028 bool Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
1029                          uint32_t dstOffset, uint32_t srcOffset, uint32_t len) {
1030   Table& table = *tables_[tableIndex];
1031   MOZ_ASSERT(dstOffset <= table.length());
1032   MOZ_ASSERT(len <= table.length() - dstOffset);
1033 
1034   Tier tier = code().bestTier();
1035   const MetadataTier& metadataTier = metadata(tier);
1036   const FuncImportVector& funcImports = metadataTier.funcImports;
1037   const CodeRangeVector& codeRanges = metadataTier.codeRanges;
1038   const Uint32Vector& funcToCodeRange = metadataTier.funcToCodeRange;
1039   const Uint32Vector& elemFuncIndices = seg.elemFuncIndices;
1040   MOZ_ASSERT(srcOffset <= elemFuncIndices.length());
1041   MOZ_ASSERT(len <= elemFuncIndices.length() - srcOffset);
1042 
1043   uint8_t* codeBaseTier = codeBase(tier);
1044   for (uint32_t i = 0; i < len; i++) {
1045     uint32_t funcIndex = elemFuncIndices[srcOffset + i];
1046     if (funcIndex == NullFuncIndex) {
1047       table.setNull(dstOffset + i);
1048     } else if (!table.isFunction()) {
1049       // Note, fnref must be rooted if we do anything more than just store it.
1050       void* fnref = Instance::refFunc(this, funcIndex);
1051       if (fnref == AnyRef::invalid().forCompiledCode()) {
1052         return false;  // OOM, which has already been reported.
1053       }
1054       table.fillAnyRef(dstOffset + i, 1, AnyRef::fromCompiledCode(fnref));
1055     } else {
1056       if (funcIndex < funcImports.length()) {
1057         FuncImportTls& import = funcImportTls(funcImports[funcIndex]);
1058         JSFunction* fun = import.fun;
1059         if (IsWasmExportedFunction(fun)) {
1060           // This element is a wasm function imported from another
1061           // instance. To preserve the === function identity required by
1062           // the JS embedding spec, we must set the element to the
1063           // imported function's underlying CodeRange.funcTableEntry and
1064           // Instance so that future Table.get()s produce the same
1065           // function object as was imported.
1066           WasmInstanceObject* calleeInstanceObj =
1067               ExportedFunctionToInstanceObject(fun);
1068           Instance& calleeInstance = calleeInstanceObj->instance();
1069           Tier calleeTier = calleeInstance.code().bestTier();
1070           const CodeRange& calleeCodeRange =
1071               calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier);
1072           void* code = calleeInstance.codeBase(calleeTier) +
1073                        calleeCodeRange.funcTableEntry();
1074           table.setFuncRef(dstOffset + i, code, &calleeInstance);
1075           continue;
1076         }
1077       }
1078       void* code = codeBaseTier +
1079                    codeRanges[funcToCodeRange[funcIndex]].funcTableEntry();
1080       table.setFuncRef(dstOffset + i, code, this);
1081     }
1082   }
1083   return true;
1084 }
1085 
tableInit(Instance * instance,uint32_t dstOffset,uint32_t srcOffset,uint32_t len,uint32_t segIndex,uint32_t tableIndex)1086 /* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset,
1087                                          uint32_t srcOffset, uint32_t len,
1088                                          uint32_t segIndex,
1089                                          uint32_t tableIndex) {
1090   MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32);
1091 
1092   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
1093                      "ensured by validation");
1094 
1095   if (!instance->passiveElemSegments_[segIndex]) {
1096     if (len == 0 && srcOffset == 0) {
1097       return 0;
1098     }
1099 
1100     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
1101                               JSMSG_WASM_OUT_OF_BOUNDS);
1102     return -1;
1103   }
1104 
1105   const ElemSegment& seg = *instance->passiveElemSegments_[segIndex];
1106   MOZ_RELEASE_ASSERT(!seg.active());
1107   const uint32_t segLen = seg.length();
1108 
1109   const Table& table = *instance->tables()[tableIndex];
1110   const uint32_t tableLen = table.length();
1111 
1112   // We are proposing to copy
1113   //
1114   //   seg[ srcOffset .. srcOffset + len - 1 ]
1115   // to
1116   //   tableBase[ dstOffset .. dstOffset + len - 1 ]
1117 
1118   // Bounds check and deal with arithmetic overflow.
1119   uint64_t dstOffsetLimit = uint64_t(dstOffset) + uint64_t(len);
1120   uint64_t srcOffsetLimit = uint64_t(srcOffset) + uint64_t(len);
1121 
1122   if (dstOffsetLimit > tableLen || srcOffsetLimit > segLen) {
1123     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
1124                               JSMSG_WASM_OUT_OF_BOUNDS);
1125     return -1;
1126   }
1127 
1128   if (!instance->initElems(tableIndex, seg, dstOffset, srcOffset, len)) {
1129     return -1;  // OOM, which has already been reported.
1130   }
1131 
1132   return 0;
1133 }
1134 
tableFill(Instance * instance,uint32_t start,void * value,uint32_t len,uint32_t tableIndex)1135 /* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start,
1136                                          void* value, uint32_t len,
1137                                          uint32_t tableIndex) {
1138   MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32);
1139 
1140   JSContext* cx = TlsContext.get();
1141   Table& table = *instance->tables()[tableIndex];
1142 
1143   // Bounds check and deal with arithmetic overflow.
1144   uint64_t offsetLimit = uint64_t(start) + uint64_t(len);
1145 
1146   if (offsetLimit > table.length()) {
1147     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
1148                               JSMSG_WASM_OUT_OF_BOUNDS);
1149     return -1;
1150   }
1151 
1152   switch (table.repr()) {
1153     case TableRepr::Ref:
1154       table.fillAnyRef(start, len, AnyRef::fromCompiledCode(value));
1155       break;
1156     case TableRepr::Func:
1157       MOZ_RELEASE_ASSERT(table.kind() == TableKind::FuncRef);
1158       table.fillFuncRef(start, len, FuncRef::fromCompiledCode(value), cx);
1159       break;
1160   }
1161 
1162   return 0;
1163 }
1164 
tableGet(Instance * instance,uint32_t index,uint32_t tableIndex)1165 /* static */ void* Instance::tableGet(Instance* instance, uint32_t index,
1166                                       uint32_t tableIndex) {
1167   MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef);
1168 
1169   const Table& table = *instance->tables()[tableIndex];
1170   if (index >= table.length()) {
1171     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
1172                               JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
1173     return AnyRef::invalid().forCompiledCode();
1174   }
1175 
1176   if (table.repr() == TableRepr::Ref) {
1177     return table.getAnyRef(index).forCompiledCode();
1178   }
1179 
1180   MOZ_RELEASE_ASSERT(table.kind() == TableKind::FuncRef);
1181 
1182   JSContext* cx = TlsContext.get();
1183   RootedFunction fun(cx);
1184   if (!table.getFuncRef(cx, index, &fun)) {
1185     return AnyRef::invalid().forCompiledCode();
1186   }
1187 
1188   return FuncRef::fromJSFunction(fun).forCompiledCode();
1189 }
1190 
tableGrow(Instance * instance,void * initValue,uint32_t delta,uint32_t tableIndex)1191 /* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue,
1192                                           uint32_t delta, uint32_t tableIndex) {
1193   MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible);
1194 
1195   RootedAnyRef ref(TlsContext.get(), AnyRef::fromCompiledCode(initValue));
1196   Table& table = *instance->tables()[tableIndex];
1197 
1198   uint32_t oldSize = table.grow(delta);
1199 
1200   if (oldSize != uint32_t(-1) && initValue != nullptr) {
1201     switch (table.repr()) {
1202       case TableRepr::Ref:
1203         table.fillAnyRef(oldSize, delta, ref);
1204         break;
1205       case TableRepr::Func:
1206         MOZ_RELEASE_ASSERT(table.kind() == TableKind::FuncRef);
1207         table.fillFuncRef(oldSize, delta, FuncRef::fromAnyRefUnchecked(ref),
1208                           TlsContext.get());
1209         break;
1210     }
1211   }
1212 
1213   return oldSize;
1214 }
1215 
tableSet(Instance * instance,uint32_t index,void * value,uint32_t tableIndex)1216 /* static */ int32_t Instance::tableSet(Instance* instance, uint32_t index,
1217                                         void* value, uint32_t tableIndex) {
1218   MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32);
1219 
1220   Table& table = *instance->tables()[tableIndex];
1221   if (index >= table.length()) {
1222     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
1223                               JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
1224     return -1;
1225   }
1226 
1227   switch (table.repr()) {
1228     case TableRepr::Ref:
1229       table.fillAnyRef(index, 1, AnyRef::fromCompiledCode(value));
1230       break;
1231     case TableRepr::Func:
1232       MOZ_RELEASE_ASSERT(table.kind() == TableKind::FuncRef);
1233       table.fillFuncRef(index, 1, FuncRef::fromCompiledCode(value),
1234                         TlsContext.get());
1235       break;
1236   }
1237 
1238   return 0;
1239 }
1240 
tableSize(Instance * instance,uint32_t tableIndex)1241 /* static */ uint32_t Instance::tableSize(Instance* instance,
1242                                           uint32_t tableIndex) {
1243   MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible);
1244   Table& table = *instance->tables()[tableIndex];
1245   return table.length();
1246 }
1247 
refFunc(Instance * instance,uint32_t funcIndex)1248 /* static */ void* Instance::refFunc(Instance* instance, uint32_t funcIndex) {
1249   MOZ_ASSERT(SASigRefFunc.failureMode == FailureMode::FailOnInvalidRef);
1250   JSContext* cx = TlsContext.get();
1251 
1252   Tier tier = instance->code().bestTier();
1253   const MetadataTier& metadataTier = instance->metadata(tier);
1254   const FuncImportVector& funcImports = metadataTier.funcImports;
1255 
1256   // If this is an import, we need to recover the original function to maintain
1257   // reference equality between a re-exported function and 'ref.func'. The
1258   // identity of the imported function object is stable across tiers, which is
1259   // what we want.
1260   //
1261   // Use the imported function only if it is an exported function, otherwise
1262   // fall through to get a (possibly new) exported function.
1263   if (funcIndex < funcImports.length()) {
1264     FuncImportTls& import = instance->funcImportTls(funcImports[funcIndex]);
1265     if (IsWasmExportedFunction(import.fun)) {
1266       return FuncRef::fromJSFunction(import.fun).forCompiledCode();
1267     }
1268   }
1269 
1270   RootedFunction fun(cx);
1271   RootedWasmInstanceObject instanceObj(cx, instance->object());
1272   if (!WasmInstanceObject::getExportedFunction(cx, instanceObj, funcIndex,
1273                                                &fun)) {
1274     // Validation ensures that we always have a valid funcIndex, so we must
1275     // have OOM'ed
1276     ReportOutOfMemory(cx);
1277     return AnyRef::invalid().forCompiledCode();
1278   }
1279 
1280   return FuncRef::fromJSFunction(fun).forCompiledCode();
1281 }
1282 
preBarrierFiltering(Instance * instance,gc::Cell ** location)1283 /* static */ void Instance::preBarrierFiltering(Instance* instance,
1284                                                 gc::Cell** location) {
1285   MOZ_ASSERT(SASigPreBarrierFiltering.failureMode == FailureMode::Infallible);
1286   MOZ_ASSERT(location);
1287   JSObject::writeBarrierPre(*reinterpret_cast<JSObject**>(location));
1288 }
1289 
postBarrier(Instance * instance,gc::Cell ** location)1290 /* static */ void Instance::postBarrier(Instance* instance,
1291                                         gc::Cell** location) {
1292   MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
1293   MOZ_ASSERT(location);
1294   TlsContext.get()->runtime()->gc.storeBuffer().putCell(
1295       reinterpret_cast<JSObject**>(location));
1296 }
1297 
postBarrierFiltering(Instance * instance,gc::Cell ** location)1298 /* static */ void Instance::postBarrierFiltering(Instance* instance,
1299                                                  gc::Cell** location) {
1300   MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
1301   MOZ_ASSERT(location);
1302   if (*location == nullptr || !gc::IsInsideNursery(*location)) {
1303     return;
1304   }
1305   TlsContext.get()->runtime()->gc.storeBuffer().putCell(
1306       reinterpret_cast<JSObject**>(location));
1307 }
1308 
1309 // The typeIndex is an index into the structTypeDescrs_ table in the instance.
1310 // That table holds TypeDescr objects.
1311 //
1312 // When we fail to allocate we return a nullptr; the wasm side must check this
1313 // and propagate it as an error.
1314 
structNew(Instance * instance,uint32_t typeIndex)1315 /* static */ void* Instance::structNew(Instance* instance, uint32_t typeIndex) {
1316   MOZ_ASSERT(SASigStructNew.failureMode == FailureMode::FailOnNullPtr);
1317   JSContext* cx = TlsContext.get();
1318   Rooted<TypeDescr*> typeDescr(cx, instance->structTypeDescrs_[typeIndex]);
1319   return TypedObject::createZeroed(cx, typeDescr);
1320 }
1321 
structNarrow(Instance * instance,uint32_t mustUnboxAnyref,uint32_t outputTypeIndex,void * maybeNullPtr)1322 /* static */ void* Instance::structNarrow(Instance* instance,
1323                                           uint32_t mustUnboxAnyref,
1324                                           uint32_t outputTypeIndex,
1325                                           void* maybeNullPtr) {
1326   MOZ_ASSERT(SASigStructNarrow.failureMode == FailureMode::Infallible);
1327 
1328   JSContext* cx = TlsContext.get();
1329 
1330   Rooted<TypedObject*> obj(cx);
1331   Rooted<StructTypeDescr*> typeDescr(cx);
1332 
1333   if (maybeNullPtr == nullptr) {
1334     return maybeNullPtr;
1335   }
1336 
1337   void* nonnullPtr = maybeNullPtr;
1338   if (mustUnboxAnyref) {
1339     // TODO/AnyRef-boxing: With boxed immediates and strings, unboxing
1340     // AnyRef is not a no-op.
1341     ASSERT_ANYREF_IS_JSOBJECT;
1342 
1343     Rooted<NativeObject*> no(cx, static_cast<NativeObject*>(nonnullPtr));
1344     if (!no->is<TypedObject>()) {
1345       return nullptr;
1346     }
1347     obj = &no->as<TypedObject>();
1348     Rooted<TypeDescr*> td(cx, &obj->typeDescr());
1349     if (td->kind() != type::Struct) {
1350       return nullptr;
1351     }
1352     typeDescr = &td->as<StructTypeDescr>();
1353   } else {
1354     obj = static_cast<TypedObject*>(nonnullPtr);
1355     typeDescr = &obj->typeDescr().as<StructTypeDescr>();
1356   }
1357 
1358   // Optimization opportunity: instead of this loop we could perhaps load an
1359   // index from `typeDescr` and use that to index into the structTypes table
1360   // of the instance.  If the index is in bounds and the desc at that index is
1361   // the desc we have then we know the index is good, and we can use that for
1362   // the prefix check.
1363 
1364   uint32_t found = UINT32_MAX;
1365   for (uint32_t i = 0; i < instance->structTypeDescrs_.length(); i++) {
1366     if (instance->structTypeDescrs_[i] == typeDescr) {
1367       found = i;
1368       break;
1369     }
1370   }
1371 
1372   if (found == UINT32_MAX) {
1373     return nullptr;
1374   }
1375 
1376   // Also asserted in constructor; let's just be double sure.
1377 
1378   MOZ_ASSERT(instance->structTypeDescrs_.length() ==
1379              instance->structTypes().length());
1380 
1381   // Now we know that the object was created by the instance, and we know its
1382   // concrete type.  We need to check that its type is an extension of the
1383   // type of outputTypeIndex.
1384 
1385   if (!instance->structTypes()[found].hasPrefix(
1386           instance->structTypes()[outputTypeIndex])) {
1387     return nullptr;
1388   }
1389 
1390   return nonnullPtr;
1391 }
1392 
1393 // Note, dst must point into nonmoveable storage that is not in the nursery,
1394 // this matters for the write barriers.  Furthermore, for pointer types the
1395 // current value of *dst must be null so that only a post-barrier is required.
1396 //
1397 // Regarding the destination not being in the nursery, we have these cases.
1398 // Either the written location is in the global data section in the
1399 // WasmInstanceObject, or the Cell of a WasmGlobalObject:
1400 //
1401 // - WasmInstanceObjects are always tenured and u.ref_ may point to a
1402 //   nursery object, so we need a post-barrier since the global data of an
1403 //   instance is effectively a field of the WasmInstanceObject.
1404 //
1405 // - WasmGlobalObjects are always tenured, and they have a Cell field, so a
1406 //   post-barrier may be needed for the same reason as above.
1407 
CopyValPostBarriered(uint8_t * dst,const Val & src)1408 void CopyValPostBarriered(uint8_t* dst, const Val& src) {
1409   switch (src.type().kind()) {
1410     case ValType::I32: {
1411       int32_t x = src.i32();
1412       memcpy(dst, &x, sizeof(x));
1413       break;
1414     }
1415     case ValType::I64: {
1416       int64_t x = src.i64();
1417       memcpy(dst, &x, sizeof(x));
1418       break;
1419     }
1420     case ValType::F32: {
1421       float x = src.f32();
1422       memcpy(dst, &x, sizeof(x));
1423       break;
1424     }
1425     case ValType::F64: {
1426       double x = src.f64();
1427       memcpy(dst, &x, sizeof(x));
1428       break;
1429     }
1430     case ValType::V128: {
1431       V128 x = src.v128();
1432       memcpy(dst, &x, sizeof(x));
1433       break;
1434     }
1435     case ValType::Ref: {
1436       // TODO/AnyRef-boxing: With boxed immediates and strings, the write
1437       // barrier is going to have to be more complicated.
1438       ASSERT_ANYREF_IS_JSOBJECT;
1439       MOZ_ASSERT(*(void**)dst == nullptr,
1440                  "should be null so no need for a pre-barrier");
1441       AnyRef x = src.ref();
1442       memcpy(dst, x.asJSObjectAddress(), sizeof(*x.asJSObjectAddress()));
1443       if (!x.isNull()) {
1444         JSObject::writeBarrierPost((JSObject**)dst, nullptr, x.asJSObject());
1445       }
1446       break;
1447     }
1448   }
1449 }
1450 
Instance(JSContext * cx,Handle<WasmInstanceObject * > object,SharedCode code,UniqueTlsData tlsDataIn,HandleWasmMemoryObject memory,SharedTableVector && tables,StructTypeDescrVector && structTypeDescrs,UniqueDebugState maybeDebug)1451 Instance::Instance(JSContext* cx, Handle<WasmInstanceObject*> object,
1452                    SharedCode code, UniqueTlsData tlsDataIn,
1453                    HandleWasmMemoryObject memory, SharedTableVector&& tables,
1454                    StructTypeDescrVector&& structTypeDescrs,
1455                    UniqueDebugState maybeDebug)
1456     : realm_(cx->realm()),
1457       object_(object),
1458       jsJitArgsRectifier_(
1459           cx->runtime()->jitRuntime()->getArgumentsRectifier().value),
1460       jsJitExceptionHandler_(
1461           cx->runtime()->jitRuntime()->getExceptionTail().value),
1462       preBarrierCode_(
1463           cx->runtime()->jitRuntime()->preBarrier(MIRType::Object).value),
1464       code_(code),
1465       tlsData_(std::move(tlsDataIn)),
1466       memory_(memory),
1467       tables_(std::move(tables)),
1468       maybeDebug_(std::move(maybeDebug)),
1469       structTypeDescrs_(std::move(structTypeDescrs)) {}
1470 
init(JSContext * cx,const JSFunctionVector & funcImports,const ValVector & globalImportValues,const WasmGlobalObjectVector & globalObjs,const DataSegmentVector & dataSegments,const ElemSegmentVector & elemSegments)1471 bool Instance::init(JSContext* cx, const JSFunctionVector& funcImports,
1472                     const ValVector& globalImportValues,
1473                     const WasmGlobalObjectVector& globalObjs,
1474                     const DataSegmentVector& dataSegments,
1475                     const ElemSegmentVector& elemSegments) {
1476   MOZ_ASSERT(!!maybeDebug_ == metadata().debugEnabled);
1477   MOZ_ASSERT(structTypeDescrs_.length() == structTypes().length());
1478 
1479 #ifdef DEBUG
1480   for (auto t : code_->tiers()) {
1481     MOZ_ASSERT(funcImports.length() == metadata(t).funcImports.length());
1482   }
1483 #endif
1484   MOZ_ASSERT(tables_.length() == metadata().tables.length());
1485 
1486   tlsData()->memoryBase =
1487       memory_ ? memory_->buffer().dataPointerEither().unwrap() : nullptr;
1488   tlsData()->boundsCheckLimit = memory_ ? memory_->boundsCheckLimit() : 0;
1489   tlsData()->instance = this;
1490   tlsData()->realm = realm_;
1491   tlsData()->cx = cx;
1492   tlsData()->valueBoxClass = &WasmValueBox::class_;
1493   tlsData()->resetInterrupt(cx);
1494   tlsData()->jumpTable = code_->tieringJumpTable();
1495   tlsData()->addressOfNeedsIncrementalBarrier =
1496       (uint8_t*)cx->compartment()->zone()->addressOfNeedsIncrementalBarrier();
1497 
1498   // Initialize function imports in the tls data
1499   Tier callerTier = code_->bestTier();
1500   for (size_t i = 0; i < metadata(callerTier).funcImports.length(); i++) {
1501     JSFunction* f = funcImports[i];
1502     const FuncImport& fi = metadata(callerTier).funcImports[i];
1503     FuncImportTls& import = funcImportTls(fi);
1504     import.fun = f;
1505     if (!isAsmJS() && IsWasmExportedFunction(f)) {
1506       WasmInstanceObject* calleeInstanceObj =
1507           ExportedFunctionToInstanceObject(f);
1508       Instance& calleeInstance = calleeInstanceObj->instance();
1509       Tier calleeTier = calleeInstance.code().bestTier();
1510       const CodeRange& codeRange =
1511           calleeInstanceObj->getExportedFunctionCodeRange(f, calleeTier);
1512       import.tls = calleeInstance.tlsData();
1513       import.realm = f->realm();
1514       import.code =
1515           calleeInstance.codeBase(calleeTier) + codeRange.funcNormalEntry();
1516       import.jitScript = nullptr;
1517     } else if (void* thunk = MaybeGetBuiltinThunk(f, fi.funcType())) {
1518       import.tls = tlsData();
1519       import.realm = f->realm();
1520       import.code = thunk;
1521       import.jitScript = nullptr;
1522     } else {
1523       import.tls = tlsData();
1524       import.realm = f->realm();
1525       import.code = codeBase(callerTier) + fi.interpExitCodeOffset();
1526       import.jitScript = nullptr;
1527     }
1528   }
1529 
1530   // Initialize tables in the tls data
1531   for (size_t i = 0; i < tables_.length(); i++) {
1532     const TableDesc& td = metadata().tables[i];
1533     TableTls& table = tableTls(td);
1534     table.length = tables_[i]->length();
1535     table.functionBase = tables_[i]->functionBase();
1536   }
1537 
1538   // Initialize globals in the tls data
1539   for (size_t i = 0; i < metadata().globals.length(); i++) {
1540     const GlobalDesc& global = metadata().globals[i];
1541 
1542     // Constants are baked into the code, never stored in the global area.
1543     if (global.isConstant()) {
1544       continue;
1545     }
1546 
1547     uint8_t* globalAddr = globalData() + global.offset();
1548     switch (global.kind()) {
1549       case GlobalKind::Import: {
1550         size_t imported = global.importIndex();
1551         if (global.isIndirect()) {
1552           *(void**)globalAddr = globalObjs[imported]->cell();
1553         } else {
1554           CopyValPostBarriered(globalAddr, globalImportValues[imported]);
1555         }
1556         break;
1557       }
1558       case GlobalKind::Variable: {
1559         const InitExpr& init = global.initExpr();
1560 
1561         RootedVal val(cx);
1562         switch (init.kind()) {
1563           case InitExpr::Kind::Constant: {
1564             val = Val(init.val());
1565             break;
1566           }
1567           case InitExpr::Kind::GetGlobal: {
1568             const GlobalDesc& imported = metadata().globals[init.globalIndex()];
1569 
1570             // Global-ref initializers cannot reference mutable globals, so
1571             // the source global should never be indirect.
1572             MOZ_ASSERT(!imported.isIndirect());
1573 
1574             val = globalImportValues[imported.importIndex()];
1575             break;
1576           }
1577           case InitExpr::Kind::RefFunc: {
1578             void* fnref = Instance::refFunc(this, init.refFuncIndex());
1579             if (fnref == AnyRef::invalid().forCompiledCode()) {
1580               return false;  // OOM, which has already been reported.
1581             }
1582             val =
1583                 Val(ValType(RefType::func()), FuncRef::fromCompiledCode(fnref));
1584             break;
1585           }
1586         }
1587 
1588         if (global.isIndirect()) {
1589           void* address = globalObjs[i]->cell();
1590           *(void**)globalAddr = address;
1591           CopyValPostBarriered((uint8_t*)address, val.get());
1592         } else {
1593           CopyValPostBarriered(globalAddr, val.get());
1594         }
1595         break;
1596       }
1597       case GlobalKind::Constant: {
1598         MOZ_CRASH("skipped at the top");
1599       }
1600     }
1601   }
1602 
1603   // Add observer if our memory base may grow
1604   if (memory_ && memory_->movingGrowable() &&
1605       !memory_->addMovingGrowObserver(cx, object_)) {
1606     return false;
1607   }
1608 
1609   // Add observers if our tables may grow
1610   for (const SharedTable& table : tables_) {
1611     if (table->movingGrowable() && !table->addMovingGrowObserver(cx, object_)) {
1612       return false;
1613     }
1614   }
1615 
1616   // Allocate in the global function type set for structural signature checks
1617   if (!metadata().funcTypeIds.empty()) {
1618     ExclusiveData<FuncTypeIdSet>::Guard lockedFuncTypeIdSet =
1619         funcTypeIdSet.lock();
1620 
1621     for (const FuncTypeWithId& funcType : metadata().funcTypeIds) {
1622       const void* funcTypeId;
1623       if (!lockedFuncTypeIdSet->allocateFuncTypeId(cx, funcType, &funcTypeId)) {
1624         return false;
1625       }
1626 
1627       *addressOfFuncTypeId(funcType.id) = funcTypeId;
1628     }
1629   }
1630 
1631   // Take references to the passive data segments
1632   if (!passiveDataSegments_.resize(dataSegments.length())) {
1633     return false;
1634   }
1635   for (size_t i = 0; i < dataSegments.length(); i++) {
1636     if (!dataSegments[i]->active()) {
1637       passiveDataSegments_[i] = dataSegments[i];
1638     }
1639   }
1640 
1641   // Take references to the passive element segments
1642   if (!passiveElemSegments_.resize(elemSegments.length())) {
1643     return false;
1644   }
1645   for (size_t i = 0; i < elemSegments.length(); i++) {
1646     if (elemSegments[i]->kind == ElemSegment::Kind::Passive) {
1647       passiveElemSegments_[i] = elemSegments[i];
1648     }
1649   }
1650 
1651   return true;
1652 }
1653 
~Instance()1654 Instance::~Instance() {
1655   realm_->wasm.unregisterInstance(*this);
1656 
1657   const FuncImportVector& funcImports =
1658       metadata(code().stableTier()).funcImports;
1659 
1660   for (unsigned i = 0; i < funcImports.length(); i++) {
1661     FuncImportTls& import = funcImportTls(funcImports[i]);
1662     if (import.jitScript) {
1663       import.jitScript->removeDependentWasmImport(*this, i);
1664     }
1665   }
1666 
1667   if (!metadata().funcTypeIds.empty()) {
1668     ExclusiveData<FuncTypeIdSet>::Guard lockedFuncTypeIdSet =
1669         funcTypeIdSet.lock();
1670 
1671     for (const FuncTypeWithId& funcType : metadata().funcTypeIds) {
1672       if (const void* funcTypeId = *addressOfFuncTypeId(funcType.id)) {
1673         lockedFuncTypeIdSet->deallocateFuncTypeId(funcType, funcTypeId);
1674       }
1675     }
1676   }
1677 }
1678 
memoryMappedSize() const1679 size_t Instance::memoryMappedSize() const {
1680   return memory_->buffer().wasmMappedSize();
1681 }
1682 
memoryAccessInGuardRegion(uint8_t * addr,unsigned numBytes) const1683 bool Instance::memoryAccessInGuardRegion(uint8_t* addr,
1684                                          unsigned numBytes) const {
1685   MOZ_ASSERT(numBytes > 0);
1686 
1687   if (!metadata().usesMemory()) {
1688     return false;
1689   }
1690 
1691   uint8_t* base = memoryBase().unwrap(/* comparison */);
1692   if (addr < base) {
1693     return false;
1694   }
1695 
1696   size_t lastByteOffset = addr - base + (numBytes - 1);
1697   return lastByteOffset >= memory()->volatileMemoryLength() &&
1698          lastByteOffset < memoryMappedSize();
1699 }
1700 
memoryAccessInBounds(uint8_t * addr,unsigned numBytes) const1701 bool Instance::memoryAccessInBounds(uint8_t* addr, unsigned numBytes) const {
1702   MOZ_ASSERT(numBytes > 0 && numBytes <= sizeof(double));
1703 
1704   if (!metadata().usesMemory()) {
1705     return false;
1706   }
1707 
1708   uint8_t* base = memoryBase().unwrap(/* comparison */);
1709   if (addr < base) {
1710     return false;
1711   }
1712 
1713   uint32_t length = memory()->volatileMemoryLength();
1714   if (addr >= base + length) {
1715     return false;
1716   }
1717 
1718   // The pointer points into the memory.  Now check for partial OOB.
1719   //
1720   // This calculation can't wrap around because the access is small and there
1721   // always is a guard page following the memory.
1722   size_t lastByteOffset = addr - base + (numBytes - 1);
1723   if (lastByteOffset >= length) {
1724     return false;
1725   }
1726 
1727   return true;
1728 }
1729 
tracePrivate(JSTracer * trc)1730 void Instance::tracePrivate(JSTracer* trc) {
1731   // This method is only called from WasmInstanceObject so the only reason why
1732   // TraceEdge is called is so that the pointer can be updated during a moving
1733   // GC.
1734   MOZ_ASSERT_IF(trc->isMarkingTracer(), gc::IsMarked(trc->runtime(), &object_));
1735   TraceEdge(trc, &object_, "wasm instance object");
1736 
1737   // OK to just do one tier here; though the tiers have different funcImports
1738   // tables, they share the tls object.
1739   for (const FuncImport& fi : metadata(code().stableTier()).funcImports) {
1740     TraceNullableEdge(trc, &funcImportTls(fi).fun, "wasm import");
1741   }
1742 
1743   for (const SharedTable& table : tables_) {
1744     table->trace(trc);
1745   }
1746 
1747   for (const GlobalDesc& global : code().metadata().globals) {
1748     // Indirect reference globals get traced by the owning WebAssembly.Global.
1749     if (!global.type().isReference() || global.isConstant() ||
1750         global.isIndirect()) {
1751       continue;
1752     }
1753     GCPtrObject* obj = (GCPtrObject*)(globalData() + global.offset());
1754     TraceNullableEdge(trc, obj, "wasm reference-typed global");
1755   }
1756 
1757   TraceNullableEdge(trc, &memory_, "wasm buffer");
1758   structTypeDescrs_.trace(trc);
1759 
1760   if (maybeDebug_) {
1761     maybeDebug_->trace(trc);
1762   }
1763 }
1764 
trace(JSTracer * trc)1765 void Instance::trace(JSTracer* trc) {
1766   // Technically, instead of having this method, the caller could use
1767   // Instance::object() to get the owning WasmInstanceObject to mark,
1768   // but this method is simpler and more efficient. The trace hook of
1769   // WasmInstanceObject will call Instance::tracePrivate at which point we
1770   // can mark the rest of the children.
1771   TraceEdge(trc, &object_, "wasm instance object");
1772 }
1773 
traceFrame(JSTracer * trc,const wasm::WasmFrameIter & wfi,uint8_t * nextPC,uintptr_t highestByteVisitedInPrevFrame)1774 uintptr_t Instance::traceFrame(JSTracer* trc, const wasm::WasmFrameIter& wfi,
1775                                uint8_t* nextPC,
1776                                uintptr_t highestByteVisitedInPrevFrame) {
1777   const StackMap* map = code().lookupStackMap(nextPC);
1778   if (!map) {
1779     return 0;
1780   }
1781 
1782   Frame* frame = wfi.frame();
1783 
1784   // |frame| points somewhere in the middle of the area described by |map|.
1785   // We have to calculate |scanStart|, the lowest address that is described by
1786   // |map|, by consulting |map->frameOffsetFromTop|.
1787 
1788   const size_t numMappedBytes = map->numMappedWords * sizeof(void*);
1789   const uintptr_t scanStart = uintptr_t(frame) +
1790                               (map->frameOffsetFromTop * sizeof(void*)) -
1791                               numMappedBytes;
1792   MOZ_ASSERT(0 == scanStart % sizeof(void*));
1793 
1794   // Do what we can to assert that, for consecutive wasm frames, their stack
1795   // maps also abut exactly.  This is a useful sanity check on the sizing of
1796   // stack maps.
1797   //
1798   // In debug builds, the stackmap construction machinery goes to considerable
1799   // efforts to ensure that the stackmaps for consecutive frames abut exactly.
1800   // This is so as to ensure there are no areas of stack inadvertently ignored
1801   // by a stackmap, nor covered by two stackmaps.  Hence any failure of this
1802   // assertion is serious and should be investigated.
1803   MOZ_ASSERT_IF(highestByteVisitedInPrevFrame != 0,
1804                 highestByteVisitedInPrevFrame + 1 == scanStart);
1805 
1806   uintptr_t* stackWords = (uintptr_t*)scanStart;
1807 
1808   // If we have some exit stub words, this means the map also covers an area
1809   // created by a exit stub, and so the highest word of that should be a
1810   // constant created by (code created by) GenerateTrapExit.
1811   MOZ_ASSERT_IF(
1812       map->numExitStubWords > 0,
1813       stackWords[map->numExitStubWords - 1 - TrapExitDummyValueOffsetFromTop] ==
1814           TrapExitDummyValue);
1815 
1816   // And actually hand them off to the GC.
1817   for (uint32_t i = 0; i < map->numMappedWords; i++) {
1818     if (map->getBit(i) == 0) {
1819       continue;
1820     }
1821 
1822     // TODO/AnyRef-boxing: With boxed immediates and strings, the value may
1823     // not be a traceable JSObject*.
1824     ASSERT_ANYREF_IS_JSOBJECT;
1825 
1826     // This assertion seems at least moderately effective in detecting
1827     // discrepancies or misalignments between the map and reality.
1828     MOZ_ASSERT(js::gc::IsCellPointerValidOrNull((const void*)stackWords[i]));
1829 
1830     if (stackWords[i]) {
1831       TraceRoot(trc, (JSObject**)&stackWords[i],
1832                 "Instance::traceWasmFrame: normal word");
1833     }
1834   }
1835 
1836   // Finally, deal with any GC-managed fields in the DebugFrame, if it is
1837   // present.
1838   if (map->hasDebugFrame) {
1839     DebugFrame* debugFrame = DebugFrame::from(frame);
1840     char* debugFrameP = (char*)debugFrame;
1841 
1842     // TODO/AnyRef-boxing: With boxed immediates and strings, the value may
1843     // not be a traceable JSObject*.
1844     ASSERT_ANYREF_IS_JSOBJECT;
1845 
1846     for (size_t i = 0; i < MaxRegisterResults; i++) {
1847       if (debugFrame->hasSpilledRegisterRefResult(i)) {
1848         char* resultRefP = debugFrameP + DebugFrame::offsetOfRegisterResult(i);
1849         TraceNullableRoot(
1850             trc, (JSObject**)resultRefP,
1851             "Instance::traceWasmFrame: DebugFrame::resultResults_");
1852       }
1853     }
1854 
1855     if (debugFrame->hasCachedReturnJSValue()) {
1856       char* cachedReturnJSValueP =
1857           debugFrameP + DebugFrame::offsetOfCachedReturnJSValue();
1858       TraceRoot(trc, (js::Value*)cachedReturnJSValueP,
1859                 "Instance::traceWasmFrame: DebugFrame::cachedReturnJSValue_");
1860     }
1861   }
1862 
1863   return scanStart + numMappedBytes - 1;
1864 }
1865 
memory() const1866 WasmMemoryObject* Instance::memory() const { return memory_; }
1867 
memoryBase() const1868 SharedMem<uint8_t*> Instance::memoryBase() const {
1869   MOZ_ASSERT(metadata().usesMemory());
1870   MOZ_ASSERT(tlsData()->memoryBase == memory_->buffer().dataPointerEither());
1871   return memory_->buffer().dataPointerEither();
1872 }
1873 
sharedMemoryBuffer() const1874 SharedArrayRawBuffer* Instance::sharedMemoryBuffer() const {
1875   MOZ_ASSERT(memory_->isShared());
1876   return memory_->sharedArrayRawBuffer();
1877 }
1878 
objectUnbarriered() const1879 WasmInstanceObject* Instance::objectUnbarriered() const {
1880   return object_.unbarrieredGet();
1881 }
1882 
object() const1883 WasmInstanceObject* Instance::object() const { return object_; }
1884 
EnsureEntryStubs(const Instance & instance,uint32_t funcIndex,const FuncExport ** funcExport,void ** interpEntry)1885 static bool EnsureEntryStubs(const Instance& instance, uint32_t funcIndex,
1886                              const FuncExport** funcExport,
1887                              void** interpEntry) {
1888   Tier tier = instance.code().bestTier();
1889 
1890   size_t funcExportIndex;
1891   *funcExport =
1892       &instance.metadata(tier).lookupFuncExport(funcIndex, &funcExportIndex);
1893 
1894   const FuncExport& fe = **funcExport;
1895   if (fe.hasEagerStubs()) {
1896     *interpEntry = instance.codeBase(tier) + fe.eagerInterpEntryOffset();
1897     return true;
1898   }
1899 
1900   MOZ_ASSERT(!instance.isAsmJS(), "only wasm can lazily export functions");
1901 
1902   // If the best tier is Ion, life is simple: background compilation has
1903   // already completed and has been committed, so there's no risk of race
1904   // conditions here.
1905   //
1906   // If the best tier is Baseline, there could be a background compilation
1907   // happening at the same time. The background compilation will lock the
1908   // first tier lazy stubs first to stop new baseline stubs from being
1909   // generated, then the second tier stubs to generate them.
1910   //
1911   // - either we take the tier1 lazy stub lock before the background
1912   // compilation gets it, then we generate the lazy stub for tier1. When the
1913   // background thread gets the tier1 lazy stub lock, it will see it has a
1914   // lazy stub and will recompile it for tier2.
1915   // - or we don't take the lock here first. Background compilation won't
1916   // find a lazy stub for this function, thus won't generate it. So we'll do
1917   // it ourselves after taking the tier2 lock.
1918 
1919   auto stubs = instance.code(tier).lazyStubs().lock();
1920   *interpEntry = stubs->lookupInterpEntry(fe.funcIndex());
1921   if (*interpEntry) {
1922     return true;
1923   }
1924 
1925   // The best tier might have changed after we've taken the lock.
1926   Tier prevTier = tier;
1927   tier = instance.code().bestTier();
1928   const CodeTier& codeTier = instance.code(tier);
1929   if (tier == prevTier) {
1930     if (!stubs->createOne(funcExportIndex, codeTier)) {
1931       return false;
1932     }
1933 
1934     *interpEntry = stubs->lookupInterpEntry(fe.funcIndex());
1935     MOZ_ASSERT(*interpEntry);
1936     return true;
1937   }
1938 
1939   MOZ_RELEASE_ASSERT(prevTier == Tier::Baseline && tier == Tier::Optimized);
1940   auto stubs2 = instance.code(tier).lazyStubs().lock();
1941 
1942   // If it didn't have a stub in the first tier, background compilation
1943   // shouldn't have made one in the second tier.
1944   MOZ_ASSERT(!stubs2->hasStub(fe.funcIndex()));
1945 
1946   if (!stubs2->createOne(funcExportIndex, codeTier)) {
1947     return false;
1948   }
1949 
1950   *interpEntry = stubs2->lookupInterpEntry(fe.funcIndex());
1951   MOZ_ASSERT(*interpEntry);
1952   return true;
1953 }
1954 
GetInterpEntry(Instance & instance,uint32_t funcIndex,CallArgs args,void ** interpEntry,const FuncType ** funcType)1955 static bool GetInterpEntry(Instance& instance, uint32_t funcIndex,
1956                            CallArgs args, void** interpEntry,
1957                            const FuncType** funcType) {
1958   const FuncExport* funcExport;
1959   if (!EnsureEntryStubs(instance, funcIndex, &funcExport, interpEntry)) {
1960     return false;
1961   }
1962 
1963   // EnsureEntryStubs() has ensured jit-entry stubs have been created and
1964   // installed in funcIndex's JumpTable entry, so we can now set the
1965   // JSFunction's jit-entry. See WasmInstanceObject::getExportedFunction().
1966   if (!funcExport->hasEagerStubs() && funcExport->canHaveJitEntry()) {
1967     JSFunction& callee = args.callee().as<JSFunction>();
1968     MOZ_ASSERT(!callee.isAsmJSNative(), "asm.js only has eager stubs");
1969     if (!callee.isWasmWithJitEntry()) {
1970       callee.setWasmJitEntry(instance.code().getAddressOfJitEntry(funcIndex));
1971     }
1972   }
1973 
1974   *funcType = &funcExport->funcType();
1975   return true;
1976 }
1977 
ResultsToJSValue(JSContext * cx,ResultType type,void * registerResultLoc,Maybe<char * > stackResultsLoc,MutableHandleValue rval)1978 bool wasm::ResultsToJSValue(JSContext* cx, ResultType type,
1979                             void* registerResultLoc,
1980                             Maybe<char*> stackResultsLoc,
1981                             MutableHandleValue rval) {
1982   if (type.empty()) {
1983     // No results: set to undefined, and we're done.
1984     rval.setUndefined();
1985     return true;
1986   }
1987 
1988   // If we added support for multiple register results, we'd need to establish a
1989   // convention for how to store them to memory in registerResultLoc.  For now
1990   // we can punt.
1991   static_assert(MaxRegisterResults == 1);
1992 
1993   // Stack results written to stackResultsLoc; register result written
1994   // to registerResultLoc.
1995 
1996   // First, convert the register return value, and prepare to iterate in
1997   // push order.  Note that if the register result is a reference type,
1998   // it may be unrooted, so ToJSValue_anyref must not GC in that case.
1999   ABIResultIter iter(type);
2000   DebugOnly<bool> usedRegisterResult = false;
2001   for (; !iter.done(); iter.next()) {
2002     if (iter.cur().inRegister()) {
2003       MOZ_ASSERT(!usedRegisterResult);
2004       if (!ToJSValue<DebugCodegenVal>(cx, registerResultLoc, iter.cur().type(),
2005                                       rval)) {
2006         return false;
2007       }
2008       usedRegisterResult = true;
2009     }
2010   }
2011   MOZ_ASSERT(usedRegisterResult);
2012 
2013   MOZ_ASSERT((stackResultsLoc.isSome()) == (iter.count() > 1));
2014   if (!stackResultsLoc) {
2015     // A single result: we're done.
2016     return true;
2017   }
2018 
2019   // Otherwise, collect results in an array, in push order.
2020   Rooted<ArrayObject*> array(cx, NewDenseEmptyArray(cx));
2021   if (!array) {
2022     return false;
2023   }
2024   RootedValue tmp(cx);
2025   for (iter.switchToPrev(); !iter.done(); iter.prev()) {
2026     const ABIResult& result = iter.cur();
2027     if (result.onStack()) {
2028       char* loc = stackResultsLoc.value() + result.stackOffset();
2029       if (!ToJSValue<DebugCodegenVal>(cx, loc, result.type(), &tmp)) {
2030         return false;
2031       }
2032       if (!NewbornArrayPush(cx, array, tmp)) {
2033         return false;
2034       }
2035     } else {
2036       if (!NewbornArrayPush(cx, array, rval)) {
2037         return false;
2038       }
2039     }
2040   }
2041   rval.set(ObjectValue(*array));
2042   return true;
2043 }
2044 
2045 class MOZ_RAII ReturnToJSResultCollector {
2046   class MOZ_RAII StackResultsRooter : public JS::CustomAutoRooter {
2047     ReturnToJSResultCollector& collector_;
2048 
2049    public:
StackResultsRooter(JSContext * cx,ReturnToJSResultCollector & collector)2050     StackResultsRooter(JSContext* cx, ReturnToJSResultCollector& collector)
2051         : JS::CustomAutoRooter(cx), collector_(collector) {}
2052 
trace(JSTracer * trc)2053     void trace(JSTracer* trc) final {
2054       for (ABIResultIter iter(collector_.type_); !iter.done(); iter.next()) {
2055         const ABIResult& result = iter.cur();
2056         if (result.onStack() && result.type().isReference()) {
2057           char* loc = collector_.stackResultsArea_.get() + result.stackOffset();
2058           JSObject** refLoc = reinterpret_cast<JSObject**>(loc);
2059           TraceNullableRoot(trc, refLoc, "StackResultsRooter::trace");
2060         }
2061       }
2062     }
2063   };
2064   friend class StackResultsRooter;
2065 
2066   ResultType type_;
2067   UniquePtr<char[], JS::FreePolicy> stackResultsArea_;
2068   Maybe<StackResultsRooter> rooter_;
2069 
2070  public:
ReturnToJSResultCollector(const ResultType & type)2071   explicit ReturnToJSResultCollector(const ResultType& type) : type_(type){};
init(JSContext * cx)2072   bool init(JSContext* cx) {
2073     bool needRooter = false;
2074     ABIResultIter iter(type_);
2075     for (; !iter.done(); iter.next()) {
2076       const ABIResult& result = iter.cur();
2077       if (result.onStack() && result.type().isReference()) {
2078         needRooter = true;
2079       }
2080     }
2081     uint32_t areaBytes = iter.stackBytesConsumedSoFar();
2082     MOZ_ASSERT_IF(needRooter, areaBytes > 0);
2083     if (areaBytes > 0) {
2084       // It is necessary to zero storage for ref results, and it doesn't
2085       // hurt to do so for other POD results.
2086       stackResultsArea_ = cx->make_zeroed_pod_array<char>(areaBytes);
2087       if (!stackResultsArea_) {
2088         return false;
2089       }
2090       if (needRooter) {
2091         rooter_.emplace(cx, *this);
2092       }
2093     }
2094     return true;
2095   }
2096 
stackResultsArea()2097   void* stackResultsArea() {
2098     MOZ_ASSERT(stackResultsArea_);
2099     return stackResultsArea_.get();
2100   }
2101 
collect(JSContext * cx,void * registerResultLoc,MutableHandleValue rval)2102   bool collect(JSContext* cx, void* registerResultLoc,
2103                MutableHandleValue rval) {
2104     Maybe<char*> stackResultsLoc =
2105         stackResultsArea_ ? Some(stackResultsArea_.get()) : Nothing();
2106     return ResultsToJSValue(cx, type_, registerResultLoc, stackResultsLoc,
2107                             rval);
2108   }
2109 };
2110 
callExport(JSContext * cx,uint32_t funcIndex,CallArgs args)2111 bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
2112   if (memory_) {
2113     // If there has been a moving grow, this Instance should have been notified.
2114     MOZ_RELEASE_ASSERT(memory_->buffer().dataPointerEither() == memoryBase());
2115   }
2116 
2117   void* interpEntry;
2118   const FuncType* funcType;
2119   if (!GetInterpEntry(*this, funcIndex, args, &interpEntry, &funcType)) {
2120     return false;
2121   }
2122 
2123 #ifdef ENABLE_WASM_SIMD
2124   if (funcType->hasV128ArgOrRet()) {
2125     JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
2126                              JSMSG_WASM_BAD_VAL_TYPE);
2127     return false;
2128   }
2129 #endif
2130 
2131   ArgTypeVector argTypes(*funcType);
2132   ResultType resultType(ResultType::Vector(funcType->results()));
2133   ReturnToJSResultCollector results(resultType);
2134   if (!results.init(cx)) {
2135     return false;
2136   }
2137 
2138   // The calling convention for an external call into wasm is to pass an
2139   // array of 16-byte values where each value contains either a coerced int32
2140   // (in the low word), or a double value (in the low dword) value, with the
2141   // coercions specified by the wasm signature. The external entry point
2142   // unpacks this array into the system-ABI-specified registers and stack
2143   // memory and then calls into the internal entry point. The return value is
2144   // stored in the first element of the array (which, therefore, must have
2145   // length >= 1).
2146   Vector<ExportArg, 8> exportArgs(cx);
2147   if (!exportArgs.resize(
2148           std::max<size_t>(1, argTypes.lengthWithStackResults()))) {
2149     return false;
2150   }
2151 
2152   ASSERT_ANYREF_IS_JSOBJECT;
2153   Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> refs(cx);
2154 
2155   DebugCodegen(DebugChannel::Function, "wasm-function[%d] arguments [",
2156                funcIndex);
2157   RootedValue v(cx);
2158   for (size_t i = 0; i < argTypes.lengthWithStackResults(); ++i) {
2159     void* rawArgLoc = &exportArgs[i];
2160     if (argTypes.isSyntheticStackResultPointerArg(i)) {
2161       *reinterpret_cast<void**>(rawArgLoc) = results.stackResultsArea();
2162       continue;
2163     }
2164     size_t naturalIdx = argTypes.naturalIndex(i);
2165     v = naturalIdx < args.length() ? args[naturalIdx] : UndefinedValue();
2166     ValType type = funcType->arg(naturalIdx);
2167     if (!ToWebAssemblyValue<DebugCodegenVal>(cx, v, type, rawArgLoc, true)) {
2168       return false;
2169     }
2170     if (type.isReference()) {
2171       void* ptr = *reinterpret_cast<void**>(rawArgLoc);
2172       // Store in rooted array until no more GC is possible.
2173       switch (type.refTypeKind()) {
2174         case RefType::Func: {
2175           RootedFunction ref(cx, FuncRef::fromCompiledCode(ptr).asJSFunction());
2176           if (!refs.emplaceBack(ref)) {
2177             return false;
2178           }
2179           break;
2180         }
2181         case RefType::Any: {
2182           RootedAnyRef ref(cx, AnyRef::fromCompiledCode(ptr));
2183           ASSERT_ANYREF_IS_JSOBJECT;
2184           if (!refs.emplaceBack(ref.get().asJSObject())) {
2185             return false;
2186           }
2187           break;
2188         }
2189         case RefType::TypeIndex:
2190           MOZ_CRASH("temporarily unsupported Ref type in callExport");
2191       }
2192       DebugCodegen(DebugChannel::Function, "/(#%d)", int(refs.length() - 1));
2193     }
2194   }
2195 
2196   // Copy over reference values from the rooted array, if any.
2197   if (refs.length() > 0) {
2198     DebugCodegen(DebugChannel::Function, "; ");
2199     size_t nextRef = 0;
2200     for (size_t i = 0; i < argTypes.lengthWithStackResults(); ++i) {
2201       if (argTypes.isSyntheticStackResultPointerArg(i)) {
2202         continue;
2203       }
2204       size_t naturalIdx = argTypes.naturalIndex(i);
2205       ValType type = funcType->arg(naturalIdx);
2206       if (type.isReference()) {
2207         void** rawArgLoc = (void**)&exportArgs[i];
2208         *rawArgLoc = refs[nextRef++];
2209         DebugCodegen(DebugChannel::Function, " ref(#%d) := %p ",
2210                      int(nextRef - 1), *rawArgLoc);
2211       }
2212     }
2213     refs.clear();
2214   }
2215 
2216   DebugCodegen(DebugChannel::Function, "]\n");
2217 
2218   {
2219     JitActivation activation(cx);
2220 
2221     // Call the per-exported-function trampoline created by GenerateEntry.
2222     auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, interpEntry);
2223     if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), tlsData())) {
2224       return false;
2225     }
2226   }
2227 
2228   if (isAsmJS() && args.isConstructing()) {
2229     // By spec, when a JS function is called as a constructor and this
2230     // function returns a primary type, which is the case for all asm.js
2231     // exported functions, the returned value is discarded and an empty
2232     // object is returned instead.
2233     PlainObject* obj = NewBuiltinClassInstance<PlainObject>(cx);
2234     if (!obj) {
2235       return false;
2236     }
2237     args.rval().set(ObjectValue(*obj));
2238     return true;
2239   }
2240 
2241   // Note that we're not rooting the register result, if any; we depend
2242   // on ResultsCollector::collect to root the value on our behalf,
2243   // before causing any GC.
2244   void* registerResultLoc = &exportArgs[0];
2245   DebugCodegen(DebugChannel::Function, "wasm-function[%d]; results [",
2246                funcIndex);
2247   if (!results.collect(cx, registerResultLoc, args.rval())) {
2248     return false;
2249   }
2250   DebugCodegen(DebugChannel::Function, "]\n");
2251 
2252   return true;
2253 }
2254 
getFuncDisplayAtom(JSContext * cx,uint32_t funcIndex) const2255 JSAtom* Instance::getFuncDisplayAtom(JSContext* cx, uint32_t funcIndex) const {
2256   // The "display name" of a function is primarily shown in Error.stack which
2257   // also includes location, so use getFuncNameBeforeLocation.
2258   UTF8Bytes name;
2259   if (!metadata().getFuncNameBeforeLocation(funcIndex, &name)) {
2260     return nullptr;
2261   }
2262 
2263   return AtomizeUTF8Chars(cx, name.begin(), name.length());
2264 }
2265 
ensureProfilingLabels(bool profilingEnabled) const2266 void Instance::ensureProfilingLabels(bool profilingEnabled) const {
2267   return code_->ensureProfilingLabels(profilingEnabled);
2268 }
2269 
onMovingGrowMemory()2270 void Instance::onMovingGrowMemory() {
2271   MOZ_ASSERT(!isAsmJS());
2272   MOZ_ASSERT(!memory_->isShared());
2273 
2274   ArrayBufferObject& buffer = memory_->buffer().as<ArrayBufferObject>();
2275   tlsData()->memoryBase = buffer.dataPointer();
2276   tlsData()->boundsCheckLimit = memory_->boundsCheckLimit();
2277 }
2278 
onMovingGrowTable(const Table * theTable)2279 void Instance::onMovingGrowTable(const Table* theTable) {
2280   MOZ_ASSERT(!isAsmJS());
2281 
2282   // `theTable` has grown and we must update cached data for it.  Importantly,
2283   // we can have cached those data in more than one location: we'll have
2284   // cached them once for each time the table was imported into this instance.
2285   //
2286   // When an instance is registered as an observer of a table it is only
2287   // registered once, regardless of how many times the table was imported.
2288   // Thus when a table is grown, onMovingGrowTable() is only invoked once for
2289   // the table.
2290   //
2291   // Ergo we must go through the entire list of tables in the instance here
2292   // and check for the table in all the cached-data slots; we can't exit after
2293   // the first hit.
2294 
2295   for (uint32_t i = 0; i < tables_.length(); i++) {
2296     if (tables_[i] == theTable) {
2297       TableTls& table = tableTls(metadata().tables[i]);
2298       table.length = tables_[i]->length();
2299       table.functionBase = tables_[i]->functionBase();
2300     }
2301   }
2302 }
2303 
deoptimizeImportExit(uint32_t funcImportIndex)2304 void Instance::deoptimizeImportExit(uint32_t funcImportIndex) {
2305   Tier t = code().bestTier();
2306   const FuncImport& fi = metadata(t).funcImports[funcImportIndex];
2307   FuncImportTls& import = funcImportTls(fi);
2308   import.code = codeBase(t) + fi.interpExitCodeOffset();
2309   import.jitScript = nullptr;
2310 }
2311 
createDisplayURL(JSContext * cx)2312 JSString* Instance::createDisplayURL(JSContext* cx) {
2313   // In the best case, we simply have a URL, from a streaming compilation of a
2314   // fetched Response.
2315 
2316   if (metadata().filenameIsURL) {
2317     return NewStringCopyZ<CanGC>(cx, metadata().filename.get());
2318   }
2319 
2320   // Otherwise, build wasm module URL from following parts:
2321   // - "wasm:" as protocol;
2322   // - URI encoded filename from metadata (if can be encoded), plus ":";
2323   // - 64-bit hash of the module bytes (as hex dump).
2324 
2325   JSStringBuilder result(cx);
2326   if (!result.append("wasm:")) {
2327     return nullptr;
2328   }
2329 
2330   if (const char* filename = metadata().filename.get()) {
2331     // EncodeURI returns false due to invalid chars or OOM -- fail only
2332     // during OOM.
2333     JSString* filenamePrefix = EncodeURI(cx, filename, strlen(filename));
2334     if (!filenamePrefix) {
2335       if (cx->isThrowingOutOfMemory()) {
2336         return nullptr;
2337       }
2338 
2339       MOZ_ASSERT(!cx->isThrowingOverRecursed());
2340       cx->clearPendingException();
2341       return nullptr;
2342     }
2343 
2344     if (!result.append(filenamePrefix)) {
2345       return nullptr;
2346     }
2347   }
2348 
2349   if (metadata().debugEnabled) {
2350     if (!result.append(":")) {
2351       return nullptr;
2352     }
2353 
2354     const ModuleHash& hash = metadata().debugHash;
2355     for (size_t i = 0; i < sizeof(ModuleHash); i++) {
2356       char digit1 = hash[i] / 16, digit2 = hash[i] % 16;
2357       if (!result.append(
2358               (char)(digit1 < 10 ? digit1 + '0' : digit1 + 'a' - 10))) {
2359         return nullptr;
2360       }
2361       if (!result.append(
2362               (char)(digit2 < 10 ? digit2 + '0' : digit2 + 'a' - 10))) {
2363         return nullptr;
2364       }
2365     }
2366   }
2367 
2368   return result.finishString();
2369 }
2370 
getOrCreateBreakpointSite(JSContext * cx,uint32_t offset)2371 WasmBreakpointSite* Instance::getOrCreateBreakpointSite(JSContext* cx,
2372                                                         uint32_t offset) {
2373   MOZ_ASSERT(debugEnabled());
2374   return debug().getOrCreateBreakpointSite(cx, this, offset);
2375 }
2376 
destroyBreakpointSite(JSFreeOp * fop,uint32_t offset)2377 void Instance::destroyBreakpointSite(JSFreeOp* fop, uint32_t offset) {
2378   MOZ_ASSERT(debugEnabled());
2379   return debug().destroyBreakpointSite(fop, this, offset);
2380 }
2381 
disassembleExport(JSContext * cx,uint32_t funcIndex,Tier tier,PrintCallback callback) const2382 void Instance::disassembleExport(JSContext* cx, uint32_t funcIndex, Tier tier,
2383                                  PrintCallback callback) const {
2384   const MetadataTier& metadataTier = metadata(tier);
2385   const FuncExport& funcExport = metadataTier.lookupFuncExport(funcIndex);
2386   const CodeRange& range = metadataTier.codeRange(funcExport);
2387   const CodeTier& codeTier = code(tier);
2388   const ModuleSegment& segment = codeTier.segment();
2389 
2390   MOZ_ASSERT(range.begin() < segment.length());
2391   MOZ_ASSERT(range.end() < segment.length());
2392 
2393   uint8_t* functionCode = segment.base() + range.begin();
2394   jit::Disassemble(functionCode, range.end() - range.begin(), callback);
2395 }
2396 
addSizeOfMisc(MallocSizeOf mallocSizeOf,Metadata::SeenSet * seenMetadata,Code::SeenSet * seenCode,Table::SeenSet * seenTables,size_t * code,size_t * data) const2397 void Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf,
2398                              Metadata::SeenSet* seenMetadata,
2399                              Code::SeenSet* seenCode,
2400                              Table::SeenSet* seenTables, size_t* code,
2401                              size_t* data) const {
2402   *data += mallocSizeOf(this);
2403   *data += mallocSizeOf(tlsData_.get());
2404   for (const SharedTable& table : tables_) {
2405     *data += table->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenTables);
2406   }
2407 
2408   if (maybeDebug_) {
2409     maybeDebug_->addSizeOfMisc(mallocSizeOf, seenMetadata, seenCode, code,
2410                                data);
2411   }
2412 
2413   code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenCode, code,
2414                                 data);
2415 }
2416