1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 *
4 * Copyright 2015 Mozilla Foundation
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 #include "wasm/WasmTypes.h"
20
21 #include "mozilla/FloatingPoint.h"
22
23 #include <algorithm>
24
25 #include "jsmath.h"
26 #include "jit/JitFrames.h"
27 #include "js/friend/ErrorMessages.h" // JSMSG_*
28 #include "js/Printf.h"
29 #include "util/Memory.h"
30 #include "vm/ArrayBufferObject.h"
31 #include "vm/Warnings.h" // js:WarnNumberASCII
32 #include "wasm/TypedObject.h"
33 #include "wasm/WasmBaselineCompile.h"
34 #include "wasm/WasmInstance.h"
35 #include "wasm/WasmJS.h"
36 #include "wasm/WasmSerialize.h"
37 #include "wasm/WasmStubs.h"
38
39 #include "vm/JSObject-inl.h"
40 #include "vm/NativeObject-inl.h"
41
42 using namespace js;
43 using namespace js::jit;
44 using namespace js::wasm;
45
46 using mozilla::IsPowerOfTwo;
47 using mozilla::MakeEnumeratedRange;
48
49 // We have only tested huge memory on x64 and arm64.
50
51 #if defined(WASM_SUPPORTS_HUGE_MEMORY)
52 # if !(defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_ARM64))
53 # error "Not an expected configuration"
54 # endif
55 #endif
56
57 // All plausible targets must be able to do at least IEEE754 double
58 // loads/stores, hence the lower limit of 8. Some Intel processors support
59 // AVX-512 loads/stores, hence the upper limit of 64.
60 static_assert(MaxMemoryAccessSize >= 8, "MaxMemoryAccessSize too low");
61 static_assert(MaxMemoryAccessSize <= 64, "MaxMemoryAccessSize too high");
62 static_assert((MaxMemoryAccessSize & (MaxMemoryAccessSize - 1)) == 0,
63 "MaxMemoryAccessSize is not a power of two");
64
65 #if defined(WASM_SUPPORTS_HUGE_MEMORY)
66 // TODO: We want this static_assert back, but it reqires MaxMemory32Bytes to be
67 // a constant or constexpr function, not a regular function as now.
68 //
69 // The assert is also present in WasmMemoryObject::isHuge and
70 // WasmMemoryObject::grow, so it's OK to comment out here for now.
71
72 // static_assert(MaxMemory32Bytes < HugeMappedSize(),
73 // "Normal array buffer could be confused with huge memory");
74 #endif
75
76 const JSClass WasmJSExceptionObject::class_ = {
77 "WasmJSExnRefObject", JSCLASS_HAS_RESERVED_SLOTS(RESERVED_SLOTS)};
78
create(JSContext * cx,MutableHandleValue value)79 WasmJSExceptionObject* WasmJSExceptionObject::create(JSContext* cx,
80 MutableHandleValue value) {
81 WasmJSExceptionObject* obj =
82 NewObjectWithGivenProto<WasmJSExceptionObject>(cx, nullptr);
83
84 if (!obj) {
85 return nullptr;
86 }
87
88 obj->setFixedSlot(VALUE_SLOT, value);
89
90 return obj;
91 }
92
93 using ImmediateType = uint32_t; // for 32/64 consistency
94 static const unsigned sTotalBits = sizeof(ImmediateType) * 8;
95 static const unsigned sTagBits = 1;
96 static const unsigned sReturnBit = 1;
97 static const unsigned sLengthBits = 4;
98 static const unsigned sTypeBits = 3;
99 static const unsigned sMaxTypes =
100 (sTotalBits - sTagBits - sReturnBit - sLengthBits) / sTypeBits;
101
IsImmediateType(ValType vt)102 static bool IsImmediateType(ValType vt) {
103 switch (vt.kind()) {
104 case ValType::I32:
105 case ValType::I64:
106 case ValType::F32:
107 case ValType::F64:
108 case ValType::V128:
109 return true;
110 case ValType::Ref:
111 switch (vt.refTypeKind()) {
112 case RefType::Func:
113 case RefType::Extern:
114 case RefType::Eq:
115 return true;
116 case RefType::TypeIndex:
117 return false;
118 }
119 break;
120 case ValType::Rtt:
121 return false;
122 }
123 MOZ_CRASH("bad ValType");
124 }
125
EncodeImmediateType(ValType vt)126 static unsigned EncodeImmediateType(ValType vt) {
127 static_assert(4 < (1 << sTypeBits), "fits");
128 switch (vt.kind()) {
129 case ValType::I32:
130 return 0;
131 case ValType::I64:
132 return 1;
133 case ValType::F32:
134 return 2;
135 case ValType::F64:
136 return 3;
137 case ValType::V128:
138 return 4;
139 case ValType::Ref:
140 switch (vt.refTypeKind()) {
141 case RefType::Func:
142 return 5;
143 case RefType::Extern:
144 return 6;
145 case RefType::Eq:
146 return 7;
147 case RefType::TypeIndex:
148 break;
149 }
150 break;
151 case ValType::Rtt:
152 break;
153 }
154 MOZ_CRASH("bad ValType");
155 }
156
157 /* static */
isGlobal(const TypeDef & type)158 bool TypeIdDesc::isGlobal(const TypeDef& type) {
159 if (!type.isFuncType()) {
160 return true;
161 }
162 const FuncType& funcType = type.funcType();
163 const ValTypeVector& results = funcType.results();
164 const ValTypeVector& args = funcType.args();
165 if (results.length() + args.length() > sMaxTypes) {
166 return true;
167 }
168
169 if (results.length() > 1) {
170 return true;
171 }
172
173 for (ValType v : results) {
174 if (!IsImmediateType(v)) {
175 return true;
176 }
177 }
178
179 for (ValType v : args) {
180 if (!IsImmediateType(v)) {
181 return true;
182 }
183 }
184
185 return false;
186 }
187
188 /* static */
global(const TypeDef & type,uint32_t globalDataOffset)189 TypeIdDesc TypeIdDesc::global(const TypeDef& type, uint32_t globalDataOffset) {
190 MOZ_ASSERT(isGlobal(type));
191 return TypeIdDesc(TypeIdDescKind::Global, globalDataOffset);
192 }
193
LengthToBits(uint32_t length)194 static ImmediateType LengthToBits(uint32_t length) {
195 static_assert(sMaxTypes <= ((1 << sLengthBits) - 1), "fits");
196 MOZ_ASSERT(length <= sMaxTypes);
197 return length;
198 }
199
200 /* static */
immediate(const TypeDef & type)201 TypeIdDesc TypeIdDesc::immediate(const TypeDef& type) {
202 const FuncType& funcType = type.funcType();
203
204 ImmediateType immediate = ImmediateBit;
205 uint32_t shift = sTagBits;
206
207 if (funcType.results().length() > 0) {
208 MOZ_ASSERT(funcType.results().length() == 1);
209 immediate |= (1 << shift);
210 shift += sReturnBit;
211
212 immediate |= EncodeImmediateType(funcType.results()[0]) << shift;
213 shift += sTypeBits;
214 } else {
215 shift += sReturnBit;
216 }
217
218 immediate |= LengthToBits(funcType.args().length()) << shift;
219 shift += sLengthBits;
220
221 for (ValType argType : funcType.args()) {
222 immediate |= EncodeImmediateType(argType) << shift;
223 shift += sTypeBits;
224 }
225
226 MOZ_ASSERT(shift <= sTotalBits);
227 return TypeIdDesc(TypeIdDescKind::Immediate, immediate);
228 }
229
serializedSize() const230 size_t TypeDefWithId::serializedSize() const {
231 return TypeDef::serializedSize() + sizeof(TypeIdDesc);
232 }
233
serialize(uint8_t * cursor) const234 uint8_t* TypeDefWithId::serialize(uint8_t* cursor) const {
235 cursor = TypeDef::serialize(cursor);
236 cursor = WriteBytes(cursor, &id, sizeof(id));
237 return cursor;
238 }
239
deserialize(const uint8_t * cursor)240 const uint8_t* TypeDefWithId::deserialize(const uint8_t* cursor) {
241 cursor = TypeDef::deserialize(cursor);
242 cursor = ReadBytes(cursor, &id, sizeof(id));
243 return cursor;
244 }
245
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const246 size_t TypeDefWithId::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
247 return TypeDef::sizeOfExcludingThis(mallocSizeOf);
248 }
249
ArgTypeVector(const FuncType & funcType)250 ArgTypeVector::ArgTypeVector(const FuncType& funcType)
251 : args_(funcType.args()),
252 hasStackResults_(ABIResultIter::HasStackResults(
253 ResultType::Vector(funcType.results()))) {}
254
serializedSize() const255 size_t Import::serializedSize() const {
256 return module.serializedSize() + field.serializedSize() + sizeof(kind);
257 }
258
serialize(uint8_t * cursor) const259 uint8_t* Import::serialize(uint8_t* cursor) const {
260 cursor = module.serialize(cursor);
261 cursor = field.serialize(cursor);
262 cursor = WriteScalar<DefinitionKind>(cursor, kind);
263 return cursor;
264 }
265
deserialize(const uint8_t * cursor)266 const uint8_t* Import::deserialize(const uint8_t* cursor) {
267 (cursor = module.deserialize(cursor)) &&
268 (cursor = field.deserialize(cursor)) &&
269 (cursor = ReadScalar<DefinitionKind>(cursor, &kind));
270 return cursor;
271 }
272
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const273 size_t Import::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
274 return module.sizeOfExcludingThis(mallocSizeOf) +
275 field.sizeOfExcludingThis(mallocSizeOf);
276 }
277
Export(UniqueChars fieldName,uint32_t index,DefinitionKind kind)278 Export::Export(UniqueChars fieldName, uint32_t index, DefinitionKind kind)
279 : fieldName_(std::move(fieldName)) {
280 pod.kind_ = kind;
281 pod.index_ = index;
282 }
283
Export(UniqueChars fieldName,DefinitionKind kind)284 Export::Export(UniqueChars fieldName, DefinitionKind kind)
285 : fieldName_(std::move(fieldName)) {
286 pod.kind_ = kind;
287 pod.index_ = 0;
288 }
289
funcIndex() const290 uint32_t Export::funcIndex() const {
291 MOZ_ASSERT(pod.kind_ == DefinitionKind::Function);
292 return pod.index_;
293 }
294
globalIndex() const295 uint32_t Export::globalIndex() const {
296 MOZ_ASSERT(pod.kind_ == DefinitionKind::Global);
297 return pod.index_;
298 }
299
300 #ifdef ENABLE_WASM_EXCEPTIONS
eventIndex() const301 uint32_t Export::eventIndex() const {
302 MOZ_ASSERT(pod.kind_ == DefinitionKind::Event);
303 return pod.index_;
304 }
305 #endif
306
tableIndex() const307 uint32_t Export::tableIndex() const {
308 MOZ_ASSERT(pod.kind_ == DefinitionKind::Table);
309 return pod.index_;
310 }
311
serializedSize() const312 size_t Export::serializedSize() const {
313 return fieldName_.serializedSize() + sizeof(pod);
314 }
315
serialize(uint8_t * cursor) const316 uint8_t* Export::serialize(uint8_t* cursor) const {
317 cursor = fieldName_.serialize(cursor);
318 cursor = WriteBytes(cursor, &pod, sizeof(pod));
319 return cursor;
320 }
321
deserialize(const uint8_t * cursor)322 const uint8_t* Export::deserialize(const uint8_t* cursor) {
323 (cursor = fieldName_.deserialize(cursor)) &&
324 (cursor = ReadBytes(cursor, &pod, sizeof(pod)));
325 return cursor;
326 }
327
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const328 size_t Export::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
329 return fieldName_.sizeOfExcludingThis(mallocSizeOf);
330 }
331
serializedSize() const332 size_t GlobalDesc::serializedSize() const {
333 size_t size = sizeof(kind_);
334 switch (kind_) {
335 case GlobalKind::Import:
336 size += initial_.serializedSize() + sizeof(offset_) + sizeof(isMutable_) +
337 sizeof(isWasm_) + sizeof(isExport_) + sizeof(importIndex_);
338 break;
339 case GlobalKind::Variable:
340 size += initial_.serializedSize() + sizeof(offset_) + sizeof(isMutable_) +
341 sizeof(isWasm_) + sizeof(isExport_);
342 break;
343 case GlobalKind::Constant:
344 size += initial_.serializedSize();
345 break;
346 default:
347 MOZ_CRASH();
348 }
349 return size;
350 }
351
serialize(uint8_t * cursor) const352 uint8_t* GlobalDesc::serialize(uint8_t* cursor) const {
353 cursor = WriteBytes(cursor, &kind_, sizeof(kind_));
354 switch (kind_) {
355 case GlobalKind::Import:
356 cursor = initial_.serialize(cursor);
357 cursor = WriteBytes(cursor, &offset_, sizeof(offset_));
358 cursor = WriteBytes(cursor, &isMutable_, sizeof(isMutable_));
359 cursor = WriteBytes(cursor, &isWasm_, sizeof(isWasm_));
360 cursor = WriteBytes(cursor, &isExport_, sizeof(isExport_));
361 cursor = WriteBytes(cursor, &importIndex_, sizeof(importIndex_));
362 break;
363 case GlobalKind::Variable:
364 cursor = initial_.serialize(cursor);
365 cursor = WriteBytes(cursor, &offset_, sizeof(offset_));
366 cursor = WriteBytes(cursor, &isMutable_, sizeof(isMutable_));
367 cursor = WriteBytes(cursor, &isWasm_, sizeof(isWasm_));
368 cursor = WriteBytes(cursor, &isExport_, sizeof(isExport_));
369 break;
370 case GlobalKind::Constant:
371 cursor = initial_.serialize(cursor);
372 break;
373 default:
374 MOZ_CRASH();
375 }
376 return cursor;
377 }
378
deserialize(const uint8_t * cursor)379 const uint8_t* GlobalDesc::deserialize(const uint8_t* cursor) {
380 if (!(cursor = ReadBytes(cursor, &kind_, sizeof(kind_)))) {
381 return nullptr;
382 }
383 switch (kind_) {
384 case GlobalKind::Import:
385 (cursor = initial_.deserialize(cursor)) &&
386 (cursor = ReadBytes(cursor, &offset_, sizeof(offset_))) &&
387 (cursor = ReadBytes(cursor, &isMutable_, sizeof(isMutable_))) &&
388 (cursor = ReadBytes(cursor, &isWasm_, sizeof(isWasm_))) &&
389 (cursor = ReadBytes(cursor, &isExport_, sizeof(isExport_))) &&
390 (cursor = ReadBytes(cursor, &importIndex_, sizeof(importIndex_)));
391 break;
392 case GlobalKind::Variable:
393 (cursor = initial_.deserialize(cursor)) &&
394 (cursor = ReadBytes(cursor, &offset_, sizeof(offset_))) &&
395 (cursor = ReadBytes(cursor, &isMutable_, sizeof(isMutable_))) &&
396 (cursor = ReadBytes(cursor, &isWasm_, sizeof(isWasm_))) &&
397 (cursor = ReadBytes(cursor, &isExport_, sizeof(isExport_)));
398 break;
399 case GlobalKind::Constant:
400 cursor = initial_.deserialize(cursor);
401 break;
402 default:
403 MOZ_CRASH();
404 }
405 return cursor;
406 }
407
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const408 size_t GlobalDesc::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
409 return initial_.sizeOfExcludingThis(mallocSizeOf);
410 }
411
serializedSize() const412 size_t ElemSegment::serializedSize() const {
413 return sizeof(kind) + sizeof(tableIndex) + sizeof(elemType) +
414 SerializedMaybeSize(offsetIfActive) +
415 SerializedPodVectorSize(elemFuncIndices);
416 }
417
serialize(uint8_t * cursor) const418 uint8_t* ElemSegment::serialize(uint8_t* cursor) const {
419 cursor = WriteBytes(cursor, &kind, sizeof(kind));
420 cursor = WriteBytes(cursor, &tableIndex, sizeof(tableIndex));
421 cursor = WriteBytes(cursor, &elemType, sizeof(elemType));
422 cursor = SerializeMaybe(cursor, offsetIfActive);
423 cursor = SerializePodVector(cursor, elemFuncIndices);
424 return cursor;
425 }
426
deserialize(const uint8_t * cursor)427 const uint8_t* ElemSegment::deserialize(const uint8_t* cursor) {
428 (cursor = ReadBytes(cursor, &kind, sizeof(kind))) &&
429 (cursor = ReadBytes(cursor, &tableIndex, sizeof(tableIndex))) &&
430 (cursor = ReadBytes(cursor, &elemType, sizeof(elemType))) &&
431 (cursor = DeserializeMaybe(cursor, &offsetIfActive)) &&
432 (cursor = DeserializePodVector(cursor, &elemFuncIndices));
433 return cursor;
434 }
435
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const436 size_t ElemSegment::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
437 return SizeOfMaybeExcludingThis(offsetIfActive, mallocSizeOf) +
438 elemFuncIndices.sizeOfExcludingThis(mallocSizeOf);
439 }
440
serializedSize() const441 size_t DataSegment::serializedSize() const {
442 return SerializedMaybeSize(offsetIfActive) + SerializedPodVectorSize(bytes);
443 }
444
serialize(uint8_t * cursor) const445 uint8_t* DataSegment::serialize(uint8_t* cursor) const {
446 cursor = SerializeMaybe(cursor, offsetIfActive);
447 cursor = SerializePodVector(cursor, bytes);
448 return cursor;
449 }
450
deserialize(const uint8_t * cursor)451 const uint8_t* DataSegment::deserialize(const uint8_t* cursor) {
452 (cursor = DeserializeMaybe(cursor, &offsetIfActive)) &&
453 (cursor = DeserializePodVector(cursor, &bytes));
454 return cursor;
455 }
456
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const457 size_t DataSegment::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
458 return SizeOfMaybeExcludingThis(offsetIfActive, mallocSizeOf) +
459 bytes.sizeOfExcludingThis(mallocSizeOf);
460 }
461
serializedSize() const462 size_t CustomSection::serializedSize() const {
463 return SerializedPodVectorSize(name) +
464 SerializedPodVectorSize(payload->bytes);
465 }
466
serialize(uint8_t * cursor) const467 uint8_t* CustomSection::serialize(uint8_t* cursor) const {
468 cursor = SerializePodVector(cursor, name);
469 cursor = SerializePodVector(cursor, payload->bytes);
470 return cursor;
471 }
472
deserialize(const uint8_t * cursor)473 const uint8_t* CustomSection::deserialize(const uint8_t* cursor) {
474 cursor = DeserializePodVector(cursor, &name);
475 if (!cursor) {
476 return nullptr;
477 }
478
479 Bytes bytes;
480 cursor = DeserializePodVector(cursor, &bytes);
481 if (!cursor) {
482 return nullptr;
483 }
484 payload = js_new<ShareableBytes>(std::move(bytes));
485 if (!payload) {
486 return nullptr;
487 }
488
489 return cursor;
490 }
491
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const492 size_t CustomSection::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
493 return name.sizeOfExcludingThis(mallocSizeOf) + sizeof(*payload) +
494 payload->sizeOfExcludingThis(mallocSizeOf);
495 }
496
497 // Heap length on ARM should fit in an ARM immediate. We approximate the set
498 // of valid ARM immediates with the predicate:
499 // 2^n for n in [16, 24)
500 // or
501 // 2^24 * n for n >= 1.
IsValidARMImmediate(uint32_t i)502 bool wasm::IsValidARMImmediate(uint32_t i) {
503 bool valid = (IsPowerOfTwo(i) || (i & 0x00ffffff) == 0);
504
505 MOZ_ASSERT_IF(valid, i % PageSize == 0);
506
507 return valid;
508 }
509
RoundUpToNextValidARMImmediate(uint64_t i)510 uint64_t wasm::RoundUpToNextValidARMImmediate(uint64_t i) {
511 MOZ_ASSERT(i <= HighestValidARMImmediate);
512 static_assert(HighestValidARMImmediate == 0xff000000,
513 "algorithm relies on specific constant");
514
515 if (i <= 16 * 1024 * 1024) {
516 i = i ? mozilla::RoundUpPow2(i) : 0;
517 } else {
518 i = (i + 0x00ffffff) & ~0x00ffffff;
519 }
520
521 MOZ_ASSERT(IsValidARMImmediate(i));
522
523 return i;
524 }
525
IsValidBoundsCheckImmediate(uint32_t i)526 bool wasm::IsValidBoundsCheckImmediate(uint32_t i) {
527 #ifdef JS_CODEGEN_ARM
528 return IsValidARMImmediate(i);
529 #else
530 return true;
531 #endif
532 }
533
ComputeMappedSize(wasm::Pages maxPages)534 size_t wasm::ComputeMappedSize(wasm::Pages maxPages) {
535 // TODO: memory64 maximum size may overflow size_t
536 size_t maxSize = maxPages.byteLength();
537
538 // It is the bounds-check limit, not the mapped size, that gets baked into
539 // code. Thus round up the maxSize to the next valid immediate value
540 // *before* adding in the guard page.
541
542 #ifdef JS_CODEGEN_ARM
543 uint64_t boundsCheckLimit = RoundUpToNextValidARMImmediate(maxSize);
544 #else
545 uint64_t boundsCheckLimit = maxSize;
546 #endif
547 MOZ_ASSERT(IsValidBoundsCheckImmediate(boundsCheckLimit));
548
549 MOZ_ASSERT(boundsCheckLimit % gc::SystemPageSize() == 0);
550 MOZ_ASSERT(GuardSize % gc::SystemPageSize() == 0);
551 return boundsCheckLimit + GuardSize;
552 }
553
empty() const554 bool TrapSiteVectorArray::empty() const {
555 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
556 if (!(*this)[trap].empty()) {
557 return false;
558 }
559 }
560
561 return true;
562 }
563
clear()564 void TrapSiteVectorArray::clear() {
565 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
566 (*this)[trap].clear();
567 }
568 }
569
swap(TrapSiteVectorArray & rhs)570 void TrapSiteVectorArray::swap(TrapSiteVectorArray& rhs) {
571 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
572 (*this)[trap].swap(rhs[trap]);
573 }
574 }
575
shrinkStorageToFit()576 void TrapSiteVectorArray::shrinkStorageToFit() {
577 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
578 (*this)[trap].shrinkStorageToFit();
579 }
580 }
581
serializedSize() const582 size_t TrapSiteVectorArray::serializedSize() const {
583 size_t ret = 0;
584 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
585 ret += SerializedPodVectorSize((*this)[trap]);
586 }
587 return ret;
588 }
589
serialize(uint8_t * cursor) const590 uint8_t* TrapSiteVectorArray::serialize(uint8_t* cursor) const {
591 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
592 cursor = SerializePodVector(cursor, (*this)[trap]);
593 }
594 return cursor;
595 }
596
deserialize(const uint8_t * cursor)597 const uint8_t* TrapSiteVectorArray::deserialize(const uint8_t* cursor) {
598 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
599 cursor = DeserializePodVector(cursor, &(*this)[trap]);
600 if (!cursor) {
601 return nullptr;
602 }
603 }
604 return cursor;
605 }
606
sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const607 size_t TrapSiteVectorArray::sizeOfExcludingThis(
608 MallocSizeOf mallocSizeOf) const {
609 size_t ret = 0;
610 for (Trap trap : MakeEnumeratedRange(Trap::Limit)) {
611 ret += (*this)[trap].sizeOfExcludingThis(mallocSizeOf);
612 }
613 return ret;
614 }
615
CodeRange(Kind kind,Offsets offsets)616 CodeRange::CodeRange(Kind kind, Offsets offsets)
617 : begin_(offsets.begin), ret_(0), end_(offsets.end), kind_(kind) {
618 MOZ_ASSERT(begin_ <= end_);
619 PodZero(&u);
620 #ifdef DEBUG
621 switch (kind_) {
622 case FarJumpIsland:
623 case TrapExit:
624 case Throw:
625 break;
626 default:
627 MOZ_CRASH("should use more specific constructor");
628 }
629 #endif
630 }
631
CodeRange(Kind kind,uint32_t funcIndex,Offsets offsets)632 CodeRange::CodeRange(Kind kind, uint32_t funcIndex, Offsets offsets)
633 : begin_(offsets.begin), ret_(0), end_(offsets.end), kind_(kind) {
634 u.funcIndex_ = funcIndex;
635 u.func.lineOrBytecode_ = 0;
636 u.func.beginToUncheckedCallEntry_ = 0;
637 u.func.beginToTierEntry_ = 0;
638 MOZ_ASSERT(isEntry());
639 MOZ_ASSERT(begin_ <= end_);
640 }
641
CodeRange(Kind kind,CallableOffsets offsets)642 CodeRange::CodeRange(Kind kind, CallableOffsets offsets)
643 : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) {
644 MOZ_ASSERT(begin_ < ret_);
645 MOZ_ASSERT(ret_ < end_);
646 PodZero(&u);
647 #ifdef DEBUG
648 switch (kind_) {
649 case DebugTrap:
650 case BuiltinThunk:
651 break;
652 default:
653 MOZ_CRASH("should use more specific constructor");
654 }
655 #endif
656 }
657
CodeRange(Kind kind,uint32_t funcIndex,CallableOffsets offsets)658 CodeRange::CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets offsets)
659 : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) {
660 MOZ_ASSERT(isImportExit() && !isImportJitExit());
661 MOZ_ASSERT(begin_ < ret_);
662 MOZ_ASSERT(ret_ < end_);
663 u.funcIndex_ = funcIndex;
664 u.func.lineOrBytecode_ = 0;
665 u.func.beginToUncheckedCallEntry_ = 0;
666 u.func.beginToTierEntry_ = 0;
667 }
668
CodeRange(uint32_t funcIndex,JitExitOffsets offsets)669 CodeRange::CodeRange(uint32_t funcIndex, JitExitOffsets offsets)
670 : begin_(offsets.begin),
671 ret_(offsets.ret),
672 end_(offsets.end),
673 kind_(ImportJitExit) {
674 MOZ_ASSERT(isImportJitExit());
675 MOZ_ASSERT(begin_ < ret_);
676 MOZ_ASSERT(ret_ < end_);
677 u.funcIndex_ = funcIndex;
678 u.jitExit.beginToUntrustedFPStart_ = offsets.untrustedFPStart - begin_;
679 u.jitExit.beginToUntrustedFPEnd_ = offsets.untrustedFPEnd - begin_;
680 MOZ_ASSERT(jitExitUntrustedFPStart() == offsets.untrustedFPStart);
681 MOZ_ASSERT(jitExitUntrustedFPEnd() == offsets.untrustedFPEnd);
682 }
683
CodeRange(uint32_t funcIndex,uint32_t funcLineOrBytecode,FuncOffsets offsets)684 CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode,
685 FuncOffsets offsets)
686 : begin_(offsets.begin),
687 ret_(offsets.ret),
688 end_(offsets.end),
689 kind_(Function) {
690 MOZ_ASSERT(begin_ < ret_);
691 MOZ_ASSERT(ret_ < end_);
692 MOZ_ASSERT(offsets.uncheckedCallEntry - begin_ <= UINT8_MAX);
693 MOZ_ASSERT(offsets.tierEntry - begin_ <= UINT8_MAX);
694 u.funcIndex_ = funcIndex;
695 u.func.lineOrBytecode_ = funcLineOrBytecode;
696 u.func.beginToUncheckedCallEntry_ = offsets.uncheckedCallEntry - begin_;
697 u.func.beginToTierEntry_ = offsets.tierEntry - begin_;
698 }
699
LookupInSorted(const CodeRangeVector & codeRanges,CodeRange::OffsetInCode target)700 const CodeRange* wasm::LookupInSorted(const CodeRangeVector& codeRanges,
701 CodeRange::OffsetInCode target) {
702 size_t lowerBound = 0;
703 size_t upperBound = codeRanges.length();
704
705 size_t match;
706 if (!BinarySearch(codeRanges, lowerBound, upperBound, target, &match)) {
707 return nullptr;
708 }
709
710 return &codeRanges[match];
711 }
712
Log(JSContext * cx,const char * fmt,...)713 void wasm::Log(JSContext* cx, const char* fmt, ...) {
714 MOZ_ASSERT(!cx->isExceptionPending());
715
716 if (!cx->options().wasmVerbose()) {
717 return;
718 }
719
720 va_list args;
721 va_start(args, fmt);
722
723 if (UniqueChars chars = JS_vsmprintf(fmt, args)) {
724 WarnNumberASCII(cx, JSMSG_WASM_VERBOSE, chars.get());
725 if (cx->isExceptionPending()) {
726 cx->clearPendingException();
727 }
728 }
729
730 va_end(args);
731 }
732
733 #ifdef WASM_CODEGEN_DEBUG
IsCodegenDebugEnabled(DebugChannel channel)734 bool wasm::IsCodegenDebugEnabled(DebugChannel channel) {
735 switch (channel) {
736 case DebugChannel::Function:
737 return JitOptions.enableWasmFuncCallSpew;
738 case DebugChannel::Import:
739 return JitOptions.enableWasmImportCallSpew;
740 }
741 return false;
742 }
743 #endif
744
DebugCodegen(DebugChannel channel,const char * fmt,...)745 void wasm::DebugCodegen(DebugChannel channel, const char* fmt, ...) {
746 #ifdef WASM_CODEGEN_DEBUG
747 if (!IsCodegenDebugEnabled(channel)) {
748 return;
749 }
750 va_list ap;
751 va_start(ap, fmt);
752 vfprintf(stderr, fmt, ap);
753 va_end(ap);
754 #endif
755 }
756
757 #ifdef ENABLE_WASM_SIMD_WORMHOLE
758 static const int8_t WormholeTrigger[] = {31, 0, 30, 2, 29, 4, 28, 6,
759 27, 8, 26, 10, 25, 12, 24};
760 static_assert(sizeof(WormholeTrigger) == 15);
761
762 static const int8_t WormholeSignatureBytes[16] = {0xD, 0xE, 0xA, 0xD, 0xD, 0x0,
763 0x0, 0xD, 0xC, 0xA, 0xF, 0xE,
764 0xB, 0xA, 0xB, 0xE};
765 static_assert(sizeof(WormholeSignatureBytes) == 16);
766
IsWormholeTrigger(const V128 & shuffleMask)767 bool wasm::IsWormholeTrigger(const V128& shuffleMask) {
768 return memcmp(shuffleMask.bytes, WormholeTrigger, sizeof(WormholeTrigger)) ==
769 0;
770 }
771
WormholeSignature()772 jit::SimdConstant wasm::WormholeSignature() {
773 return jit::SimdConstant::CreateX16(WormholeSignatureBytes);
774 }
775
776 #endif
777