1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 *
4 * Copyright 2016 Mozilla Foundation
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 #include "wasm/WasmValidate.h"
20
21 #include "mozilla/CheckedInt.h"
22 #include "mozilla/Unused.h"
23 #include "mozilla/Utf8.h"
24
25 #include "builtin/TypedObject.h"
26 #include "jit/JitOptions.h"
27 #include "js/Printf.h"
28 #include "vm/JSContext.h"
29 #include "vm/Realm.h"
30 #include "wasm/WasmOpIter.h"
31
32 using namespace js;
33 using namespace js::jit;
34 using namespace js::wasm;
35
36 using mozilla::AsChars;
37 using mozilla::CheckedInt;
38 using mozilla::CheckedInt32;
39 using mozilla::IsUtf8;
40 using mozilla::MakeSpan;
41 using mozilla::Unused;
42
43 // Decoder implementation.
44
failf(const char * msg,...)45 bool Decoder::failf(const char* msg, ...) {
46 va_list ap;
47 va_start(ap, msg);
48 UniqueChars str(JS_vsmprintf(msg, ap));
49 va_end(ap);
50 if (!str) {
51 return false;
52 }
53
54 return fail(str.get());
55 }
56
warnf(const char * msg,...)57 void Decoder::warnf(const char* msg, ...) {
58 if (!warnings_) {
59 return;
60 }
61
62 va_list ap;
63 va_start(ap, msg);
64 UniqueChars str(JS_vsmprintf(msg, ap));
65 va_end(ap);
66 if (!str) {
67 return;
68 }
69
70 Unused << warnings_->append(std::move(str));
71 }
72
fail(size_t errorOffset,const char * msg)73 bool Decoder::fail(size_t errorOffset, const char* msg) {
74 MOZ_ASSERT(error_);
75 UniqueChars strWithOffset(JS_smprintf("at offset %zu: %s", errorOffset, msg));
76 if (!strWithOffset) {
77 return false;
78 }
79
80 *error_ = std::move(strWithOffset);
81 return false;
82 }
83
readSectionHeader(uint8_t * id,SectionRange * range)84 bool Decoder::readSectionHeader(uint8_t* id, SectionRange* range) {
85 if (!readFixedU8(id)) {
86 return false;
87 }
88
89 uint32_t size;
90 if (!readVarU32(&size)) {
91 return false;
92 }
93
94 range->start = currentOffset();
95 range->size = size;
96 return true;
97 }
98
startSection(SectionId id,ModuleEnvironment * env,MaybeSectionRange * range,const char * sectionName)99 bool Decoder::startSection(SectionId id, ModuleEnvironment* env,
100 MaybeSectionRange* range, const char* sectionName) {
101 MOZ_ASSERT(!*range);
102
103 // Record state at beginning of section to allow rewinding to this point
104 // if, after skipping through several custom sections, we don't find the
105 // section 'id'.
106 const uint8_t* const initialCur = cur_;
107 const size_t initialCustomSectionsLength = env->customSections.length();
108
109 // Maintain a pointer to the current section that gets updated as custom
110 // sections are skipped.
111 const uint8_t* currentSectionStart = cur_;
112
113 // Only start a section with 'id', skipping any custom sections before it.
114
115 uint8_t idValue;
116 if (!readFixedU8(&idValue)) {
117 goto rewind;
118 }
119
120 while (idValue != uint8_t(id)) {
121 if (idValue != uint8_t(SectionId::Custom)) {
122 goto rewind;
123 }
124
125 // Rewind to the beginning of the current section since this is what
126 // skipCustomSection() assumes.
127 cur_ = currentSectionStart;
128 if (!skipCustomSection(env)) {
129 return false;
130 }
131
132 // Having successfully skipped a custom section, consider the next
133 // section.
134 currentSectionStart = cur_;
135 if (!readFixedU8(&idValue)) {
136 goto rewind;
137 }
138 }
139
140 // Don't check the size since the range of bytes being decoded might not
141 // contain the section body. (This is currently the case when streaming: the
142 // code section header is decoded with the module environment bytes, the
143 // body of the code section is streamed in separately.)
144
145 uint32_t size;
146 if (!readVarU32(&size)) {
147 goto fail;
148 }
149
150 range->emplace();
151 (*range)->start = currentOffset();
152 (*range)->size = size;
153 return true;
154
155 rewind:
156 cur_ = initialCur;
157 env->customSections.shrinkTo(initialCustomSectionsLength);
158 return true;
159
160 fail:
161 return failf("failed to start %s section", sectionName);
162 }
163
finishSection(const SectionRange & range,const char * sectionName)164 bool Decoder::finishSection(const SectionRange& range,
165 const char* sectionName) {
166 if (resilientMode_) {
167 return true;
168 }
169 if (range.size != currentOffset() - range.start) {
170 return failf("byte size mismatch in %s section", sectionName);
171 }
172 return true;
173 }
174
startCustomSection(const char * expected,size_t expectedLength,ModuleEnvironment * env,MaybeSectionRange * range)175 bool Decoder::startCustomSection(const char* expected, size_t expectedLength,
176 ModuleEnvironment* env,
177 MaybeSectionRange* range) {
178 // Record state at beginning of section to allow rewinding to this point
179 // if, after skipping through several custom sections, we don't find the
180 // section 'id'.
181 const uint8_t* const initialCur = cur_;
182 const size_t initialCustomSectionsLength = env->customSections.length();
183
184 while (true) {
185 // Try to start a custom section. If we can't, rewind to the beginning
186 // since we may have skipped several custom sections already looking for
187 // 'expected'.
188 if (!startSection(SectionId::Custom, env, range, "custom")) {
189 return false;
190 }
191 if (!*range) {
192 goto rewind;
193 }
194
195 if (bytesRemain() < (*range)->size) {
196 goto fail;
197 }
198
199 CustomSectionEnv sec;
200 if (!readVarU32(&sec.nameLength) || sec.nameLength > bytesRemain()) {
201 goto fail;
202 }
203
204 sec.nameOffset = currentOffset();
205 sec.payloadOffset = sec.nameOffset + sec.nameLength;
206
207 uint32_t payloadEnd = (*range)->start + (*range)->size;
208 if (sec.payloadOffset > payloadEnd) {
209 goto fail;
210 }
211
212 sec.payloadLength = payloadEnd - sec.payloadOffset;
213
214 // Now that we have a valid custom section, record its offsets in the
215 // metadata which can be queried by the user via Module.customSections.
216 // Note: after an entry is appended, it may be popped if this loop or
217 // the loop in startSection needs to rewind.
218 if (!env->customSections.append(sec)) {
219 return false;
220 }
221
222 // If this is the expected custom section, we're done.
223 if (!expected || (expectedLength == sec.nameLength &&
224 !memcmp(cur_, expected, sec.nameLength))) {
225 cur_ += sec.nameLength;
226 return true;
227 }
228
229 // Otherwise, blindly skip the custom section and keep looking.
230 skipAndFinishCustomSection(**range);
231 range->reset();
232 }
233 MOZ_CRASH("unreachable");
234
235 rewind:
236 cur_ = initialCur;
237 env->customSections.shrinkTo(initialCustomSectionsLength);
238 return true;
239
240 fail:
241 return fail("failed to start custom section");
242 }
243
finishCustomSection(const char * name,const SectionRange & range)244 void Decoder::finishCustomSection(const char* name, const SectionRange& range) {
245 MOZ_ASSERT(cur_ >= beg_);
246 MOZ_ASSERT(cur_ <= end_);
247
248 if (error_ && *error_) {
249 warnf("in the '%s' custom section: %s", name, error_->get());
250 skipAndFinishCustomSection(range);
251 return;
252 }
253
254 uint32_t actualSize = currentOffset() - range.start;
255 if (range.size != actualSize) {
256 if (actualSize < range.size) {
257 warnf("in the '%s' custom section: %" PRIu32 " unconsumed bytes", name,
258 uint32_t(range.size - actualSize));
259 } else {
260 warnf("in the '%s' custom section: %" PRIu32
261 " bytes consumed past the end",
262 name, uint32_t(actualSize - range.size));
263 }
264 skipAndFinishCustomSection(range);
265 return;
266 }
267
268 // Nothing to do! (c.f. skipAndFinishCustomSection())
269 }
270
skipAndFinishCustomSection(const SectionRange & range)271 void Decoder::skipAndFinishCustomSection(const SectionRange& range) {
272 MOZ_ASSERT(cur_ >= beg_);
273 MOZ_ASSERT(cur_ <= end_);
274 cur_ = (beg_ + (range.start - offsetInModule_)) + range.size;
275 MOZ_ASSERT(cur_ <= end_);
276 clearError();
277 }
278
skipCustomSection(ModuleEnvironment * env)279 bool Decoder::skipCustomSection(ModuleEnvironment* env) {
280 MaybeSectionRange range;
281 if (!startCustomSection(nullptr, 0, env, &range)) {
282 return false;
283 }
284 if (!range) {
285 return fail("expected custom section");
286 }
287
288 skipAndFinishCustomSection(*range);
289 return true;
290 }
291
startNameSubsection(NameType nameType,Maybe<uint32_t> * endOffset)292 bool Decoder::startNameSubsection(NameType nameType,
293 Maybe<uint32_t>* endOffset) {
294 MOZ_ASSERT(!*endOffset);
295
296 const uint8_t* const initialPosition = cur_;
297
298 uint8_t nameTypeValue;
299 if (!readFixedU8(&nameTypeValue)) {
300 goto rewind;
301 }
302
303 if (nameTypeValue != uint8_t(nameType)) {
304 goto rewind;
305 }
306
307 uint32_t payloadLength;
308 if (!readVarU32(&payloadLength) || payloadLength > bytesRemain()) {
309 return fail("bad name subsection payload length");
310 }
311
312 *endOffset = Some(currentOffset() + payloadLength);
313 return true;
314
315 rewind:
316 cur_ = initialPosition;
317 return true;
318 }
319
finishNameSubsection(uint32_t expected)320 bool Decoder::finishNameSubsection(uint32_t expected) {
321 uint32_t actual = currentOffset();
322 if (expected != actual) {
323 return failf("bad name subsection length (expected: %" PRIu32
324 ", actual: %" PRIu32 ")",
325 expected, actual);
326 }
327
328 return true;
329 }
330
skipNameSubsection()331 bool Decoder::skipNameSubsection() {
332 uint8_t nameTypeValue;
333 if (!readFixedU8(&nameTypeValue)) {
334 return fail("unable to read name subsection id");
335 }
336
337 switch (nameTypeValue) {
338 case uint8_t(NameType::Module):
339 case uint8_t(NameType::Function):
340 return fail("out of order name subsections");
341 default:
342 break;
343 }
344
345 uint32_t payloadLength;
346 if (!readVarU32(&payloadLength) || !readBytes(payloadLength)) {
347 return fail("bad name subsection payload length");
348 }
349
350 return true;
351 }
352
353 // Misc helpers.
354
EncodeLocalEntries(Encoder & e,const ValTypeVector & locals)355 bool wasm::EncodeLocalEntries(Encoder& e, const ValTypeVector& locals) {
356 if (locals.length() > MaxLocals) {
357 return false;
358 }
359
360 uint32_t numLocalEntries = 0;
361 if (locals.length()) {
362 ValType prev = locals[0];
363 numLocalEntries++;
364 for (ValType t : locals) {
365 if (t != prev) {
366 numLocalEntries++;
367 prev = t;
368 }
369 }
370 }
371
372 if (!e.writeVarU32(numLocalEntries)) {
373 return false;
374 }
375
376 if (numLocalEntries) {
377 ValType prev = locals[0];
378 uint32_t count = 1;
379 for (uint32_t i = 1; i < locals.length(); i++, count++) {
380 if (prev != locals[i]) {
381 if (!e.writeVarU32(count)) {
382 return false;
383 }
384 if (!e.writeValType(prev)) {
385 return false;
386 }
387 prev = locals[i];
388 count = 0;
389 }
390 }
391 if (!e.writeVarU32(count)) {
392 return false;
393 }
394 if (!e.writeValType(prev)) {
395 return false;
396 }
397 }
398
399 return true;
400 }
401
DecodeLocalEntries(Decoder & d,const TypeDefVector & types,bool refTypesEnabled,bool gcTypesEnabled,ValTypeVector * locals)402 bool wasm::DecodeLocalEntries(Decoder& d, const TypeDefVector& types,
403 bool refTypesEnabled, bool gcTypesEnabled,
404 ValTypeVector* locals) {
405 uint32_t numLocalEntries;
406 if (!d.readVarU32(&numLocalEntries)) {
407 return d.fail("failed to read number of local entries");
408 }
409
410 for (uint32_t i = 0; i < numLocalEntries; i++) {
411 uint32_t count;
412 if (!d.readVarU32(&count)) {
413 return d.fail("failed to read local entry count");
414 }
415
416 if (MaxLocals - locals->length() < count) {
417 return d.fail("too many locals");
418 }
419
420 ValType type;
421 if (!d.readValType(types, refTypesEnabled, gcTypesEnabled, &type)) {
422 return false;
423 }
424
425 if (!locals->appendN(type, count)) {
426 return false;
427 }
428 }
429
430 return true;
431 }
432
DecodeValidatedLocalEntries(Decoder & d,ValTypeVector * locals)433 bool wasm::DecodeValidatedLocalEntries(Decoder& d, ValTypeVector* locals) {
434 uint32_t numLocalEntries;
435 MOZ_ALWAYS_TRUE(d.readVarU32(&numLocalEntries));
436
437 for (uint32_t i = 0; i < numLocalEntries; i++) {
438 uint32_t count = d.uncheckedReadVarU32();
439 MOZ_ASSERT(MaxLocals - locals->length() >= count);
440 if (!locals->appendN(d.uncheckedReadValType(), count)) {
441 return false;
442 }
443 }
444
445 return true;
446 }
447
448 // Function body validation.
449
450 class NothingVector {
451 Nothing unused_;
452
453 public:
resize(size_t length)454 bool resize(size_t length) { return true; }
operator [](size_t)455 Nothing& operator[](size_t) { return unused_; }
back()456 Nothing& back() { return unused_; }
457 };
458
459 struct ValidatingPolicy {
460 using Value = Nothing;
461 using ValueVector = NothingVector;
462 using ControlItem = Nothing;
463 };
464
465 using ValidatingOpIter = OpIter<ValidatingPolicy>;
466
DecodeFunctionBodyExprs(const ModuleEnvironment & env,uint32_t funcIndex,const ValTypeVector & locals,const uint8_t * bodyEnd,Decoder * d)467 static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
468 uint32_t funcIndex,
469 const ValTypeVector& locals,
470 const uint8_t* bodyEnd, Decoder* d) {
471 ValidatingOpIter iter(env, *d);
472
473 if (!iter.readFunctionStart(funcIndex)) {
474 return false;
475 }
476
477 #define CHECK(c) \
478 if (!(c)) return false; \
479 break
480
481 while (true) {
482 OpBytes op;
483 if (!iter.readOp(&op)) {
484 return false;
485 }
486
487 Nothing nothing;
488 NothingVector nothings;
489 ResultType unusedType;
490
491 switch (op.b0) {
492 case uint16_t(Op::End): {
493 LabelKind unusedKind;
494 if (!iter.readEnd(&unusedKind, &unusedType, ¬hings, ¬hings)) {
495 return false;
496 }
497 iter.popEnd();
498 if (iter.controlStackEmpty()) {
499 return iter.readFunctionEnd(bodyEnd);
500 }
501 break;
502 }
503 case uint16_t(Op::Nop):
504 CHECK(iter.readNop());
505 case uint16_t(Op::Drop):
506 CHECK(iter.readDrop());
507 case uint16_t(Op::Call): {
508 uint32_t unusedIndex;
509 NothingVector unusedArgs;
510 CHECK(iter.readCall(&unusedIndex, &unusedArgs));
511 }
512 case uint16_t(Op::CallIndirect): {
513 uint32_t unusedIndex, unusedIndex2;
514 NothingVector unusedArgs;
515 CHECK(iter.readCallIndirect(&unusedIndex, &unusedIndex2, ¬hing,
516 &unusedArgs));
517 }
518 case uint16_t(Op::I32Const): {
519 int32_t unused;
520 CHECK(iter.readI32Const(&unused));
521 }
522 case uint16_t(Op::I64Const): {
523 int64_t unused;
524 CHECK(iter.readI64Const(&unused));
525 }
526 case uint16_t(Op::F32Const): {
527 float unused;
528 CHECK(iter.readF32Const(&unused));
529 }
530 case uint16_t(Op::F64Const): {
531 double unused;
532 CHECK(iter.readF64Const(&unused));
533 }
534 case uint16_t(Op::GetLocal): {
535 uint32_t unused;
536 CHECK(iter.readGetLocal(locals, &unused));
537 }
538 case uint16_t(Op::SetLocal): {
539 uint32_t unused;
540 CHECK(iter.readSetLocal(locals, &unused, ¬hing));
541 }
542 case uint16_t(Op::TeeLocal): {
543 uint32_t unused;
544 CHECK(iter.readTeeLocal(locals, &unused, ¬hing));
545 }
546 case uint16_t(Op::GetGlobal): {
547 uint32_t unused;
548 CHECK(iter.readGetGlobal(&unused));
549 }
550 case uint16_t(Op::SetGlobal): {
551 uint32_t unused;
552 CHECK(iter.readSetGlobal(&unused, ¬hing));
553 }
554 #ifdef ENABLE_WASM_REFTYPES
555 case uint16_t(Op::TableGet): {
556 if (!env.refTypesEnabled()) {
557 return iter.unrecognizedOpcode(&op);
558 }
559 uint32_t unusedTableIndex;
560 CHECK(iter.readTableGet(&unusedTableIndex, ¬hing));
561 }
562 case uint16_t(Op::TableSet): {
563 if (!env.refTypesEnabled()) {
564 return iter.unrecognizedOpcode(&op);
565 }
566 uint32_t unusedTableIndex;
567 CHECK(iter.readTableSet(&unusedTableIndex, ¬hing, ¬hing));
568 }
569 #endif
570 case uint16_t(Op::SelectNumeric): {
571 StackType unused;
572 CHECK(iter.readSelect(/*typed*/ false, &unused, ¬hing, ¬hing,
573 ¬hing));
574 }
575 case uint16_t(Op::SelectTyped): {
576 if (!env.refTypesEnabled()) {
577 return iter.unrecognizedOpcode(&op);
578 }
579 StackType unused;
580 CHECK(iter.readSelect(/*typed*/ true, &unused, ¬hing, ¬hing,
581 ¬hing));
582 }
583 case uint16_t(Op::Block):
584 CHECK(iter.readBlock(&unusedType));
585 case uint16_t(Op::Loop):
586 CHECK(iter.readLoop(&unusedType));
587 case uint16_t(Op::If):
588 CHECK(iter.readIf(&unusedType, ¬hing));
589 case uint16_t(Op::Else):
590 CHECK(iter.readElse(&unusedType, &unusedType, ¬hings));
591 case uint16_t(Op::I32Clz):
592 case uint16_t(Op::I32Ctz):
593 case uint16_t(Op::I32Popcnt):
594 CHECK(iter.readUnary(ValType::I32, ¬hing));
595 case uint16_t(Op::I64Clz):
596 case uint16_t(Op::I64Ctz):
597 case uint16_t(Op::I64Popcnt):
598 CHECK(iter.readUnary(ValType::I64, ¬hing));
599 case uint16_t(Op::F32Abs):
600 case uint16_t(Op::F32Neg):
601 case uint16_t(Op::F32Ceil):
602 case uint16_t(Op::F32Floor):
603 case uint16_t(Op::F32Sqrt):
604 case uint16_t(Op::F32Trunc):
605 case uint16_t(Op::F32Nearest):
606 CHECK(iter.readUnary(ValType::F32, ¬hing));
607 case uint16_t(Op::F64Abs):
608 case uint16_t(Op::F64Neg):
609 case uint16_t(Op::F64Ceil):
610 case uint16_t(Op::F64Floor):
611 case uint16_t(Op::F64Sqrt):
612 case uint16_t(Op::F64Trunc):
613 case uint16_t(Op::F64Nearest):
614 CHECK(iter.readUnary(ValType::F64, ¬hing));
615 case uint16_t(Op::I32Add):
616 case uint16_t(Op::I32Sub):
617 case uint16_t(Op::I32Mul):
618 case uint16_t(Op::I32DivS):
619 case uint16_t(Op::I32DivU):
620 case uint16_t(Op::I32RemS):
621 case uint16_t(Op::I32RemU):
622 case uint16_t(Op::I32And):
623 case uint16_t(Op::I32Or):
624 case uint16_t(Op::I32Xor):
625 case uint16_t(Op::I32Shl):
626 case uint16_t(Op::I32ShrS):
627 case uint16_t(Op::I32ShrU):
628 case uint16_t(Op::I32Rotl):
629 case uint16_t(Op::I32Rotr):
630 CHECK(iter.readBinary(ValType::I32, ¬hing, ¬hing));
631 case uint16_t(Op::I64Add):
632 case uint16_t(Op::I64Sub):
633 case uint16_t(Op::I64Mul):
634 case uint16_t(Op::I64DivS):
635 case uint16_t(Op::I64DivU):
636 case uint16_t(Op::I64RemS):
637 case uint16_t(Op::I64RemU):
638 case uint16_t(Op::I64And):
639 case uint16_t(Op::I64Or):
640 case uint16_t(Op::I64Xor):
641 case uint16_t(Op::I64Shl):
642 case uint16_t(Op::I64ShrS):
643 case uint16_t(Op::I64ShrU):
644 case uint16_t(Op::I64Rotl):
645 case uint16_t(Op::I64Rotr):
646 CHECK(iter.readBinary(ValType::I64, ¬hing, ¬hing));
647 case uint16_t(Op::F32Add):
648 case uint16_t(Op::F32Sub):
649 case uint16_t(Op::F32Mul):
650 case uint16_t(Op::F32Div):
651 case uint16_t(Op::F32Min):
652 case uint16_t(Op::F32Max):
653 case uint16_t(Op::F32CopySign):
654 CHECK(iter.readBinary(ValType::F32, ¬hing, ¬hing));
655 case uint16_t(Op::F64Add):
656 case uint16_t(Op::F64Sub):
657 case uint16_t(Op::F64Mul):
658 case uint16_t(Op::F64Div):
659 case uint16_t(Op::F64Min):
660 case uint16_t(Op::F64Max):
661 case uint16_t(Op::F64CopySign):
662 CHECK(iter.readBinary(ValType::F64, ¬hing, ¬hing));
663 case uint16_t(Op::I32Eq):
664 case uint16_t(Op::I32Ne):
665 case uint16_t(Op::I32LtS):
666 case uint16_t(Op::I32LtU):
667 case uint16_t(Op::I32LeS):
668 case uint16_t(Op::I32LeU):
669 case uint16_t(Op::I32GtS):
670 case uint16_t(Op::I32GtU):
671 case uint16_t(Op::I32GeS):
672 case uint16_t(Op::I32GeU):
673 CHECK(iter.readComparison(ValType::I32, ¬hing, ¬hing));
674 case uint16_t(Op::I64Eq):
675 case uint16_t(Op::I64Ne):
676 case uint16_t(Op::I64LtS):
677 case uint16_t(Op::I64LtU):
678 case uint16_t(Op::I64LeS):
679 case uint16_t(Op::I64LeU):
680 case uint16_t(Op::I64GtS):
681 case uint16_t(Op::I64GtU):
682 case uint16_t(Op::I64GeS):
683 case uint16_t(Op::I64GeU):
684 CHECK(iter.readComparison(ValType::I64, ¬hing, ¬hing));
685 case uint16_t(Op::F32Eq):
686 case uint16_t(Op::F32Ne):
687 case uint16_t(Op::F32Lt):
688 case uint16_t(Op::F32Le):
689 case uint16_t(Op::F32Gt):
690 case uint16_t(Op::F32Ge):
691 CHECK(iter.readComparison(ValType::F32, ¬hing, ¬hing));
692 case uint16_t(Op::F64Eq):
693 case uint16_t(Op::F64Ne):
694 case uint16_t(Op::F64Lt):
695 case uint16_t(Op::F64Le):
696 case uint16_t(Op::F64Gt):
697 case uint16_t(Op::F64Ge):
698 CHECK(iter.readComparison(ValType::F64, ¬hing, ¬hing));
699 case uint16_t(Op::I32Eqz):
700 CHECK(iter.readConversion(ValType::I32, ValType::I32, ¬hing));
701 case uint16_t(Op::I64Eqz):
702 case uint16_t(Op::I32WrapI64):
703 CHECK(iter.readConversion(ValType::I64, ValType::I32, ¬hing));
704 case uint16_t(Op::I32TruncSF32):
705 case uint16_t(Op::I32TruncUF32):
706 case uint16_t(Op::I32ReinterpretF32):
707 CHECK(iter.readConversion(ValType::F32, ValType::I32, ¬hing));
708 case uint16_t(Op::I32TruncSF64):
709 case uint16_t(Op::I32TruncUF64):
710 CHECK(iter.readConversion(ValType::F64, ValType::I32, ¬hing));
711 case uint16_t(Op::I64ExtendSI32):
712 case uint16_t(Op::I64ExtendUI32):
713 CHECK(iter.readConversion(ValType::I32, ValType::I64, ¬hing));
714 case uint16_t(Op::I64TruncSF32):
715 case uint16_t(Op::I64TruncUF32):
716 CHECK(iter.readConversion(ValType::F32, ValType::I64, ¬hing));
717 case uint16_t(Op::I64TruncSF64):
718 case uint16_t(Op::I64TruncUF64):
719 case uint16_t(Op::I64ReinterpretF64):
720 CHECK(iter.readConversion(ValType::F64, ValType::I64, ¬hing));
721 case uint16_t(Op::F32ConvertSI32):
722 case uint16_t(Op::F32ConvertUI32):
723 case uint16_t(Op::F32ReinterpretI32):
724 CHECK(iter.readConversion(ValType::I32, ValType::F32, ¬hing));
725 case uint16_t(Op::F32ConvertSI64):
726 case uint16_t(Op::F32ConvertUI64):
727 CHECK(iter.readConversion(ValType::I64, ValType::F32, ¬hing));
728 case uint16_t(Op::F32DemoteF64):
729 CHECK(iter.readConversion(ValType::F64, ValType::F32, ¬hing));
730 case uint16_t(Op::F64ConvertSI32):
731 case uint16_t(Op::F64ConvertUI32):
732 CHECK(iter.readConversion(ValType::I32, ValType::F64, ¬hing));
733 case uint16_t(Op::F64ConvertSI64):
734 case uint16_t(Op::F64ConvertUI64):
735 case uint16_t(Op::F64ReinterpretI64):
736 CHECK(iter.readConversion(ValType::I64, ValType::F64, ¬hing));
737 case uint16_t(Op::F64PromoteF32):
738 CHECK(iter.readConversion(ValType::F32, ValType::F64, ¬hing));
739 case uint16_t(Op::I32Extend8S):
740 case uint16_t(Op::I32Extend16S):
741 CHECK(iter.readConversion(ValType::I32, ValType::I32, ¬hing));
742 case uint16_t(Op::I64Extend8S):
743 case uint16_t(Op::I64Extend16S):
744 case uint16_t(Op::I64Extend32S):
745 CHECK(iter.readConversion(ValType::I64, ValType::I64, ¬hing));
746 case uint16_t(Op::I32Load8S):
747 case uint16_t(Op::I32Load8U): {
748 LinearMemoryAddress<Nothing> addr;
749 CHECK(iter.readLoad(ValType::I32, 1, &addr));
750 }
751 case uint16_t(Op::I32Load16S):
752 case uint16_t(Op::I32Load16U): {
753 LinearMemoryAddress<Nothing> addr;
754 CHECK(iter.readLoad(ValType::I32, 2, &addr));
755 }
756 case uint16_t(Op::I32Load): {
757 LinearMemoryAddress<Nothing> addr;
758 CHECK(iter.readLoad(ValType::I32, 4, &addr));
759 }
760 case uint16_t(Op::I64Load8S):
761 case uint16_t(Op::I64Load8U): {
762 LinearMemoryAddress<Nothing> addr;
763 CHECK(iter.readLoad(ValType::I64, 1, &addr));
764 }
765 case uint16_t(Op::I64Load16S):
766 case uint16_t(Op::I64Load16U): {
767 LinearMemoryAddress<Nothing> addr;
768 CHECK(iter.readLoad(ValType::I64, 2, &addr));
769 }
770 case uint16_t(Op::I64Load32S):
771 case uint16_t(Op::I64Load32U): {
772 LinearMemoryAddress<Nothing> addr;
773 CHECK(iter.readLoad(ValType::I64, 4, &addr));
774 }
775 case uint16_t(Op::I64Load): {
776 LinearMemoryAddress<Nothing> addr;
777 CHECK(iter.readLoad(ValType::I64, 8, &addr));
778 }
779 case uint16_t(Op::F32Load): {
780 LinearMemoryAddress<Nothing> addr;
781 CHECK(iter.readLoad(ValType::F32, 4, &addr));
782 }
783 case uint16_t(Op::F64Load): {
784 LinearMemoryAddress<Nothing> addr;
785 CHECK(iter.readLoad(ValType::F64, 8, &addr));
786 }
787 case uint16_t(Op::I32Store8): {
788 LinearMemoryAddress<Nothing> addr;
789 CHECK(iter.readStore(ValType::I32, 1, &addr, ¬hing));
790 }
791 case uint16_t(Op::I32Store16): {
792 LinearMemoryAddress<Nothing> addr;
793 CHECK(iter.readStore(ValType::I32, 2, &addr, ¬hing));
794 }
795 case uint16_t(Op::I32Store): {
796 LinearMemoryAddress<Nothing> addr;
797 CHECK(iter.readStore(ValType::I32, 4, &addr, ¬hing));
798 }
799 case uint16_t(Op::I64Store8): {
800 LinearMemoryAddress<Nothing> addr;
801 CHECK(iter.readStore(ValType::I64, 1, &addr, ¬hing));
802 }
803 case uint16_t(Op::I64Store16): {
804 LinearMemoryAddress<Nothing> addr;
805 CHECK(iter.readStore(ValType::I64, 2, &addr, ¬hing));
806 }
807 case uint16_t(Op::I64Store32): {
808 LinearMemoryAddress<Nothing> addr;
809 CHECK(iter.readStore(ValType::I64, 4, &addr, ¬hing));
810 }
811 case uint16_t(Op::I64Store): {
812 LinearMemoryAddress<Nothing> addr;
813 CHECK(iter.readStore(ValType::I64, 8, &addr, ¬hing));
814 }
815 case uint16_t(Op::F32Store): {
816 LinearMemoryAddress<Nothing> addr;
817 CHECK(iter.readStore(ValType::F32, 4, &addr, ¬hing));
818 }
819 case uint16_t(Op::F64Store): {
820 LinearMemoryAddress<Nothing> addr;
821 CHECK(iter.readStore(ValType::F64, 8, &addr, ¬hing));
822 }
823 case uint16_t(Op::MemoryGrow):
824 CHECK(iter.readMemoryGrow(¬hing));
825 case uint16_t(Op::MemorySize):
826 CHECK(iter.readMemorySize());
827 case uint16_t(Op::Br): {
828 uint32_t unusedDepth;
829 CHECK(iter.readBr(&unusedDepth, &unusedType, ¬hings));
830 }
831 case uint16_t(Op::BrIf): {
832 uint32_t unusedDepth;
833 CHECK(iter.readBrIf(&unusedDepth, &unusedType, ¬hings, ¬hing));
834 }
835 case uint16_t(Op::BrTable): {
836 Uint32Vector unusedDepths;
837 uint32_t unusedDefault;
838 CHECK(iter.readBrTable(&unusedDepths, &unusedDefault, &unusedType,
839 ¬hings, ¬hing));
840 }
841 case uint16_t(Op::Return):
842 CHECK(iter.readReturn(¬hings));
843 case uint16_t(Op::Unreachable):
844 CHECK(iter.readUnreachable());
845 #ifdef ENABLE_WASM_GC
846 case uint16_t(Op::GcPrefix): {
847 switch (op.b1) {
848 case uint32_t(GcOp::StructNew): {
849 if (!env.gcTypesEnabled()) {
850 return iter.unrecognizedOpcode(&op);
851 }
852 uint32_t unusedUint;
853 NothingVector unusedArgs;
854 CHECK(iter.readStructNew(&unusedUint, &unusedArgs));
855 }
856 case uint32_t(GcOp::StructGet): {
857 if (!env.gcTypesEnabled()) {
858 return iter.unrecognizedOpcode(&op);
859 }
860 uint32_t unusedUint1, unusedUint2;
861 CHECK(iter.readStructGet(&unusedUint1, &unusedUint2, ¬hing));
862 }
863 case uint32_t(GcOp::StructSet): {
864 if (!env.gcTypesEnabled()) {
865 return iter.unrecognizedOpcode(&op);
866 }
867 uint32_t unusedUint1, unusedUint2;
868 CHECK(iter.readStructSet(&unusedUint1, &unusedUint2, ¬hing,
869 ¬hing));
870 }
871 case uint32_t(GcOp::StructNarrow): {
872 if (!env.gcTypesEnabled()) {
873 return iter.unrecognizedOpcode(&op);
874 }
875 ValType unusedTy, unusedTy2;
876 CHECK(iter.readStructNarrow(&unusedTy, &unusedTy2, ¬hing));
877 }
878 default:
879 return iter.unrecognizedOpcode(&op);
880 }
881 break;
882 }
883 #endif
884
885 #ifdef ENABLE_WASM_SIMD
886 case uint16_t(Op::SimdPrefix): {
887 if (!env.v128Enabled()) {
888 return iter.unrecognizedOpcode(&op);
889 }
890 uint32_t noIndex;
891 switch (op.b1) {
892 case uint32_t(SimdOp::I8x16ExtractLaneS):
893 case uint32_t(SimdOp::I8x16ExtractLaneU):
894 CHECK(iter.readExtractLane(ValType::I32, 16, &noIndex, ¬hing));
895 case uint32_t(SimdOp::I16x8ExtractLaneS):
896 case uint32_t(SimdOp::I16x8ExtractLaneU):
897 CHECK(iter.readExtractLane(ValType::I32, 8, &noIndex, ¬hing));
898 case uint32_t(SimdOp::I32x4ExtractLane):
899 CHECK(iter.readExtractLane(ValType::I32, 4, &noIndex, ¬hing));
900 case uint32_t(SimdOp::I64x2ExtractLane):
901 CHECK(iter.readExtractLane(ValType::I64, 2, &noIndex, ¬hing));
902 case uint32_t(SimdOp::F32x4ExtractLane):
903 CHECK(iter.readExtractLane(ValType::F32, 4, &noIndex, ¬hing));
904 case uint32_t(SimdOp::F64x2ExtractLane):
905 CHECK(iter.readExtractLane(ValType::F64, 2, &noIndex, ¬hing));
906
907 case uint32_t(SimdOp::I8x16Splat):
908 case uint32_t(SimdOp::I16x8Splat):
909 case uint32_t(SimdOp::I32x4Splat):
910 CHECK(iter.readConversion(ValType::I32, ValType::V128, ¬hing));
911 case uint32_t(SimdOp::I64x2Splat):
912 CHECK(iter.readConversion(ValType::I64, ValType::V128, ¬hing));
913 case uint32_t(SimdOp::F32x4Splat):
914 CHECK(iter.readConversion(ValType::F32, ValType::V128, ¬hing));
915 case uint32_t(SimdOp::F64x2Splat):
916 CHECK(iter.readConversion(ValType::F64, ValType::V128, ¬hing));
917
918 case uint32_t(SimdOp::I8x16AnyTrue):
919 case uint32_t(SimdOp::I8x16AllTrue):
920 case uint32_t(SimdOp::I16x8AnyTrue):
921 case uint32_t(SimdOp::I16x8AllTrue):
922 case uint32_t(SimdOp::I32x4AnyTrue):
923 case uint32_t(SimdOp::I32x4AllTrue):
924 CHECK(iter.readConversion(ValType::V128, ValType::I32, ¬hing));
925
926 case uint32_t(SimdOp::I8x16ReplaceLane):
927 CHECK(iter.readReplaceLane(ValType::I32, 16, &noIndex, ¬hing,
928 ¬hing));
929 case uint32_t(SimdOp::I16x8ReplaceLane):
930 CHECK(iter.readReplaceLane(ValType::I32, 8, &noIndex, ¬hing,
931 ¬hing));
932 case uint32_t(SimdOp::I32x4ReplaceLane):
933 CHECK(iter.readReplaceLane(ValType::I32, 4, &noIndex, ¬hing,
934 ¬hing));
935 case uint32_t(SimdOp::I64x2ReplaceLane):
936 CHECK(iter.readReplaceLane(ValType::I64, 2, &noIndex, ¬hing,
937 ¬hing));
938 case uint32_t(SimdOp::F32x4ReplaceLane):
939 CHECK(iter.readReplaceLane(ValType::F32, 4, &noIndex, ¬hing,
940 ¬hing));
941 case uint32_t(SimdOp::F64x2ReplaceLane):
942 CHECK(iter.readReplaceLane(ValType::F64, 2, &noIndex, ¬hing,
943 ¬hing));
944
945 case uint32_t(SimdOp::I8x16Eq):
946 case uint32_t(SimdOp::I8x16Ne):
947 case uint32_t(SimdOp::I8x16LtS):
948 case uint32_t(SimdOp::I8x16LtU):
949 case uint32_t(SimdOp::I8x16GtS):
950 case uint32_t(SimdOp::I8x16GtU):
951 case uint32_t(SimdOp::I8x16LeS):
952 case uint32_t(SimdOp::I8x16LeU):
953 case uint32_t(SimdOp::I8x16GeS):
954 case uint32_t(SimdOp::I8x16GeU):
955 case uint32_t(SimdOp::I16x8Eq):
956 case uint32_t(SimdOp::I16x8Ne):
957 case uint32_t(SimdOp::I16x8LtS):
958 case uint32_t(SimdOp::I16x8LtU):
959 case uint32_t(SimdOp::I16x8GtS):
960 case uint32_t(SimdOp::I16x8GtU):
961 case uint32_t(SimdOp::I16x8LeS):
962 case uint32_t(SimdOp::I16x8LeU):
963 case uint32_t(SimdOp::I16x8GeS):
964 case uint32_t(SimdOp::I16x8GeU):
965 case uint32_t(SimdOp::I32x4Eq):
966 case uint32_t(SimdOp::I32x4Ne):
967 case uint32_t(SimdOp::I32x4LtS):
968 case uint32_t(SimdOp::I32x4LtU):
969 case uint32_t(SimdOp::I32x4GtS):
970 case uint32_t(SimdOp::I32x4GtU):
971 case uint32_t(SimdOp::I32x4LeS):
972 case uint32_t(SimdOp::I32x4LeU):
973 case uint32_t(SimdOp::I32x4GeS):
974 case uint32_t(SimdOp::I32x4GeU):
975 case uint32_t(SimdOp::F32x4Eq):
976 case uint32_t(SimdOp::F32x4Ne):
977 case uint32_t(SimdOp::F32x4Lt):
978 case uint32_t(SimdOp::F32x4Gt):
979 case uint32_t(SimdOp::F32x4Le):
980 case uint32_t(SimdOp::F32x4Ge):
981 case uint32_t(SimdOp::F64x2Eq):
982 case uint32_t(SimdOp::F64x2Ne):
983 case uint32_t(SimdOp::F64x2Lt):
984 case uint32_t(SimdOp::F64x2Gt):
985 case uint32_t(SimdOp::F64x2Le):
986 case uint32_t(SimdOp::F64x2Ge):
987 case uint32_t(SimdOp::V128And):
988 case uint32_t(SimdOp::V128Or):
989 case uint32_t(SimdOp::V128Xor):
990 case uint32_t(SimdOp::V128AndNot):
991 case uint32_t(SimdOp::I8x16AvgrU):
992 case uint32_t(SimdOp::I16x8AvgrU):
993 case uint32_t(SimdOp::I8x16Add):
994 case uint32_t(SimdOp::I8x16AddSaturateS):
995 case uint32_t(SimdOp::I8x16AddSaturateU):
996 case uint32_t(SimdOp::I8x16Sub):
997 case uint32_t(SimdOp::I8x16SubSaturateS):
998 case uint32_t(SimdOp::I8x16SubSaturateU):
999 case uint32_t(SimdOp::I8x16MinS):
1000 case uint32_t(SimdOp::I8x16MinU):
1001 case uint32_t(SimdOp::I8x16MaxS):
1002 case uint32_t(SimdOp::I8x16MaxU):
1003 case uint32_t(SimdOp::I16x8Add):
1004 case uint32_t(SimdOp::I16x8AddSaturateS):
1005 case uint32_t(SimdOp::I16x8AddSaturateU):
1006 case uint32_t(SimdOp::I16x8Sub):
1007 case uint32_t(SimdOp::I16x8SubSaturateS):
1008 case uint32_t(SimdOp::I16x8SubSaturateU):
1009 case uint32_t(SimdOp::I16x8Mul):
1010 case uint32_t(SimdOp::I16x8MinS):
1011 case uint32_t(SimdOp::I16x8MinU):
1012 case uint32_t(SimdOp::I16x8MaxS):
1013 case uint32_t(SimdOp::I16x8MaxU):
1014 case uint32_t(SimdOp::I32x4Add):
1015 case uint32_t(SimdOp::I32x4Sub):
1016 case uint32_t(SimdOp::I32x4Mul):
1017 case uint32_t(SimdOp::I32x4MinS):
1018 case uint32_t(SimdOp::I32x4MinU):
1019 case uint32_t(SimdOp::I32x4MaxS):
1020 case uint32_t(SimdOp::I32x4MaxU):
1021 case uint32_t(SimdOp::I64x2Add):
1022 case uint32_t(SimdOp::I64x2Sub):
1023 case uint32_t(SimdOp::I64x2Mul):
1024 case uint32_t(SimdOp::F32x4Add):
1025 case uint32_t(SimdOp::F32x4Sub):
1026 case uint32_t(SimdOp::F32x4Mul):
1027 case uint32_t(SimdOp::F32x4Div):
1028 case uint32_t(SimdOp::F32x4Min):
1029 case uint32_t(SimdOp::F32x4Max):
1030 case uint32_t(SimdOp::F64x2Add):
1031 case uint32_t(SimdOp::F64x2Sub):
1032 case uint32_t(SimdOp::F64x2Mul):
1033 case uint32_t(SimdOp::F64x2Div):
1034 case uint32_t(SimdOp::F64x2Min):
1035 case uint32_t(SimdOp::F64x2Max):
1036 case uint32_t(SimdOp::I8x16NarrowSI16x8):
1037 case uint32_t(SimdOp::I8x16NarrowUI16x8):
1038 case uint32_t(SimdOp::I16x8NarrowSI32x4):
1039 case uint32_t(SimdOp::I16x8NarrowUI32x4):
1040 case uint32_t(SimdOp::V8x16Swizzle):
1041 CHECK(iter.readBinary(ValType::V128, ¬hing, ¬hing));
1042
1043 case uint32_t(SimdOp::I8x16Neg):
1044 case uint32_t(SimdOp::I16x8Neg):
1045 case uint32_t(SimdOp::I16x8WidenLowSI8x16):
1046 case uint32_t(SimdOp::I16x8WidenHighSI8x16):
1047 case uint32_t(SimdOp::I16x8WidenLowUI8x16):
1048 case uint32_t(SimdOp::I16x8WidenHighUI8x16):
1049 case uint32_t(SimdOp::I32x4Neg):
1050 case uint32_t(SimdOp::I32x4WidenLowSI16x8):
1051 case uint32_t(SimdOp::I32x4WidenHighSI16x8):
1052 case uint32_t(SimdOp::I32x4WidenLowUI16x8):
1053 case uint32_t(SimdOp::I32x4WidenHighUI16x8):
1054 case uint32_t(SimdOp::I32x4TruncSSatF32x4):
1055 case uint32_t(SimdOp::I32x4TruncUSatF32x4):
1056 case uint32_t(SimdOp::I64x2Neg):
1057 case uint32_t(SimdOp::F32x4Abs):
1058 case uint32_t(SimdOp::F32x4Neg):
1059 case uint32_t(SimdOp::F32x4Sqrt):
1060 case uint32_t(SimdOp::F32x4ConvertSI32x4):
1061 case uint32_t(SimdOp::F32x4ConvertUI32x4):
1062 case uint32_t(SimdOp::F64x2Abs):
1063 case uint32_t(SimdOp::F64x2Neg):
1064 case uint32_t(SimdOp::F64x2Sqrt):
1065 case uint32_t(SimdOp::V128Not):
1066 case uint32_t(SimdOp::I8x16Abs):
1067 case uint32_t(SimdOp::I16x8Abs):
1068 case uint32_t(SimdOp::I32x4Abs):
1069 CHECK(iter.readUnary(ValType::V128, ¬hing));
1070
1071 case uint32_t(SimdOp::I8x16Shl):
1072 case uint32_t(SimdOp::I8x16ShrS):
1073 case uint32_t(SimdOp::I8x16ShrU):
1074 case uint32_t(SimdOp::I16x8Shl):
1075 case uint32_t(SimdOp::I16x8ShrS):
1076 case uint32_t(SimdOp::I16x8ShrU):
1077 case uint32_t(SimdOp::I32x4Shl):
1078 case uint32_t(SimdOp::I32x4ShrS):
1079 case uint32_t(SimdOp::I32x4ShrU):
1080 case uint32_t(SimdOp::I64x2Shl):
1081 case uint32_t(SimdOp::I64x2ShrS):
1082 case uint32_t(SimdOp::I64x2ShrU):
1083 CHECK(iter.readVectorShift(¬hing, ¬hing));
1084
1085 case uint32_t(SimdOp::V128Bitselect):
1086 CHECK(iter.readVectorSelect(¬hing, ¬hing, ¬hing));
1087
1088 case uint32_t(SimdOp::V8x16Shuffle): {
1089 V128 mask;
1090 CHECK(iter.readVectorShuffle(¬hing, ¬hing, &mask));
1091 }
1092
1093 case uint32_t(SimdOp::V128Const): {
1094 V128 noVector;
1095 CHECK(iter.readV128Const(&noVector));
1096 }
1097
1098 case uint32_t(SimdOp::V128Load): {
1099 LinearMemoryAddress<Nothing> addr;
1100 CHECK(iter.readLoad(ValType::V128, 16, &addr));
1101 }
1102
1103 case uint32_t(SimdOp::V8x16LoadSplat): {
1104 LinearMemoryAddress<Nothing> addr;
1105 CHECK(iter.readLoadSplat(1, &addr));
1106 }
1107
1108 case uint32_t(SimdOp::V16x8LoadSplat): {
1109 LinearMemoryAddress<Nothing> addr;
1110 CHECK(iter.readLoadSplat(2, &addr));
1111 }
1112
1113 case uint32_t(SimdOp::V32x4LoadSplat): {
1114 LinearMemoryAddress<Nothing> addr;
1115 CHECK(iter.readLoadSplat(4, &addr));
1116 }
1117
1118 case uint32_t(SimdOp::V64x2LoadSplat): {
1119 LinearMemoryAddress<Nothing> addr;
1120 CHECK(iter.readLoadSplat(8, &addr));
1121 }
1122
1123 case uint32_t(SimdOp::I16x8LoadS8x8):
1124 case uint32_t(SimdOp::I16x8LoadU8x8): {
1125 LinearMemoryAddress<Nothing> addr;
1126 CHECK(iter.readLoadExtend(&addr));
1127 }
1128
1129 case uint32_t(SimdOp::I32x4LoadS16x4):
1130 case uint32_t(SimdOp::I32x4LoadU16x4): {
1131 LinearMemoryAddress<Nothing> addr;
1132 CHECK(iter.readLoadExtend(&addr));
1133 }
1134
1135 case uint32_t(SimdOp::I64x2LoadS32x2):
1136 case uint32_t(SimdOp::I64x2LoadU32x2): {
1137 LinearMemoryAddress<Nothing> addr;
1138 CHECK(iter.readLoadExtend(&addr));
1139 }
1140
1141 case uint32_t(SimdOp::V128Store): {
1142 LinearMemoryAddress<Nothing> addr;
1143 CHECK(iter.readStore(ValType::V128, 16, &addr, ¬hing));
1144 }
1145
1146 default:
1147 return iter.unrecognizedOpcode(&op);
1148 }
1149 break;
1150 }
1151 #endif // ENABLE_WASM_SIMD
1152
1153 case uint16_t(Op::MiscPrefix): {
1154 switch (op.b1) {
1155 case uint32_t(MiscOp::I32TruncSSatF32):
1156 case uint32_t(MiscOp::I32TruncUSatF32):
1157 CHECK(iter.readConversion(ValType::F32, ValType::I32, ¬hing));
1158 case uint32_t(MiscOp::I32TruncSSatF64):
1159 case uint32_t(MiscOp::I32TruncUSatF64):
1160 CHECK(iter.readConversion(ValType::F64, ValType::I32, ¬hing));
1161 case uint32_t(MiscOp::I64TruncSSatF32):
1162 case uint32_t(MiscOp::I64TruncUSatF32):
1163 CHECK(iter.readConversion(ValType::F32, ValType::I64, ¬hing));
1164 case uint32_t(MiscOp::I64TruncSSatF64):
1165 case uint32_t(MiscOp::I64TruncUSatF64):
1166 CHECK(iter.readConversion(ValType::F64, ValType::I64, ¬hing));
1167 case uint32_t(MiscOp::MemCopy): {
1168 #ifndef ENABLE_WASM_BULKMEM_OPS
1169 // Bulk memory must be available if shared memory is enabled.
1170 if (env.sharedMemoryEnabled == Shareable::False) {
1171 return iter.fail("bulk memory ops disabled");
1172 }
1173 #endif
1174 uint32_t unusedDestMemIndex;
1175 uint32_t unusedSrcMemIndex;
1176 CHECK(iter.readMemOrTableCopy(/*isMem=*/true, &unusedDestMemIndex,
1177 ¬hing, &unusedSrcMemIndex,
1178 ¬hing, ¬hing));
1179 }
1180 case uint32_t(MiscOp::DataDrop): {
1181 #ifndef ENABLE_WASM_BULKMEM_OPS
1182 // Bulk memory must be available if shared memory is enabled.
1183 if (env.sharedMemoryEnabled == Shareable::False) {
1184 return iter.fail("bulk memory ops disabled");
1185 }
1186 #endif
1187 uint32_t unusedSegIndex;
1188 CHECK(iter.readDataOrElemDrop(/*isData=*/true, &unusedSegIndex));
1189 }
1190 case uint32_t(MiscOp::MemFill):
1191 #ifndef ENABLE_WASM_BULKMEM_OPS
1192 // Bulk memory must be available if shared memory is enabled.
1193 if (env.sharedMemoryEnabled == Shareable::False) {
1194 return iter.fail("bulk memory ops disabled");
1195 }
1196 #endif
1197 CHECK(iter.readMemFill(¬hing, ¬hing, ¬hing));
1198 case uint32_t(MiscOp::MemInit): {
1199 #ifndef ENABLE_WASM_BULKMEM_OPS
1200 // Bulk memory must be available if shared memory is enabled.
1201 if (env.sharedMemoryEnabled == Shareable::False) {
1202 return iter.fail("bulk memory ops disabled");
1203 }
1204 #endif
1205 uint32_t unusedSegIndex;
1206 uint32_t unusedTableIndex;
1207 CHECK(iter.readMemOrTableInit(/*isMem=*/true, &unusedSegIndex,
1208 &unusedTableIndex, ¬hing, ¬hing,
1209 ¬hing));
1210 }
1211 case uint32_t(MiscOp::TableCopy): {
1212 #ifndef ENABLE_WASM_BULKMEM_OPS
1213 // Bulk memory must be available if shared memory is enabled.
1214 if (env.sharedMemoryEnabled == Shareable::False) {
1215 return iter.fail("bulk memory ops disabled");
1216 }
1217 #endif
1218 uint32_t unusedDestTableIndex;
1219 uint32_t unusedSrcTableIndex;
1220 CHECK(iter.readMemOrTableCopy(
1221 /*isMem=*/false, &unusedDestTableIndex, ¬hing,
1222 &unusedSrcTableIndex, ¬hing, ¬hing));
1223 }
1224 case uint32_t(MiscOp::ElemDrop): {
1225 #ifndef ENABLE_WASM_BULKMEM_OPS
1226 // Bulk memory must be available if shared memory is enabled.
1227 if (env.sharedMemoryEnabled == Shareable::False) {
1228 return iter.fail("bulk memory ops disabled");
1229 }
1230 #endif
1231 uint32_t unusedSegIndex;
1232 CHECK(iter.readDataOrElemDrop(/*isData=*/false, &unusedSegIndex));
1233 }
1234 case uint32_t(MiscOp::TableInit): {
1235 #ifndef ENABLE_WASM_BULKMEM_OPS
1236 // Bulk memory must be available if shared memory is enabled.
1237 if (env.sharedMemoryEnabled == Shareable::False) {
1238 return iter.fail("bulk memory ops disabled");
1239 }
1240 #endif
1241 uint32_t unusedSegIndex;
1242 uint32_t unusedTableIndex;
1243 CHECK(iter.readMemOrTableInit(/*isMem=*/false, &unusedSegIndex,
1244 &unusedTableIndex, ¬hing, ¬hing,
1245 ¬hing));
1246 }
1247 #ifdef ENABLE_WASM_REFTYPES
1248 case uint32_t(MiscOp::TableFill): {
1249 if (!env.refTypesEnabled()) {
1250 return iter.unrecognizedOpcode(&op);
1251 }
1252 uint32_t unusedTableIndex;
1253 CHECK(iter.readTableFill(&unusedTableIndex, ¬hing, ¬hing,
1254 ¬hing));
1255 }
1256 case uint32_t(MiscOp::TableGrow): {
1257 if (!env.refTypesEnabled()) {
1258 return iter.unrecognizedOpcode(&op);
1259 }
1260 uint32_t unusedTableIndex;
1261 CHECK(iter.readTableGrow(&unusedTableIndex, ¬hing, ¬hing));
1262 }
1263 case uint32_t(MiscOp::TableSize): {
1264 if (!env.refTypesEnabled()) {
1265 return iter.unrecognizedOpcode(&op);
1266 }
1267 uint32_t unusedTableIndex;
1268 CHECK(iter.readTableSize(&unusedTableIndex));
1269 }
1270 #endif
1271 default:
1272 return iter.unrecognizedOpcode(&op);
1273 }
1274 break;
1275 }
1276 #ifdef ENABLE_WASM_GC
1277 case uint16_t(Op::RefEq): {
1278 if (!env.gcTypesEnabled()) {
1279 return iter.unrecognizedOpcode(&op);
1280 }
1281 CHECK(iter.readComparison(RefType::any(), ¬hing, ¬hing));
1282 }
1283 #endif
1284 #ifdef ENABLE_WASM_REFTYPES
1285 case uint16_t(Op::RefFunc): {
1286 uint32_t unusedIndex;
1287 CHECK(iter.readRefFunc(&unusedIndex));
1288 }
1289 case uint16_t(Op::RefNull): {
1290 if (!env.refTypesEnabled()) {
1291 return iter.unrecognizedOpcode(&op);
1292 }
1293 CHECK(iter.readRefNull());
1294 }
1295 case uint16_t(Op::RefIsNull): {
1296 if (!env.refTypesEnabled()) {
1297 return iter.unrecognizedOpcode(&op);
1298 }
1299 Nothing nothing;
1300 CHECK(iter.readRefIsNull(¬hing));
1301 }
1302 #endif
1303 case uint16_t(Op::ThreadPrefix): {
1304 switch (op.b1) {
1305 case uint32_t(ThreadOp::Wake): {
1306 LinearMemoryAddress<Nothing> addr;
1307 CHECK(iter.readWake(&addr, ¬hing));
1308 }
1309 case uint32_t(ThreadOp::I32Wait): {
1310 LinearMemoryAddress<Nothing> addr;
1311 CHECK(iter.readWait(&addr, ValType::I32, 4, ¬hing, ¬hing));
1312 }
1313 case uint32_t(ThreadOp::I64Wait): {
1314 LinearMemoryAddress<Nothing> addr;
1315 CHECK(iter.readWait(&addr, ValType::I64, 8, ¬hing, ¬hing));
1316 }
1317 case uint32_t(ThreadOp::Fence): {
1318 CHECK(iter.readFence());
1319 }
1320 case uint32_t(ThreadOp::I32AtomicLoad): {
1321 LinearMemoryAddress<Nothing> addr;
1322 CHECK(iter.readAtomicLoad(&addr, ValType::I32, 4));
1323 }
1324 case uint32_t(ThreadOp::I64AtomicLoad): {
1325 LinearMemoryAddress<Nothing> addr;
1326 CHECK(iter.readAtomicLoad(&addr, ValType::I64, 8));
1327 }
1328 case uint32_t(ThreadOp::I32AtomicLoad8U): {
1329 LinearMemoryAddress<Nothing> addr;
1330 CHECK(iter.readAtomicLoad(&addr, ValType::I32, 1));
1331 }
1332 case uint32_t(ThreadOp::I32AtomicLoad16U): {
1333 LinearMemoryAddress<Nothing> addr;
1334 CHECK(iter.readAtomicLoad(&addr, ValType::I32, 2));
1335 }
1336 case uint32_t(ThreadOp::I64AtomicLoad8U): {
1337 LinearMemoryAddress<Nothing> addr;
1338 CHECK(iter.readAtomicLoad(&addr, ValType::I64, 1));
1339 }
1340 case uint32_t(ThreadOp::I64AtomicLoad16U): {
1341 LinearMemoryAddress<Nothing> addr;
1342 CHECK(iter.readAtomicLoad(&addr, ValType::I64, 2));
1343 }
1344 case uint32_t(ThreadOp::I64AtomicLoad32U): {
1345 LinearMemoryAddress<Nothing> addr;
1346 CHECK(iter.readAtomicLoad(&addr, ValType::I64, 4));
1347 }
1348 case uint32_t(ThreadOp::I32AtomicStore): {
1349 LinearMemoryAddress<Nothing> addr;
1350 CHECK(iter.readAtomicStore(&addr, ValType::I32, 4, ¬hing));
1351 }
1352 case uint32_t(ThreadOp::I64AtomicStore): {
1353 LinearMemoryAddress<Nothing> addr;
1354 CHECK(iter.readAtomicStore(&addr, ValType::I64, 8, ¬hing));
1355 }
1356 case uint32_t(ThreadOp::I32AtomicStore8U): {
1357 LinearMemoryAddress<Nothing> addr;
1358 CHECK(iter.readAtomicStore(&addr, ValType::I32, 1, ¬hing));
1359 }
1360 case uint32_t(ThreadOp::I32AtomicStore16U): {
1361 LinearMemoryAddress<Nothing> addr;
1362 CHECK(iter.readAtomicStore(&addr, ValType::I32, 2, ¬hing));
1363 }
1364 case uint32_t(ThreadOp::I64AtomicStore8U): {
1365 LinearMemoryAddress<Nothing> addr;
1366 CHECK(iter.readAtomicStore(&addr, ValType::I64, 1, ¬hing));
1367 }
1368 case uint32_t(ThreadOp::I64AtomicStore16U): {
1369 LinearMemoryAddress<Nothing> addr;
1370 CHECK(iter.readAtomicStore(&addr, ValType::I64, 2, ¬hing));
1371 }
1372 case uint32_t(ThreadOp::I64AtomicStore32U): {
1373 LinearMemoryAddress<Nothing> addr;
1374 CHECK(iter.readAtomicStore(&addr, ValType::I64, 4, ¬hing));
1375 }
1376 case uint32_t(ThreadOp::I32AtomicAdd):
1377 case uint32_t(ThreadOp::I32AtomicSub):
1378 case uint32_t(ThreadOp::I32AtomicAnd):
1379 case uint32_t(ThreadOp::I32AtomicOr):
1380 case uint32_t(ThreadOp::I32AtomicXor):
1381 case uint32_t(ThreadOp::I32AtomicXchg): {
1382 LinearMemoryAddress<Nothing> addr;
1383 CHECK(iter.readAtomicRMW(&addr, ValType::I32, 4, ¬hing));
1384 }
1385 case uint32_t(ThreadOp::I64AtomicAdd):
1386 case uint32_t(ThreadOp::I64AtomicSub):
1387 case uint32_t(ThreadOp::I64AtomicAnd):
1388 case uint32_t(ThreadOp::I64AtomicOr):
1389 case uint32_t(ThreadOp::I64AtomicXor):
1390 case uint32_t(ThreadOp::I64AtomicXchg): {
1391 LinearMemoryAddress<Nothing> addr;
1392 CHECK(iter.readAtomicRMW(&addr, ValType::I64, 8, ¬hing));
1393 }
1394 case uint32_t(ThreadOp::I32AtomicAdd8U):
1395 case uint32_t(ThreadOp::I32AtomicSub8U):
1396 case uint32_t(ThreadOp::I32AtomicAnd8U):
1397 case uint32_t(ThreadOp::I32AtomicOr8U):
1398 case uint32_t(ThreadOp::I32AtomicXor8U):
1399 case uint32_t(ThreadOp::I32AtomicXchg8U): {
1400 LinearMemoryAddress<Nothing> addr;
1401 CHECK(iter.readAtomicRMW(&addr, ValType::I32, 1, ¬hing));
1402 }
1403 case uint32_t(ThreadOp::I32AtomicAdd16U):
1404 case uint32_t(ThreadOp::I32AtomicSub16U):
1405 case uint32_t(ThreadOp::I32AtomicAnd16U):
1406 case uint32_t(ThreadOp::I32AtomicOr16U):
1407 case uint32_t(ThreadOp::I32AtomicXor16U):
1408 case uint32_t(ThreadOp::I32AtomicXchg16U): {
1409 LinearMemoryAddress<Nothing> addr;
1410 CHECK(iter.readAtomicRMW(&addr, ValType::I32, 2, ¬hing));
1411 }
1412 case uint32_t(ThreadOp::I64AtomicAdd8U):
1413 case uint32_t(ThreadOp::I64AtomicSub8U):
1414 case uint32_t(ThreadOp::I64AtomicAnd8U):
1415 case uint32_t(ThreadOp::I64AtomicOr8U):
1416 case uint32_t(ThreadOp::I64AtomicXor8U):
1417 case uint32_t(ThreadOp::I64AtomicXchg8U): {
1418 LinearMemoryAddress<Nothing> addr;
1419 CHECK(iter.readAtomicRMW(&addr, ValType::I64, 1, ¬hing));
1420 }
1421 case uint32_t(ThreadOp::I64AtomicAdd16U):
1422 case uint32_t(ThreadOp::I64AtomicSub16U):
1423 case uint32_t(ThreadOp::I64AtomicAnd16U):
1424 case uint32_t(ThreadOp::I64AtomicOr16U):
1425 case uint32_t(ThreadOp::I64AtomicXor16U):
1426 case uint32_t(ThreadOp::I64AtomicXchg16U): {
1427 LinearMemoryAddress<Nothing> addr;
1428 CHECK(iter.readAtomicRMW(&addr, ValType::I64, 2, ¬hing));
1429 }
1430 case uint32_t(ThreadOp::I64AtomicAdd32U):
1431 case uint32_t(ThreadOp::I64AtomicSub32U):
1432 case uint32_t(ThreadOp::I64AtomicAnd32U):
1433 case uint32_t(ThreadOp::I64AtomicOr32U):
1434 case uint32_t(ThreadOp::I64AtomicXor32U):
1435 case uint32_t(ThreadOp::I64AtomicXchg32U): {
1436 LinearMemoryAddress<Nothing> addr;
1437 CHECK(iter.readAtomicRMW(&addr, ValType::I64, 4, ¬hing));
1438 }
1439 case uint32_t(ThreadOp::I32AtomicCmpXchg): {
1440 LinearMemoryAddress<Nothing> addr;
1441 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I32, 4, ¬hing,
1442 ¬hing));
1443 }
1444 case uint32_t(ThreadOp::I64AtomicCmpXchg): {
1445 LinearMemoryAddress<Nothing> addr;
1446 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I64, 8, ¬hing,
1447 ¬hing));
1448 }
1449 case uint32_t(ThreadOp::I32AtomicCmpXchg8U): {
1450 LinearMemoryAddress<Nothing> addr;
1451 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I32, 1, ¬hing,
1452 ¬hing));
1453 }
1454 case uint32_t(ThreadOp::I32AtomicCmpXchg16U): {
1455 LinearMemoryAddress<Nothing> addr;
1456 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I32, 2, ¬hing,
1457 ¬hing));
1458 }
1459 case uint32_t(ThreadOp::I64AtomicCmpXchg8U): {
1460 LinearMemoryAddress<Nothing> addr;
1461 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I64, 1, ¬hing,
1462 ¬hing));
1463 }
1464 case uint32_t(ThreadOp::I64AtomicCmpXchg16U): {
1465 LinearMemoryAddress<Nothing> addr;
1466 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I64, 2, ¬hing,
1467 ¬hing));
1468 }
1469 case uint32_t(ThreadOp::I64AtomicCmpXchg32U): {
1470 LinearMemoryAddress<Nothing> addr;
1471 CHECK(iter.readAtomicCmpXchg(&addr, ValType::I64, 4, ¬hing,
1472 ¬hing));
1473 }
1474 default:
1475 return iter.unrecognizedOpcode(&op);
1476 }
1477 break;
1478 }
1479 case uint16_t(Op::MozPrefix):
1480 return iter.unrecognizedOpcode(&op);
1481 default:
1482 return iter.unrecognizedOpcode(&op);
1483 }
1484 }
1485
1486 MOZ_CRASH("unreachable");
1487
1488 #undef CHECK
1489 }
1490
ValidateFunctionBody(const ModuleEnvironment & env,uint32_t funcIndex,uint32_t bodySize,Decoder & d)1491 bool wasm::ValidateFunctionBody(const ModuleEnvironment& env,
1492 uint32_t funcIndex, uint32_t bodySize,
1493 Decoder& d) {
1494 ValTypeVector locals;
1495 if (!locals.appendAll(env.funcTypes[funcIndex]->args())) {
1496 return false;
1497 }
1498
1499 const uint8_t* bodyBegin = d.currentPosition();
1500
1501 if (!DecodeLocalEntries(d, env.types, env.refTypesEnabled(),
1502 env.gcTypesEnabled(), &locals)) {
1503 return false;
1504 }
1505
1506 if (!DecodeFunctionBodyExprs(env, funcIndex, locals, bodyBegin + bodySize,
1507 &d)) {
1508 return false;
1509 }
1510
1511 return true;
1512 }
1513
1514 // Section macros.
1515
DecodePreamble(Decoder & d)1516 static bool DecodePreamble(Decoder& d) {
1517 if (d.bytesRemain() > MaxModuleBytes) {
1518 return d.fail("module too big");
1519 }
1520
1521 uint32_t u32;
1522 if (!d.readFixedU32(&u32) || u32 != MagicNumber) {
1523 return d.fail("failed to match magic number");
1524 }
1525
1526 if (!d.readFixedU32(&u32) || u32 != EncodingVersion) {
1527 return d.failf("binary version 0x%" PRIx32
1528 " does not match expected version 0x%" PRIx32,
1529 u32, EncodingVersion);
1530 }
1531
1532 return true;
1533 }
1534
1535 enum class TypeState { None, Struct, ForwardStruct, Func };
1536
1537 typedef Vector<TypeState, 0, SystemAllocPolicy> TypeStateVector;
1538
ValidateTypeState(Decoder & d,TypeStateVector * typeState,ValType type)1539 static bool ValidateTypeState(Decoder& d, TypeStateVector* typeState,
1540 ValType type) {
1541 if (!type.isTypeIndex()) {
1542 return true;
1543 }
1544
1545 uint32_t refTypeIndex = type.refType().typeIndex();
1546 switch ((*typeState)[refTypeIndex]) {
1547 case TypeState::None:
1548 (*typeState)[refTypeIndex] = TypeState::ForwardStruct;
1549 break;
1550 case TypeState::Struct:
1551 case TypeState::ForwardStruct:
1552 break;
1553 case TypeState::Func:
1554 return d.fail("ref does not reference a struct type");
1555 }
1556 return true;
1557 }
1558
1559 #ifdef WASM_PRIVATE_REFTYPES
FuncTypeIsJSCompatible(Decoder & d,const FuncType & ft)1560 static bool FuncTypeIsJSCompatible(Decoder& d, const FuncType& ft) {
1561 if (ft.exposesTypeIndex()) {
1562 return d.fail("cannot expose indexed reference type");
1563 }
1564 return true;
1565 }
1566 #endif
1567
DecodeTypeVector(Decoder & d,ModuleEnvironment * env,TypeStateVector * typeState,uint32_t count,ValTypeVector * types)1568 static bool DecodeTypeVector(Decoder& d, ModuleEnvironment* env,
1569 TypeStateVector* typeState, uint32_t count,
1570 ValTypeVector* types) {
1571 if (!types->resize(count)) {
1572 return false;
1573 }
1574
1575 for (uint32_t i = 0; i < count; i++) {
1576 if (!d.readValType(env->types.length(), env->refTypesEnabled(),
1577 env->gcTypesEnabled(), &(*types)[i])) {
1578 return false;
1579 }
1580 if (!ValidateTypeState(d, typeState, (*types)[i])) {
1581 return false;
1582 }
1583 }
1584 return true;
1585 }
1586
DecodeFuncType(Decoder & d,ModuleEnvironment * env,TypeStateVector * typeState,uint32_t typeIndex)1587 static bool DecodeFuncType(Decoder& d, ModuleEnvironment* env,
1588 TypeStateVector* typeState, uint32_t typeIndex) {
1589 uint32_t numArgs;
1590 if (!d.readVarU32(&numArgs)) {
1591 return d.fail("bad number of function args");
1592 }
1593 if (numArgs > MaxParams) {
1594 return d.fail("too many arguments in signature");
1595 }
1596 ValTypeVector args;
1597 if (!DecodeTypeVector(d, env, typeState, numArgs, &args)) {
1598 return false;
1599 }
1600
1601 uint32_t numResults;
1602 if (!d.readVarU32(&numResults)) {
1603 return d.fail("bad number of function returns");
1604 }
1605 if (numResults > env->funcMaxResults()) {
1606 return d.fail("too many returns in signature");
1607 }
1608 ValTypeVector results;
1609 if (!DecodeTypeVector(d, env, typeState, numResults, &results)) {
1610 return false;
1611 }
1612
1613 if ((*typeState)[typeIndex] != TypeState::None) {
1614 return d.fail("function type entry referenced as struct");
1615 }
1616
1617 env->types[typeIndex] =
1618 TypeDef(FuncType(std::move(args), std::move(results)));
1619 (*typeState)[typeIndex] = TypeState::Func;
1620
1621 return true;
1622 }
1623
DecodeStructType(Decoder & d,ModuleEnvironment * env,TypeStateVector * typeState,uint32_t typeIndex)1624 static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
1625 TypeStateVector* typeState, uint32_t typeIndex) {
1626 if (!env->gcTypesEnabled()) {
1627 return d.fail("Structure types not enabled");
1628 }
1629
1630 uint32_t numFields;
1631 if (!d.readVarU32(&numFields)) {
1632 return d.fail("Bad number of fields");
1633 }
1634
1635 if (numFields > MaxStructFields) {
1636 return d.fail("too many fields in structure");
1637 }
1638
1639 StructFieldVector fields;
1640 if (!fields.resize(numFields)) {
1641 return false;
1642 }
1643
1644 StructMetaTypeDescr::Layout layout;
1645 for (uint32_t i = 0; i < numFields; i++) {
1646 if (!d.readValType(env->types.length(), env->refTypesEnabled(),
1647 env->gcTypesEnabled(), &fields[i].type)) {
1648 return false;
1649 }
1650
1651 uint8_t flags;
1652 if (!d.readFixedU8(&flags)) {
1653 return d.fail("expected flag");
1654 }
1655 if ((flags & ~uint8_t(FieldFlags::AllowedMask)) != 0) {
1656 return d.fail("garbage flag bits");
1657 }
1658 fields[i].isMutable = flags & uint8_t(FieldFlags::Mutable);
1659
1660 if (!ValidateTypeState(d, typeState, fields[i].type)) {
1661 return false;
1662 }
1663
1664 CheckedInt32 offset;
1665 switch (fields[i].type.kind()) {
1666 case ValType::I32:
1667 offset = layout.addScalar(Scalar::Int32);
1668 break;
1669 case ValType::I64:
1670 offset = layout.addScalar(Scalar::Int64);
1671 break;
1672 case ValType::F32:
1673 offset = layout.addScalar(Scalar::Float32);
1674 break;
1675 case ValType::F64:
1676 offset = layout.addScalar(Scalar::Float64);
1677 break;
1678 case ValType::V128:
1679 offset = layout.addScalar(Scalar::Simd128);
1680 break;
1681 case ValType::Ref:
1682 switch (fields[i].type.refTypeKind()) {
1683 case RefType::TypeIndex:
1684 offset = layout.addReference(ReferenceType::TYPE_OBJECT);
1685 break;
1686 case RefType::Func:
1687 case RefType::Any:
1688 offset = layout.addReference(ReferenceType::TYPE_WASM_ANYREF);
1689 break;
1690 }
1691 break;
1692 }
1693 if (!offset.isValid()) {
1694 return d.fail("Object too large");
1695 }
1696
1697 fields[i].offset = offset.value();
1698 }
1699
1700 CheckedInt32 totalSize = layout.close();
1701 if (!totalSize.isValid()) {
1702 return d.fail("Object too large");
1703 }
1704
1705 bool isInline = InlineTypedObject::canAccommodateSize(totalSize.value());
1706 uint32_t offsetBy = isInline ? InlineTypedObject::offsetOfDataStart() : 0;
1707
1708 for (StructField& f : fields) {
1709 f.offset += offsetBy;
1710 }
1711
1712 if ((*typeState)[typeIndex] != TypeState::None &&
1713 (*typeState)[typeIndex] != TypeState::ForwardStruct) {
1714 return d.fail("struct type entry referenced as function");
1715 }
1716
1717 env->types[typeIndex] =
1718 TypeDef(StructType(std::move(fields), env->numStructTypes, isInline));
1719 (*typeState)[typeIndex] = TypeState::Struct;
1720 env->numStructTypes++;
1721
1722 return true;
1723 }
1724
DecodeTypeSection(Decoder & d,ModuleEnvironment * env)1725 static bool DecodeTypeSection(Decoder& d, ModuleEnvironment* env) {
1726 MaybeSectionRange range;
1727 if (!d.startSection(SectionId::Type, env, &range, "type")) {
1728 return false;
1729 }
1730 if (!range) {
1731 return true;
1732 }
1733
1734 uint32_t numTypes;
1735 if (!d.readVarU32(&numTypes)) {
1736 return d.fail("expected number of types");
1737 }
1738
1739 if (numTypes > MaxTypes) {
1740 return d.fail("too many types");
1741 }
1742
1743 if (!env->types.resize(numTypes)) {
1744 return false;
1745 }
1746
1747 TypeStateVector typeState;
1748 if (!typeState.appendN(TypeState::None, numTypes)) {
1749 return false;
1750 }
1751
1752 for (uint32_t typeIndex = 0; typeIndex < numTypes; typeIndex++) {
1753 uint8_t form;
1754 if (!d.readFixedU8(&form)) {
1755 return d.fail("expected type form");
1756 }
1757
1758 switch (form) {
1759 case uint8_t(TypeCode::Func):
1760 if (!DecodeFuncType(d, env, &typeState, typeIndex)) {
1761 return false;
1762 }
1763 break;
1764 case uint8_t(TypeCode::Struct):
1765 if (!DecodeStructType(d, env, &typeState, typeIndex)) {
1766 return false;
1767 }
1768 break;
1769 default:
1770 return d.fail("expected type form");
1771 }
1772 }
1773
1774 return d.finishSection(*range, "type");
1775 }
1776
DecodeName(Decoder & d)1777 static UniqueChars DecodeName(Decoder& d) {
1778 uint32_t numBytes;
1779 if (!d.readVarU32(&numBytes)) {
1780 return nullptr;
1781 }
1782
1783 if (numBytes > MaxStringBytes) {
1784 return nullptr;
1785 }
1786
1787 const uint8_t* bytes;
1788 if (!d.readBytes(numBytes, &bytes)) {
1789 return nullptr;
1790 }
1791
1792 if (!IsUtf8(AsChars(MakeSpan(bytes, numBytes)))) {
1793 return nullptr;
1794 }
1795
1796 UniqueChars name(js_pod_malloc<char>(numBytes + 1));
1797 if (!name) {
1798 return nullptr;
1799 }
1800
1801 memcpy(name.get(), bytes, numBytes);
1802 name[numBytes] = '\0';
1803
1804 return name;
1805 }
1806
DecodeSignatureIndex(Decoder & d,const TypeDefVector & types,uint32_t * funcTypeIndex)1807 static bool DecodeSignatureIndex(Decoder& d, const TypeDefVector& types,
1808 uint32_t* funcTypeIndex) {
1809 if (!d.readVarU32(funcTypeIndex)) {
1810 return d.fail("expected signature index");
1811 }
1812
1813 if (*funcTypeIndex >= types.length()) {
1814 return d.fail("signature index out of range");
1815 }
1816
1817 const TypeDef& def = types[*funcTypeIndex];
1818
1819 if (!def.isFuncType()) {
1820 return d.fail("signature index references non-signature");
1821 }
1822
1823 return true;
1824 }
1825
DecodeLimits(Decoder & d,Limits * limits,Shareable allowShared=Shareable::False)1826 static bool DecodeLimits(Decoder& d, Limits* limits,
1827 Shareable allowShared = Shareable::False) {
1828 uint8_t flags;
1829 if (!d.readFixedU8(&flags)) {
1830 return d.fail("expected flags");
1831 }
1832
1833 uint8_t mask = allowShared == Shareable::True
1834 ? uint8_t(MemoryMasks::AllowShared)
1835 : uint8_t(MemoryMasks::AllowUnshared);
1836
1837 if (flags & ~uint8_t(mask)) {
1838 return d.failf("unexpected bits set in flags: %" PRIu32,
1839 uint32_t(flags & ~uint8_t(mask)));
1840 }
1841
1842 if (!d.readVarU32(&limits->initial)) {
1843 return d.fail("expected initial length");
1844 }
1845
1846 if (flags & uint8_t(MemoryTableFlags::HasMaximum)) {
1847 uint32_t maximum;
1848 if (!d.readVarU32(&maximum)) {
1849 return d.fail("expected maximum length");
1850 }
1851
1852 if (limits->initial > maximum) {
1853 return d.failf(
1854 "memory size minimum must not be greater than maximum; "
1855 "maximum length %" PRIu32 " is less than initial length %" PRIu32,
1856 maximum, limits->initial);
1857 }
1858
1859 limits->maximum.emplace(maximum);
1860 }
1861
1862 limits->shared = Shareable::False;
1863
1864 if (allowShared == Shareable::True) {
1865 if ((flags & uint8_t(MemoryTableFlags::IsShared)) &&
1866 !(flags & uint8_t(MemoryTableFlags::HasMaximum))) {
1867 return d.fail("maximum length required for shared memory");
1868 }
1869
1870 limits->shared = (flags & uint8_t(MemoryTableFlags::IsShared))
1871 ? Shareable::True
1872 : Shareable::False;
1873 }
1874
1875 return true;
1876 }
1877
DecodeTableTypeAndLimits(Decoder & d,bool refTypesEnabled,TableDescVector * tables)1878 static bool DecodeTableTypeAndLimits(Decoder& d, bool refTypesEnabled,
1879 TableDescVector* tables) {
1880 uint8_t elementType;
1881 if (!d.readFixedU8(&elementType)) {
1882 return d.fail("expected table element type");
1883 }
1884
1885 TableKind tableKind;
1886 if (elementType == uint8_t(TypeCode::FuncRef)) {
1887 tableKind = TableKind::FuncRef;
1888 #ifdef ENABLE_WASM_REFTYPES
1889 } else if (elementType == uint8_t(TypeCode::AnyRef)) {
1890 if (!refTypesEnabled) {
1891 return d.fail("expected 'funcref' element type");
1892 }
1893 tableKind = TableKind::AnyRef;
1894 #endif
1895 } else {
1896 #ifdef ENABLE_WASM_REFTYPES
1897 return d.fail("expected reference element type");
1898 #else
1899 return d.fail("expected 'funcref' element type");
1900 #endif
1901 }
1902
1903 Limits limits;
1904 if (!DecodeLimits(d, &limits)) {
1905 return false;
1906 }
1907
1908 // If there's a maximum, check it is in range. The check to exclude
1909 // initial > maximum is carried out by the DecodeLimits call above, so
1910 // we don't repeat it here.
1911 if (limits.initial > MaxTableInitialLength ||
1912 ((limits.maximum.isSome() && limits.maximum.value() > MaxTableLength))) {
1913 return d.fail("too many table elements");
1914 }
1915
1916 if (tables->length() >= MaxTables) {
1917 return d.fail("too many tables");
1918 }
1919
1920 return tables->emplaceBack(tableKind, limits);
1921 }
1922
GlobalIsJSCompatible(Decoder & d,ValType type)1923 static bool GlobalIsJSCompatible(Decoder& d, ValType type) {
1924 switch (type.kind()) {
1925 case ValType::I32:
1926 case ValType::F32:
1927 case ValType::F64:
1928 case ValType::I64:
1929 case ValType::V128:
1930 break;
1931 case ValType::Ref:
1932 switch (type.refTypeKind()) {
1933 case RefType::Func:
1934 case RefType::Any:
1935 break;
1936 case RefType::TypeIndex:
1937 #ifdef WASM_PRIVATE_REFTYPES
1938 return d.fail("cannot expose indexed reference type");
1939 #else
1940 break;
1941 #endif
1942 default:
1943 return d.fail("unexpected variable type in global import/export");
1944 }
1945 break;
1946 default:
1947 return d.fail("unexpected variable type in global import/export");
1948 }
1949
1950 return true;
1951 }
1952
DecodeGlobalType(Decoder & d,const TypeDefVector & types,bool refTypesEnabled,bool gcTypesEnabled,ValType * type,bool * isMutable)1953 static bool DecodeGlobalType(Decoder& d, const TypeDefVector& types,
1954 bool refTypesEnabled, bool gcTypesEnabled,
1955 ValType* type, bool* isMutable) {
1956 if (!d.readValType(types, refTypesEnabled, gcTypesEnabled, type)) {
1957 return d.fail("expected global type");
1958 }
1959
1960 uint8_t flags;
1961 if (!d.readFixedU8(&flags)) {
1962 return d.fail("expected global flags");
1963 }
1964
1965 if (flags & ~uint8_t(GlobalTypeImmediate::AllowedMask)) {
1966 return d.fail("unexpected bits set in global flags");
1967 }
1968
1969 *isMutable = flags & uint8_t(GlobalTypeImmediate::IsMutable);
1970 return true;
1971 }
1972
ConvertMemoryPagesToBytes(Limits * memory)1973 void wasm::ConvertMemoryPagesToBytes(Limits* memory) {
1974 CheckedInt<uint32_t> initialBytes = memory->initial;
1975 initialBytes *= PageSize;
1976
1977 static_assert(MaxMemoryInitialPages < UINT16_MAX,
1978 "multiplying by PageSize can't overflow");
1979 MOZ_ASSERT(initialBytes.isValid(), "can't overflow by above assertion");
1980
1981 memory->initial = initialBytes.value();
1982
1983 if (!memory->maximum) {
1984 return;
1985 }
1986
1987 MOZ_ASSERT(*memory->maximum <= MaxMemoryMaximumPages);
1988
1989 CheckedInt<uint32_t> maximumBytes = *memory->maximum;
1990 maximumBytes *= PageSize;
1991
1992 // Clamp the maximum memory value to UINT32_MAX; it's not semantically
1993 // visible since growing will fail for values greater than INT32_MAX.
1994 memory->maximum =
1995 Some(maximumBytes.isValid() ? maximumBytes.value() : UINT32_MAX);
1996
1997 MOZ_ASSERT(memory->initial <= *memory->maximum);
1998 }
1999
DecodeMemoryLimits(Decoder & d,ModuleEnvironment * env)2000 static bool DecodeMemoryLimits(Decoder& d, ModuleEnvironment* env) {
2001 if (env->usesMemory()) {
2002 return d.fail("already have default memory");
2003 }
2004
2005 Limits memory;
2006 if (!DecodeLimits(d, &memory, Shareable::True)) {
2007 return false;
2008 }
2009
2010 if (memory.initial > MaxMemoryInitialPages) {
2011 return d.fail("initial memory size too big");
2012 }
2013
2014 if (memory.maximum && *memory.maximum > MaxMemoryMaximumPages) {
2015 return d.fail("maximum memory size too big");
2016 }
2017
2018 ConvertMemoryPagesToBytes(&memory);
2019
2020 if (memory.shared == Shareable::True &&
2021 env->sharedMemoryEnabled == Shareable::False) {
2022 return d.fail("shared memory is disabled");
2023 }
2024
2025 env->memoryUsage = memory.shared == Shareable::True ? MemoryUsage::Shared
2026 : MemoryUsage::Unshared;
2027 env->minMemoryLength = memory.initial;
2028 env->maxMemoryLength = memory.maximum;
2029 return true;
2030 }
2031
DecodeImport(Decoder & d,ModuleEnvironment * env)2032 static bool DecodeImport(Decoder& d, ModuleEnvironment* env) {
2033 UniqueChars moduleName = DecodeName(d);
2034 if (!moduleName) {
2035 return d.fail("expected valid import module name");
2036 }
2037
2038 UniqueChars funcName = DecodeName(d);
2039 if (!funcName) {
2040 return d.fail("expected valid import func name");
2041 }
2042
2043 uint8_t rawImportKind;
2044 if (!d.readFixedU8(&rawImportKind)) {
2045 return d.fail("failed to read import kind");
2046 }
2047
2048 DefinitionKind importKind = DefinitionKind(rawImportKind);
2049
2050 switch (importKind) {
2051 case DefinitionKind::Function: {
2052 uint32_t funcTypeIndex;
2053 if (!DecodeSignatureIndex(d, env->types, &funcTypeIndex)) {
2054 return false;
2055 }
2056 #ifdef WASM_PRIVATE_REFTYPES
2057 if (!FuncTypeIsJSCompatible(d, env->types[funcTypeIndex].funcType())) {
2058 return false;
2059 }
2060 #endif
2061 if (!env->funcTypes.append(&env->types[funcTypeIndex].funcType())) {
2062 return false;
2063 }
2064 if (env->funcTypes.length() > MaxFuncs) {
2065 return d.fail("too many functions");
2066 }
2067 break;
2068 }
2069 case DefinitionKind::Table: {
2070 if (!DecodeTableTypeAndLimits(d, env->refTypesEnabled(), &env->tables)) {
2071 return false;
2072 }
2073 env->tables.back().importedOrExported = true;
2074 break;
2075 }
2076 case DefinitionKind::Memory: {
2077 if (!DecodeMemoryLimits(d, env)) {
2078 return false;
2079 }
2080 break;
2081 }
2082 case DefinitionKind::Global: {
2083 ValType type;
2084 bool isMutable;
2085 if (!DecodeGlobalType(d, env->types, env->refTypesEnabled(),
2086 env->gcTypesEnabled(), &type, &isMutable)) {
2087 return false;
2088 }
2089 if (!GlobalIsJSCompatible(d, type)) {
2090 return false;
2091 }
2092 if (!env->globals.append(
2093 GlobalDesc(type, isMutable, env->globals.length()))) {
2094 return false;
2095 }
2096 if (env->globals.length() > MaxGlobals) {
2097 return d.fail("too many globals");
2098 }
2099 break;
2100 }
2101 default:
2102 return d.fail("unsupported import kind");
2103 }
2104
2105 return env->imports.emplaceBack(std::move(moduleName), std::move(funcName),
2106 importKind);
2107 }
2108
DecodeImportSection(Decoder & d,ModuleEnvironment * env)2109 static bool DecodeImportSection(Decoder& d, ModuleEnvironment* env) {
2110 MaybeSectionRange range;
2111 if (!d.startSection(SectionId::Import, env, &range, "import")) {
2112 return false;
2113 }
2114 if (!range) {
2115 return true;
2116 }
2117
2118 uint32_t numImports;
2119 if (!d.readVarU32(&numImports)) {
2120 return d.fail("failed to read number of imports");
2121 }
2122
2123 if (numImports > MaxImports) {
2124 return d.fail("too many imports");
2125 }
2126
2127 for (uint32_t i = 0; i < numImports; i++) {
2128 if (!DecodeImport(d, env)) {
2129 return false;
2130 }
2131 }
2132
2133 if (!d.finishSection(*range, "import")) {
2134 return false;
2135 }
2136
2137 // The global data offsets will be filled in by ModuleGenerator::init.
2138 if (!env->funcImportGlobalDataOffsets.resize(env->funcTypes.length())) {
2139 return false;
2140 }
2141
2142 return true;
2143 }
2144
DecodeFunctionSection(Decoder & d,ModuleEnvironment * env)2145 static bool DecodeFunctionSection(Decoder& d, ModuleEnvironment* env) {
2146 MaybeSectionRange range;
2147 if (!d.startSection(SectionId::Function, env, &range, "function")) {
2148 return false;
2149 }
2150 if (!range) {
2151 return true;
2152 }
2153
2154 uint32_t numDefs;
2155 if (!d.readVarU32(&numDefs)) {
2156 return d.fail("expected number of function definitions");
2157 }
2158
2159 CheckedInt<uint32_t> numFuncs = env->funcTypes.length();
2160 numFuncs += numDefs;
2161 if (!numFuncs.isValid() || numFuncs.value() > MaxFuncs) {
2162 return d.fail("too many functions");
2163 }
2164
2165 if (!env->funcTypes.reserve(numFuncs.value())) {
2166 return false;
2167 }
2168
2169 for (uint32_t i = 0; i < numDefs; i++) {
2170 uint32_t funcTypeIndex;
2171 if (!DecodeSignatureIndex(d, env->types, &funcTypeIndex)) {
2172 return false;
2173 }
2174 env->funcTypes.infallibleAppend(&env->types[funcTypeIndex].funcType());
2175 }
2176
2177 return d.finishSection(*range, "function");
2178 }
2179
DecodeTableSection(Decoder & d,ModuleEnvironment * env)2180 static bool DecodeTableSection(Decoder& d, ModuleEnvironment* env) {
2181 MaybeSectionRange range;
2182 if (!d.startSection(SectionId::Table, env, &range, "table")) {
2183 return false;
2184 }
2185 if (!range) {
2186 return true;
2187 }
2188
2189 uint32_t numTables;
2190 if (!d.readVarU32(&numTables)) {
2191 return d.fail("failed to read number of tables");
2192 }
2193
2194 for (uint32_t i = 0; i < numTables; ++i) {
2195 if (!DecodeTableTypeAndLimits(d, env->refTypesEnabled(), &env->tables)) {
2196 return false;
2197 }
2198 }
2199
2200 return d.finishSection(*range, "table");
2201 }
2202
DecodeMemorySection(Decoder & d,ModuleEnvironment * env)2203 static bool DecodeMemorySection(Decoder& d, ModuleEnvironment* env) {
2204 MaybeSectionRange range;
2205 if (!d.startSection(SectionId::Memory, env, &range, "memory")) {
2206 return false;
2207 }
2208 if (!range) {
2209 return true;
2210 }
2211
2212 uint32_t numMemories;
2213 if (!d.readVarU32(&numMemories)) {
2214 return d.fail("failed to read number of memories");
2215 }
2216
2217 if (numMemories > 1) {
2218 return d.fail("the number of memories must be at most one");
2219 }
2220
2221 for (uint32_t i = 0; i < numMemories; ++i) {
2222 if (!DecodeMemoryLimits(d, env)) {
2223 return false;
2224 }
2225 }
2226
2227 return d.finishSection(*range, "memory");
2228 }
2229
DecodeInitializerExpression(Decoder & d,ModuleEnvironment * env,ValType expected,InitExpr * init)2230 static bool DecodeInitializerExpression(Decoder& d, ModuleEnvironment* env,
2231 ValType expected, InitExpr* init) {
2232 OpBytes op;
2233 if (!d.readOp(&op)) {
2234 return d.fail("failed to read initializer type");
2235 }
2236
2237 switch (op.b0) {
2238 case uint16_t(Op::I32Const): {
2239 int32_t i32;
2240 if (!d.readVarS32(&i32)) {
2241 return d.fail("failed to read initializer i32 expression");
2242 }
2243 *init = InitExpr::fromConstant(LitVal(uint32_t(i32)));
2244 break;
2245 }
2246 case uint16_t(Op::I64Const): {
2247 int64_t i64;
2248 if (!d.readVarS64(&i64)) {
2249 return d.fail("failed to read initializer i64 expression");
2250 }
2251 *init = InitExpr::fromConstant(LitVal(uint64_t(i64)));
2252 break;
2253 }
2254 case uint16_t(Op::F32Const): {
2255 float f32;
2256 if (!d.readFixedF32(&f32)) {
2257 return d.fail("failed to read initializer f32 expression");
2258 }
2259 *init = InitExpr::fromConstant(LitVal(f32));
2260 break;
2261 }
2262 case uint16_t(Op::F64Const): {
2263 double f64;
2264 if (!d.readFixedF64(&f64)) {
2265 return d.fail("failed to read initializer f64 expression");
2266 }
2267 *init = InitExpr::fromConstant(LitVal(f64));
2268 break;
2269 }
2270 #ifdef ENABLE_WASM_SIMD
2271 case uint16_t(Op::SimdPrefix): {
2272 if (op.b1 != uint32_t(SimdOp::V128Const)) {
2273 return d.fail("unexpected initializer expression");
2274 }
2275 V128 v128;
2276 if (!d.readFixedV128(&v128)) {
2277 return d.fail("failed to read initializer v128 expression");
2278 }
2279 *init = InitExpr::fromConstant(LitVal(v128));
2280 break;
2281 }
2282 #endif
2283 #ifdef ENABLE_WASM_REFTYPES
2284 case uint16_t(Op::RefNull): {
2285 MOZ_ASSERT_IF(env->isStructType(expected), env->gcTypesEnabled());
2286 RefType initType;
2287 if (!d.readRefType(env->types, env->gcTypesEnabled(), &initType)) {
2288 return false;
2289 }
2290 if (!expected.isReference() ||
2291 !env->isRefSubtypeOf(ValType(initType), ValType(expected))) {
2292 return d.fail(
2293 "type mismatch: initializer type and expected type don't match");
2294 }
2295 *init = InitExpr::fromConstant(LitVal(expected, AnyRef::null()));
2296 break;
2297 }
2298 case uint16_t(Op::RefFunc): {
2299 if (!expected.isReference() || expected.refType() != RefType::func()) {
2300 return d.fail(
2301 "type mismatch: initializer type and expected type don't match");
2302 }
2303 uint32_t i;
2304 if (!d.readVarU32(&i)) {
2305 return d.fail(
2306 "failed to read ref.func index in initializer expression");
2307 }
2308 if (i >= env->numFuncs()) {
2309 return d.fail("function index out of range in initializer expression");
2310 }
2311 env->validForRefFunc.setBit(i);
2312 *init = InitExpr::fromRefFunc(i);
2313 break;
2314 }
2315 #endif
2316 case uint16_t(Op::GetGlobal): {
2317 uint32_t i;
2318 const GlobalDescVector& globals = env->globals;
2319 if (!d.readVarU32(&i)) {
2320 return d.fail(
2321 "failed to read global.get index in initializer expression");
2322 }
2323 if (i >= globals.length()) {
2324 return d.fail("global index out of range in initializer expression");
2325 }
2326 if (!globals[i].isImport() || globals[i].isMutable()) {
2327 return d.fail(
2328 "initializer expression must reference a global immutable import");
2329 }
2330 if (expected.isReference()) {
2331 bool fail = false;
2332 if (!globals[i].type().isReference()) {
2333 fail = true;
2334 } else if ((env->isStructType(expected) ||
2335 env->isStructType(globals[i].type())) &&
2336 !env->gcTypesEnabled()) {
2337 fail = true;
2338 } else if (!env->isRefSubtypeOf(globals[i].type(), expected)) {
2339 fail = true;
2340 }
2341 if (fail) {
2342 return d.fail(
2343 "type mismatch: initializer type and expected type don't match");
2344 }
2345 *init = InitExpr::fromGetGlobal(i, expected);
2346 } else {
2347 *init = InitExpr::fromGetGlobal(i, globals[i].type());
2348 }
2349 break;
2350 }
2351 default: {
2352 return d.fail("unexpected initializer expression");
2353 }
2354 }
2355
2356 if (expected != init->type()) {
2357 return d.fail(
2358 "type mismatch: initializer type and expected type don't match");
2359 }
2360
2361 OpBytes end;
2362 if (!d.readOp(&end) || end.b0 != uint16_t(Op::End)) {
2363 return d.fail("failed to read end of initializer expression");
2364 }
2365
2366 return true;
2367 }
2368
DecodeGlobalSection(Decoder & d,ModuleEnvironment * env)2369 static bool DecodeGlobalSection(Decoder& d, ModuleEnvironment* env) {
2370 MaybeSectionRange range;
2371 if (!d.startSection(SectionId::Global, env, &range, "global")) {
2372 return false;
2373 }
2374 if (!range) {
2375 return true;
2376 }
2377
2378 uint32_t numDefs;
2379 if (!d.readVarU32(&numDefs)) {
2380 return d.fail("expected number of globals");
2381 }
2382
2383 CheckedInt<uint32_t> numGlobals = env->globals.length();
2384 numGlobals += numDefs;
2385 if (!numGlobals.isValid() || numGlobals.value() > MaxGlobals) {
2386 return d.fail("too many globals");
2387 }
2388
2389 if (!env->globals.reserve(numGlobals.value())) {
2390 return false;
2391 }
2392
2393 for (uint32_t i = 0; i < numDefs; i++) {
2394 ValType type;
2395 bool isMutable;
2396 if (!DecodeGlobalType(d, env->types, env->refTypesEnabled(),
2397 env->gcTypesEnabled(), &type, &isMutable)) {
2398 return false;
2399 }
2400
2401 InitExpr initializer;
2402 if (!DecodeInitializerExpression(d, env, type, &initializer)) {
2403 return false;
2404 }
2405
2406 env->globals.infallibleAppend(GlobalDesc(initializer, isMutable));
2407 }
2408
2409 return d.finishSection(*range, "global");
2410 }
2411
2412 typedef HashSet<const char*, mozilla::CStringHasher, SystemAllocPolicy>
2413 CStringSet;
2414
DecodeExportName(Decoder & d,CStringSet * dupSet)2415 static UniqueChars DecodeExportName(Decoder& d, CStringSet* dupSet) {
2416 UniqueChars exportName = DecodeName(d);
2417 if (!exportName) {
2418 d.fail("expected valid export name");
2419 return nullptr;
2420 }
2421
2422 CStringSet::AddPtr p = dupSet->lookupForAdd(exportName.get());
2423 if (p) {
2424 d.fail("duplicate export");
2425 return nullptr;
2426 }
2427
2428 if (!dupSet->add(p, exportName.get())) {
2429 return nullptr;
2430 }
2431
2432 return exportName;
2433 }
2434
DecodeExport(Decoder & d,ModuleEnvironment * env,CStringSet * dupSet)2435 static bool DecodeExport(Decoder& d, ModuleEnvironment* env,
2436 CStringSet* dupSet) {
2437 UniqueChars fieldName = DecodeExportName(d, dupSet);
2438 if (!fieldName) {
2439 return false;
2440 }
2441
2442 uint8_t exportKind;
2443 if (!d.readFixedU8(&exportKind)) {
2444 return d.fail("failed to read export kind");
2445 }
2446
2447 switch (DefinitionKind(exportKind)) {
2448 case DefinitionKind::Function: {
2449 uint32_t funcIndex;
2450 if (!d.readVarU32(&funcIndex)) {
2451 return d.fail("expected function index");
2452 }
2453
2454 if (funcIndex >= env->numFuncs()) {
2455 return d.fail("exported function index out of bounds");
2456 }
2457 #ifdef WASM_PRIVATE_REFTYPES
2458 if (!FuncTypeIsJSCompatible(d, *env->funcTypes[funcIndex])) {
2459 return false;
2460 }
2461 #endif
2462
2463 env->validForRefFunc.setBit(funcIndex);
2464 return env->exports.emplaceBack(std::move(fieldName), funcIndex,
2465 DefinitionKind::Function);
2466 }
2467 case DefinitionKind::Table: {
2468 uint32_t tableIndex;
2469 if (!d.readVarU32(&tableIndex)) {
2470 return d.fail("expected table index");
2471 }
2472
2473 if (tableIndex >= env->tables.length()) {
2474 return d.fail("exported table index out of bounds");
2475 }
2476 env->tables[tableIndex].importedOrExported = true;
2477 return env->exports.emplaceBack(std::move(fieldName), tableIndex,
2478 DefinitionKind::Table);
2479 }
2480 case DefinitionKind::Memory: {
2481 uint32_t memoryIndex;
2482 if (!d.readVarU32(&memoryIndex)) {
2483 return d.fail("expected memory index");
2484 }
2485
2486 if (memoryIndex > 0 || !env->usesMemory()) {
2487 return d.fail("exported memory index out of bounds");
2488 }
2489
2490 return env->exports.emplaceBack(std::move(fieldName),
2491 DefinitionKind::Memory);
2492 }
2493 case DefinitionKind::Global: {
2494 uint32_t globalIndex;
2495 if (!d.readVarU32(&globalIndex)) {
2496 return d.fail("expected global index");
2497 }
2498
2499 if (globalIndex >= env->globals.length()) {
2500 return d.fail("exported global index out of bounds");
2501 }
2502
2503 GlobalDesc* global = &env->globals[globalIndex];
2504 global->setIsExport();
2505 if (!GlobalIsJSCompatible(d, global->type())) {
2506 return false;
2507 }
2508
2509 return env->exports.emplaceBack(std::move(fieldName), globalIndex,
2510 DefinitionKind::Global);
2511 }
2512 default:
2513 return d.fail("unexpected export kind");
2514 }
2515
2516 MOZ_CRASH("unreachable");
2517 }
2518
DecodeExportSection(Decoder & d,ModuleEnvironment * env)2519 static bool DecodeExportSection(Decoder& d, ModuleEnvironment* env) {
2520 MaybeSectionRange range;
2521 if (!d.startSection(SectionId::Export, env, &range, "export")) {
2522 return false;
2523 }
2524 if (!range) {
2525 return true;
2526 }
2527
2528 CStringSet dupSet;
2529
2530 uint32_t numExports;
2531 if (!d.readVarU32(&numExports)) {
2532 return d.fail("failed to read number of exports");
2533 }
2534
2535 if (numExports > MaxExports) {
2536 return d.fail("too many exports");
2537 }
2538
2539 for (uint32_t i = 0; i < numExports; i++) {
2540 if (!DecodeExport(d, env, &dupSet)) {
2541 return false;
2542 }
2543 }
2544
2545 return d.finishSection(*range, "export");
2546 }
2547
DecodeStartSection(Decoder & d,ModuleEnvironment * env)2548 static bool DecodeStartSection(Decoder& d, ModuleEnvironment* env) {
2549 MaybeSectionRange range;
2550 if (!d.startSection(SectionId::Start, env, &range, "start")) {
2551 return false;
2552 }
2553 if (!range) {
2554 return true;
2555 }
2556
2557 uint32_t funcIndex;
2558 if (!d.readVarU32(&funcIndex)) {
2559 return d.fail("failed to read start func index");
2560 }
2561
2562 if (funcIndex >= env->numFuncs()) {
2563 return d.fail("unknown start function");
2564 }
2565
2566 const FuncType& funcType = *env->funcTypes[funcIndex];
2567 if (funcType.results().length() > 0) {
2568 return d.fail("start function must not return anything");
2569 }
2570
2571 if (funcType.args().length()) {
2572 return d.fail("start function must be nullary");
2573 }
2574
2575 env->startFuncIndex = Some(funcIndex);
2576
2577 return d.finishSection(*range, "start");
2578 }
2579
NormalizeElemSegmentKind(ElemSegmentKind decodedKind)2580 static inline ElemSegment::Kind NormalizeElemSegmentKind(
2581 ElemSegmentKind decodedKind) {
2582 switch (decodedKind) {
2583 case ElemSegmentKind::Active:
2584 case ElemSegmentKind::ActiveWithTableIndex: {
2585 return ElemSegment::Kind::Active;
2586 }
2587 case ElemSegmentKind::Passive: {
2588 return ElemSegment::Kind::Passive;
2589 }
2590 case ElemSegmentKind::Declared: {
2591 return ElemSegment::Kind::Declared;
2592 }
2593 }
2594 MOZ_CRASH("unexpected elem segment kind");
2595 }
2596
DecodeElemSection(Decoder & d,ModuleEnvironment * env)2597 static bool DecodeElemSection(Decoder& d, ModuleEnvironment* env) {
2598 MaybeSectionRange range;
2599 if (!d.startSection(SectionId::Elem, env, &range, "elem")) {
2600 return false;
2601 }
2602 if (!range) {
2603 return true;
2604 }
2605
2606 uint32_t numSegments;
2607 if (!d.readVarU32(&numSegments)) {
2608 return d.fail("failed to read number of elem segments");
2609 }
2610
2611 if (numSegments > MaxElemSegments) {
2612 return d.fail("too many elem segments");
2613 }
2614
2615 if (!env->elemSegments.reserve(numSegments)) {
2616 return false;
2617 }
2618
2619 for (uint32_t i = 0; i < numSegments; i++) {
2620 uint32_t segmentFlags;
2621 if (!d.readVarU32(&segmentFlags)) {
2622 return d.fail("expected elem segment flags field");
2623 }
2624
2625 Maybe<ElemSegmentFlags> flags = ElemSegmentFlags::construct(segmentFlags);
2626 if (!flags) {
2627 return d.fail("invalid elem segment flags field");
2628 }
2629
2630 MutableElemSegment seg = js_new<ElemSegment>();
2631 if (!seg) {
2632 return false;
2633 }
2634
2635 ElemSegmentKind kind = flags->kind();
2636 seg->kind = NormalizeElemSegmentKind(kind);
2637
2638 if (kind == ElemSegmentKind::Active ||
2639 kind == ElemSegmentKind::ActiveWithTableIndex) {
2640 if (env->tables.length() == 0) {
2641 return d.fail("active elem segment requires a table");
2642 }
2643
2644 uint32_t tableIndex = 0;
2645 if (kind == ElemSegmentKind::ActiveWithTableIndex &&
2646 !d.readVarU32(&tableIndex)) {
2647 return d.fail("expected table index");
2648 }
2649 if (tableIndex >= env->tables.length()) {
2650 return d.fail("table index out of range for element segment");
2651 }
2652 seg->tableIndex = tableIndex;
2653
2654 InitExpr offset;
2655 if (!DecodeInitializerExpression(d, env, ValType::I32, &offset)) {
2656 return false;
2657 }
2658 seg->offsetIfActive.emplace(offset);
2659 } else {
2660 // Too many bugs result from keeping this value zero. For passive
2661 // or declared segments, there really is no table index, and we should
2662 // never touch the field.
2663 MOZ_ASSERT(kind == ElemSegmentKind::Passive ||
2664 kind == ElemSegmentKind::Declared);
2665 seg->tableIndex = (uint32_t)-1;
2666 }
2667
2668 ElemSegmentPayload payload = flags->payload();
2669 ValType elemType;
2670
2671 // `ActiveWithTableIndex`, `Declared`, and `Passive` element segments encode
2672 // the type or definition kind of the payload. `Active` element segments are
2673 // restricted to MVP behavior, which assumes only function indices.
2674 if (kind == ElemSegmentKind::Active) {
2675 elemType = RefType::func();
2676 } else {
2677 uint8_t form;
2678 if (!d.readFixedU8(&form)) {
2679 return d.fail("expected type or extern kind");
2680 }
2681
2682 switch (payload) {
2683 case ElemSegmentPayload::ElemExpression: {
2684 switch (form) {
2685 case uint8_t(TypeCode::FuncRef):
2686 elemType = RefType::func();
2687 break;
2688 case uint8_t(TypeCode::AnyRef):
2689 elemType = RefType::any();
2690 break;
2691 default:
2692 return d.fail(
2693 "segments with element expressions can only contain "
2694 "references");
2695 }
2696 break;
2697 }
2698 case ElemSegmentPayload::ExternIndex: {
2699 if (form != uint8_t(DefinitionKind::Function)) {
2700 return d.fail(
2701 "segments with extern indices can only contain function "
2702 "references");
2703 }
2704 elemType = RefType::func();
2705 }
2706 }
2707 }
2708
2709 // Check constraints on the element type.
2710 switch (kind) {
2711 case ElemSegmentKind::Declared: {
2712 if (!(elemType.isReference() &&
2713 env->isRefSubtypeOf(elemType, RefType::func()))) {
2714 return d.fail(
2715 "declared segment's element type must be subtype of funcref");
2716 }
2717 break;
2718 }
2719 case ElemSegmentKind::Active:
2720 case ElemSegmentKind::ActiveWithTableIndex: {
2721 ValType tblElemType = ToElemValType(env->tables[seg->tableIndex].kind);
2722 if (!(elemType == tblElemType ||
2723 (elemType.isReference() && tblElemType.isReference() &&
2724 env->isRefSubtypeOf(elemType, tblElemType)))) {
2725 return d.fail(
2726 "segment's element type must be subtype of table's element type");
2727 }
2728 break;
2729 }
2730 case ElemSegmentKind::Passive: {
2731 // By construction, above.
2732 MOZ_ASSERT(elemType.isReference());
2733 break;
2734 }
2735 }
2736 seg->elementType = elemType;
2737
2738 uint32_t numElems;
2739 if (!d.readVarU32(&numElems)) {
2740 return d.fail("expected segment size");
2741 }
2742
2743 if (numElems > MaxTableInitialLength) {
2744 return d.fail("too many table elements");
2745 }
2746
2747 if (!seg->elemFuncIndices.reserve(numElems)) {
2748 return false;
2749 }
2750
2751 #ifdef WASM_PRIVATE_REFTYPES
2752 // We assume that passive or declared segments may be applied to external
2753 // tables. We can do slightly better: if there are no external tables in
2754 // the module then we don't need to worry about passive or declared
2755 // segments either. But this is a temporary restriction.
2756 bool exportedTable = kind == ElemSegmentKind::Passive ||
2757 kind == ElemSegmentKind::Declared ||
2758 env->tables[seg->tableIndex].importedOrExported;
2759 #endif
2760
2761 // For passive segments we should use DecodeInitializerExpression() but we
2762 // don't really want to generalize that function yet, so instead read the
2763 // required Ref.Func and End here.
2764
2765 for (uint32_t i = 0; i < numElems; i++) {
2766 bool needIndex = true;
2767
2768 if (payload == ElemSegmentPayload::ElemExpression) {
2769 OpBytes op;
2770 if (!d.readOp(&op)) {
2771 return d.fail("failed to read initializer operation");
2772 }
2773
2774 RefType initType = RefType::any();
2775 switch (op.b0) {
2776 case uint16_t(Op::RefFunc):
2777 initType = RefType::func();
2778 break;
2779 case uint16_t(Op::RefNull):
2780 if (!d.readRefType(env->types, env->gcTypesEnabled(), &initType)) {
2781 return false;
2782 }
2783 needIndex = false;
2784 break;
2785 default:
2786 return d.fail("failed to read initializer operation");
2787 }
2788 if (!env->isRefSubtypeOf(ValType(initType), ValType(elemType))) {
2789 return d.fail("initializer type must be subtype of element type");
2790 }
2791 }
2792
2793 uint32_t funcIndex = NullFuncIndex;
2794 if (needIndex) {
2795 if (!d.readVarU32(&funcIndex)) {
2796 return d.fail("failed to read element function index");
2797 }
2798 if (funcIndex >= env->numFuncs()) {
2799 return d.fail("table element out of range");
2800 }
2801 #ifdef WASM_PRIVATE_REFTYPES
2802 if (exportedTable &&
2803 !FuncTypeIsJSCompatible(d, *env->funcTypes[funcIndex])) {
2804 return false;
2805 }
2806 #endif
2807 }
2808
2809 if (payload == ElemSegmentPayload::ElemExpression) {
2810 OpBytes end;
2811 if (!d.readOp(&end) || end.b0 != uint16_t(Op::End)) {
2812 return d.fail("failed to read end of initializer expression");
2813 }
2814 }
2815
2816 seg->elemFuncIndices.infallibleAppend(funcIndex);
2817 if (funcIndex != NullFuncIndex) {
2818 env->validForRefFunc.setBit(funcIndex);
2819 }
2820 }
2821
2822 env->elemSegments.infallibleAppend(std::move(seg));
2823 }
2824
2825 return d.finishSection(*range, "elem");
2826 }
2827
DecodeDataCountSection(Decoder & d,ModuleEnvironment * env)2828 static bool DecodeDataCountSection(Decoder& d, ModuleEnvironment* env) {
2829 MaybeSectionRange range;
2830 if (!d.startSection(SectionId::DataCount, env, &range, "datacount")) {
2831 return false;
2832 }
2833 if (!range) {
2834 return true;
2835 }
2836
2837 #ifndef ENABLE_WASM_BULKMEM_OPS
2838 // Bulk memory must be available if shared memory is enabled.
2839 if (env->sharedMemoryEnabled == Shareable::False) {
2840 return d.fail("bulk memory ops disabled");
2841 }
2842 #endif
2843
2844 uint32_t dataCount;
2845 if (!d.readVarU32(&dataCount)) {
2846 return d.fail("expected data segment count");
2847 }
2848
2849 env->dataCount.emplace(dataCount);
2850
2851 return d.finishSection(*range, "datacount");
2852 }
2853
StartsCodeSection(const uint8_t * begin,const uint8_t * end,SectionRange * codeSection)2854 bool wasm::StartsCodeSection(const uint8_t* begin, const uint8_t* end,
2855 SectionRange* codeSection) {
2856 UniqueChars unused;
2857 Decoder d(begin, end, 0, &unused);
2858
2859 if (!DecodePreamble(d)) {
2860 return false;
2861 }
2862
2863 while (!d.done()) {
2864 uint8_t id;
2865 SectionRange range;
2866 if (!d.readSectionHeader(&id, &range)) {
2867 return false;
2868 }
2869
2870 if (id == uint8_t(SectionId::Code)) {
2871 *codeSection = range;
2872 return true;
2873 }
2874
2875 if (!d.readBytes(range.size)) {
2876 return false;
2877 }
2878 }
2879
2880 return false;
2881 }
2882
DecodeModuleEnvironment(Decoder & d,ModuleEnvironment * env)2883 bool wasm::DecodeModuleEnvironment(Decoder& d, ModuleEnvironment* env) {
2884 if (!DecodePreamble(d)) {
2885 return false;
2886 }
2887
2888 env->compilerEnv->computeParameters(d);
2889
2890 if (!DecodeTypeSection(d, env)) {
2891 return false;
2892 }
2893
2894 if (!DecodeImportSection(d, env)) {
2895 return false;
2896 }
2897
2898 if (!DecodeFunctionSection(d, env)) {
2899 return false;
2900 }
2901
2902 if (!DecodeTableSection(d, env)) {
2903 return false;
2904 }
2905
2906 if (!DecodeMemorySection(d, env)) {
2907 return false;
2908 }
2909
2910 if (!DecodeGlobalSection(d, env)) {
2911 return false;
2912 }
2913
2914 if (!DecodeExportSection(d, env)) {
2915 return false;
2916 }
2917
2918 if (!DecodeStartSection(d, env)) {
2919 return false;
2920 }
2921
2922 if (!DecodeElemSection(d, env)) {
2923 return false;
2924 }
2925
2926 if (!DecodeDataCountSection(d, env)) {
2927 return false;
2928 }
2929
2930 if (!d.startSection(SectionId::Code, env, &env->codeSection, "code")) {
2931 return false;
2932 }
2933
2934 if (env->codeSection && env->codeSection->size > MaxCodeSectionBytes) {
2935 return d.fail("code section too big");
2936 }
2937
2938 return true;
2939 }
2940
DecodeFunctionBody(Decoder & d,const ModuleEnvironment & env,uint32_t funcIndex)2941 static bool DecodeFunctionBody(Decoder& d, const ModuleEnvironment& env,
2942 uint32_t funcIndex) {
2943 uint32_t bodySize;
2944 if (!d.readVarU32(&bodySize)) {
2945 return d.fail("expected number of function body bytes");
2946 }
2947
2948 if (bodySize > MaxFunctionBytes) {
2949 return d.fail("function body too big");
2950 }
2951
2952 if (d.bytesRemain() < bodySize) {
2953 return d.fail("function body length too big");
2954 }
2955
2956 if (!ValidateFunctionBody(env, funcIndex, bodySize, d)) {
2957 return false;
2958 }
2959
2960 return true;
2961 }
2962
DecodeCodeSection(Decoder & d,ModuleEnvironment * env)2963 static bool DecodeCodeSection(Decoder& d, ModuleEnvironment* env) {
2964 if (!env->codeSection) {
2965 if (env->numFuncDefs() != 0) {
2966 return d.fail("expected code section");
2967 }
2968 return true;
2969 }
2970
2971 uint32_t numFuncDefs;
2972 if (!d.readVarU32(&numFuncDefs)) {
2973 return d.fail("expected function body count");
2974 }
2975
2976 if (numFuncDefs != env->numFuncDefs()) {
2977 return d.fail(
2978 "function body count does not match function signature count");
2979 }
2980
2981 for (uint32_t funcDefIndex = 0; funcDefIndex < numFuncDefs; funcDefIndex++) {
2982 if (!DecodeFunctionBody(d, *env, env->numFuncImports() + funcDefIndex)) {
2983 return false;
2984 }
2985 }
2986
2987 return d.finishSection(*env->codeSection, "code");
2988 }
2989
DecodeDataSection(Decoder & d,ModuleEnvironment * env)2990 static bool DecodeDataSection(Decoder& d, ModuleEnvironment* env) {
2991 MaybeSectionRange range;
2992 if (!d.startSection(SectionId::Data, env, &range, "data")) {
2993 return false;
2994 }
2995 if (!range) {
2996 if (env->dataCount.isSome() && *env->dataCount > 0) {
2997 return d.fail("number of data segments does not match declared count");
2998 }
2999 return true;
3000 }
3001
3002 uint32_t numSegments;
3003 if (!d.readVarU32(&numSegments)) {
3004 return d.fail("failed to read number of data segments");
3005 }
3006
3007 if (numSegments > MaxDataSegments) {
3008 return d.fail("too many data segments");
3009 }
3010
3011 if (env->dataCount.isSome() && numSegments != *env->dataCount) {
3012 return d.fail("number of data segments does not match declared count");
3013 }
3014
3015 for (uint32_t i = 0; i < numSegments; i++) {
3016 uint32_t initializerKindVal;
3017 if (!d.readVarU32(&initializerKindVal)) {
3018 return d.fail("expected data initializer-kind field");
3019 }
3020
3021 switch (initializerKindVal) {
3022 case uint32_t(DataSegmentKind::Active):
3023 case uint32_t(DataSegmentKind::Passive):
3024 case uint32_t(DataSegmentKind::ActiveWithMemoryIndex):
3025 break;
3026 default:
3027 return d.fail("invalid data initializer-kind field");
3028 }
3029
3030 DataSegmentKind initializerKind = DataSegmentKind(initializerKindVal);
3031
3032 if (initializerKind != DataSegmentKind::Passive && !env->usesMemory()) {
3033 return d.fail("active data segment requires a memory section");
3034 }
3035
3036 uint32_t memIndex = 0;
3037 if (initializerKind == DataSegmentKind::ActiveWithMemoryIndex) {
3038 if (!d.readVarU32(&memIndex)) {
3039 return d.fail("expected memory index");
3040 }
3041 if (memIndex > 0) {
3042 return d.fail("memory index must be zero");
3043 }
3044 }
3045
3046 DataSegmentEnv seg;
3047 if (initializerKind == DataSegmentKind::Active ||
3048 initializerKind == DataSegmentKind::ActiveWithMemoryIndex) {
3049 InitExpr segOffset;
3050 if (!DecodeInitializerExpression(d, env, ValType::I32, &segOffset)) {
3051 return false;
3052 }
3053 seg.offsetIfActive.emplace(segOffset);
3054 }
3055
3056 if (!d.readVarU32(&seg.length)) {
3057 return d.fail("expected segment size");
3058 }
3059
3060 if (seg.length > MaxMemoryInitialPages * PageSize) {
3061 return d.fail("segment size too big");
3062 }
3063
3064 seg.bytecodeOffset = d.currentOffset();
3065
3066 if (!d.readBytes(seg.length)) {
3067 return d.fail("data segment shorter than declared");
3068 }
3069
3070 if (!env->dataSegments.append(seg)) {
3071 return false;
3072 }
3073 }
3074
3075 return d.finishSection(*range, "data");
3076 }
3077
DecodeModuleNameSubsection(Decoder & d,const CustomSectionEnv & nameSection,ModuleEnvironment * env)3078 static bool DecodeModuleNameSubsection(Decoder& d,
3079 const CustomSectionEnv& nameSection,
3080 ModuleEnvironment* env) {
3081 Maybe<uint32_t> endOffset;
3082 if (!d.startNameSubsection(NameType::Module, &endOffset)) {
3083 return false;
3084 }
3085 if (!endOffset) {
3086 return true;
3087 }
3088
3089 Name moduleName;
3090 if (!d.readVarU32(&moduleName.length)) {
3091 return d.fail("failed to read module name length");
3092 }
3093
3094 MOZ_ASSERT(d.currentOffset() >= nameSection.payloadOffset);
3095 moduleName.offsetInNamePayload =
3096 d.currentOffset() - nameSection.payloadOffset;
3097
3098 const uint8_t* bytes;
3099 if (!d.readBytes(moduleName.length, &bytes)) {
3100 return d.fail("failed to read module name bytes");
3101 }
3102
3103 env->moduleName.emplace(moduleName);
3104
3105 return d.finishNameSubsection(*endOffset);
3106 }
3107
DecodeFunctionNameSubsection(Decoder & d,const CustomSectionEnv & nameSection,ModuleEnvironment * env)3108 static bool DecodeFunctionNameSubsection(Decoder& d,
3109 const CustomSectionEnv& nameSection,
3110 ModuleEnvironment* env) {
3111 Maybe<uint32_t> endOffset;
3112 if (!d.startNameSubsection(NameType::Function, &endOffset)) {
3113 return false;
3114 }
3115 if (!endOffset) {
3116 return true;
3117 }
3118
3119 uint32_t nameCount = 0;
3120 if (!d.readVarU32(&nameCount) || nameCount > MaxFuncs) {
3121 return d.fail("bad function name count");
3122 }
3123
3124 NameVector funcNames;
3125
3126 for (uint32_t i = 0; i < nameCount; ++i) {
3127 uint32_t funcIndex = 0;
3128 if (!d.readVarU32(&funcIndex)) {
3129 return d.fail("unable to read function index");
3130 }
3131
3132 // Names must refer to real functions and be given in ascending order.
3133 if (funcIndex >= env->numFuncs() || funcIndex < funcNames.length()) {
3134 return d.fail("invalid function index");
3135 }
3136
3137 Name funcName;
3138 if (!d.readVarU32(&funcName.length) || funcName.length > MaxStringLength) {
3139 return d.fail("unable to read function name length");
3140 }
3141
3142 if (!funcName.length) {
3143 continue;
3144 }
3145
3146 if (!funcNames.resize(funcIndex + 1)) {
3147 return false;
3148 }
3149
3150 MOZ_ASSERT(d.currentOffset() >= nameSection.payloadOffset);
3151 funcName.offsetInNamePayload =
3152 d.currentOffset() - nameSection.payloadOffset;
3153
3154 if (!d.readBytes(funcName.length)) {
3155 return d.fail("unable to read function name bytes");
3156 }
3157
3158 funcNames[funcIndex] = funcName;
3159 }
3160
3161 if (!d.finishNameSubsection(*endOffset)) {
3162 return false;
3163 }
3164
3165 // To encourage fully valid function names subsections; only save names if
3166 // the entire subsection decoded correctly.
3167 env->funcNames = std::move(funcNames);
3168 return true;
3169 }
3170
DecodeNameSection(Decoder & d,ModuleEnvironment * env)3171 static bool DecodeNameSection(Decoder& d, ModuleEnvironment* env) {
3172 MaybeSectionRange range;
3173 if (!d.startCustomSection(NameSectionName, env, &range)) {
3174 return false;
3175 }
3176 if (!range) {
3177 return true;
3178 }
3179
3180 env->nameCustomSectionIndex = Some(env->customSections.length() - 1);
3181 const CustomSectionEnv& nameSection = env->customSections.back();
3182
3183 // Once started, custom sections do not report validation errors.
3184
3185 if (!DecodeModuleNameSubsection(d, nameSection, env)) {
3186 goto finish;
3187 }
3188
3189 if (!DecodeFunctionNameSubsection(d, nameSection, env)) {
3190 goto finish;
3191 }
3192
3193 while (d.currentOffset() < range->end()) {
3194 if (!d.skipNameSubsection()) {
3195 goto finish;
3196 }
3197 }
3198
3199 finish:
3200 d.finishCustomSection(NameSectionName, *range);
3201 return true;
3202 }
3203
DecodeModuleTail(Decoder & d,ModuleEnvironment * env)3204 bool wasm::DecodeModuleTail(Decoder& d, ModuleEnvironment* env) {
3205 if (!DecodeDataSection(d, env)) {
3206 return false;
3207 }
3208
3209 if (!DecodeNameSection(d, env)) {
3210 return false;
3211 }
3212
3213 while (!d.done()) {
3214 if (!d.skipCustomSection(env)) {
3215 if (d.resilientMode()) {
3216 d.clearError();
3217 return true;
3218 }
3219 return false;
3220 }
3221 }
3222
3223 return true;
3224 }
3225
3226 // Validate algorithm.
3227
Validate(JSContext * cx,const ShareableBytes & bytecode,UniqueChars * error)3228 bool wasm::Validate(JSContext* cx, const ShareableBytes& bytecode,
3229 UniqueChars* error) {
3230 Decoder d(bytecode.bytes, 0, error);
3231
3232 bool gcTypesConfigured = GcTypesAvailable(cx);
3233 bool refTypesConfigured = ReftypesAvailable(cx);
3234 bool multiValueConfigured = MultiValuesAvailable(cx);
3235 bool hugeMemory = false;
3236 bool v128Configured = SimdAvailable(cx);
3237
3238 CompilerEnvironment compilerEnv(
3239 CompileMode::Once, Tier::Optimized, OptimizedBackend::Ion,
3240 DebugEnabled::False, multiValueConfigured, refTypesConfigured,
3241 gcTypesConfigured, hugeMemory, v128Configured);
3242 ModuleEnvironment env(
3243 &compilerEnv,
3244 cx->realm()->creationOptions().getSharedMemoryAndAtomicsEnabled()
3245 ? Shareable::True
3246 : Shareable::False);
3247 if (!DecodeModuleEnvironment(d, &env)) {
3248 return false;
3249 }
3250
3251 if (!DecodeCodeSection(d, &env)) {
3252 return false;
3253 }
3254
3255 if (!DecodeModuleTail(d, &env)) {
3256 return false;
3257 }
3258
3259 MOZ_ASSERT(!*error, "unreported error in decoding");
3260 return true;
3261 }
3262