1 //! Functions defined locally within a wasm module.
2 
3 mod context;
4 mod emit;
5 
6 use self::context::ValidationContext;
7 use crate::emit::IdsToIndices;
8 use crate::encode::Encoder;
9 use crate::ir::*;
10 use crate::map::{IdHashMap, IdHashSet};
11 use crate::parse::IndicesToIds;
12 use crate::{Data, DataId, FunctionBuilder, FunctionId, MemoryId, Module, Result, TypeId, ValType};
13 use std::collections::BTreeMap;
14 use wasmparser::{FuncValidator, Operator, ValidatorResources};
15 
16 /// A function defined locally within the wasm module.
17 #[derive(Debug)]
18 pub struct LocalFunction {
19     /// All of this function's instructions, contained in the arena.
20     builder: FunctionBuilder,
21 
22     /// Arguments to this function, and the locals that they're assigned to.
23     pub args: Vec<LocalId>,
24     //
25     // TODO: provenance: (InstrSeqId, usize) -> offset in code section of the
26     // original instruction. This will be necessary for preserving debug info.
27 }
28 
29 impl LocalFunction {
30     /// Creates a new definition of a local function from its components.
new(args: Vec<LocalId>, builder: FunctionBuilder) -> LocalFunction31     pub(crate) fn new(args: Vec<LocalId>, builder: FunctionBuilder) -> LocalFunction {
32         LocalFunction { args, builder }
33     }
34 
35     /// Construct a new `LocalFunction`.
36     ///
37     /// Validates the given function body and constructs the `Instr` IR at the
38     /// same time.
parse( module: &Module, indices: &IndicesToIds, id: FunctionId, ty: TypeId, args: Vec<LocalId>, mut body: wasmparser::BinaryReader<'_>, on_instr_pos: Option<&(dyn Fn(&usize) -> InstrLocId + Sync + Send + 'static)>, mut validator: FuncValidator<ValidatorResources>, ) -> Result<LocalFunction>39     pub(crate) fn parse(
40         module: &Module,
41         indices: &IndicesToIds,
42         id: FunctionId,
43         ty: TypeId,
44         args: Vec<LocalId>,
45         mut body: wasmparser::BinaryReader<'_>,
46         on_instr_pos: Option<&(dyn Fn(&usize) -> InstrLocId + Sync + Send + 'static)>,
47         mut validator: FuncValidator<ValidatorResources>,
48     ) -> Result<LocalFunction> {
49         let mut func = LocalFunction {
50             builder: FunctionBuilder::without_entry(ty),
51             args,
52         };
53 
54         let result: Vec<_> = module.types.get(ty).results().iter().cloned().collect();
55         let result = result.into_boxed_slice();
56 
57         let controls = &mut context::ControlStack::new();
58 
59         let mut ctx = ValidationContext::new(module, indices, id, &mut func, controls);
60 
61         let ty = module.types.find_for_function_entry(&result).expect(
62             "the function entry type should have already been created before parsing the body",
63         );
64         let entry = ctx.push_control_with_ty(BlockKind::FunctionEntry, ty);
65         ctx.func.builder.entry = Some(entry);
66         while !body.eof() {
67             let pos = body.original_position();
68             let inst = body.read_operator()?;
69             let loc = if let Some(ref on_instr_pos) = on_instr_pos {
70                 on_instr_pos(&pos)
71             } else {
72                 InstrLocId::new(pos as u32)
73             };
74             validator.op(pos, &inst)?;
75             append_instruction(&mut ctx, inst, loc);
76         }
77         validator.finish(body.original_position())?;
78 
79         debug_assert!(ctx.controls.is_empty());
80 
81         Ok(func)
82     }
83 
84     /// Get this function's type.
85     #[inline]
ty(&self) -> TypeId86     pub fn ty(&self) -> TypeId {
87         self.builder.ty
88     }
89 
add_block( &mut self, make_block: impl FnOnce(InstrSeqId) -> InstrSeq, ) -> InstrSeqId90     pub(crate) fn add_block(
91         &mut self,
92         make_block: impl FnOnce(InstrSeqId) -> InstrSeq,
93     ) -> InstrSeqId {
94         self.builder.arena.alloc_with_id(make_block)
95     }
96 
97     /// Get the id of this function's entry block.
entry_block(&self) -> InstrSeqId98     pub fn entry_block(&self) -> InstrSeqId {
99         self.builder.entry.unwrap()
100     }
101 
102     /// Get the block associated with the given id.
block(&self, id: InstrSeqId) -> &InstrSeq103     pub fn block(&self, id: InstrSeqId) -> &InstrSeq {
104         &self.builder.arena[id]
105     }
106 
107     /// Get the block associated with the given id.
block_mut(&mut self, id: InstrSeqId) -> &mut InstrSeq108     pub fn block_mut(&mut self, id: InstrSeqId) -> &mut InstrSeq {
109         &mut self.builder.arena[id]
110     }
111 
112     /// Get access to a `FunctionBuilder` to continue adding instructions to
113     /// this function.
builder(&self) -> &FunctionBuilder114     pub fn builder(&self) -> &FunctionBuilder {
115         &self.builder
116     }
117 
118     /// Get access to a `FunctionBuilder` to continue adding instructions to
119     /// this function.
builder_mut(&mut self) -> &mut FunctionBuilder120     pub fn builder_mut(&mut self) -> &mut FunctionBuilder {
121         &mut self.builder
122     }
123 
124     /// Get the size of this function, in number of instructions.
size(&self) -> u64125     pub fn size(&self) -> u64 {
126         let mut v = SizeVisitor::default();
127         dfs_in_order(&mut v, self, self.entry_block());
128         return v.size;
129 
130         #[derive(Default)]
131         struct SizeVisitor {
132             size: u64,
133         }
134 
135         impl<'instr> Visitor<'instr> for SizeVisitor {
136             fn start_instr_seq(&mut self, seq: &'instr InstrSeq) {
137                 self.size += seq.len() as u64;
138             }
139         }
140     }
141 
142     /// Is this function's body a [constant
143     /// instruction](https://webassembly.github.io/spec/core/valid/instructions.html#constant-instructions)?
is_const(&self) -> bool144     pub fn is_const(&self) -> bool {
145         self.block(self.entry_block())
146             .instrs
147             .iter()
148             .all(|(e, _)| e.is_const())
149     }
150 
151     /// Collect the set of data segments that are used in this function via
152     /// `memory.init` or `data.drop` instructions.
used_data_segments(&self) -> IdHashSet<Data>153     pub fn used_data_segments(&self) -> IdHashSet<Data> {
154         let mut visitor = DataSegmentsVisitor::default();
155         dfs_in_order(&mut visitor, self, self.entry_block());
156         return visitor.segments;
157 
158         #[derive(Default)]
159         struct DataSegmentsVisitor {
160             segments: IdHashSet<Data>,
161         }
162 
163         impl<'a> Visitor<'a> for DataSegmentsVisitor {
164             fn visit_data_id(&mut self, id: &DataId) {
165                 self.segments.insert(*id);
166             }
167         }
168     }
169 
used_locals(&self) -> IdHashSet<Local>170     fn used_locals(&self) -> IdHashSet<Local> {
171         let mut locals = Used::default();
172         dfs_in_order(&mut locals, self, self.entry_block());
173         return locals.locals;
174 
175         #[derive(Default)]
176         struct Used {
177             locals: IdHashSet<Local>,
178         }
179 
180         impl<'a> Visitor<'a> for Used {
181             fn visit_local_id(&mut self, id: &LocalId) {
182                 self.locals.insert(*id);
183             }
184         }
185     }
186 
187     /// Emit this function's compact locals declarations.
emit_locals( &self, module: &Module, encoder: &mut Encoder, ) -> (IdHashSet<Local>, IdHashMap<Local, u32>)188     pub(crate) fn emit_locals(
189         &self,
190         module: &Module,
191         encoder: &mut Encoder,
192     ) -> (IdHashSet<Local>, IdHashMap<Local, u32>) {
193         let used_set = self.used_locals();
194         let mut used_locals = used_set.iter().cloned().collect::<Vec<_>>();
195         // Sort to ensure we assign local indexes deterministically, and
196         // everything is distinct so we can use a faster unstable sort.
197         used_locals.sort_unstable();
198 
199         // NB: Use `BTreeMap` to make compilation deterministic by emitting
200         // types in the same order
201         let mut ty_to_locals = BTreeMap::new();
202         let args = self.args.iter().cloned().collect::<IdHashSet<_>>();
203 
204         // Partition all locals by their type as we'll create at most one entry
205         // for each type. Skip all arguments to the function because they're
206         // handled separately.
207         for local in used_locals.iter() {
208             if !args.contains(local) {
209                 let ty = module.locals.get(*local).ty();
210                 ty_to_locals.entry(ty).or_insert_with(Vec::new).push(*local);
211             }
212         }
213 
214         let mut local_map = IdHashMap::default();
215         local_map.reserve(used_locals.len());
216 
217         // Allocate an index to all the function arguments, as these are all
218         // unconditionally used and are implicit locals in wasm.
219         let mut idx = 0;
220         for &arg in self.args.iter() {
221             local_map.insert(arg, idx);
222             idx += 1;
223         }
224 
225         // Assign an index to all remaining locals
226         for (_, locals) in ty_to_locals.iter() {
227             for l in locals {
228                 local_map.insert(*l, idx);
229                 idx += 1;
230             }
231         }
232 
233         // Use our type map to emit a compact representation of all locals now
234         encoder.usize(ty_to_locals.len());
235         for (ty, locals) in ty_to_locals.iter() {
236             encoder.usize(locals.len());
237             ty.emit(encoder);
238         }
239 
240         (used_set, local_map)
241     }
242 
243     /// Emit this function's instruction sequence.
emit_instructions( &self, indices: &IdsToIndices, local_indices: &IdHashMap<Local, u32>, dst: &mut Encoder, map: Option<&mut Vec<(InstrLocId, usize)>>, )244     pub(crate) fn emit_instructions(
245         &self,
246         indices: &IdsToIndices,
247         local_indices: &IdHashMap<Local, u32>,
248         dst: &mut Encoder,
249         map: Option<&mut Vec<(InstrLocId, usize)>>,
250     ) {
251         emit::run(self, indices, local_indices, dst, map)
252     }
253 }
254 
block_result_tys( ctx: &ValidationContext, ty: wasmparser::TypeOrFuncType, ) -> Result<Box<[ValType]>>255 fn block_result_tys(
256     ctx: &ValidationContext,
257     ty: wasmparser::TypeOrFuncType,
258 ) -> Result<Box<[ValType]>> {
259     match ty {
260         wasmparser::TypeOrFuncType::Type(ty) => ValType::from_wasmparser_type(ty).map(Into::into),
261         wasmparser::TypeOrFuncType::FuncType(idx) => {
262             let ty = ctx.indices.get_type(idx)?;
263             Ok(ctx.module.types.results(ty).into())
264         }
265     }
266 }
267 
block_param_tys( ctx: &ValidationContext, ty: wasmparser::TypeOrFuncType, ) -> Result<Box<[ValType]>>268 fn block_param_tys(
269     ctx: &ValidationContext,
270     ty: wasmparser::TypeOrFuncType,
271 ) -> Result<Box<[ValType]>> {
272     match ty {
273         wasmparser::TypeOrFuncType::Type(_) => Ok([][..].into()),
274         wasmparser::TypeOrFuncType::FuncType(idx) => {
275             let ty = ctx.indices.get_type(idx)?;
276             Ok(ctx.module.types.params(ty).into())
277         }
278     }
279 }
280 
append_instruction<'context>( ctx: &'context mut ValidationContext, inst: Operator, loc: InstrLocId, )281 fn append_instruction<'context>(
282     ctx: &'context mut ValidationContext,
283     inst: Operator,
284     loc: InstrLocId,
285 ) {
286     // NB. there's a lot of `unwrap()` here in this function, and that's because
287     // the `Operator` was validated above to already be valid, so everything
288     // should succeed.
289     use crate::ir::ExtendedLoad::*;
290 
291     log::trace!("validate instruction: {:?}", inst);
292 
293     let const_ = |ctx: &mut ValidationContext, value| {
294         ctx.alloc_instr(Const { value }, loc);
295     };
296 
297     let unop = |ctx: &mut ValidationContext, op| {
298         ctx.alloc_instr(Unop { op }, loc);
299     };
300     let binop = |ctx: &mut ValidationContext, op| {
301         ctx.alloc_instr(Binop { op }, loc);
302     };
303 
304     let mem_arg =
305         |ctx: &mut ValidationContext, arg: &wasmparser::MemoryImmediate| -> (MemoryId, MemArg) {
306             (
307                 ctx.indices.get_memory(arg.memory).unwrap(),
308                 MemArg {
309                     align: 1 << (arg.align as i32),
310                     offset: arg.offset,
311                 },
312             )
313         };
314 
315     let load = |ctx: &mut ValidationContext, arg, kind| {
316         let (memory, arg) = mem_arg(ctx, &arg);
317         ctx.alloc_instr(Load { arg, kind, memory }, loc);
318     };
319 
320     let store = |ctx: &mut ValidationContext, arg, kind| {
321         let (memory, arg) = mem_arg(ctx, &arg);
322         ctx.alloc_instr(Store { arg, kind, memory }, loc);
323     };
324 
325     let atomicrmw = |ctx: &mut ValidationContext, arg, op, width| {
326         let (memory, arg) = mem_arg(ctx, &arg);
327         ctx.alloc_instr(
328             AtomicRmw {
329                 arg,
330                 memory,
331                 op,
332                 width,
333             },
334             loc,
335         );
336     };
337 
338     let cmpxchg = |ctx: &mut ValidationContext, arg, width| {
339         let (memory, arg) = mem_arg(ctx, &arg);
340         ctx.alloc_instr(Cmpxchg { arg, memory, width }, loc);
341     };
342 
343     let load_simd = |ctx: &mut ValidationContext, arg, kind| {
344         let (memory, arg) = mem_arg(ctx, &arg);
345         ctx.alloc_instr(LoadSimd { memory, arg, kind }, loc);
346     };
347     match inst {
348         Operator::Call { function_index } => {
349             let func = ctx.indices.get_func(function_index).unwrap();
350             ctx.alloc_instr(Call { func }, loc);
351         }
352         Operator::CallIndirect { index, table_index } => {
353             let type_id = ctx.indices.get_type(index).unwrap();
354             let table = ctx.indices.get_table(table_index).unwrap();
355             ctx.alloc_instr(CallIndirect { table, ty: type_id }, loc);
356         }
357         Operator::LocalGet { local_index } => {
358             let local = ctx.indices.get_local(ctx.func_id, local_index).unwrap();
359             ctx.alloc_instr(LocalGet { local }, loc);
360         }
361         Operator::LocalSet { local_index } => {
362             let local = ctx.indices.get_local(ctx.func_id, local_index).unwrap();
363             ctx.alloc_instr(LocalSet { local }, loc);
364         }
365         Operator::LocalTee { local_index } => {
366             let local = ctx.indices.get_local(ctx.func_id, local_index).unwrap();
367             ctx.alloc_instr(LocalTee { local }, loc);
368         }
369         Operator::GlobalGet { global_index } => {
370             let global = ctx.indices.get_global(global_index).unwrap();
371             ctx.alloc_instr(GlobalGet { global }, loc);
372         }
373         Operator::GlobalSet { global_index } => {
374             let global = ctx.indices.get_global(global_index).unwrap();
375             ctx.alloc_instr(GlobalSet { global }, loc);
376         }
377         Operator::I32Const { value } => const_(ctx, Value::I32(value)),
378         Operator::I64Const { value } => const_(ctx, Value::I64(value)),
379         Operator::F32Const { value } => const_(ctx, Value::F32(f32::from_bits(value.bits()))),
380         Operator::F64Const { value } => const_(ctx, Value::F64(f64::from_bits(value.bits()))),
381         Operator::V128Const { value } => {
382             let val = crate::init_expr::v128_to_u128(&value);
383             const_(ctx, Value::V128(val))
384         }
385         Operator::I32Eqz => unop(ctx, UnaryOp::I32Eqz),
386         Operator::I32Eq => binop(ctx, BinaryOp::I32Eq),
387         Operator::I32Ne => binop(ctx, BinaryOp::I32Ne),
388         Operator::I32LtS => binop(ctx, BinaryOp::I32LtS),
389         Operator::I32LtU => binop(ctx, BinaryOp::I32LtU),
390         Operator::I32GtS => binop(ctx, BinaryOp::I32GtS),
391         Operator::I32GtU => binop(ctx, BinaryOp::I32GtU),
392         Operator::I32LeS => binop(ctx, BinaryOp::I32LeS),
393         Operator::I32LeU => binop(ctx, BinaryOp::I32LeU),
394         Operator::I32GeS => binop(ctx, BinaryOp::I32GeS),
395         Operator::I32GeU => binop(ctx, BinaryOp::I32GeU),
396 
397         Operator::I64Eqz => unop(ctx, UnaryOp::I64Eqz),
398         Operator::I64Eq => binop(ctx, BinaryOp::I64Eq),
399         Operator::I64Ne => binop(ctx, BinaryOp::I64Ne),
400         Operator::I64LtS => binop(ctx, BinaryOp::I64LtS),
401         Operator::I64LtU => binop(ctx, BinaryOp::I64LtU),
402         Operator::I64GtS => binop(ctx, BinaryOp::I64GtS),
403         Operator::I64GtU => binop(ctx, BinaryOp::I64GtU),
404         Operator::I64LeS => binop(ctx, BinaryOp::I64LeS),
405         Operator::I64LeU => binop(ctx, BinaryOp::I64LeU),
406         Operator::I64GeS => binop(ctx, BinaryOp::I64GeS),
407         Operator::I64GeU => binop(ctx, BinaryOp::I64GeU),
408 
409         Operator::F32Eq => binop(ctx, BinaryOp::F32Eq),
410         Operator::F32Ne => binop(ctx, BinaryOp::F32Ne),
411         Operator::F32Lt => binop(ctx, BinaryOp::F32Lt),
412         Operator::F32Gt => binop(ctx, BinaryOp::F32Gt),
413         Operator::F32Le => binop(ctx, BinaryOp::F32Le),
414         Operator::F32Ge => binop(ctx, BinaryOp::F32Ge),
415 
416         Operator::F64Eq => binop(ctx, BinaryOp::F64Eq),
417         Operator::F64Ne => binop(ctx, BinaryOp::F64Ne),
418         Operator::F64Lt => binop(ctx, BinaryOp::F64Lt),
419         Operator::F64Gt => binop(ctx, BinaryOp::F64Gt),
420         Operator::F64Le => binop(ctx, BinaryOp::F64Le),
421         Operator::F64Ge => binop(ctx, BinaryOp::F64Ge),
422 
423         Operator::I32Clz => unop(ctx, UnaryOp::I32Clz),
424         Operator::I32Ctz => unop(ctx, UnaryOp::I32Ctz),
425         Operator::I32Popcnt => unop(ctx, UnaryOp::I32Popcnt),
426         Operator::I32Add => binop(ctx, BinaryOp::I32Add),
427         Operator::I32Sub => binop(ctx, BinaryOp::I32Sub),
428         Operator::I32Mul => binop(ctx, BinaryOp::I32Mul),
429         Operator::I32DivS => binop(ctx, BinaryOp::I32DivS),
430         Operator::I32DivU => binop(ctx, BinaryOp::I32DivU),
431         Operator::I32RemS => binop(ctx, BinaryOp::I32RemS),
432         Operator::I32RemU => binop(ctx, BinaryOp::I32RemU),
433         Operator::I32And => binop(ctx, BinaryOp::I32And),
434         Operator::I32Or => binop(ctx, BinaryOp::I32Or),
435         Operator::I32Xor => binop(ctx, BinaryOp::I32Xor),
436         Operator::I32Shl => binop(ctx, BinaryOp::I32Shl),
437         Operator::I32ShrS => binop(ctx, BinaryOp::I32ShrS),
438         Operator::I32ShrU => binop(ctx, BinaryOp::I32ShrU),
439         Operator::I32Rotl => binop(ctx, BinaryOp::I32Rotl),
440         Operator::I32Rotr => binop(ctx, BinaryOp::I32Rotr),
441 
442         Operator::I64Clz => unop(ctx, UnaryOp::I64Clz),
443         Operator::I64Ctz => unop(ctx, UnaryOp::I64Ctz),
444         Operator::I64Popcnt => unop(ctx, UnaryOp::I64Popcnt),
445         Operator::I64Add => binop(ctx, BinaryOp::I64Add),
446         Operator::I64Sub => binop(ctx, BinaryOp::I64Sub),
447         Operator::I64Mul => binop(ctx, BinaryOp::I64Mul),
448         Operator::I64DivS => binop(ctx, BinaryOp::I64DivS),
449         Operator::I64DivU => binop(ctx, BinaryOp::I64DivU),
450         Operator::I64RemS => binop(ctx, BinaryOp::I64RemS),
451         Operator::I64RemU => binop(ctx, BinaryOp::I64RemU),
452         Operator::I64And => binop(ctx, BinaryOp::I64And),
453         Operator::I64Or => binop(ctx, BinaryOp::I64Or),
454         Operator::I64Xor => binop(ctx, BinaryOp::I64Xor),
455         Operator::I64Shl => binop(ctx, BinaryOp::I64Shl),
456         Operator::I64ShrS => binop(ctx, BinaryOp::I64ShrS),
457         Operator::I64ShrU => binop(ctx, BinaryOp::I64ShrU),
458         Operator::I64Rotl => binop(ctx, BinaryOp::I64Rotl),
459         Operator::I64Rotr => binop(ctx, BinaryOp::I64Rotr),
460 
461         Operator::F32Abs => unop(ctx, UnaryOp::F32Abs),
462         Operator::F32Neg => unop(ctx, UnaryOp::F32Neg),
463         Operator::F32Ceil => unop(ctx, UnaryOp::F32Ceil),
464         Operator::F32Floor => unop(ctx, UnaryOp::F32Floor),
465         Operator::F32Trunc => unop(ctx, UnaryOp::F32Trunc),
466         Operator::F32Nearest => unop(ctx, UnaryOp::F32Nearest),
467         Operator::F32Sqrt => unop(ctx, UnaryOp::F32Sqrt),
468         Operator::F32Add => binop(ctx, BinaryOp::F32Add),
469         Operator::F32Sub => binop(ctx, BinaryOp::F32Sub),
470         Operator::F32Mul => binop(ctx, BinaryOp::F32Mul),
471         Operator::F32Div => binop(ctx, BinaryOp::F32Div),
472         Operator::F32Min => binop(ctx, BinaryOp::F32Min),
473         Operator::F32Max => binop(ctx, BinaryOp::F32Max),
474         Operator::F32Copysign => binop(ctx, BinaryOp::F32Copysign),
475 
476         Operator::F64Abs => unop(ctx, UnaryOp::F64Abs),
477         Operator::F64Neg => unop(ctx, UnaryOp::F64Neg),
478         Operator::F64Ceil => unop(ctx, UnaryOp::F64Ceil),
479         Operator::F64Floor => unop(ctx, UnaryOp::F64Floor),
480         Operator::F64Trunc => unop(ctx, UnaryOp::F64Trunc),
481         Operator::F64Nearest => unop(ctx, UnaryOp::F64Nearest),
482         Operator::F64Sqrt => unop(ctx, UnaryOp::F64Sqrt),
483         Operator::F64Add => binop(ctx, BinaryOp::F64Add),
484         Operator::F64Sub => binop(ctx, BinaryOp::F64Sub),
485         Operator::F64Mul => binop(ctx, BinaryOp::F64Mul),
486         Operator::F64Div => binop(ctx, BinaryOp::F64Div),
487         Operator::F64Min => binop(ctx, BinaryOp::F64Min),
488         Operator::F64Max => binop(ctx, BinaryOp::F64Max),
489         Operator::F64Copysign => binop(ctx, BinaryOp::F64Copysign),
490 
491         Operator::I32WrapI64 => unop(ctx, UnaryOp::I32WrapI64),
492         Operator::I32TruncF32S => unop(ctx, UnaryOp::I32TruncSF32),
493         Operator::I32TruncF32U => unop(ctx, UnaryOp::I32TruncUF32),
494         Operator::I32TruncF64S => unop(ctx, UnaryOp::I32TruncSF64),
495         Operator::I32TruncF64U => unop(ctx, UnaryOp::I32TruncUF64),
496 
497         Operator::I64ExtendI32S => unop(ctx, UnaryOp::I64ExtendSI32),
498         Operator::I64ExtendI32U => unop(ctx, UnaryOp::I64ExtendUI32),
499         Operator::I64TruncF32S => unop(ctx, UnaryOp::I64TruncSF32),
500         Operator::I64TruncF32U => unop(ctx, UnaryOp::I64TruncUF32),
501         Operator::I64TruncF64S => unop(ctx, UnaryOp::I64TruncSF64),
502         Operator::I64TruncF64U => unop(ctx, UnaryOp::I64TruncUF64),
503 
504         Operator::F32ConvertI32S => unop(ctx, UnaryOp::F32ConvertSI32),
505         Operator::F32ConvertI32U => unop(ctx, UnaryOp::F32ConvertUI32),
506         Operator::F32ConvertI64S => unop(ctx, UnaryOp::F32ConvertSI64),
507         Operator::F32ConvertI64U => unop(ctx, UnaryOp::F32ConvertUI64),
508         Operator::F32DemoteF64 => unop(ctx, UnaryOp::F32DemoteF64),
509 
510         Operator::F64ConvertI32S => unop(ctx, UnaryOp::F64ConvertSI32),
511         Operator::F64ConvertI32U => unop(ctx, UnaryOp::F64ConvertUI32),
512         Operator::F64ConvertI64S => unop(ctx, UnaryOp::F64ConvertSI64),
513         Operator::F64ConvertI64U => unop(ctx, UnaryOp::F64ConvertUI64),
514         Operator::F64PromoteF32 => unop(ctx, UnaryOp::F64PromoteF32),
515 
516         Operator::I32ReinterpretF32 => unop(ctx, UnaryOp::I32ReinterpretF32),
517         Operator::I64ReinterpretF64 => unop(ctx, UnaryOp::I64ReinterpretF64),
518         Operator::F32ReinterpretI32 => unop(ctx, UnaryOp::F32ReinterpretI32),
519         Operator::F64ReinterpretI64 => unop(ctx, UnaryOp::F64ReinterpretI64),
520 
521         Operator::I32Extend8S => unop(ctx, UnaryOp::I32Extend8S),
522         Operator::I32Extend16S => unop(ctx, UnaryOp::I32Extend16S),
523         Operator::I64Extend8S => unop(ctx, UnaryOp::I64Extend8S),
524         Operator::I64Extend16S => unop(ctx, UnaryOp::I64Extend16S),
525         Operator::I64Extend32S => unop(ctx, UnaryOp::I64Extend32S),
526 
527         Operator::Drop => ctx.alloc_instr(Drop {}, loc),
528         Operator::Select => ctx.alloc_instr(Select { ty: None }, loc),
529         Operator::TypedSelect { ty } => {
530             let ty = ValType::parse(&ty).unwrap();
531             ctx.alloc_instr(Select { ty: Some(ty) }, loc);
532         }
533         Operator::Return => {
534             ctx.alloc_instr(Return {}, loc);
535             ctx.unreachable();
536         }
537         Operator::Unreachable => {
538             ctx.alloc_instr(Unreachable {}, loc);
539             ctx.unreachable();
540         }
541         Operator::Block { ty } => {
542             let param_tys = block_param_tys(ctx, ty).unwrap();
543             let result_tys = block_result_tys(ctx, ty).unwrap();
544             let seq = ctx
545                 .push_control(BlockKind::Block, param_tys, result_tys)
546                 .unwrap();
547             ctx.alloc_instr_in_control(1, Block { seq }, loc).unwrap();
548         }
549         Operator::Loop { ty } => {
550             let result_tys = block_result_tys(ctx, ty).unwrap();
551             let param_tys = block_param_tys(ctx, ty).unwrap();
552             let seq = ctx
553                 .push_control(BlockKind::Loop, param_tys, result_tys)
554                 .unwrap();
555             ctx.alloc_instr_in_control(1, Loop { seq }, loc).unwrap();
556         }
557         Operator::If { ty } => {
558             let result_tys = block_result_tys(ctx, ty).unwrap();
559             let param_tys = block_param_tys(ctx, ty).unwrap();
560 
561             let consequent = ctx
562                 .push_control(BlockKind::If, param_tys, result_tys)
563                 .unwrap();
564             ctx.if_else.push(context::IfElseState {
565                 consequent,
566                 alternative: None,
567             });
568         }
569         Operator::End => {
570             let (frame, _block) = ctx.pop_control().unwrap();
571 
572             // If we just finished an if/else block then the actual
573             // instruction which produces the value will be an `IfElse` node,
574             // not the block itself. Do some postprocessing here to create
575             // such a node.
576             match frame.kind {
577                 BlockKind::If | BlockKind::Else => {
578                     let context::IfElseState {
579                         consequent,
580                         alternative,
581                     } = ctx.if_else.pop().unwrap();
582 
583                     let alternative = match alternative {
584                         Some(alt) => {
585                             debug_assert_eq!(frame.kind, BlockKind::Else);
586                             alt
587                         }
588                         None => {
589                             debug_assert_eq!(frame.kind, BlockKind::If);
590                             let alternative = ctx
591                                 .push_control(
592                                     BlockKind::Else,
593                                     frame.start_types.clone(),
594                                     frame.end_types.clone(),
595                                 )
596                                 .unwrap();
597                             ctx.pop_control().unwrap();
598                             alternative
599                         }
600                     };
601 
602                     ctx.alloc_instr(
603                         IfElse {
604                             consequent,
605                             alternative,
606                         },
607                         loc,
608                     );
609                 }
610                 _ => {}
611             }
612         }
613         Operator::Else => {
614             let (frame, _consequent) = ctx.pop_control().unwrap();
615             // An `else` instruction is only valid immediately inside an if/else
616             // block which is denoted by the `IfElse` block kind.
617             match frame.kind {
618                 BlockKind::If => {}
619                 _ => panic!("`else` without a leading `if`"),
620             }
621 
622             // But we still need to parse the alternative block, so allocate the
623             // block here to parse.
624             let alternative = ctx
625                 .push_control(BlockKind::Else, frame.start_types, frame.end_types)
626                 .unwrap();
627             let last = ctx.if_else.last_mut().unwrap();
628             if last.alternative.is_some() {
629                 panic!("`else` without a leading `if`")
630             }
631             last.alternative = Some(alternative);
632         }
633         Operator::Br { relative_depth } => {
634             let n = relative_depth as usize;
635             let block = ctx.control(n).unwrap().block;
636             ctx.alloc_instr(Br { block }, loc);
637             ctx.unreachable();
638         }
639         Operator::BrIf { relative_depth } => {
640             let n = relative_depth as usize;
641             let block = ctx.control(n).unwrap().block;
642             ctx.alloc_instr(BrIf { block }, loc);
643         }
644 
645         Operator::BrTable { table } => {
646             let mut blocks = Vec::with_capacity(table.len());
647             let mut default = None;
648             for pair in table.targets() {
649                 let (target, is_default) = pair.unwrap();
650                 let control = ctx.control(target as usize).unwrap();
651                 if is_default {
652                     default = Some(control.block);
653                 } else {
654                     blocks.push(control.block);
655                 }
656             }
657             ctx.alloc_instr(
658                 BrTable {
659                     blocks: blocks.into(),
660                     default: default.unwrap(),
661                 },
662                 loc,
663             );
664             ctx.unreachable();
665         }
666 
667         Operator::MemorySize { mem, .. } => {
668             let memory = ctx.indices.get_memory(mem).unwrap();
669             ctx.alloc_instr(MemorySize { memory }, loc);
670         }
671         Operator::MemoryGrow { mem, .. } => {
672             let memory = ctx.indices.get_memory(mem).unwrap();
673             ctx.alloc_instr(MemoryGrow { memory }, loc);
674         }
675         Operator::MemoryInit { segment, mem } => {
676             let memory = ctx.indices.get_memory(mem).unwrap();
677             let data = ctx.indices.get_data(segment).unwrap();
678             ctx.alloc_instr(MemoryInit { memory, data }, loc);
679         }
680         Operator::DataDrop { segment } => {
681             let data = ctx.indices.get_data(segment).unwrap();
682             ctx.alloc_instr(DataDrop { data }, loc);
683         }
684         Operator::MemoryCopy { src, dst } => {
685             let src = ctx.indices.get_memory(src).unwrap();
686             let dst = ctx.indices.get_memory(dst).unwrap();
687             ctx.alloc_instr(MemoryCopy { src, dst }, loc);
688         }
689         Operator::MemoryFill { mem } => {
690             let memory = ctx.indices.get_memory(mem).unwrap();
691             ctx.alloc_instr(MemoryFill { memory }, loc);
692         }
693 
694         Operator::Nop => {}
695 
696         Operator::I32Load { memarg } => load(ctx, memarg, LoadKind::I32 { atomic: false }),
697         Operator::I64Load { memarg } => load(ctx, memarg, LoadKind::I64 { atomic: false }),
698         Operator::F32Load { memarg } => load(ctx, memarg, LoadKind::F32),
699         Operator::F64Load { memarg } => load(ctx, memarg, LoadKind::F64),
700         Operator::V128Load { memarg } => load(ctx, memarg, LoadKind::V128),
701         Operator::I32Load8S { memarg } => load(ctx, memarg, LoadKind::I32_8 { kind: SignExtend }),
702         Operator::I32Load8U { memarg } => load(ctx, memarg, LoadKind::I32_8 { kind: ZeroExtend }),
703         Operator::I32Load16S { memarg } => load(ctx, memarg, LoadKind::I32_16 { kind: SignExtend }),
704         Operator::I32Load16U { memarg } => load(ctx, memarg, LoadKind::I32_16 { kind: ZeroExtend }),
705         Operator::I64Load8S { memarg } => load(ctx, memarg, LoadKind::I64_8 { kind: SignExtend }),
706         Operator::I64Load8U { memarg } => load(ctx, memarg, LoadKind::I64_8 { kind: ZeroExtend }),
707         Operator::I64Load16S { memarg } => load(ctx, memarg, LoadKind::I64_16 { kind: SignExtend }),
708         Operator::I64Load16U { memarg } => load(ctx, memarg, LoadKind::I64_16 { kind: ZeroExtend }),
709         Operator::I64Load32S { memarg } => load(ctx, memarg, LoadKind::I64_32 { kind: SignExtend }),
710         Operator::I64Load32U { memarg } => load(ctx, memarg, LoadKind::I64_32 { kind: ZeroExtend }),
711 
712         Operator::I32Store { memarg } => store(ctx, memarg, StoreKind::I32 { atomic: false }),
713         Operator::I64Store { memarg } => store(ctx, memarg, StoreKind::I64 { atomic: false }),
714         Operator::F32Store { memarg } => store(ctx, memarg, StoreKind::F32),
715         Operator::F64Store { memarg } => store(ctx, memarg, StoreKind::F64),
716         Operator::V128Store { memarg } => store(ctx, memarg, StoreKind::V128),
717         Operator::I32Store8 { memarg } => store(ctx, memarg, StoreKind::I32_8 { atomic: false }),
718         Operator::I32Store16 { memarg } => store(ctx, memarg, StoreKind::I32_16 { atomic: false }),
719         Operator::I64Store8 { memarg } => store(ctx, memarg, StoreKind::I64_8 { atomic: false }),
720         Operator::I64Store16 { memarg } => store(ctx, memarg, StoreKind::I64_16 { atomic: false }),
721         Operator::I64Store32 { memarg } => store(ctx, memarg, StoreKind::I64_32 { atomic: false }),
722 
723         Operator::AtomicFence { flags: _ } => ctx.alloc_instr(AtomicFence {}, loc),
724 
725         Operator::I32AtomicLoad { memarg } => load(ctx, memarg, LoadKind::I32 { atomic: true }),
726         Operator::I64AtomicLoad { memarg } => load(ctx, memarg, LoadKind::I64 { atomic: true }),
727         Operator::I32AtomicLoad8U { memarg } => load(
728             ctx,
729             memarg,
730             LoadKind::I32_8 {
731                 kind: ZeroExtendAtomic,
732             },
733         ),
734         Operator::I32AtomicLoad16U { memarg } => load(
735             ctx,
736             memarg,
737             LoadKind::I32_16 {
738                 kind: ZeroExtendAtomic,
739             },
740         ),
741         Operator::I64AtomicLoad8U { memarg } => load(
742             ctx,
743             memarg,
744             LoadKind::I64_8 {
745                 kind: ZeroExtendAtomic,
746             },
747         ),
748         Operator::I64AtomicLoad16U { memarg } => load(
749             ctx,
750             memarg,
751             LoadKind::I64_16 {
752                 kind: ZeroExtendAtomic,
753             },
754         ),
755         Operator::I64AtomicLoad32U { memarg } => load(
756             ctx,
757             memarg,
758             LoadKind::I64_32 {
759                 kind: ZeroExtendAtomic,
760             },
761         ),
762 
763         Operator::I32AtomicStore { memarg } => store(ctx, memarg, StoreKind::I32 { atomic: true }),
764         Operator::I64AtomicStore { memarg } => store(ctx, memarg, StoreKind::I64 { atomic: true }),
765         Operator::I32AtomicStore8 { memarg } => {
766             store(ctx, memarg, StoreKind::I32_8 { atomic: true })
767         }
768         Operator::I32AtomicStore16 { memarg } => {
769             store(ctx, memarg, StoreKind::I32_16 { atomic: true })
770         }
771         Operator::I64AtomicStore8 { memarg } => {
772             store(ctx, memarg, StoreKind::I64_8 { atomic: true })
773         }
774         Operator::I64AtomicStore16 { memarg } => {
775             store(ctx, memarg, StoreKind::I64_16 { atomic: true })
776         }
777         Operator::I64AtomicStore32 { memarg } => {
778             store(ctx, memarg, StoreKind::I64_32 { atomic: true })
779         }
780 
781         Operator::I32AtomicRmwAdd { memarg } => {
782             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I32);
783         }
784         Operator::I64AtomicRmwAdd { memarg } => {
785             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I64);
786         }
787         Operator::I32AtomicRmw8AddU { memarg } => {
788             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I32_8);
789         }
790         Operator::I32AtomicRmw16AddU { memarg } => {
791             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I32_16);
792         }
793         Operator::I64AtomicRmw8AddU { memarg } => {
794             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I64_8);
795         }
796         Operator::I64AtomicRmw16AddU { memarg } => {
797             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I64_16);
798         }
799         Operator::I64AtomicRmw32AddU { memarg } => {
800             atomicrmw(ctx, memarg, AtomicOp::Add, AtomicWidth::I64_32);
801         }
802 
803         Operator::I32AtomicRmwSub { memarg } => {
804             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I32);
805         }
806         Operator::I64AtomicRmwSub { memarg } => {
807             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I64);
808         }
809         Operator::I32AtomicRmw8SubU { memarg } => {
810             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I32_8);
811         }
812         Operator::I32AtomicRmw16SubU { memarg } => {
813             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I32_16);
814         }
815         Operator::I64AtomicRmw8SubU { memarg } => {
816             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I64_8);
817         }
818         Operator::I64AtomicRmw16SubU { memarg } => {
819             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I64_16);
820         }
821         Operator::I64AtomicRmw32SubU { memarg } => {
822             atomicrmw(ctx, memarg, AtomicOp::Sub, AtomicWidth::I64_32);
823         }
824 
825         Operator::I32AtomicRmwAnd { memarg } => {
826             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I32);
827         }
828         Operator::I64AtomicRmwAnd { memarg } => {
829             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I64);
830         }
831         Operator::I32AtomicRmw8AndU { memarg } => {
832             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I32_8);
833         }
834         Operator::I32AtomicRmw16AndU { memarg } => {
835             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I32_16);
836         }
837         Operator::I64AtomicRmw8AndU { memarg } => {
838             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I64_8);
839         }
840         Operator::I64AtomicRmw16AndU { memarg } => {
841             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I64_16);
842         }
843         Operator::I64AtomicRmw32AndU { memarg } => {
844             atomicrmw(ctx, memarg, AtomicOp::And, AtomicWidth::I64_32);
845         }
846 
847         Operator::I32AtomicRmwOr { memarg } => {
848             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I32);
849         }
850         Operator::I64AtomicRmwOr { memarg } => {
851             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I64);
852         }
853         Operator::I32AtomicRmw8OrU { memarg } => {
854             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I32_8);
855         }
856         Operator::I32AtomicRmw16OrU { memarg } => {
857             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I32_16);
858         }
859         Operator::I64AtomicRmw8OrU { memarg } => {
860             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I64_8);
861         }
862         Operator::I64AtomicRmw16OrU { memarg } => {
863             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I64_16);
864         }
865         Operator::I64AtomicRmw32OrU { memarg } => {
866             atomicrmw(ctx, memarg, AtomicOp::Or, AtomicWidth::I64_32);
867         }
868 
869         Operator::I32AtomicRmwXor { memarg } => {
870             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I32);
871         }
872         Operator::I64AtomicRmwXor { memarg } => {
873             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I64);
874         }
875         Operator::I32AtomicRmw8XorU { memarg } => {
876             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I32_8);
877         }
878         Operator::I32AtomicRmw16XorU { memarg } => {
879             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I32_16);
880         }
881         Operator::I64AtomicRmw8XorU { memarg } => {
882             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I64_8);
883         }
884         Operator::I64AtomicRmw16XorU { memarg } => {
885             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I64_16);
886         }
887         Operator::I64AtomicRmw32XorU { memarg } => {
888             atomicrmw(ctx, memarg, AtomicOp::Xor, AtomicWidth::I64_32);
889         }
890 
891         Operator::I32AtomicRmwXchg { memarg } => {
892             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I32);
893         }
894         Operator::I64AtomicRmwXchg { memarg } => {
895             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I64);
896         }
897         Operator::I32AtomicRmw8XchgU { memarg } => {
898             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I32_8);
899         }
900         Operator::I32AtomicRmw16XchgU { memarg } => {
901             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I32_16);
902         }
903         Operator::I64AtomicRmw8XchgU { memarg } => {
904             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I64_8);
905         }
906         Operator::I64AtomicRmw16XchgU { memarg } => {
907             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I64_16);
908         }
909         Operator::I64AtomicRmw32XchgU { memarg } => {
910             atomicrmw(ctx, memarg, AtomicOp::Xchg, AtomicWidth::I64_32);
911         }
912 
913         Operator::I32AtomicRmwCmpxchg { memarg } => {
914             cmpxchg(ctx, memarg, AtomicWidth::I32);
915         }
916         Operator::I64AtomicRmwCmpxchg { memarg } => {
917             cmpxchg(ctx, memarg, AtomicWidth::I64);
918         }
919         Operator::I32AtomicRmw8CmpxchgU { memarg } => {
920             cmpxchg(ctx, memarg, AtomicWidth::I32_8);
921         }
922         Operator::I32AtomicRmw16CmpxchgU { memarg } => {
923             cmpxchg(ctx, memarg, AtomicWidth::I32_16);
924         }
925         Operator::I64AtomicRmw8CmpxchgU { memarg } => {
926             cmpxchg(ctx, memarg, AtomicWidth::I64_8);
927         }
928         Operator::I64AtomicRmw16CmpxchgU { memarg } => {
929             cmpxchg(ctx, memarg, AtomicWidth::I64_16);
930         }
931         Operator::I64AtomicRmw32CmpxchgU { memarg } => {
932             cmpxchg(ctx, memarg, AtomicWidth::I64_32);
933         }
934         Operator::MemoryAtomicNotify { ref memarg } => {
935             let (memory, arg) = mem_arg(ctx, memarg);
936             ctx.alloc_instr(AtomicNotify { memory, arg }, loc);
937         }
938         Operator::MemoryAtomicWait32 { ref memarg }
939         | Operator::MemoryAtomicWait64 { ref memarg } => {
940             let sixty_four = match inst {
941                 Operator::MemoryAtomicWait32 { .. } => false,
942                 _ => true,
943             };
944             let (memory, arg) = mem_arg(ctx, memarg);
945             ctx.alloc_instr(
946                 AtomicWait {
947                     sixty_four,
948                     memory,
949                     arg,
950                 },
951                 loc,
952             );
953         }
954 
955         Operator::TableGet { table } => {
956             let table = ctx.indices.get_table(table).unwrap();
957             ctx.alloc_instr(TableGet { table }, loc);
958         }
959         Operator::TableSet { table } => {
960             let table = ctx.indices.get_table(table).unwrap();
961             ctx.alloc_instr(TableSet { table }, loc);
962         }
963         Operator::TableGrow { table } => {
964             let table = ctx.indices.get_table(table).unwrap();
965             ctx.alloc_instr(TableGrow { table }, loc);
966         }
967         Operator::TableSize { table } => {
968             let table = ctx.indices.get_table(table).unwrap();
969             ctx.alloc_instr(TableSize { table }, loc);
970         }
971         Operator::TableFill { table } => {
972             let table = ctx.indices.get_table(table).unwrap();
973             ctx.alloc_instr(TableFill { table }, loc);
974         }
975         Operator::RefNull { ty } => {
976             let ty = ValType::parse(&ty).unwrap();
977             ctx.alloc_instr(RefNull { ty }, loc);
978         }
979         Operator::RefIsNull => {
980             ctx.alloc_instr(RefIsNull {}, loc);
981         }
982         Operator::RefFunc { function_index } => {
983             let func = ctx.indices.get_func(function_index).unwrap();
984             ctx.alloc_instr(RefFunc { func }, loc);
985         }
986 
987         Operator::I8x16Swizzle => {
988             ctx.alloc_instr(I8x16Swizzle {}, loc);
989         }
990 
991         Operator::I8x16Shuffle { lanes } => {
992             ctx.alloc_instr(I8x16Shuffle { indices: lanes }, loc);
993         }
994 
995         Operator::I8x16Splat => unop(ctx, UnaryOp::I8x16Splat),
996         Operator::I8x16ExtractLaneS { lane: idx } => unop(ctx, UnaryOp::I8x16ExtractLaneS { idx }),
997         Operator::I8x16ExtractLaneU { lane: idx } => unop(ctx, UnaryOp::I8x16ExtractLaneU { idx }),
998         Operator::I8x16ReplaceLane { lane: idx } => binop(ctx, BinaryOp::I8x16ReplaceLane { idx }),
999         Operator::I16x8Splat => unop(ctx, UnaryOp::I16x8Splat),
1000         Operator::I16x8ExtractLaneS { lane: idx } => unop(ctx, UnaryOp::I16x8ExtractLaneS { idx }),
1001         Operator::I16x8ExtractLaneU { lane: idx } => unop(ctx, UnaryOp::I16x8ExtractLaneU { idx }),
1002         Operator::I16x8ReplaceLane { lane: idx } => binop(ctx, BinaryOp::I16x8ReplaceLane { idx }),
1003         Operator::I32x4Splat => unop(ctx, UnaryOp::I32x4Splat),
1004         Operator::I32x4ExtractLane { lane: idx } => unop(ctx, UnaryOp::I32x4ExtractLane { idx }),
1005         Operator::I32x4ReplaceLane { lane: idx } => binop(ctx, BinaryOp::I32x4ReplaceLane { idx }),
1006         Operator::I64x2Splat => unop(ctx, UnaryOp::I64x2Splat),
1007         Operator::I64x2ExtractLane { lane: idx } => unop(ctx, UnaryOp::I64x2ExtractLane { idx }),
1008         Operator::I64x2ReplaceLane { lane: idx } => binop(ctx, BinaryOp::I64x2ReplaceLane { idx }),
1009         Operator::F32x4Splat => unop(ctx, UnaryOp::F32x4Splat),
1010         Operator::F32x4ExtractLane { lane: idx } => unop(ctx, UnaryOp::F32x4ExtractLane { idx }),
1011         Operator::F32x4ReplaceLane { lane: idx } => binop(ctx, BinaryOp::F32x4ReplaceLane { idx }),
1012         Operator::F64x2Splat => unop(ctx, UnaryOp::F64x2Splat),
1013         Operator::F64x2ExtractLane { lane: idx } => unop(ctx, UnaryOp::F64x2ExtractLane { idx }),
1014         Operator::F64x2ReplaceLane { lane: idx } => binop(ctx, BinaryOp::F64x2ReplaceLane { idx }),
1015 
1016         Operator::I8x16Eq => binop(ctx, BinaryOp::I8x16Eq),
1017         Operator::I8x16Ne => binop(ctx, BinaryOp::I8x16Ne),
1018         Operator::I8x16LtS => binop(ctx, BinaryOp::I8x16LtS),
1019         Operator::I8x16LtU => binop(ctx, BinaryOp::I8x16LtU),
1020         Operator::I8x16GtS => binop(ctx, BinaryOp::I8x16GtS),
1021         Operator::I8x16GtU => binop(ctx, BinaryOp::I8x16GtU),
1022         Operator::I8x16LeS => binop(ctx, BinaryOp::I8x16LeS),
1023         Operator::I8x16LeU => binop(ctx, BinaryOp::I8x16LeU),
1024         Operator::I8x16GeS => binop(ctx, BinaryOp::I8x16GeS),
1025         Operator::I8x16GeU => binop(ctx, BinaryOp::I8x16GeU),
1026         Operator::I16x8Eq => binop(ctx, BinaryOp::I16x8Eq),
1027         Operator::I16x8Ne => binop(ctx, BinaryOp::I16x8Ne),
1028         Operator::I16x8LtS => binop(ctx, BinaryOp::I16x8LtS),
1029         Operator::I16x8LtU => binop(ctx, BinaryOp::I16x8LtU),
1030         Operator::I16x8GtS => binop(ctx, BinaryOp::I16x8GtS),
1031         Operator::I16x8GtU => binop(ctx, BinaryOp::I16x8GtU),
1032         Operator::I16x8LeS => binop(ctx, BinaryOp::I16x8LeS),
1033         Operator::I16x8LeU => binop(ctx, BinaryOp::I16x8LeU),
1034         Operator::I16x8GeS => binop(ctx, BinaryOp::I16x8GeS),
1035         Operator::I16x8GeU => binop(ctx, BinaryOp::I16x8GeU),
1036         Operator::I32x4Eq => binop(ctx, BinaryOp::I32x4Eq),
1037         Operator::I32x4Ne => binop(ctx, BinaryOp::I32x4Ne),
1038         Operator::I32x4LtS => binop(ctx, BinaryOp::I32x4LtS),
1039         Operator::I32x4LtU => binop(ctx, BinaryOp::I32x4LtU),
1040         Operator::I32x4GtS => binop(ctx, BinaryOp::I32x4GtS),
1041         Operator::I32x4GtU => binop(ctx, BinaryOp::I32x4GtU),
1042         Operator::I32x4LeS => binop(ctx, BinaryOp::I32x4LeS),
1043         Operator::I32x4LeU => binop(ctx, BinaryOp::I32x4LeU),
1044         Operator::I32x4GeS => binop(ctx, BinaryOp::I32x4GeS),
1045         Operator::I32x4GeU => binop(ctx, BinaryOp::I32x4GeU),
1046         Operator::I64x2Eq => binop(ctx, BinaryOp::I64x2Eq),
1047         Operator::I64x2Ne => binop(ctx, BinaryOp::I64x2Ne),
1048         Operator::I64x2LtS => binop(ctx, BinaryOp::I64x2LtS),
1049         Operator::I64x2GtS => binop(ctx, BinaryOp::I64x2GtS),
1050         Operator::I64x2LeS => binop(ctx, BinaryOp::I64x2LeS),
1051         Operator::I64x2GeS => binop(ctx, BinaryOp::I64x2GeS),
1052         Operator::F32x4Eq => binop(ctx, BinaryOp::F32x4Eq),
1053         Operator::F32x4Ne => binop(ctx, BinaryOp::F32x4Ne),
1054         Operator::F32x4Lt => binop(ctx, BinaryOp::F32x4Lt),
1055         Operator::F32x4Gt => binop(ctx, BinaryOp::F32x4Gt),
1056         Operator::F32x4Le => binop(ctx, BinaryOp::F32x4Le),
1057         Operator::F32x4Ge => binop(ctx, BinaryOp::F32x4Ge),
1058         Operator::F64x2Eq => binop(ctx, BinaryOp::F64x2Eq),
1059         Operator::F64x2Ne => binop(ctx, BinaryOp::F64x2Ne),
1060         Operator::F64x2Lt => binop(ctx, BinaryOp::F64x2Lt),
1061         Operator::F64x2Gt => binop(ctx, BinaryOp::F64x2Gt),
1062         Operator::F64x2Le => binop(ctx, BinaryOp::F64x2Le),
1063         Operator::F64x2Ge => binop(ctx, BinaryOp::F64x2Ge),
1064 
1065         Operator::V128Not => unop(ctx, UnaryOp::V128Not),
1066         Operator::V128AnyTrue => unop(ctx, UnaryOp::V128AnyTrue),
1067         Operator::V128And => binop(ctx, BinaryOp::V128And),
1068         Operator::V128AndNot => binop(ctx, BinaryOp::V128AndNot),
1069         Operator::V128Or => binop(ctx, BinaryOp::V128Or),
1070         Operator::V128Xor => binop(ctx, BinaryOp::V128Xor),
1071 
1072         Operator::V128Bitselect => ctx.alloc_instr(V128Bitselect {}, loc),
1073 
1074         Operator::I8x16Abs => unop(ctx, UnaryOp::I8x16Abs),
1075         Operator::I8x16Popcnt => unop(ctx, UnaryOp::I8x16Popcnt),
1076         Operator::I8x16Neg => unop(ctx, UnaryOp::I8x16Neg),
1077         Operator::I8x16AllTrue => unop(ctx, UnaryOp::I8x16AllTrue),
1078         Operator::I8x16Shl => binop(ctx, BinaryOp::I8x16Shl),
1079         Operator::I8x16ShrS => binop(ctx, BinaryOp::I8x16ShrS),
1080         Operator::I8x16ShrU => binop(ctx, BinaryOp::I8x16ShrU),
1081         Operator::I8x16Add => binop(ctx, BinaryOp::I8x16Add),
1082         Operator::I8x16AddSatS => binop(ctx, BinaryOp::I8x16AddSatS),
1083         Operator::I8x16AddSatU => binop(ctx, BinaryOp::I8x16AddSatU),
1084         Operator::I8x16Sub => binop(ctx, BinaryOp::I8x16Sub),
1085         Operator::I8x16SubSatS => binop(ctx, BinaryOp::I8x16SubSatS),
1086         Operator::I8x16SubSatU => binop(ctx, BinaryOp::I8x16SubSatU),
1087 
1088         Operator::I16x8Abs => unop(ctx, UnaryOp::I16x8Abs),
1089         Operator::I16x8Neg => unop(ctx, UnaryOp::I16x8Neg),
1090         Operator::I16x8AllTrue => unop(ctx, UnaryOp::I16x8AllTrue),
1091         Operator::I16x8Shl => binop(ctx, BinaryOp::I16x8Shl),
1092         Operator::I16x8ShrS => binop(ctx, BinaryOp::I16x8ShrS),
1093         Operator::I16x8ShrU => binop(ctx, BinaryOp::I16x8ShrU),
1094         Operator::I16x8Add => binop(ctx, BinaryOp::I16x8Add),
1095         Operator::I16x8AddSatS => binop(ctx, BinaryOp::I16x8AddSatS),
1096         Operator::I16x8AddSatU => binop(ctx, BinaryOp::I16x8AddSatU),
1097         Operator::I16x8Sub => binop(ctx, BinaryOp::I16x8Sub),
1098         Operator::I16x8SubSatS => binop(ctx, BinaryOp::I16x8SubSatS),
1099         Operator::I16x8SubSatU => binop(ctx, BinaryOp::I16x8SubSatU),
1100         Operator::I16x8Mul => binop(ctx, BinaryOp::I16x8Mul),
1101 
1102         Operator::I32x4Abs => unop(ctx, UnaryOp::I32x4Abs),
1103         Operator::I32x4Neg => unop(ctx, UnaryOp::I32x4Neg),
1104         Operator::I32x4AllTrue => unop(ctx, UnaryOp::I32x4AllTrue),
1105         Operator::I32x4Shl => binop(ctx, BinaryOp::I32x4Shl),
1106         Operator::I32x4ShrS => binop(ctx, BinaryOp::I32x4ShrS),
1107         Operator::I32x4ShrU => binop(ctx, BinaryOp::I32x4ShrU),
1108         Operator::I32x4Add => binop(ctx, BinaryOp::I32x4Add),
1109         Operator::I32x4Sub => binop(ctx, BinaryOp::I32x4Sub),
1110         Operator::I32x4Mul => binop(ctx, BinaryOp::I32x4Mul),
1111 
1112         Operator::I64x2Abs => unop(ctx, UnaryOp::I64x2Abs),
1113         Operator::I64x2AllTrue => unop(ctx, UnaryOp::I64x2AllTrue),
1114         Operator::I64x2Neg => unop(ctx, UnaryOp::I64x2Neg),
1115         Operator::I64x2Shl => binop(ctx, BinaryOp::I64x2Shl),
1116         Operator::I64x2ShrS => binop(ctx, BinaryOp::I64x2ShrS),
1117         Operator::I64x2ShrU => binop(ctx, BinaryOp::I64x2ShrU),
1118         Operator::I64x2Add => binop(ctx, BinaryOp::I64x2Add),
1119         Operator::I64x2Sub => binop(ctx, BinaryOp::I64x2Sub),
1120         Operator::I64x2Mul => binop(ctx, BinaryOp::I64x2Mul),
1121 
1122         Operator::F32x4Abs => unop(ctx, UnaryOp::F32x4Abs),
1123         Operator::F32x4Neg => unop(ctx, UnaryOp::F32x4Neg),
1124         Operator::F32x4Sqrt => unop(ctx, UnaryOp::F32x4Sqrt),
1125         Operator::F32x4Add => binop(ctx, BinaryOp::F32x4Add),
1126         Operator::F32x4Sub => binop(ctx, BinaryOp::F32x4Sub),
1127         Operator::F32x4Mul => binop(ctx, BinaryOp::F32x4Mul),
1128         Operator::F32x4Div => binop(ctx, BinaryOp::F32x4Div),
1129         Operator::F32x4Min => binop(ctx, BinaryOp::F32x4Min),
1130         Operator::F32x4Max => binop(ctx, BinaryOp::F32x4Max),
1131         Operator::F32x4Ceil => unop(ctx, UnaryOp::F32x4Ceil),
1132         Operator::F32x4Floor => unop(ctx, UnaryOp::F32x4Floor),
1133         Operator::F32x4Trunc => unop(ctx, UnaryOp::F32x4Trunc),
1134         Operator::F32x4Nearest => unop(ctx, UnaryOp::F32x4Nearest),
1135         Operator::F32x4PMin => binop(ctx, BinaryOp::F32x4PMin),
1136         Operator::F32x4PMax => binop(ctx, BinaryOp::F32x4PMax),
1137 
1138         Operator::I16x8ExtAddPairwiseI8x16S => unop(ctx, UnaryOp::I16x8ExtAddPairwiseI8x16S),
1139         Operator::I16x8ExtAddPairwiseI8x16U => unop(ctx, UnaryOp::I16x8ExtAddPairwiseI8x16U),
1140         Operator::I32x4ExtAddPairwiseI16x8S => unop(ctx, UnaryOp::I32x4ExtAddPairwiseI16x8S),
1141         Operator::I32x4ExtAddPairwiseI16x8U => unop(ctx, UnaryOp::I32x4ExtAddPairwiseI16x8U),
1142         Operator::I64x2ExtendLowI32x4S => unop(ctx, UnaryOp::I64x2ExtendLowI32x4S),
1143         Operator::I64x2ExtendHighI32x4S => unop(ctx, UnaryOp::I64x2ExtendHighI32x4S),
1144         Operator::I64x2ExtendLowI32x4U => unop(ctx, UnaryOp::I64x2ExtendLowI32x4U),
1145         Operator::I64x2ExtendHighI32x4U => unop(ctx, UnaryOp::I64x2ExtendHighI32x4U),
1146         Operator::I32x4TruncSatF64x2SZero => unop(ctx, UnaryOp::I32x4TruncSatF64x2SZero),
1147         Operator::I32x4TruncSatF64x2UZero => unop(ctx, UnaryOp::I32x4TruncSatF64x2UZero),
1148         Operator::F64x2ConvertLowI32x4S => unop(ctx, UnaryOp::F64x2ConvertLowI32x4S),
1149         Operator::F64x2ConvertLowI32x4U => unop(ctx, UnaryOp::F64x2ConvertLowI32x4U),
1150         Operator::F32x4DemoteF64x2Zero => unop(ctx, UnaryOp::F32x4DemoteF64x2Zero),
1151         Operator::F64x2PromoteLowF32x4 => unop(ctx, UnaryOp::F64x2PromoteLowF32x4),
1152 
1153         Operator::I16x8Q15MulrSatS => binop(ctx, BinaryOp::I16x8Q15MulrSatS),
1154         Operator::I16x8ExtMulLowI8x16S => binop(ctx, BinaryOp::I16x8ExtMulLowI8x16S),
1155         Operator::I16x8ExtMulHighI8x16S => binop(ctx, BinaryOp::I16x8ExtMulHighI8x16S),
1156         Operator::I16x8ExtMulLowI8x16U => binop(ctx, BinaryOp::I16x8ExtMulLowI8x16U),
1157         Operator::I16x8ExtMulHighI8x16U => binop(ctx, BinaryOp::I16x8ExtMulHighI8x16U),
1158         Operator::I32x4ExtMulLowI16x8S => binop(ctx, BinaryOp::I32x4ExtMulLowI16x8S),
1159         Operator::I32x4ExtMulHighI16x8S => binop(ctx, BinaryOp::I32x4ExtMulHighI16x8S),
1160         Operator::I32x4ExtMulLowI16x8U => binop(ctx, BinaryOp::I32x4ExtMulLowI16x8U),
1161         Operator::I32x4ExtMulHighI16x8U => binop(ctx, BinaryOp::I32x4ExtMulHighI16x8U),
1162         Operator::I64x2ExtMulLowI32x4S => binop(ctx, BinaryOp::I64x2ExtMulLowI32x4S),
1163         Operator::I64x2ExtMulHighI32x4S => binop(ctx, BinaryOp::I64x2ExtMulHighI32x4S),
1164         Operator::I64x2ExtMulLowI32x4U => binop(ctx, BinaryOp::I64x2ExtMulLowI32x4U),
1165         Operator::I64x2ExtMulHighI32x4U => binop(ctx, BinaryOp::I64x2ExtMulHighI32x4U),
1166 
1167         Operator::F64x2Abs => unop(ctx, UnaryOp::F64x2Abs),
1168         Operator::F64x2Neg => unop(ctx, UnaryOp::F64x2Neg),
1169         Operator::F64x2Sqrt => unop(ctx, UnaryOp::F64x2Sqrt),
1170         Operator::F64x2Add => binop(ctx, BinaryOp::F64x2Add),
1171         Operator::F64x2Sub => binop(ctx, BinaryOp::F64x2Sub),
1172         Operator::F64x2Mul => binop(ctx, BinaryOp::F64x2Mul),
1173         Operator::F64x2Div => binop(ctx, BinaryOp::F64x2Div),
1174         Operator::F64x2Min => binop(ctx, BinaryOp::F64x2Min),
1175         Operator::F64x2Max => binop(ctx, BinaryOp::F64x2Max),
1176         Operator::F64x2Ceil => unop(ctx, UnaryOp::F64x2Ceil),
1177         Operator::F64x2Floor => unop(ctx, UnaryOp::F64x2Floor),
1178         Operator::F64x2Trunc => unop(ctx, UnaryOp::F64x2Trunc),
1179         Operator::F64x2Nearest => unop(ctx, UnaryOp::F64x2Nearest),
1180         Operator::F64x2PMin => binop(ctx, BinaryOp::F64x2PMin),
1181         Operator::F64x2PMax => binop(ctx, BinaryOp::F64x2PMax),
1182 
1183         Operator::I32x4TruncSatF32x4S => unop(ctx, UnaryOp::I32x4TruncSatF32x4S),
1184         Operator::I32x4TruncSatF32x4U => unop(ctx, UnaryOp::I32x4TruncSatF32x4U),
1185         Operator::F32x4ConvertI32x4S => unop(ctx, UnaryOp::F32x4ConvertI32x4S),
1186         Operator::F32x4ConvertI32x4U => unop(ctx, UnaryOp::F32x4ConvertI32x4U),
1187 
1188         Operator::I32TruncSatF32S => unop(ctx, UnaryOp::I32TruncSSatF32),
1189         Operator::I32TruncSatF32U => unop(ctx, UnaryOp::I32TruncUSatF32),
1190         Operator::I32TruncSatF64S => unop(ctx, UnaryOp::I32TruncSSatF64),
1191         Operator::I32TruncSatF64U => unop(ctx, UnaryOp::I32TruncUSatF64),
1192         Operator::I64TruncSatF32S => unop(ctx, UnaryOp::I64TruncSSatF32),
1193         Operator::I64TruncSatF32U => unop(ctx, UnaryOp::I64TruncUSatF32),
1194         Operator::I64TruncSatF64S => unop(ctx, UnaryOp::I64TruncSSatF64),
1195         Operator::I64TruncSatF64U => unop(ctx, UnaryOp::I64TruncUSatF64),
1196 
1197         Operator::V128Load8Splat { memarg } => load_simd(ctx, memarg, LoadSimdKind::Splat8),
1198         Operator::V128Load16Splat { memarg } => load_simd(ctx, memarg, LoadSimdKind::Splat16),
1199         Operator::V128Load32Splat { memarg } => load_simd(ctx, memarg, LoadSimdKind::Splat32),
1200         Operator::V128Load64Splat { memarg } => load_simd(ctx, memarg, LoadSimdKind::Splat64),
1201         Operator::V128Load32Zero { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load32Zero),
1202         Operator::V128Load64Zero { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load64Zero),
1203 
1204         Operator::V128Load8Lane { memarg, lane } => {
1205             load_simd(ctx, memarg, LoadSimdKind::V128Load8Lane(lane))
1206         }
1207         Operator::V128Load16Lane { memarg, lane } => {
1208             load_simd(ctx, memarg, LoadSimdKind::V128Load16Lane(lane))
1209         }
1210         Operator::V128Load32Lane { memarg, lane } => {
1211             load_simd(ctx, memarg, LoadSimdKind::V128Load32Lane(lane))
1212         }
1213         Operator::V128Load64Lane { memarg, lane } => {
1214             load_simd(ctx, memarg, LoadSimdKind::V128Load64Lane(lane))
1215         }
1216         Operator::V128Store8Lane { memarg, lane } => {
1217             load_simd(ctx, memarg, LoadSimdKind::V128Store8Lane(lane))
1218         }
1219         Operator::V128Store16Lane { memarg, lane } => {
1220             load_simd(ctx, memarg, LoadSimdKind::V128Store16Lane(lane))
1221         }
1222         Operator::V128Store32Lane { memarg, lane } => {
1223             load_simd(ctx, memarg, LoadSimdKind::V128Store32Lane(lane))
1224         }
1225         Operator::V128Store64Lane { memarg, lane } => {
1226             load_simd(ctx, memarg, LoadSimdKind::V128Store64Lane(lane))
1227         }
1228         Operator::I8x16NarrowI16x8S => binop(ctx, BinaryOp::I8x16NarrowI16x8S),
1229         Operator::I8x16NarrowI16x8U => binop(ctx, BinaryOp::I8x16NarrowI16x8U),
1230         Operator::I16x8NarrowI32x4S => binop(ctx, BinaryOp::I16x8NarrowI32x4S),
1231         Operator::I16x8NarrowI32x4U => binop(ctx, BinaryOp::I16x8NarrowI32x4U),
1232         Operator::I16x8ExtendLowI8x16S => unop(ctx, UnaryOp::I16x8WidenLowI8x16S),
1233         Operator::I16x8ExtendLowI8x16U => unop(ctx, UnaryOp::I16x8WidenLowI8x16U),
1234         Operator::I16x8ExtendHighI8x16S => unop(ctx, UnaryOp::I16x8WidenHighI8x16S),
1235         Operator::I16x8ExtendHighI8x16U => unop(ctx, UnaryOp::I16x8WidenHighI8x16U),
1236         Operator::I32x4ExtendLowI16x8S => unop(ctx, UnaryOp::I32x4WidenLowI16x8S),
1237         Operator::I32x4ExtendLowI16x8U => unop(ctx, UnaryOp::I32x4WidenLowI16x8U),
1238         Operator::I32x4ExtendHighI16x8S => unop(ctx, UnaryOp::I32x4WidenHighI16x8S),
1239         Operator::I32x4ExtendHighI16x8U => unop(ctx, UnaryOp::I32x4WidenHighI16x8U),
1240         Operator::V128Load8x8S { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load8x8S),
1241         Operator::V128Load8x8U { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load8x8U),
1242         Operator::V128Load16x4S { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load16x4S),
1243         Operator::V128Load16x4U { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load16x4U),
1244         Operator::V128Load32x2S { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load32x2S),
1245         Operator::V128Load32x2U { memarg } => load_simd(ctx, memarg, LoadSimdKind::V128Load32x2U),
1246         Operator::I8x16RoundingAverageU => binop(ctx, BinaryOp::I8x16RoundingAverageU),
1247         Operator::I16x8RoundingAverageU => binop(ctx, BinaryOp::I16x8RoundingAverageU),
1248 
1249         Operator::I8x16MinS => binop(ctx, BinaryOp::I8x16MinS),
1250         Operator::I8x16MinU => binop(ctx, BinaryOp::I8x16MinU),
1251         Operator::I8x16MaxS => binop(ctx, BinaryOp::I8x16MaxS),
1252         Operator::I8x16MaxU => binop(ctx, BinaryOp::I8x16MaxU),
1253         Operator::I16x8MinS => binop(ctx, BinaryOp::I16x8MinS),
1254         Operator::I16x8MinU => binop(ctx, BinaryOp::I16x8MinU),
1255         Operator::I16x8MaxS => binop(ctx, BinaryOp::I16x8MaxS),
1256         Operator::I16x8MaxU => binop(ctx, BinaryOp::I16x8MaxU),
1257         Operator::I32x4MinS => binop(ctx, BinaryOp::I32x4MinS),
1258         Operator::I32x4MinU => binop(ctx, BinaryOp::I32x4MinU),
1259         Operator::I32x4MaxS => binop(ctx, BinaryOp::I32x4MaxS),
1260         Operator::I32x4MaxU => binop(ctx, BinaryOp::I32x4MaxU),
1261 
1262         Operator::I8x16Bitmask => unop(ctx, UnaryOp::I8x16Bitmask),
1263         Operator::I16x8Bitmask => unop(ctx, UnaryOp::I16x8Bitmask),
1264         Operator::I32x4Bitmask => unop(ctx, UnaryOp::I32x4Bitmask),
1265         Operator::I64x2Bitmask => unop(ctx, UnaryOp::I64x2Bitmask),
1266 
1267         Operator::I32x4DotI16x8S => binop(ctx, BinaryOp::I32x4DotI16x8S),
1268 
1269         Operator::TableCopy {
1270             src_table,
1271             dst_table,
1272         } => {
1273             let src = ctx.indices.get_table(src_table).unwrap();
1274             let dst = ctx.indices.get_table(dst_table).unwrap();
1275             ctx.alloc_instr(TableCopy { src, dst }, loc);
1276         }
1277 
1278         Operator::TableInit { segment, table } => {
1279             let elem = ctx.indices.get_element(segment).unwrap();
1280             let table = ctx.indices.get_table(table).unwrap();
1281             ctx.alloc_instr(TableInit { elem, table }, loc);
1282         }
1283 
1284         Operator::ElemDrop { segment } => {
1285             let elem = ctx.indices.get_element(segment).unwrap();
1286             ctx.alloc_instr(ElemDrop { elem }, loc);
1287         }
1288 
1289         Operator::ReturnCall { .. }
1290         | Operator::ReturnCallIndirect { .. }
1291         | Operator::Try { ty: _ }
1292         | Operator::Catch { index: _ }
1293         | Operator::Throw { index: _ }
1294         | Operator::Rethrow { relative_depth: _ }
1295         | Operator::Unwind
1296         | Operator::Delegate { relative_depth: _ }
1297         | Operator::CatchAll => {
1298             unimplemented!("not supported")
1299         }
1300     }
1301 }
1302