winch_codegen/
visitor.rs

1//! This module is the central place for machine code emission.
2//! It defines an implementation of wasmparser's Visitor trait
3//! for `CodeGen`; which defines a visitor per op-code,
4//! which validates and dispatches to the corresponding
5//! machine code emitter.
6
7use crate::abi::RetArea;
8use crate::codegen::{
9    control_index, AtomicWaitKind, Callee, CodeGen, CodeGenError, ControlStackFrame, Emission,
10    FnCall,
11};
12use crate::masm::{
13    DivKind, Extend, ExtractLaneKind, FloatCmpKind, IntCmpKind, LoadKind, MacroAssembler,
14    MemMoveDirection, MulWideKind, OperandSize, RegImm, RemKind, ReplaceLaneKind, RmwOp,
15    RoundingMode, SPOffset, ShiftKind, Signed, SplatKind, SplatLoadKind, StoreKind, TruncKind,
16    V128AbsKind, V128AddKind, V128ConvertKind, V128ExtAddKind, V128ExtMulKind, V128ExtendKind,
17    V128LoadExtendKind, V128MaxKind, V128MinKind, V128MulKind, V128NarrowKind, V128NegKind,
18    V128SubKind, V128TruncKind, VectorCompareKind, VectorEqualityKind, Zero,
19};
20
21use crate::reg::{writable, Reg};
22use crate::stack::{TypedReg, Val};
23use anyhow::{anyhow, bail, ensure, Result};
24use regalloc2::RegClass;
25use smallvec::{smallvec, SmallVec};
26use wasmparser::{
27    BlockType, BrTable, Ieee32, Ieee64, MemArg, VisitOperator, VisitSimdOperator, V128,
28};
29use wasmtime_cranelift::TRAP_INDIRECT_CALL_TO_NULL;
30use wasmtime_environ::{
31    FuncIndex, GlobalIndex, MemoryIndex, TableIndex, TypeIndex, WasmHeapType, WasmValType,
32    FUNCREF_INIT_BIT,
33};
34
35/// A macro to define unsupported WebAssembly operators.
36///
37/// This macro calls itself recursively;
38/// 1. It no-ops when matching a supported operator.
39/// 2. Defines the visitor function and panics when
40///    matching an unsupported operator.
41macro_rules! def_unsupported {
42    ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $ann:tt)*) => {
43        $(
44            def_unsupported!(
45                emit
46                    $op
47
48                fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
49                    $($(let _ = $arg;)*)?
50
51                    Err(anyhow!(CodeGenError::unimplemented_wasm_instruction()))
52                }
53            );
54        )*
55    };
56
57    (emit I32Const $($rest:tt)*) => {};
58    (emit I64Const $($rest:tt)*) => {};
59    (emit F32Const $($rest:tt)*) => {};
60    (emit F64Const $($rest:tt)*) => {};
61    (emit V128Const $($rest:tt)*) => {};
62    (emit F32Add $($rest:tt)*) => {};
63    (emit F64Add $($rest:tt)*) => {};
64    (emit F32Sub $($rest:tt)*) => {};
65    (emit F64Sub $($rest:tt)*) => {};
66    (emit F32Mul $($rest:tt)*) => {};
67    (emit F64Mul $($rest:tt)*) => {};
68    (emit F32Div $($rest:tt)*) => {};
69    (emit F64Div $($rest:tt)*) => {};
70    (emit F32Min $($rest:tt)*) => {};
71    (emit F64Min $($rest:tt)*) => {};
72    (emit F32Max $($rest:tt)*) => {};
73    (emit F64Max $($rest:tt)*) => {};
74    (emit F32Copysign $($rest:tt)*) => {};
75    (emit F64Copysign $($rest:tt)*) => {};
76    (emit F32Abs $($rest:tt)*) => {};
77    (emit F64Abs $($rest:tt)*) => {};
78    (emit F32Neg $($rest:tt)*) => {};
79    (emit F64Neg $($rest:tt)*) => {};
80    (emit F32Floor $($rest:tt)*) => {};
81    (emit F64Floor $($rest:tt)*) => {};
82    (emit F32Ceil $($rest:tt)*) => {};
83    (emit F64Ceil $($rest:tt)*) => {};
84    (emit F32Nearest $($rest:tt)*) => {};
85    (emit F64Nearest $($rest:tt)*) => {};
86    (emit F32Trunc $($rest:tt)*) => {};
87    (emit F64Trunc $($rest:tt)*) => {};
88    (emit F32Sqrt $($rest:tt)*) => {};
89    (emit F64Sqrt $($rest:tt)*) => {};
90    (emit F32Eq $($rest:tt)*) => {};
91    (emit F64Eq $($rest:tt)*) => {};
92    (emit F32Ne $($rest:tt)*) => {};
93    (emit F64Ne $($rest:tt)*) => {};
94    (emit F32Lt $($rest:tt)*) => {};
95    (emit F64Lt $($rest:tt)*) => {};
96    (emit F32Gt $($rest:tt)*) => {};
97    (emit F64Gt $($rest:tt)*) => {};
98    (emit F32Le $($rest:tt)*) => {};
99    (emit F64Le $($rest:tt)*) => {};
100    (emit F32Ge $($rest:tt)*) => {};
101    (emit F64Ge $($rest:tt)*) => {};
102    (emit F32ConvertI32S $($rest:tt)*) => {};
103    (emit F32ConvertI32U $($rest:tt)*) => {};
104    (emit F32ConvertI64S $($rest:tt)*) => {};
105    (emit F32ConvertI64U $($rest:tt)*) => {};
106    (emit F64ConvertI32S $($rest:tt)*) => {};
107    (emit F64ConvertI32U $($rest:tt)*) => {};
108    (emit F64ConvertI64S $($rest:tt)*) => {};
109    (emit F64ConvertI64U $($rest:tt)*) => {};
110    (emit F32ReinterpretI32 $($rest:tt)*) => {};
111    (emit F64ReinterpretI64 $($rest:tt)*) => {};
112    (emit F32DemoteF64 $($rest:tt)*) => {};
113    (emit F64PromoteF32 $($rest:tt)*) => {};
114    (emit I32Add $($rest:tt)*) => {};
115    (emit I64Add $($rest:tt)*) => {};
116    (emit I32Sub $($rest:tt)*) => {};
117    (emit I32Mul $($rest:tt)*) => {};
118    (emit I32DivS $($rest:tt)*) => {};
119    (emit I32DivU $($rest:tt)*) => {};
120    (emit I64DivS $($rest:tt)*) => {};
121    (emit I64DivU $($rest:tt)*) => {};
122    (emit I64RemU $($rest:tt)*) => {};
123    (emit I64RemS $($rest:tt)*) => {};
124    (emit I32RemU $($rest:tt)*) => {};
125    (emit I32RemS $($rest:tt)*) => {};
126    (emit I64Mul $($rest:tt)*) => {};
127    (emit I64Sub $($rest:tt)*) => {};
128    (emit I32Eq $($rest:tt)*) => {};
129    (emit I64Eq $($rest:tt)*) => {};
130    (emit I32Ne $($rest:tt)*) => {};
131    (emit I64Ne $($rest:tt)*) => {};
132    (emit I32LtS $($rest:tt)*) => {};
133    (emit I64LtS $($rest:tt)*) => {};
134    (emit I32LtU $($rest:tt)*) => {};
135    (emit I64LtU $($rest:tt)*) => {};
136    (emit I32LeS $($rest:tt)*) => {};
137    (emit I64LeS $($rest:tt)*) => {};
138    (emit I32LeU $($rest:tt)*) => {};
139    (emit I64LeU $($rest:tt)*) => {};
140    (emit I32GtS $($rest:tt)*) => {};
141    (emit I64GtS $($rest:tt)*) => {};
142    (emit I32GtU $($rest:tt)*) => {};
143    (emit I64GtU $($rest:tt)*) => {};
144    (emit I32GeS $($rest:tt)*) => {};
145    (emit I64GeS $($rest:tt)*) => {};
146    (emit I32GeU $($rest:tt)*) => {};
147    (emit I64GeU $($rest:tt)*) => {};
148    (emit I32Eqz $($rest:tt)*) => {};
149    (emit I64Eqz $($rest:tt)*) => {};
150    (emit I32And $($rest:tt)*) => {};
151    (emit I64And $($rest:tt)*) => {};
152    (emit I32Or $($rest:tt)*) => {};
153    (emit I64Or $($rest:tt)*) => {};
154    (emit I32Xor $($rest:tt)*) => {};
155    (emit I64Xor $($rest:tt)*) => {};
156    (emit I32Shl $($rest:tt)*) => {};
157    (emit I64Shl $($rest:tt)*) => {};
158    (emit I32ShrS $($rest:tt)*) => {};
159    (emit I64ShrS $($rest:tt)*) => {};
160    (emit I32ShrU $($rest:tt)*) => {};
161    (emit I64ShrU $($rest:tt)*) => {};
162    (emit I32Rotl $($rest:tt)*) => {};
163    (emit I64Rotl $($rest:tt)*) => {};
164    (emit I32Rotr $($rest:tt)*) => {};
165    (emit I64Rotr $($rest:tt)*) => {};
166    (emit I32Clz $($rest:tt)*) => {};
167    (emit I64Clz $($rest:tt)*) => {};
168    (emit I32Ctz $($rest:tt)*) => {};
169    (emit I64Ctz $($rest:tt)*) => {};
170    (emit I32Popcnt $($rest:tt)*) => {};
171    (emit I64Popcnt $($rest:tt)*) => {};
172    (emit I32WrapI64 $($rest:tt)*) => {};
173    (emit I64ExtendI32S $($rest:tt)*) => {};
174    (emit I64ExtendI32U $($rest:tt)*) => {};
175    (emit I32Extend8S $($rest:tt)*) => {};
176    (emit I32Extend16S $($rest:tt)*) => {};
177    (emit I64Extend8S $($rest:tt)*) => {};
178    (emit I64Extend16S $($rest:tt)*) => {};
179    (emit I64Extend32S $($rest:tt)*) => {};
180    (emit I32TruncF32S $($rest:tt)*) => {};
181    (emit I32TruncF32U $($rest:tt)*) => {};
182    (emit I32TruncF64S $($rest:tt)*) => {};
183    (emit I32TruncF64U $($rest:tt)*) => {};
184    (emit I64TruncF32S $($rest:tt)*) => {};
185    (emit I64TruncF32U $($rest:tt)*) => {};
186    (emit I64TruncF64S $($rest:tt)*) => {};
187    (emit I64TruncF64U $($rest:tt)*) => {};
188    (emit I32ReinterpretF32 $($rest:tt)*) => {};
189    (emit I64ReinterpretF64 $($rest:tt)*) => {};
190    (emit LocalGet $($rest:tt)*) => {};
191    (emit LocalSet $($rest:tt)*) => {};
192    (emit Call $($rest:tt)*) => {};
193    (emit End $($rest:tt)*) => {};
194    (emit Nop $($rest:tt)*) => {};
195    (emit If $($rest:tt)*) => {};
196    (emit Else $($rest:tt)*) => {};
197    (emit Block $($rest:tt)*) => {};
198    (emit Loop $($rest:tt)*) => {};
199    (emit Br $($rest:tt)*) => {};
200    (emit BrIf $($rest:tt)*) => {};
201    (emit Return $($rest:tt)*) => {};
202    (emit Unreachable $($rest:tt)*) => {};
203    (emit LocalTee $($rest:tt)*) => {};
204    (emit GlobalGet $($rest:tt)*) => {};
205    (emit GlobalSet $($rest:tt)*) => {};
206    (emit Select $($rest:tt)*) => {};
207    (emit Drop $($rest:tt)*) => {};
208    (emit BrTable $($rest:tt)*) => {};
209    (emit CallIndirect $($rest:tt)*) => {};
210    (emit TableInit $($rest:tt)*) => {};
211    (emit TableCopy $($rest:tt)*) => {};
212    (emit TableGet $($rest:tt)*) => {};
213    (emit TableSet $($rest:tt)*) => {};
214    (emit TableGrow $($rest:tt)*) => {};
215    (emit TableSize $($rest:tt)*) => {};
216    (emit TableFill $($rest:tt)*) => {};
217    (emit ElemDrop $($rest:tt)*) => {};
218    (emit MemoryInit $($rest:tt)*) => {};
219    (emit MemoryCopy $($rest:tt)*) => {};
220    (emit DataDrop $($rest:tt)*) => {};
221    (emit MemoryFill $($rest:tt)*) => {};
222    (emit MemorySize $($rest:tt)*) => {};
223    (emit MemoryGrow $($rest:tt)*) => {};
224    (emit I32Load $($rest:tt)*) => {};
225    (emit I32Load8S $($rest:tt)*) => {};
226    (emit I32Load8U $($rest:tt)*) => {};
227    (emit I32Load16S $($rest:tt)*) => {};
228    (emit I32Load16U $($rest:tt)*) => {};
229    (emit I64Load8S $($rest:tt)*) => {};
230    (emit I64Load8U $($rest:tt)*) => {};
231    (emit I64Load16S $($rest:tt)*) => {};
232    (emit I64Load16U $($rest:tt)*) => {};
233    (emit I64Load32S $($rest:tt)*) => {};
234    (emit I64Load32U $($rest:tt)*) => {};
235    (emit I64Load $($rest:tt)*) => {};
236    (emit I32Store $($rest:tt)*) => {};
237    (emit I32Store8 $($rest:tt)*) => {};
238    (emit I32Store16 $($rest:tt)*) => {};
239    (emit I64Store $($rest:tt)*) => {};
240    (emit I64Store8 $($rest:tt)*) => {};
241    (emit I64Store16 $($rest:tt)*) => {};
242    (emit I64Store32 $($rest:tt)*) => {};
243    (emit F32Load $($rest:tt)*) => {};
244    (emit F32Store $($rest:tt)*) => {};
245    (emit F64Load $($rest:tt)*) => {};
246    (emit F64Store $($rest:tt)*) => {};
247    (emit I32TruncSatF32S $($rest:tt)*) => {};
248    (emit I32TruncSatF32U $($rest:tt)*) => {};
249    (emit I32TruncSatF64S $($rest:tt)*) => {};
250    (emit I32TruncSatF64U $($rest:tt)*) => {};
251    (emit I64TruncSatF32S $($rest:tt)*) => {};
252    (emit I64TruncSatF32U $($rest:tt)*) => {};
253    (emit I64TruncSatF64S $($rest:tt)*) => {};
254    (emit I64TruncSatF64U $($rest:tt)*) => {};
255    (emit V128Load $($rest:tt)*) => {};
256    (emit V128Store $($rest:tt)*) => {};
257    (emit I64Add128 $($rest:tt)*) => {};
258    (emit I64Sub128 $($rest:tt)*) => {};
259    (emit I64MulWideS $($rest:tt)*) => {};
260    (emit I64MulWideU $($rest:tt)*) => {};
261    (emit I32AtomicLoad8U $($rest:tt)*) => {};
262    (emit I32AtomicLoad16U $($rest:tt)*) => {};
263    (emit I32AtomicLoad $($rest:tt)*) => {};
264    (emit I64AtomicLoad8U $($rest:tt)*) => {};
265    (emit I64AtomicLoad16U $($rest:tt)*) => {};
266    (emit I64AtomicLoad32U $($rest:tt)*) => {};
267    (emit I64AtomicLoad $($rest:tt)*) => {};
268    (emit V128Load8x8S $($rest:tt)*) => {};
269    (emit V128Load8x8U $($rest:tt)*) => {};
270    (emit V128Load16x4S $($rest:tt)*) => {};
271    (emit V128Load16x4U $($rest:tt)*) => {};
272    (emit V128Load32x2S $($rest:tt)*) => {};
273    (emit V128Load32x2U $($rest:tt)*) => {};
274    (emit V128Load8Splat $($rest:tt)*) => {};
275    (emit V128Load16Splat $($rest:tt)*) => {};
276    (emit V128Load32Splat $($rest:tt)*) => {};
277    (emit V128Load64Splat $($rest:tt)*) => {};
278    (emit I8x16Splat $($rest:tt)*) => {};
279    (emit I16x8Splat $($rest:tt)*) => {};
280    (emit I32x4Splat $($rest:tt)*) => {};
281    (emit I64x2Splat $($rest:tt)*) => {};
282    (emit F32x4Splat $($rest:tt)*) => {};
283    (emit F64x2Splat $($rest:tt)*) => {};
284    (emit I32AtomicStore8 $($rest:tt)*) => {};
285    (emit I32AtomicStore16 $($rest:tt)*) => {};
286    (emit I32AtomicStore $($rest:tt)*) => {};
287    (emit I64AtomicStore8 $($rest:tt)*) => {};
288    (emit I64AtomicStore16 $($rest:tt)*) => {};
289    (emit I64AtomicStore32 $($rest:tt)*) => {};
290    (emit I64AtomicStore $($rest:tt)*) => {};
291    (emit I32AtomicRmw8AddU $($rest:tt)*) => {};
292    (emit I32AtomicRmw16AddU $($rest:tt)*) => {};
293    (emit I32AtomicRmwAdd $($rest:tt)*) => {};
294    (emit I64AtomicRmw8AddU $($rest:tt)*) => {};
295    (emit I64AtomicRmw16AddU $($rest:tt)*) => {};
296    (emit I64AtomicRmw32AddU $($rest:tt)*) => {};
297    (emit I64AtomicRmwAdd $($rest:tt)*) => {};
298    (emit I8x16Shuffle $($rest:tt)*) => {};
299    (emit I8x16Swizzle $($rest:tt)*) => {};
300    (emit I32AtomicRmw8SubU $($rest:tt)*) => {};
301    (emit I32AtomicRmw16SubU $($rest:tt)*) => {};
302    (emit I32AtomicRmwSub $($rest:tt)*) => {};
303    (emit I64AtomicRmw8SubU $($rest:tt)*) => {};
304    (emit I64AtomicRmw16SubU $($rest:tt)*) => {};
305    (emit I64AtomicRmw32SubU $($rest:tt)*) => {};
306    (emit I64AtomicRmwSub $($rest:tt)*) => {};
307    (emit I32AtomicRmw8XchgU $($rest:tt)*) => {};
308    (emit I32AtomicRmw16XchgU $($rest:tt)*) => {};
309    (emit I32AtomicRmwXchg $($rest:tt)*) => {};
310    (emit I64AtomicRmw8XchgU $($rest:tt)*) => {};
311    (emit I64AtomicRmw16XchgU $($rest:tt)*) => {};
312    (emit I64AtomicRmw32XchgU $($rest:tt)*) => {};
313    (emit I64AtomicRmwXchg $($rest:tt)*) => {};
314    (emit I8x16ExtractLaneS $($rest:tt)*) => {};
315    (emit I8x16ExtractLaneU $($rest:tt)*) => {};
316    (emit I16x8ExtractLaneS $($rest:tt)*) => {};
317    (emit I16x8ExtractLaneU $($rest:tt)*) => {};
318    (emit I32x4ExtractLane $($rest:tt)*) => {};
319    (emit I64x2ExtractLane $($rest:tt)*) => {};
320    (emit F32x4ExtractLane $($rest:tt)*) => {};
321    (emit F64x2ExtractLane $($rest:tt)*) => {};
322    (emit I32AtomicRmw8AndU $($rest:tt)*) => {};
323    (emit I32AtomicRmw16AndU $($rest:tt)*) => {};
324    (emit I32AtomicRmwAnd $($rest:tt)*) => {};
325    (emit I64AtomicRmw8AndU $($rest:tt)*) => {};
326    (emit I64AtomicRmw16AndU $($rest:tt)*) => {};
327    (emit I64AtomicRmw32AndU $($rest:tt)*) => {};
328    (emit I64AtomicRmwAnd $($rest:tt)*) => {};
329    (emit I32AtomicRmw8OrU $($rest:tt)*) => {};
330    (emit I32AtomicRmw16OrU $($rest:tt)*) => {};
331    (emit I32AtomicRmwOr $($rest:tt)*) => {};
332    (emit I64AtomicRmw8OrU $($rest:tt)*) => {};
333    (emit I64AtomicRmw16OrU $($rest:tt)*) => {};
334    (emit I64AtomicRmw32OrU $($rest:tt)*) => {};
335    (emit I64AtomicRmwOr $($rest:tt)*) => {};
336    (emit I32AtomicRmw8XorU $($rest:tt)*) => {};
337    (emit I32AtomicRmw16XorU $($rest:tt)*) => {};
338    (emit I32AtomicRmwXor $($rest:tt)*) => {};
339    (emit I64AtomicRmw8XorU $($rest:tt)*) => {};
340    (emit I64AtomicRmw16XorU $($rest:tt)*) => {};
341    (emit I64AtomicRmw32XorU $($rest:tt)*) => {};
342    (emit I64AtomicRmwXor $($rest:tt)*) => {};
343    (emit I8x16ReplaceLane $($rest:tt)*) => {};
344    (emit I16x8ReplaceLane $($rest:tt)*) => {};
345    (emit I32x4ReplaceLane $($rest:tt)*) => {};
346    (emit I64x2ReplaceLane $($rest:tt)*) => {};
347    (emit F32x4ReplaceLane $($rest:tt)*) => {};
348    (emit F64x2ReplaceLane $($rest:tt)*) => {};
349    (emit I32AtomicRmw8CmpxchgU $($rest:tt)*) => {};
350    (emit I32AtomicRmw16CmpxchgU $($rest:tt)*) => {};
351    (emit I32AtomicRmwCmpxchg $($rest:tt)*) => {};
352    (emit I64AtomicRmw8CmpxchgU $($rest:tt)*) => {};
353    (emit I64AtomicRmw16CmpxchgU $($rest:tt)*) => {};
354    (emit I64AtomicRmw32CmpxchgU $($rest:tt)*) => {};
355    (emit I64AtomicRmwCmpxchg $($rest:tt)*) => {};
356    (emit I8x16Eq $($rest:tt)*) => {};
357    (emit I16x8Eq $($rest:tt)*) => {};
358    (emit I32x4Eq $($rest:tt)*) => {};
359    (emit I64x2Eq $($rest:tt)*) => {};
360    (emit F32x4Eq $($rest:tt)*) => {};
361    (emit F64x2Eq $($rest:tt)*) => {};
362    (emit I8x16Ne $($rest:tt)*) => {};
363    (emit I16x8Ne $($rest:tt)*) => {};
364    (emit I32x4Ne $($rest:tt)*) => {};
365    (emit I64x2Ne $($rest:tt)*) => {};
366    (emit F32x4Ne $($rest:tt)*) => {};
367    (emit F64x2Ne $($rest:tt)*) => {};
368    (emit I8x16LtS $($rest:tt)*) => {};
369    (emit I8x16LtU $($rest:tt)*) => {};
370    (emit I16x8LtS $($rest:tt)*) => {};
371    (emit I16x8LtU $($rest:tt)*) => {};
372    (emit I32x4LtS $($rest:tt)*) => {};
373    (emit I32x4LtU $($rest:tt)*) => {};
374    (emit I64x2LtS $($rest:tt)*) => {};
375    (emit F32x4Lt $($rest:tt)*) => {};
376    (emit F64x2Lt $($rest:tt)*) => {};
377    (emit I8x16LeS $($rest:tt)*) => {};
378    (emit I8x16LeU $($rest:tt)*) => {};
379    (emit I16x8LeS $($rest:tt)*) => {};
380    (emit I16x8LeU $($rest:tt)*) => {};
381    (emit I32x4LeS $($rest:tt)*) => {};
382    (emit I32x4LeU $($rest:tt)*) => {};
383    (emit I64x2LeS $($rest:tt)*) => {};
384    (emit F32x4Le $($rest:tt)*) => {};
385    (emit F64x2Le $($rest:tt)*) => {};
386    (emit I8x16GtS $($rest:tt)*) => {};
387    (emit I8x16GtU $($rest:tt)*) => {};
388    (emit I16x8GtS $($rest:tt)*) => {};
389    (emit I16x8GtU $($rest:tt)*) => {};
390    (emit I32x4GtS $($rest:tt)*) => {};
391    (emit I32x4GtU $($rest:tt)*) => {};
392    (emit I64x2GtS $($rest:tt)*) => {};
393    (emit F32x4Gt $($rest:tt)*) => {};
394    (emit F64x2Gt $($rest:tt)*) => {};
395    (emit I8x16GeS $($rest:tt)*) => {};
396    (emit I8x16GeU $($rest:tt)*) => {};
397    (emit I16x8GeS $($rest:tt)*) => {};
398    (emit I16x8GeU $($rest:tt)*) => {};
399    (emit I32x4GeS $($rest:tt)*) => {};
400    (emit I32x4GeU $($rest:tt)*) => {};
401    (emit I64x2GeS $($rest:tt)*) => {};
402    (emit F32x4Ge $($rest:tt)*) => {};
403    (emit F64x2Ge $($rest:tt)*) => {};
404    (emit MemoryAtomicWait32 $($rest:tt)*) => {};
405    (emit MemoryAtomicWait64 $($rest:tt)*) => {};
406    (emit MemoryAtomicNotify $($rest:tt)*) => {};
407    (emit AtomicFence $($rest:tt)*) => {};
408    (emit V128Not $($rest:tt)*) => {};
409    (emit V128And $($rest:tt)*) => {};
410    (emit V128AndNot $($rest:tt)*) => {};
411    (emit V128Or $($rest:tt)*) => {};
412    (emit V128Xor $($rest:tt)*) => {};
413    (emit V128Bitselect $($rest:tt)*) => {};
414    (emit V128AnyTrue $($rest:tt)*) => {};
415    (emit V128Load8Lane $($rest:tt)*) => {};
416    (emit V128Load16Lane $($rest:tt)*) => {};
417    (emit V128Load32Lane $($rest:tt)*) => {};
418    (emit V128Load64Lane $($rest:tt)*) => {};
419    (emit V128Store8Lane $($rest:tt)*) => {};
420    (emit V128Store16Lane $($rest:tt)*) => {};
421    (emit V128Store32Lane $($rest:tt)*) => {};
422    (emit V128Store64Lane $($rest:tt)*) => {};
423    (emit F32x4ConvertI32x4S $($rest:tt)*) => {};
424    (emit F32x4ConvertI32x4U $($rest:tt)*) => {};
425    (emit F64x2ConvertLowI32x4S $($rest:tt)*) => {};
426    (emit F64x2ConvertLowI32x4U $($rest:tt)*) => {};
427    (emit I8x16NarrowI16x8S $($rest:tt)*) => {};
428    (emit I8x16NarrowI16x8U $($rest:tt)*) => {};
429    (emit I16x8NarrowI32x4S $($rest:tt)*) => {};
430    (emit I16x8NarrowI32x4U $($rest:tt)*) => {};
431    (emit F32x4DemoteF64x2Zero $($rest:tt)*) => {};
432    (emit F64x2PromoteLowF32x4 $($rest:tt)*) => {};
433    (emit I16x8ExtendLowI8x16S $($rest:tt)*) => {};
434    (emit I16x8ExtendHighI8x16S $($rest:tt)*) => {};
435    (emit I16x8ExtendLowI8x16U $($rest:tt)*) => {};
436    (emit I16x8ExtendHighI8x16U $($rest:tt)*) => {};
437    (emit I32x4ExtendLowI16x8S $($rest:tt)*) => {};
438    (emit I32x4ExtendHighI16x8S $($rest:tt)*) => {};
439    (emit I32x4ExtendLowI16x8U $($rest:tt)*) => {};
440    (emit I32x4ExtendHighI16x8U $($rest:tt)*) => {};
441    (emit I64x2ExtendLowI32x4S $($rest:tt)*) => {};
442    (emit I64x2ExtendHighI32x4S $($rest:tt)*) => {};
443    (emit I64x2ExtendLowI32x4U $($rest:tt)*) => {};
444    (emit I64x2ExtendHighI32x4U $($rest:tt)*) => {};
445    (emit I8x16Add $($rest:tt)*) => {};
446    (emit I16x8Add $($rest:tt)*) => {};
447    (emit I32x4Add $($rest:tt)*) => {};
448    (emit I64x2Add $($rest:tt)*) => {};
449    (emit I8x16Sub $($rest:tt)*) => {};
450    (emit I16x8Sub $($rest:tt)*) => {};
451    (emit I32x4Sub $($rest:tt)*) => {};
452    (emit I64x2Sub $($rest:tt)*) => {};
453    (emit I16x8Mul $($rest:tt)*) => {};
454    (emit I32x4Mul $($rest:tt)*) => {};
455    (emit I64x2Mul $($rest:tt)*) => {};
456    (emit I8x16AddSatS $($rest:tt)*) => {};
457    (emit I16x8AddSatS $($rest:tt)*) => {};
458    (emit I8x16AddSatU $($rest:tt)*) => {};
459    (emit I16x8AddSatU $($rest:tt)*) => {};
460    (emit I8x16SubSatS $($rest:tt)*) => {};
461    (emit I16x8SubSatS $($rest:tt)*) => {};
462    (emit I8x16SubSatU $($rest:tt)*) => {};
463    (emit I16x8SubSatU $($rest:tt)*) => {};
464    (emit I8x16Abs $($rest:tt)*) => {};
465    (emit I16x8Abs $($rest:tt)*) => {};
466    (emit I32x4Abs $($rest:tt)*) => {};
467    (emit I64x2Abs $($rest:tt)*) => {};
468    (emit F32x4Abs $($rest:tt)*) => {};
469    (emit F64x2Abs $($rest:tt)*) => {};
470    (emit I8x16Neg $($rest:tt)*) => {};
471    (emit I16x8Neg $($rest:tt)*) => {};
472    (emit I32x4Neg $($rest:tt)*) => {};
473    (emit I64x2Neg $($rest:tt)*) => {};
474    (emit I8x16Shl $($rest:tt)*) => {};
475    (emit I16x8Shl $($rest:tt)*) => {};
476    (emit I32x4Shl $($rest:tt)*) => {};
477    (emit I64x2Shl $($rest:tt)*) => {};
478    (emit I8x16ShrU $($rest:tt)*) => {};
479    (emit I16x8ShrU $($rest:tt)*) => {};
480    (emit I32x4ShrU $($rest:tt)*) => {};
481    (emit I64x2ShrU $($rest:tt)*) => {};
482    (emit I8x16ShrS $($rest:tt)*) => {};
483    (emit I16x8ShrS $($rest:tt)*) => {};
484    (emit I32x4ShrS $($rest:tt)*) => {};
485    (emit I64x2ShrS $($rest:tt)*) => {};
486    (emit I16x8Q15MulrSatS $($rest:tt)*) => {};
487    (emit I8x16AllTrue $($rest:tt)*) => {};
488    (emit I16x8AllTrue $($rest:tt)*) => {};
489    (emit I32x4AllTrue $($rest:tt)*) => {};
490    (emit I64x2AllTrue $($rest:tt)*) => {};
491    (emit I8x16Bitmask $($rest:tt)*) => {};
492    (emit I16x8Bitmask $($rest:tt)*) => {};
493    (emit I32x4Bitmask $($rest:tt)*) => {};
494    (emit I64x2Bitmask $($rest:tt)*) => {};
495    (emit I32x4TruncSatF32x4S $($rest:tt)*) => {};
496    (emit I32x4TruncSatF32x4U $($rest:tt)*) => {};
497    (emit I32x4TruncSatF64x2SZero $($rest:tt)*) => {};
498    (emit I32x4TruncSatF64x2UZero $($rest:tt)*) => {};
499    (emit I8x16MinU $($rest:tt)*) => {};
500    (emit I16x8MinU $($rest:tt)*) => {};
501    (emit I32x4MinU $($rest:tt)*) => {};
502    (emit I8x16MinS $($rest:tt)*) => {};
503    (emit I16x8MinS $($rest:tt)*) => {};
504    (emit I32x4MinS $($rest:tt)*) => {};
505    (emit I8x16MaxU $($rest:tt)*) => {};
506    (emit I16x8MaxU $($rest:tt)*) => {};
507    (emit I32x4MaxU $($rest:tt)*) => {};
508    (emit I8x16MaxS $($rest:tt)*) => {};
509    (emit I16x8MaxS $($rest:tt)*) => {};
510    (emit I32x4MaxS $($rest:tt)*) => {};
511    (emit I16x8ExtMulLowI8x16S $($rest:tt)*) => {};
512    (emit I32x4ExtMulLowI16x8S $($rest:tt)*) => {};
513    (emit I64x2ExtMulLowI32x4S $($rest:tt)*) => {};
514    (emit I16x8ExtMulHighI8x16S $($rest:tt)*) => {};
515    (emit I32x4ExtMulHighI16x8S $($rest:tt)*) => {};
516    (emit I64x2ExtMulHighI32x4S $($rest:tt)*) => {};
517    (emit I16x8ExtMulLowI8x16U $($rest:tt)*) => {};
518    (emit I32x4ExtMulLowI16x8U $($rest:tt)*) => {};
519    (emit I64x2ExtMulLowI32x4U $($rest:tt)*) => {};
520    (emit I16x8ExtMulHighI8x16U $($rest:tt)*) => {};
521    (emit I32x4ExtMulHighI16x8U $($rest:tt)*) => {};
522    (emit I64x2ExtMulHighI32x4U $($rest:tt)*) => {};
523    (emit I16x8ExtAddPairwiseI8x16U $($rest:tt)*) => {};
524    (emit I16x8ExtAddPairwiseI8x16S $($rest:tt)*) => {};
525    (emit I32x4ExtAddPairwiseI16x8U $($rest:tt)*) => {};
526    (emit I32x4ExtAddPairwiseI16x8S $($rest:tt)*) => {};
527    (emit I32x4DotI16x8S $($rest:tt)*) => {};
528    (emit I8x16Popcnt $($rest:tt)*) => {};
529    (emit I8x16AvgrU $($rest:tt)*) => {};
530    (emit I16x8AvgrU $($rest:tt)*) => {};
531    (emit F32x4Add $($rest:tt)*) => {};
532    (emit F64x2Add $($rest:tt)*) => {};
533    (emit F32x4Sub $($rest:tt)*) => {};
534    (emit F64x2Sub $($rest:tt)*) => {};
535    (emit F32x4Mul $($rest:tt)*) => {};
536    (emit F64x2Mul $($rest:tt)*) => {};
537    (emit F32x4Div $($rest:tt)*) => {};
538    (emit F64x2Div $($rest:tt)*) => {};
539    (emit F32x4Neg $($rest:tt)*) => {};
540    (emit F64x2Neg $($rest:tt)*) => {};
541    (emit F32x4Sqrt $($rest:tt)*) => {};
542    (emit F64x2Sqrt $($rest:tt)*) => {};
543    (emit F32x4Ceil $($rest:tt)*) => {};
544    (emit F64x2Ceil $($rest:tt)*) => {};
545    (emit F32x4Floor $($rest:tt)*) => {};
546    (emit F64x2Floor $($rest:tt)*) => {};
547    (emit F32x4Nearest $($rest:tt)*) => {};
548    (emit F64x2Nearest $($rest:tt)*) => {};
549    (emit F32x4Trunc $($rest:tt)*) => {};
550    (emit F64x2Trunc $($rest:tt)*) => {};
551    (emit V128Load32Zero $($rest:tt)*) => {};
552    (emit V128Load64Zero $($rest:tt)*) => {};
553    (emit F32x4PMin $($rest:tt)*) => {};
554    (emit F64x2PMin $($rest:tt)*) => {};
555    (emit F32x4PMax $($rest:tt)*) => {};
556    (emit F64x2PMax $($rest:tt)*) => {};
557    (emit F32x4Min $($rest:tt)*) => {};
558    (emit F64x2Min $($rest:tt)*) => {};
559    (emit F32x4Max $($rest:tt)*) => {};
560    (emit F64x2Max $($rest:tt)*) => {};
561
562    (emit $unsupported:tt $($rest:tt)*) => {$($rest)*};
563}
564
565impl<'a, 'translation, 'data, M> VisitOperator<'a> for CodeGen<'a, 'translation, 'data, M, Emission>
566where
567    M: MacroAssembler,
568{
569    type Output = Result<()>;
570
571    fn visit_i32_const(&mut self, val: i32) -> Self::Output {
572        self.context.stack.push(Val::i32(val));
573
574        Ok(())
575    }
576
577    fn visit_i64_const(&mut self, val: i64) -> Self::Output {
578        self.context.stack.push(Val::i64(val));
579        Ok(())
580    }
581
582    fn visit_f32_const(&mut self, val: Ieee32) -> Self::Output {
583        self.context.stack.push(Val::f32(val));
584        Ok(())
585    }
586
587    fn visit_f64_const(&mut self, val: Ieee64) -> Self::Output {
588        self.context.stack.push(Val::f64(val));
589        Ok(())
590    }
591
592    fn visit_f32_add(&mut self) -> Self::Output {
593        self.context.binop(
594            self.masm,
595            OperandSize::S32,
596            &mut |masm: &mut M, dst, src, size| {
597                masm.float_add(writable!(dst), dst, src, size)?;
598                Ok(TypedReg::f32(dst))
599            },
600        )
601    }
602
603    fn visit_f64_add(&mut self) -> Self::Output {
604        self.context.binop(
605            self.masm,
606            OperandSize::S64,
607            &mut |masm: &mut M, dst, src, size| {
608                masm.float_add(writable!(dst), dst, src, size)?;
609                Ok(TypedReg::f64(dst))
610            },
611        )
612    }
613
614    fn visit_f32_sub(&mut self) -> Self::Output {
615        self.context.binop(
616            self.masm,
617            OperandSize::S32,
618            &mut |masm: &mut M, dst, src, size| {
619                masm.float_sub(writable!(dst), dst, src, size)?;
620                Ok(TypedReg::f32(dst))
621            },
622        )
623    }
624
625    fn visit_f64_sub(&mut self) -> Self::Output {
626        self.context.binop(
627            self.masm,
628            OperandSize::S64,
629            &mut |masm: &mut M, dst, src, size| {
630                masm.float_sub(writable!(dst), dst, src, size)?;
631                Ok(TypedReg::f64(dst))
632            },
633        )
634    }
635
636    fn visit_f32_mul(&mut self) -> Self::Output {
637        self.context.binop(
638            self.masm,
639            OperandSize::S32,
640            &mut |masm: &mut M, dst, src, size| {
641                masm.float_mul(writable!(dst), dst, src, size)?;
642                Ok(TypedReg::f32(dst))
643            },
644        )
645    }
646
647    fn visit_f64_mul(&mut self) -> Self::Output {
648        self.context.binop(
649            self.masm,
650            OperandSize::S64,
651            &mut |masm: &mut M, dst, src, size| {
652                masm.float_mul(writable!(dst), dst, src, size)?;
653                Ok(TypedReg::f64(dst))
654            },
655        )
656    }
657
658    fn visit_f32_div(&mut self) -> Self::Output {
659        self.context.binop(
660            self.masm,
661            OperandSize::S32,
662            &mut |masm: &mut M, dst, src, size| {
663                masm.float_div(writable!(dst), dst, src, size)?;
664                Ok(TypedReg::f32(dst))
665            },
666        )
667    }
668
669    fn visit_f64_div(&mut self) -> Self::Output {
670        self.context.binop(
671            self.masm,
672            OperandSize::S64,
673            &mut |masm: &mut M, dst, src, size| {
674                masm.float_div(writable!(dst), dst, src, size)?;
675                Ok(TypedReg::f64(dst))
676            },
677        )
678    }
679
680    fn visit_f32_min(&mut self) -> Self::Output {
681        self.context.binop(
682            self.masm,
683            OperandSize::S32,
684            &mut |masm: &mut M, dst, src, size| {
685                masm.float_min(writable!(dst), dst, src, size)?;
686                Ok(TypedReg::f32(dst))
687            },
688        )
689    }
690
691    fn visit_f64_min(&mut self) -> Self::Output {
692        self.context.binop(
693            self.masm,
694            OperandSize::S64,
695            &mut |masm: &mut M, dst, src, size| {
696                masm.float_min(writable!(dst), dst, src, size)?;
697                Ok(TypedReg::f64(dst))
698            },
699        )
700    }
701
702    fn visit_f32_max(&mut self) -> Self::Output {
703        self.context.binop(
704            self.masm,
705            OperandSize::S32,
706            &mut |masm: &mut M, dst, src, size| {
707                masm.float_max(writable!(dst), dst, src, size)?;
708                Ok(TypedReg::f32(dst))
709            },
710        )
711    }
712
713    fn visit_f64_max(&mut self) -> Self::Output {
714        self.context.binop(
715            self.masm,
716            OperandSize::S64,
717            &mut |masm: &mut M, dst, src, size| {
718                masm.float_max(writable!(dst), dst, src, size)?;
719                Ok(TypedReg::f64(dst))
720            },
721        )
722    }
723
724    fn visit_f32_copysign(&mut self) -> Self::Output {
725        self.context.binop(
726            self.masm,
727            OperandSize::S32,
728            &mut |masm: &mut M, dst, src, size| {
729                masm.float_copysign(writable!(dst), dst, src, size)?;
730                Ok(TypedReg::f32(dst))
731            },
732        )
733    }
734
735    fn visit_f64_copysign(&mut self) -> Self::Output {
736        self.context.binop(
737            self.masm,
738            OperandSize::S64,
739            &mut |masm: &mut M, dst, src, size| {
740                masm.float_copysign(writable!(dst), dst, src, size)?;
741                Ok(TypedReg::f64(dst))
742            },
743        )
744    }
745
746    fn visit_f32_abs(&mut self) -> Self::Output {
747        self.context.unop(self.masm, |masm, reg| {
748            masm.float_abs(writable!(reg), OperandSize::S32)?;
749            Ok(TypedReg::f32(reg))
750        })
751    }
752
753    fn visit_f64_abs(&mut self) -> Self::Output {
754        self.context.unop(self.masm, |masm, reg| {
755            masm.float_abs(writable!(reg), OperandSize::S64)?;
756            Ok(TypedReg::f64(reg))
757        })
758    }
759
760    fn visit_f32_neg(&mut self) -> Self::Output {
761        self.context.unop(self.masm, |masm, reg| {
762            masm.float_neg(writable!(reg), OperandSize::S32)?;
763            Ok(TypedReg::f32(reg))
764        })
765    }
766
767    fn visit_f64_neg(&mut self) -> Self::Output {
768        self.context.unop(self.masm, |masm, reg| {
769            masm.float_neg(writable!(reg), OperandSize::S64)?;
770            Ok(TypedReg::f64(reg))
771        })
772    }
773
774    fn visit_f32_floor(&mut self) -> Self::Output {
775        self.masm.float_round(
776            RoundingMode::Down,
777            &mut self.env,
778            &mut self.context,
779            OperandSize::S32,
780            |env, cx, masm| {
781                let builtin = env.builtins.floor_f32::<M::ABI>()?;
782                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
783            },
784        )
785    }
786
787    fn visit_f64_floor(&mut self) -> Self::Output {
788        self.masm.float_round(
789            RoundingMode::Down,
790            &mut self.env,
791            &mut self.context,
792            OperandSize::S64,
793            |env, cx, masm| {
794                let builtin = env.builtins.floor_f64::<M::ABI>()?;
795                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
796            },
797        )
798    }
799
800    fn visit_f32_ceil(&mut self) -> Self::Output {
801        self.masm.float_round(
802            RoundingMode::Up,
803            &mut self.env,
804            &mut self.context,
805            OperandSize::S32,
806            |env, cx, masm| {
807                let builtin = env.builtins.ceil_f32::<M::ABI>()?;
808                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
809            },
810        )
811    }
812
813    fn visit_f64_ceil(&mut self) -> Self::Output {
814        self.masm.float_round(
815            RoundingMode::Up,
816            &mut self.env,
817            &mut self.context,
818            OperandSize::S64,
819            |env, cx, masm| {
820                let builtin = env.builtins.ceil_f64::<M::ABI>()?;
821                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
822            },
823        )
824    }
825
826    fn visit_f32_nearest(&mut self) -> Self::Output {
827        self.masm.float_round(
828            RoundingMode::Nearest,
829            &mut self.env,
830            &mut self.context,
831            OperandSize::S32,
832            |env, cx, masm| {
833                let builtin = env.builtins.nearest_f32::<M::ABI>()?;
834                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
835            },
836        )
837    }
838
839    fn visit_f64_nearest(&mut self) -> Self::Output {
840        self.masm.float_round(
841            RoundingMode::Nearest,
842            &mut self.env,
843            &mut self.context,
844            OperandSize::S64,
845            |env, cx, masm| {
846                let builtin = env.builtins.nearest_f64::<M::ABI>()?;
847                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
848            },
849        )
850    }
851
852    fn visit_f32_trunc(&mut self) -> Self::Output {
853        self.masm.float_round(
854            RoundingMode::Zero,
855            &mut self.env,
856            &mut self.context,
857            OperandSize::S32,
858            |env, cx, masm| {
859                let builtin = env.builtins.trunc_f32::<M::ABI>()?;
860                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
861            },
862        )
863    }
864
865    fn visit_f64_trunc(&mut self) -> Self::Output {
866        self.masm.float_round(
867            RoundingMode::Zero,
868            &mut self.env,
869            &mut self.context,
870            OperandSize::S64,
871            |env, cx, masm| {
872                let builtin = env.builtins.trunc_f64::<M::ABI>()?;
873                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
874            },
875        )
876    }
877
878    fn visit_f32_sqrt(&mut self) -> Self::Output {
879        self.context.unop(self.masm, |masm, reg| {
880            masm.float_sqrt(writable!(reg), reg, OperandSize::S32)?;
881            Ok(TypedReg::f32(reg))
882        })
883    }
884
885    fn visit_f64_sqrt(&mut self) -> Self::Output {
886        self.context.unop(self.masm, |masm, reg| {
887            masm.float_sqrt(writable!(reg), reg, OperandSize::S64)?;
888            Ok(TypedReg::f64(reg))
889        })
890    }
891
892    fn visit_f32_eq(&mut self) -> Self::Output {
893        self.context.float_cmp_op(
894            self.masm,
895            OperandSize::S32,
896            &mut |masm: &mut M, dst, src1, src2, size| {
897                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Eq, size)
898            },
899        )
900    }
901
902    fn visit_f64_eq(&mut self) -> Self::Output {
903        self.context.float_cmp_op(
904            self.masm,
905            OperandSize::S64,
906            &mut |masm: &mut M, dst, src1, src2, size| {
907                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Eq, size)
908            },
909        )
910    }
911
912    fn visit_f32_ne(&mut self) -> Self::Output {
913        self.context.float_cmp_op(
914            self.masm,
915            OperandSize::S32,
916            &mut |masm: &mut M, dst, src1, src2, size| {
917                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ne, size)
918            },
919        )
920    }
921
922    fn visit_f64_ne(&mut self) -> Self::Output {
923        self.context.float_cmp_op(
924            self.masm,
925            OperandSize::S64,
926            &mut |masm: &mut M, dst, src1, src2, size| {
927                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ne, size)
928            },
929        )
930    }
931
932    fn visit_f32_lt(&mut self) -> Self::Output {
933        self.context.float_cmp_op(
934            self.masm,
935            OperandSize::S32,
936            &mut |masm: &mut M, dst, src1, src2, size| {
937                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Lt, size)
938            },
939        )
940    }
941
942    fn visit_f64_lt(&mut self) -> Self::Output {
943        self.context.float_cmp_op(
944            self.masm,
945            OperandSize::S64,
946            &mut |masm: &mut M, dst, src1, src2, size| {
947                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Lt, size)
948            },
949        )
950    }
951
952    fn visit_f32_gt(&mut self) -> Self::Output {
953        self.context.float_cmp_op(
954            self.masm,
955            OperandSize::S32,
956            &mut |masm: &mut M, dst, src1, src2, size| {
957                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Gt, size)
958            },
959        )
960    }
961
962    fn visit_f64_gt(&mut self) -> Self::Output {
963        self.context.float_cmp_op(
964            self.masm,
965            OperandSize::S64,
966            &mut |masm: &mut M, dst, src1, src2, size| {
967                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Gt, size)
968            },
969        )
970    }
971
972    fn visit_f32_le(&mut self) -> Self::Output {
973        self.context.float_cmp_op(
974            self.masm,
975            OperandSize::S32,
976            &mut |masm: &mut M, dst, src1, src2, size| {
977                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Le, size)
978            },
979        )
980    }
981
982    fn visit_f64_le(&mut self) -> Self::Output {
983        self.context.float_cmp_op(
984            self.masm,
985            OperandSize::S64,
986            &mut |masm: &mut M, dst, src1, src2, size| {
987                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Le, size)
988            },
989        )
990    }
991
992    fn visit_f32_ge(&mut self) -> Self::Output {
993        self.context.float_cmp_op(
994            self.masm,
995            OperandSize::S32,
996            &mut |masm: &mut M, dst, src1, src2, size| {
997                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ge, size)
998            },
999        )
1000    }
1001
1002    fn visit_f64_ge(&mut self) -> Self::Output {
1003        self.context.float_cmp_op(
1004            self.masm,
1005            OperandSize::S64,
1006            &mut |masm: &mut M, dst, src1, src2, size| {
1007                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ge, size)
1008            },
1009        )
1010    }
1011
1012    fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
1013        self.context
1014            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, dst_size| {
1015                masm.signed_convert(writable!(dst), src, OperandSize::S32, dst_size)
1016            })
1017    }
1018
1019    fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
1020        self.context.convert_op_with_tmp_reg(
1021            self.masm,
1022            WasmValType::F32,
1023            RegClass::Int,
1024            |masm, dst, src, tmp_gpr, dst_size| {
1025                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S32, dst_size)
1026            },
1027        )
1028    }
1029
1030    fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
1031        self.context
1032            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, dst_size| {
1033                masm.signed_convert(writable!(dst), src, OperandSize::S64, dst_size)
1034            })
1035    }
1036
1037    fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
1038        self.context.convert_op_with_tmp_reg(
1039            self.masm,
1040            WasmValType::F32,
1041            RegClass::Int,
1042            |masm, dst, src, tmp_gpr, dst_size| {
1043                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S64, dst_size)
1044            },
1045        )
1046    }
1047
1048    fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
1049        self.context
1050            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, dst_size| {
1051                masm.signed_convert(writable!(dst), src, OperandSize::S32, dst_size)
1052            })
1053    }
1054
1055    fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
1056        self.context.convert_op_with_tmp_reg(
1057            self.masm,
1058            WasmValType::F64,
1059            RegClass::Int,
1060            |masm, dst, src, tmp_gpr, dst_size| {
1061                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S32, dst_size)
1062            },
1063        )
1064    }
1065
1066    fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
1067        self.context
1068            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, dst_size| {
1069                masm.signed_convert(writable!(dst), src, OperandSize::S64, dst_size)
1070            })
1071    }
1072
1073    fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
1074        self.context.convert_op_with_tmp_reg(
1075            self.masm,
1076            WasmValType::F64,
1077            RegClass::Int,
1078            |masm, dst, src, tmp_gpr, dst_size| {
1079                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S64, dst_size)
1080            },
1081        )
1082    }
1083
1084    fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
1085        self.context
1086            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, size| {
1087                masm.reinterpret_int_as_float(writable!(dst), src.into(), size)
1088            })
1089    }
1090
1091    fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
1092        self.context
1093            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, size| {
1094                masm.reinterpret_int_as_float(writable!(dst), src.into(), size)
1095            })
1096    }
1097
1098    fn visit_f32_demote_f64(&mut self) -> Self::Output {
1099        self.context.unop(self.masm, |masm, reg| {
1100            masm.demote(writable!(reg), reg)?;
1101            Ok(TypedReg::f32(reg))
1102        })
1103    }
1104
1105    fn visit_f64_promote_f32(&mut self) -> Self::Output {
1106        self.context.unop(self.masm, |masm, reg| {
1107            masm.promote(writable!(reg), reg)?;
1108            Ok(TypedReg::f64(reg))
1109        })
1110    }
1111
1112    fn visit_i32_add(&mut self) -> Self::Output {
1113        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1114            masm.add(writable!(dst), dst, src, size)?;
1115            Ok(TypedReg::i32(dst))
1116        })
1117    }
1118
1119    fn visit_i64_add(&mut self) -> Self::Output {
1120        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1121            masm.add(writable!(dst), dst, src, size)?;
1122            Ok(TypedReg::i64(dst))
1123        })
1124    }
1125
1126    fn visit_i32_sub(&mut self) -> Self::Output {
1127        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1128            masm.sub(writable!(dst), dst, src, size)?;
1129            Ok(TypedReg::i32(dst))
1130        })
1131    }
1132
1133    fn visit_i64_sub(&mut self) -> Self::Output {
1134        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1135            masm.sub(writable!(dst), dst, src, size)?;
1136            Ok(TypedReg::i64(dst))
1137        })
1138    }
1139
1140    fn visit_i32_mul(&mut self) -> Self::Output {
1141        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1142            masm.mul(writable!(dst), dst, src, size)?;
1143            Ok(TypedReg::i32(dst))
1144        })
1145    }
1146
1147    fn visit_i64_mul(&mut self) -> Self::Output {
1148        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1149            masm.mul(writable!(dst), dst, src, size)?;
1150            Ok(TypedReg::i64(dst))
1151        })
1152    }
1153
1154    fn visit_i32_div_s(&mut self) -> Self::Output {
1155        use DivKind::*;
1156        use OperandSize::*;
1157
1158        self.masm.div(&mut self.context, Signed, S32)
1159    }
1160
1161    fn visit_i32_div_u(&mut self) -> Self::Output {
1162        use DivKind::*;
1163        use OperandSize::*;
1164
1165        self.masm.div(&mut self.context, Unsigned, S32)
1166    }
1167
1168    fn visit_i64_div_s(&mut self) -> Self::Output {
1169        use DivKind::*;
1170        use OperandSize::*;
1171
1172        self.masm.div(&mut self.context, Signed, S64)
1173    }
1174
1175    fn visit_i64_div_u(&mut self) -> Self::Output {
1176        use DivKind::*;
1177        use OperandSize::*;
1178
1179        self.masm.div(&mut self.context, Unsigned, S64)
1180    }
1181
1182    fn visit_i32_rem_s(&mut self) -> Self::Output {
1183        use OperandSize::*;
1184        use RemKind::*;
1185
1186        self.masm.rem(&mut self.context, Signed, S32)
1187    }
1188
1189    fn visit_i32_rem_u(&mut self) -> Self::Output {
1190        use OperandSize::*;
1191        use RemKind::*;
1192
1193        self.masm.rem(&mut self.context, Unsigned, S32)
1194    }
1195
1196    fn visit_i64_rem_s(&mut self) -> Self::Output {
1197        use OperandSize::*;
1198        use RemKind::*;
1199
1200        self.masm.rem(&mut self.context, Signed, S64)
1201    }
1202
1203    fn visit_i64_rem_u(&mut self) -> Self::Output {
1204        use OperandSize::*;
1205        use RemKind::*;
1206
1207        self.masm.rem(&mut self.context, Unsigned, S64)
1208    }
1209
1210    fn visit_i32_eq(&mut self) -> Self::Output {
1211        self.cmp_i32s(IntCmpKind::Eq)
1212    }
1213
1214    fn visit_i64_eq(&mut self) -> Self::Output {
1215        self.cmp_i64s(IntCmpKind::Eq)
1216    }
1217
1218    fn visit_i32_ne(&mut self) -> Self::Output {
1219        self.cmp_i32s(IntCmpKind::Ne)
1220    }
1221
1222    fn visit_i64_ne(&mut self) -> Self::Output {
1223        self.cmp_i64s(IntCmpKind::Ne)
1224    }
1225
1226    fn visit_i32_lt_s(&mut self) -> Self::Output {
1227        self.cmp_i32s(IntCmpKind::LtS)
1228    }
1229
1230    fn visit_i64_lt_s(&mut self) -> Self::Output {
1231        self.cmp_i64s(IntCmpKind::LtS)
1232    }
1233
1234    fn visit_i32_lt_u(&mut self) -> Self::Output {
1235        self.cmp_i32s(IntCmpKind::LtU)
1236    }
1237
1238    fn visit_i64_lt_u(&mut self) -> Self::Output {
1239        self.cmp_i64s(IntCmpKind::LtU)
1240    }
1241
1242    fn visit_i32_le_s(&mut self) -> Self::Output {
1243        self.cmp_i32s(IntCmpKind::LeS)
1244    }
1245
1246    fn visit_i64_le_s(&mut self) -> Self::Output {
1247        self.cmp_i64s(IntCmpKind::LeS)
1248    }
1249
1250    fn visit_i32_le_u(&mut self) -> Self::Output {
1251        self.cmp_i32s(IntCmpKind::LeU)
1252    }
1253
1254    fn visit_i64_le_u(&mut self) -> Self::Output {
1255        self.cmp_i64s(IntCmpKind::LeU)
1256    }
1257
1258    fn visit_i32_gt_s(&mut self) -> Self::Output {
1259        self.cmp_i32s(IntCmpKind::GtS)
1260    }
1261
1262    fn visit_i64_gt_s(&mut self) -> Self::Output {
1263        self.cmp_i64s(IntCmpKind::GtS)
1264    }
1265
1266    fn visit_i32_gt_u(&mut self) -> Self::Output {
1267        self.cmp_i32s(IntCmpKind::GtU)
1268    }
1269
1270    fn visit_i64_gt_u(&mut self) -> Self::Output {
1271        self.cmp_i64s(IntCmpKind::GtU)
1272    }
1273
1274    fn visit_i32_ge_s(&mut self) -> Self::Output {
1275        self.cmp_i32s(IntCmpKind::GeS)
1276    }
1277
1278    fn visit_i64_ge_s(&mut self) -> Self::Output {
1279        self.cmp_i64s(IntCmpKind::GeS)
1280    }
1281
1282    fn visit_i32_ge_u(&mut self) -> Self::Output {
1283        self.cmp_i32s(IntCmpKind::GeU)
1284    }
1285
1286    fn visit_i64_ge_u(&mut self) -> Self::Output {
1287        self.cmp_i64s(IntCmpKind::GeU)
1288    }
1289
1290    fn visit_i32_eqz(&mut self) -> Self::Output {
1291        use OperandSize::*;
1292
1293        self.context.unop(self.masm, |masm, reg| {
1294            masm.cmp_with_set(writable!(reg.into()), RegImm::i32(0), IntCmpKind::Eq, S32)?;
1295            Ok(TypedReg::i32(reg))
1296        })
1297    }
1298
1299    fn visit_i64_eqz(&mut self) -> Self::Output {
1300        use OperandSize::*;
1301
1302        self.context.unop(self.masm, |masm, reg| {
1303            masm.cmp_with_set(writable!(reg.into()), RegImm::i64(0), IntCmpKind::Eq, S64)?;
1304            Ok(TypedReg::i32(reg)) // Return value for `i64.eqz` is an `i32`.
1305        })
1306    }
1307
1308    fn visit_i32_clz(&mut self) -> Self::Output {
1309        use OperandSize::*;
1310
1311        self.context.unop(self.masm, |masm, reg| {
1312            masm.clz(writable!(reg), reg, S32)?;
1313            Ok(TypedReg::i32(reg))
1314        })
1315    }
1316
1317    fn visit_i64_clz(&mut self) -> Self::Output {
1318        use OperandSize::*;
1319
1320        self.context.unop(self.masm, |masm, reg| {
1321            masm.clz(writable!(reg), reg, S64)?;
1322            Ok(TypedReg::i64(reg))
1323        })
1324    }
1325
1326    fn visit_i32_ctz(&mut self) -> Self::Output {
1327        use OperandSize::*;
1328
1329        self.context.unop(self.masm, |masm, reg| {
1330            masm.ctz(writable!(reg), reg, S32)?;
1331            Ok(TypedReg::i32(reg))
1332        })
1333    }
1334
1335    fn visit_i64_ctz(&mut self) -> Self::Output {
1336        use OperandSize::*;
1337
1338        self.context.unop(self.masm, |masm, reg| {
1339            masm.ctz(writable!(reg), reg, S64)?;
1340            Ok(TypedReg::i64(reg))
1341        })
1342    }
1343
1344    fn visit_i32_and(&mut self) -> Self::Output {
1345        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1346            masm.and(writable!(dst), dst, src, size)?;
1347            Ok(TypedReg::i32(dst))
1348        })
1349    }
1350
1351    fn visit_i64_and(&mut self) -> Self::Output {
1352        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1353            masm.and(writable!(dst), dst, src, size)?;
1354            Ok(TypedReg::i64(dst))
1355        })
1356    }
1357
1358    fn visit_i32_or(&mut self) -> Self::Output {
1359        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1360            masm.or(writable!(dst), dst, src, size)?;
1361            Ok(TypedReg::i32(dst))
1362        })
1363    }
1364
1365    fn visit_i64_or(&mut self) -> Self::Output {
1366        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1367            masm.or(writable!(dst), dst, src, size)?;
1368            Ok(TypedReg::i64(dst))
1369        })
1370    }
1371
1372    fn visit_i32_xor(&mut self) -> Self::Output {
1373        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1374            masm.xor(writable!(dst), dst, src, size)?;
1375            Ok(TypedReg::i32(dst))
1376        })
1377    }
1378
1379    fn visit_i64_xor(&mut self) -> Self::Output {
1380        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1381            masm.xor(writable!(dst), dst, src, size)?;
1382            Ok(TypedReg::i64(dst))
1383        })
1384    }
1385
1386    fn visit_i32_shl(&mut self) -> Self::Output {
1387        use ShiftKind::*;
1388
1389        self.context.i32_shift(self.masm, Shl)
1390    }
1391
1392    fn visit_i64_shl(&mut self) -> Self::Output {
1393        use ShiftKind::*;
1394
1395        self.context.i64_shift(self.masm, Shl)
1396    }
1397
1398    fn visit_i32_shr_s(&mut self) -> Self::Output {
1399        use ShiftKind::*;
1400
1401        self.context.i32_shift(self.masm, ShrS)
1402    }
1403
1404    fn visit_i64_shr_s(&mut self) -> Self::Output {
1405        use ShiftKind::*;
1406
1407        self.context.i64_shift(self.masm, ShrS)
1408    }
1409
1410    fn visit_i32_shr_u(&mut self) -> Self::Output {
1411        use ShiftKind::*;
1412
1413        self.context.i32_shift(self.masm, ShrU)
1414    }
1415
1416    fn visit_i64_shr_u(&mut self) -> Self::Output {
1417        use ShiftKind::*;
1418
1419        self.context.i64_shift(self.masm, ShrU)
1420    }
1421
1422    fn visit_i32_rotl(&mut self) -> Self::Output {
1423        use ShiftKind::*;
1424
1425        self.context.i32_shift(self.masm, Rotl)
1426    }
1427
1428    fn visit_i64_rotl(&mut self) -> Self::Output {
1429        use ShiftKind::*;
1430
1431        self.context.i64_shift(self.masm, Rotl)
1432    }
1433
1434    fn visit_i32_rotr(&mut self) -> Self::Output {
1435        use ShiftKind::*;
1436
1437        self.context.i32_shift(self.masm, Rotr)
1438    }
1439
1440    fn visit_i64_rotr(&mut self) -> Self::Output {
1441        use ShiftKind::*;
1442
1443        self.context.i64_shift(self.masm, Rotr)
1444    }
1445
1446    fn visit_end(&mut self) -> Self::Output {
1447        if !self.context.reachable {
1448            self.handle_unreachable_end()
1449        } else {
1450            let mut control = self.pop_control_frame()?;
1451            control.emit_end(self.masm, &mut self.context)
1452        }
1453    }
1454
1455    fn visit_i32_popcnt(&mut self) -> Self::Output {
1456        use OperandSize::*;
1457        self.masm.popcnt(&mut self.context, S32)
1458    }
1459
1460    fn visit_i64_popcnt(&mut self) -> Self::Output {
1461        use OperandSize::*;
1462
1463        self.masm.popcnt(&mut self.context, S64)
1464    }
1465
1466    fn visit_i32_wrap_i64(&mut self) -> Self::Output {
1467        self.context.unop(self.masm, |masm, reg| {
1468            masm.wrap(writable!(reg), reg)?;
1469            Ok(TypedReg::i32(reg))
1470        })
1471    }
1472
1473    fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
1474        self.context.unop(self.masm, |masm, reg| {
1475            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend32.into())?;
1476            Ok(TypedReg::i64(reg))
1477        })
1478    }
1479
1480    fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
1481        self.context.unop(self.masm, |masm, reg| {
1482            masm.extend(writable!(reg), reg, Extend::<Zero>::I64Extend32.into())?;
1483            Ok(TypedReg::i64(reg))
1484        })
1485    }
1486
1487    fn visit_i32_extend8_s(&mut self) -> Self::Output {
1488        self.context.unop(self.masm, |masm, reg| {
1489            masm.extend(writable!(reg), reg, Extend::<Signed>::I32Extend8.into())?;
1490            Ok(TypedReg::i32(reg))
1491        })
1492    }
1493
1494    fn visit_i32_extend16_s(&mut self) -> Self::Output {
1495        self.context.unop(self.masm, |masm, reg| {
1496            masm.extend(writable!(reg), reg, Extend::<Signed>::I32Extend16.into())?;
1497            Ok(TypedReg::i32(reg))
1498        })
1499    }
1500
1501    fn visit_i64_extend8_s(&mut self) -> Self::Output {
1502        self.context.unop(self.masm, |masm, reg| {
1503            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend8.into())?;
1504            Ok(TypedReg::i64(reg))
1505        })
1506    }
1507
1508    fn visit_i64_extend16_s(&mut self) -> Self::Output {
1509        self.context.unop(self.masm, |masm, reg| {
1510            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend16.into())?;
1511            Ok(TypedReg::i64(reg))
1512        })
1513    }
1514
1515    fn visit_i64_extend32_s(&mut self) -> Self::Output {
1516        self.context.unop(self.masm, |masm, reg| {
1517            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend32.into())?;
1518            Ok(TypedReg::i64(reg))
1519        })
1520    }
1521
1522    fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
1523        use OperandSize::*;
1524
1525        self.context
1526            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
1527                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Unchecked)
1528            })
1529    }
1530
1531    fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
1532        use OperandSize::*;
1533
1534        self.masm
1535            .unsigned_truncate(&mut self.context, S32, S32, TruncKind::Unchecked)
1536    }
1537
1538    fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
1539        use OperandSize::*;
1540
1541        self.context
1542            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
1543                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Unchecked)
1544            })
1545    }
1546
1547    fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
1548        use OperandSize::*;
1549        self.masm
1550            .unsigned_truncate(&mut self.context, S64, S32, TruncKind::Unchecked)
1551    }
1552
1553    fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
1554        use OperandSize::*;
1555
1556        self.context
1557            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
1558                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Unchecked)
1559            })
1560    }
1561
1562    fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
1563        use OperandSize::*;
1564
1565        self.masm
1566            .unsigned_truncate(&mut self.context, S32, S64, TruncKind::Unchecked)
1567    }
1568
1569    fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
1570        use OperandSize::*;
1571
1572        self.context
1573            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
1574                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Unchecked)
1575            })
1576    }
1577
1578    fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
1579        use OperandSize::*;
1580
1581        self.masm
1582            .unsigned_truncate(&mut self.context, S64, S64, TruncKind::Unchecked)
1583    }
1584
1585    fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
1586        self.context
1587            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, size| {
1588                masm.reinterpret_float_as_int(writable!(dst), src.into(), size)
1589            })
1590    }
1591
1592    fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
1593        self.context
1594            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, size| {
1595                masm.reinterpret_float_as_int(writable!(dst), src.into(), size)
1596            })
1597    }
1598
1599    fn visit_local_get(&mut self, index: u32) -> Self::Output {
1600        use WasmValType::*;
1601        let context = &mut self.context;
1602        let slot = context.frame.get_wasm_local(index);
1603        match slot.ty {
1604            I32 | I64 | F32 | F64 | V128 => context.stack.push(Val::local(index, slot.ty)),
1605            Ref(rt) => match rt.heap_type {
1606                WasmHeapType::Func => context.stack.push(Val::local(index, slot.ty)),
1607                _ => bail!(CodeGenError::unsupported_wasm_type()),
1608            },
1609        }
1610
1611        Ok(())
1612    }
1613
1614    fn visit_local_set(&mut self, index: u32) -> Self::Output {
1615        let src = self.emit_set_local(index)?;
1616        self.context.free_reg(src);
1617        Ok(())
1618    }
1619
1620    fn visit_call(&mut self, index: u32) -> Self::Output {
1621        let callee = self.env.callee_from_index(FuncIndex::from_u32(index));
1622        FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, callee)?;
1623        Ok(())
1624    }
1625
1626    fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
1627        // Spill now because `emit_lazy_init_funcref` and the `FnCall::emit`
1628        // invocations will both trigger spills since they both call functions.
1629        // However, the machine instructions for the spill emitted by
1630        // `emit_lazy_funcref` will be jumped over if the funcref was previously
1631        // initialized which may result in the machine stack becoming
1632        // unbalanced.
1633        self.context.spill(self.masm)?;
1634
1635        let type_index = TypeIndex::from_u32(type_index);
1636        let table_index = TableIndex::from_u32(table_index);
1637
1638        self.emit_lazy_init_funcref(table_index)?;
1639
1640        // Perform the indirect call.
1641        // This code assumes that [`Self::emit_lazy_init_funcref`] will
1642        // push the funcref to the value stack.
1643        let funcref_ptr = self
1644            .context
1645            .stack
1646            .peek()
1647            .map(|v| v.unwrap_reg())
1648            .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
1649        self.masm
1650            .trapz(funcref_ptr.into(), TRAP_INDIRECT_CALL_TO_NULL)?;
1651        self.emit_typecheck_funcref(funcref_ptr.into(), type_index)?;
1652
1653        let callee = self.env.funcref(type_index);
1654        FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, callee)?;
1655        Ok(())
1656    }
1657
1658    fn visit_table_init(&mut self, elem: u32, table: u32) -> Self::Output {
1659        let at = self.context.stack.ensure_index_at(3)?;
1660
1661        self.context
1662            .stack
1663            .insert_many(at, &[table.try_into()?, elem.try_into()?]);
1664
1665        let builtin = self.env.builtins.table_init::<M::ABI, M::Ptr>()?;
1666        FnCall::emit::<M>(
1667            &mut self.env,
1668            self.masm,
1669            &mut self.context,
1670            Callee::Builtin(builtin.clone()),
1671        )?;
1672        self.context.pop_and_free(self.masm)
1673    }
1674
1675    fn visit_table_copy(&mut self, dst: u32, src: u32) -> Self::Output {
1676        let at = self.context.stack.ensure_index_at(3)?;
1677        self.context
1678            .stack
1679            .insert_many(at, &[dst.try_into()?, src.try_into()?]);
1680
1681        let builtin = self.env.builtins.table_copy::<M::ABI, M::Ptr>()?;
1682        FnCall::emit::<M>(
1683            &mut self.env,
1684            self.masm,
1685            &mut self.context,
1686            Callee::Builtin(builtin),
1687        )?;
1688        self.context.pop_and_free(self.masm)
1689    }
1690
1691    fn visit_table_get(&mut self, table: u32) -> Self::Output {
1692        let table_index = TableIndex::from_u32(table);
1693        let table = self.env.table(table_index);
1694        let heap_type = table.ref_type.heap_type;
1695
1696        match heap_type {
1697            WasmHeapType::Func => self.emit_lazy_init_funcref(table_index),
1698            _ => Err(anyhow!(CodeGenError::unsupported_wasm_type())),
1699        }
1700    }
1701
1702    fn visit_table_grow(&mut self, table: u32) -> Self::Output {
1703        let table_index = TableIndex::from_u32(table);
1704        let table_ty = self.env.table(table_index);
1705        let builtin = match table_ty.ref_type.heap_type {
1706            WasmHeapType::Func => self.env.builtins.table_grow_func_ref::<M::ABI, M::Ptr>()?,
1707            _ => bail!(CodeGenError::unsupported_wasm_type()),
1708        };
1709
1710        let len = self.context.stack.len();
1711        // table.grow` requires at least 2 elements on the value stack.
1712        let at = self.context.stack.ensure_index_at(2)?;
1713
1714        // The table_grow builtin expects the parameters in a different
1715        // order.
1716        // The value stack at this point should contain:
1717        // [ init_value | delta ] (stack top)
1718        // but the builtin function expects the init value as the last
1719        // argument.
1720        self.context.stack.inner_mut().swap(len - 1, len - 2);
1721        self.context.stack.insert_many(at, &[table.try_into()?]);
1722
1723        FnCall::emit::<M>(
1724            &mut self.env,
1725            self.masm,
1726            &mut self.context,
1727            Callee::Builtin(builtin.clone()),
1728        )?;
1729
1730        Ok(())
1731    }
1732
1733    fn visit_table_size(&mut self, table: u32) -> Self::Output {
1734        let table_index = TableIndex::from_u32(table);
1735        let table_data = self.env.resolve_table_data(table_index);
1736        self.emit_compute_table_size(&table_data)
1737    }
1738
1739    fn visit_table_fill(&mut self, table: u32) -> Self::Output {
1740        let table_index = TableIndex::from_u32(table);
1741        let table_ty = self.env.table(table_index);
1742
1743        ensure!(
1744            table_ty.ref_type.heap_type == WasmHeapType::Func,
1745            CodeGenError::unsupported_wasm_type()
1746        );
1747
1748        let builtin = self.env.builtins.table_fill_func_ref::<M::ABI, M::Ptr>()?;
1749
1750        let at = self.context.stack.ensure_index_at(3)?;
1751
1752        self.context.stack.insert_many(at, &[table.try_into()?]);
1753        FnCall::emit::<M>(
1754            &mut self.env,
1755            self.masm,
1756            &mut self.context,
1757            Callee::Builtin(builtin.clone()),
1758        )?;
1759        self.context.pop_and_free(self.masm)
1760    }
1761
1762    fn visit_table_set(&mut self, table: u32) -> Self::Output {
1763        let ptr_type = self.env.ptr_type();
1764        let table_index = TableIndex::from_u32(table);
1765        let table_data = self.env.resolve_table_data(table_index);
1766        let table = self.env.table(table_index);
1767        match table.ref_type.heap_type {
1768            WasmHeapType::Func => {
1769                ensure!(
1770                    self.tunables.table_lazy_init,
1771                    CodeGenError::unsupported_table_eager_init()
1772                );
1773                let value = self.context.pop_to_reg(self.masm, None)?;
1774                let index = self.context.pop_to_reg(self.masm, None)?;
1775                let base = self.context.any_gpr(self.masm)?;
1776                let elem_addr =
1777                    self.emit_compute_table_elem_addr(index.into(), base, &table_data)?;
1778                // Set the initialized bit.
1779                self.masm.or(
1780                    writable!(value.into()),
1781                    value.into(),
1782                    RegImm::i64(FUNCREF_INIT_BIT as i64),
1783                    ptr_type.try_into()?,
1784                )?;
1785
1786                self.masm.store_ptr(value.into(), elem_addr)?;
1787
1788                self.context.free_reg(value);
1789                self.context.free_reg(index);
1790                self.context.free_reg(base);
1791                Ok(())
1792            }
1793            _ => Err(anyhow!(CodeGenError::unsupported_wasm_type())),
1794        }
1795    }
1796
1797    fn visit_elem_drop(&mut self, index: u32) -> Self::Output {
1798        let elem_drop = self.env.builtins.elem_drop::<M::ABI, M::Ptr>()?;
1799        self.context.stack.extend([index.try_into()?]);
1800        FnCall::emit::<M>(
1801            &mut self.env,
1802            self.masm,
1803            &mut self.context,
1804            Callee::Builtin(elem_drop),
1805        )?;
1806        Ok(())
1807    }
1808
1809    fn visit_memory_init(&mut self, data_index: u32, mem: u32) -> Self::Output {
1810        let at = self.context.stack.ensure_index_at(3)?;
1811        self.context
1812            .stack
1813            .insert_many(at, &[mem.try_into()?, data_index.try_into()?]);
1814        let builtin = self.env.builtins.memory_init::<M::ABI, M::Ptr>()?;
1815        FnCall::emit::<M>(
1816            &mut self.env,
1817            self.masm,
1818            &mut self.context,
1819            Callee::Builtin(builtin),
1820        )?;
1821        self.context.pop_and_free(self.masm)
1822    }
1823
1824    fn visit_memory_copy(&mut self, dst_mem: u32, src_mem: u32) -> Self::Output {
1825        // At this point, the stack is expected to contain:
1826        //     [ dst_offset, src_offset, len ]
1827        // The following code inserts the missing params, so that stack contains:
1828        //     [ vmctx, dst_mem, dst_offset, src_mem, src_offset, len ]
1829        // Which is the order expected by the builtin function.
1830        let _ = self.context.stack.ensure_index_at(3)?;
1831        let at = self.context.stack.ensure_index_at(2)?;
1832        self.context.stack.insert_many(at, &[src_mem.try_into()?]);
1833
1834        // One element was inserted above, so instead of 3, we use 4.
1835        let at = self.context.stack.ensure_index_at(4)?;
1836        self.context.stack.insert_many(at, &[dst_mem.try_into()?]);
1837
1838        let builtin = self.env.builtins.memory_copy::<M::ABI, M::Ptr>()?;
1839
1840        FnCall::emit::<M>(
1841            &mut self.env,
1842            self.masm,
1843            &mut self.context,
1844            Callee::Builtin(builtin),
1845        )?;
1846        self.context.pop_and_free(self.masm)
1847    }
1848
1849    fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
1850        let at = self.context.stack.ensure_index_at(3)?;
1851
1852        self.context.stack.insert_many(at, &[mem.try_into()?]);
1853
1854        let builtin = self.env.builtins.memory_fill::<M::ABI, M::Ptr>()?;
1855        FnCall::emit::<M>(
1856            &mut self.env,
1857            self.masm,
1858            &mut self.context,
1859            Callee::Builtin(builtin),
1860        )?;
1861        self.context.pop_and_free(self.masm)
1862    }
1863
1864    fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
1865        let heap = self.env.resolve_heap(MemoryIndex::from_u32(mem));
1866        self.emit_compute_memory_size(&heap)
1867    }
1868
1869    fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
1870        let _ = self.context.stack.ensure_index_at(1)?;
1871        // The stack at this point contains: [ delta ]
1872        // The desired state is
1873        //   [ vmctx, delta, index ]
1874        self.context.stack.extend([mem.try_into()?]);
1875
1876        let heap = self.env.resolve_heap(MemoryIndex::from_u32(mem));
1877        let builtin = self.env.builtins.memory32_grow::<M::ABI, M::Ptr>()?;
1878        FnCall::emit::<M>(
1879            &mut self.env,
1880            self.masm,
1881            &mut self.context,
1882            Callee::Builtin(builtin),
1883        )?;
1884
1885        // The memory32_grow builtin returns a pointer type, therefore we must
1886        // ensure that the return type is representative of the address space of
1887        // the heap type.
1888        match (self.env.ptr_type(), heap.index_type()) {
1889            (WasmValType::I64, WasmValType::I64) => Ok(()),
1890            // When the heap type is smaller than the pointer type, we adjust
1891            // the result of the memory32_grow builtin.
1892            (WasmValType::I64, WasmValType::I32) => {
1893                let top: Reg = self.context.pop_to_reg(self.masm, None)?.into();
1894                self.masm.wrap(writable!(top.into()), top.into())?;
1895                self.context.stack.push(TypedReg::i32(top).into());
1896                Ok(())
1897            }
1898            _ => Err(anyhow!(CodeGenError::unsupported_32_bit_platform())),
1899        }
1900    }
1901
1902    fn visit_data_drop(&mut self, data_index: u32) -> Self::Output {
1903        self.context.stack.extend([data_index.try_into()?]);
1904
1905        let builtin = self.env.builtins.data_drop::<M::ABI, M::Ptr>()?;
1906        FnCall::emit::<M>(
1907            &mut self.env,
1908            self.masm,
1909            &mut self.context,
1910            Callee::Builtin(builtin),
1911        )
1912    }
1913
1914    fn visit_nop(&mut self) -> Self::Output {
1915        Ok(())
1916    }
1917
1918    fn visit_if(&mut self, blockty: BlockType) -> Self::Output {
1919        self.control_frames.push(ControlStackFrame::r#if(
1920            self.env.resolve_block_sig(blockty),
1921            self.masm,
1922            &mut self.context,
1923        )?);
1924
1925        Ok(())
1926    }
1927
1928    fn visit_else(&mut self) -> Self::Output {
1929        if !self.context.reachable {
1930            self.handle_unreachable_else()
1931        } else {
1932            let control = self
1933                .control_frames
1934                .last_mut()
1935                .ok_or_else(|| CodeGenError::control_frame_expected())?;
1936            control.emit_else(self.masm, &mut self.context)
1937        }
1938    }
1939
1940    fn visit_block(&mut self, blockty: BlockType) -> Self::Output {
1941        self.control_frames.push(ControlStackFrame::block(
1942            self.env.resolve_block_sig(blockty),
1943            self.masm,
1944            &mut self.context,
1945        )?);
1946
1947        Ok(())
1948    }
1949
1950    fn visit_loop(&mut self, blockty: BlockType) -> Self::Output {
1951        self.control_frames.push(ControlStackFrame::r#loop(
1952            self.env.resolve_block_sig(blockty),
1953            self.masm,
1954            &mut self.context,
1955        )?);
1956
1957        self.maybe_emit_epoch_check()?;
1958        self.maybe_emit_fuel_check()
1959    }
1960
1961    fn visit_br(&mut self, depth: u32) -> Self::Output {
1962        let index = control_index(depth, self.control_frames.len())?;
1963        let frame = &mut self.control_frames[index];
1964        self.context
1965            .unconditional_jump(frame, self.masm, |masm, cx, frame| {
1966                frame.pop_abi_results::<M, _>(cx, masm, |results, _, _| {
1967                    Ok(results.ret_area().copied())
1968                })
1969            })
1970    }
1971
1972    fn visit_br_if(&mut self, depth: u32) -> Self::Output {
1973        let index = control_index(depth, self.control_frames.len())?;
1974        let frame = &mut self.control_frames[index];
1975        frame.set_as_target();
1976
1977        let top = {
1978            let top = self.context.without::<Result<TypedReg>, M, _>(
1979                frame.results::<M>()?.regs(),
1980                self.masm,
1981                |ctx, masm| ctx.pop_to_reg(masm, None),
1982            )??;
1983            // Explicitly save any live registers and locals before setting up
1984            // the branch state.
1985            // In some cases, calculating the `top` value above, will result in
1986            // a spill, thus the following one will result in a no-op.
1987            self.context.spill(self.masm)?;
1988            frame.top_abi_results::<M, _>(
1989                &mut self.context,
1990                self.masm,
1991                |results, context, masm| {
1992                    // In the case of `br_if` there's a possibility that we'll
1993                    // exit early from the block or fallthrough, for
1994                    // a fallthrough, we cannot rely on the pre-computed return area;
1995                    // it must be recalculated so that any values that are
1996                    // generated are correctly placed near the current stack
1997                    // pointer.
1998                    if results.on_stack() {
1999                        let stack_consumed = context.stack.sizeof(results.stack_operands_len());
2000                        let base = masm.sp_offset()?.as_u32() - stack_consumed;
2001                        let offs = base + results.size();
2002                        Ok(Some(RetArea::sp(SPOffset::from_u32(offs))))
2003                    } else {
2004                        Ok(None)
2005                    }
2006                },
2007            )?;
2008            top
2009        };
2010
2011        // Emit instructions to balance the machine stack if the frame has
2012        // a different offset.
2013        let current_sp_offset = self.masm.sp_offset()?;
2014        let results_size = frame.results::<M>()?.size();
2015        let state = frame.stack_state();
2016        let (label, cmp, needs_cleanup) = if current_sp_offset > state.target_offset {
2017            (self.masm.get_label()?, IntCmpKind::Eq, true)
2018        } else {
2019            (*frame.label(), IntCmpKind::Ne, false)
2020        };
2021
2022        self.masm
2023            .branch(cmp, top.reg.into(), top.reg.into(), label, OperandSize::S32)?;
2024        self.context.free_reg(top);
2025
2026        if needs_cleanup {
2027            // Emit instructions to balance the stack and jump if not falling
2028            // through.
2029            self.masm.memmove(
2030                current_sp_offset,
2031                state.target_offset,
2032                results_size,
2033                MemMoveDirection::LowToHigh,
2034            )?;
2035            self.masm.ensure_sp_for_jump(state.target_offset)?;
2036            self.masm.jmp(*frame.label())?;
2037
2038            // Restore sp_offset to what it was for falling through and emit
2039            // fallthrough label.
2040            self.masm.reset_stack_pointer(current_sp_offset)?;
2041            self.masm.bind(label)?;
2042        }
2043
2044        Ok(())
2045    }
2046
2047    fn visit_br_table(&mut self, targets: BrTable<'a>) -> Self::Output {
2048        // +1 to account for the default target.
2049        let len = targets.len() + 1;
2050        // SmallVec<[_; 5]> to match the binary emission layer (e.g
2051        // see `JmpTableSeq'), but here we use 5 instead since we
2052        // bundle the default target as the last element in the array.
2053        let mut labels: SmallVec<[_; 5]> = smallvec![];
2054        for _ in 0..len {
2055            labels.push(self.masm.get_label()?);
2056        }
2057
2058        let default_index = control_index(targets.default(), self.control_frames.len())?;
2059        let default_frame = &mut self.control_frames[default_index];
2060        let default_result = default_frame.results::<M>()?;
2061
2062        let (index, tmp) = {
2063            let index_and_tmp = self.context.without::<Result<(TypedReg, _)>, M, _>(
2064                default_result.regs(),
2065                self.masm,
2066                |cx, masm| Ok((cx.pop_to_reg(masm, None)?, cx.any_gpr(masm)?)),
2067            )??;
2068
2069            // Materialize any constants or locals into their result representation,
2070            // so that when reachability is restored, they are correctly located.
2071            default_frame.top_abi_results::<M, _>(
2072                &mut self.context,
2073                self.masm,
2074                |results, _, _| Ok(results.ret_area().copied()),
2075            )?;
2076            index_and_tmp
2077        };
2078
2079        self.masm.jmp_table(&labels, index.into(), tmp)?;
2080        // Save the original stack pointer offset; we will reset the stack
2081        // pointer to this offset after jumping to each of the targets. Each
2082        // jump might adjust the stack according to the base offset of the
2083        // target.
2084        let current_sp = self.masm.sp_offset()?;
2085
2086        for (t, l) in targets
2087            .targets()
2088            .into_iter()
2089            .chain(std::iter::once(Ok(targets.default())))
2090            .zip(labels.iter())
2091        {
2092            let control_index = control_index(t?, self.control_frames.len())?;
2093            let frame = &mut self.control_frames[control_index];
2094            // Reset the stack pointer to its original offset. This is needed
2095            // because each jump will potentially adjust the stack pointer
2096            // according to the base offset of the target.
2097            self.masm.reset_stack_pointer(current_sp)?;
2098
2099            // NB: We don't perform any result handling as it was
2100            // already taken care of above before jumping to the
2101            // jump table.
2102            self.masm.bind(*l)?;
2103            // Ensure that the stack pointer is correctly positioned before
2104            // jumping to the jump table code.
2105            let state = frame.stack_state();
2106            self.masm.ensure_sp_for_jump(state.target_offset)?;
2107            self.masm.jmp(*frame.label())?;
2108            frame.set_as_target();
2109        }
2110        // Finally reset the stack pointer to the original location.
2111        // The reachability analysis, will ensure it's correctly located
2112        // once reachability is restored.
2113        self.masm.reset_stack_pointer(current_sp)?;
2114        self.context.reachable = false;
2115        self.context.free_reg(index.reg);
2116        self.context.free_reg(tmp);
2117
2118        Ok(())
2119    }
2120
2121    fn visit_return(&mut self) -> Self::Output {
2122        // Grab the outermost frame, which is the function's body
2123        // frame. We don't rely on [`codegen::control_index`] since
2124        // this frame is implicit and we know that it should exist at
2125        // index 0.
2126        let outermost = &mut self.control_frames[0];
2127        self.context
2128            .unconditional_jump(outermost, self.masm, |masm, cx, frame| {
2129                frame.pop_abi_results::<M, _>(cx, masm, |results, _, _| {
2130                    Ok(results.ret_area().copied())
2131                })
2132            })
2133    }
2134
2135    fn visit_unreachable(&mut self) -> Self::Output {
2136        self.masm.unreachable()?;
2137        self.context.reachable = false;
2138        // Set the implicit outermost frame as target to perform the necessary
2139        // stack clean up.
2140        let outermost = &mut self.control_frames[0];
2141        outermost.set_as_target();
2142
2143        Ok(())
2144    }
2145
2146    fn visit_local_tee(&mut self, index: u32) -> Self::Output {
2147        let typed_reg = self.emit_set_local(index)?;
2148        self.context.stack.push(typed_reg.into());
2149
2150        Ok(())
2151    }
2152
2153    fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
2154        let index = GlobalIndex::from_u32(global_index);
2155        let (ty, base, offset) = self.emit_get_global_addr(index)?;
2156        let addr = self.masm.address_at_reg(base, offset)?;
2157        let dst = self.context.reg_for_type(ty, self.masm)?;
2158        self.masm.load(addr, writable!(dst), ty.try_into()?)?;
2159        self.context.stack.push(Val::reg(dst, ty));
2160
2161        self.context.free_reg(base);
2162
2163        Ok(())
2164    }
2165
2166    fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
2167        let index = GlobalIndex::from_u32(global_index);
2168        let (ty, base, offset) = self.emit_get_global_addr(index)?;
2169        let addr = self.masm.address_at_reg(base, offset)?;
2170
2171        let typed_reg = self.context.pop_to_reg(self.masm, None)?;
2172        self.masm
2173            .store(typed_reg.reg.into(), addr, ty.try_into()?)?;
2174        self.context.free_reg(typed_reg.reg);
2175        self.context.free_reg(base);
2176
2177        Ok(())
2178    }
2179
2180    fn visit_drop(&mut self) -> Self::Output {
2181        self.context.drop_last(1, |regalloc, val| match val {
2182            Val::Reg(tr) => Ok(regalloc.free(tr.reg.into())),
2183            Val::Memory(m) => self.masm.free_stack(m.slot.size),
2184            _ => Ok(()),
2185        })
2186    }
2187
2188    fn visit_select(&mut self) -> Self::Output {
2189        let cond = self.context.pop_to_reg(self.masm, None)?;
2190        let val2 = self.context.pop_to_reg(self.masm, None)?;
2191        let val1 = self.context.pop_to_reg(self.masm, None)?;
2192        self.masm
2193            .cmp(cond.reg.into(), RegImm::i32(0), OperandSize::S32)?;
2194        // Conditionally move val1 to val2 if the comparison is
2195        // not zero.
2196        self.masm.cmov(
2197            writable!(val2.into()),
2198            val1.into(),
2199            IntCmpKind::Ne,
2200            val1.ty.try_into()?,
2201        )?;
2202        self.context.stack.push(val2.into());
2203        self.context.free_reg(val1.reg);
2204        self.context.free_reg(cond);
2205
2206        Ok(())
2207    }
2208
2209    fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2210        self.emit_wasm_load(
2211            &memarg,
2212            WasmValType::I32,
2213            LoadKind::Operand(OperandSize::S32),
2214        )
2215    }
2216
2217    fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2218        self.emit_wasm_load(
2219            &memarg,
2220            WasmValType::I32,
2221            LoadKind::ScalarExtend(Extend::<Signed>::I32Extend8.into()),
2222        )
2223    }
2224
2225    fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2226        self.emit_wasm_load(
2227            &memarg,
2228            WasmValType::I32,
2229            LoadKind::ScalarExtend(Extend::<Zero>::I32Extend8.into()),
2230        )
2231    }
2232
2233    fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2234        self.emit_wasm_load(
2235            &memarg,
2236            WasmValType::I32,
2237            LoadKind::ScalarExtend(Extend::<Signed>::I32Extend16.into()),
2238        )
2239    }
2240
2241    fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2242        self.emit_wasm_load(
2243            &memarg,
2244            WasmValType::I32,
2245            LoadKind::ScalarExtend(Extend::<Zero>::I32Extend16.into()),
2246        )
2247    }
2248
2249    fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2250        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2251    }
2252
2253    fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2254        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S8))
2255    }
2256
2257    fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2258        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S16))
2259    }
2260
2261    fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2262        self.emit_wasm_load(
2263            &memarg,
2264            WasmValType::I64,
2265            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend8.into()),
2266        )
2267    }
2268
2269    fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2270        self.emit_wasm_load(
2271            &memarg,
2272            WasmValType::I64,
2273            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend8.into()),
2274        )
2275    }
2276
2277    fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2278        self.emit_wasm_load(
2279            &memarg,
2280            WasmValType::I64,
2281            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend16.into()),
2282        )
2283    }
2284
2285    fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2286        self.emit_wasm_load(
2287            &memarg,
2288            WasmValType::I64,
2289            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend16.into()),
2290        )
2291    }
2292
2293    fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2294        self.emit_wasm_load(
2295            &memarg,
2296            WasmValType::I64,
2297            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend32.into()),
2298        )
2299    }
2300
2301    fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2302        self.emit_wasm_load(
2303            &memarg,
2304            WasmValType::I64,
2305            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend32.into()),
2306        )
2307    }
2308
2309    fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2310        self.emit_wasm_load(
2311            &memarg,
2312            WasmValType::I64,
2313            LoadKind::Operand(OperandSize::S64),
2314        )
2315    }
2316
2317    fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2318        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S64))
2319    }
2320
2321    fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2322        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S8))
2323    }
2324
2325    fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2326        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S16))
2327    }
2328
2329    fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2330        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2331    }
2332
2333    fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2334        self.emit_wasm_load(
2335            &memarg,
2336            WasmValType::F32,
2337            LoadKind::Operand(OperandSize::S32),
2338        )
2339    }
2340
2341    fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2342        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2343    }
2344
2345    fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2346        self.emit_wasm_load(
2347            &memarg,
2348            WasmValType::F64,
2349            LoadKind::Operand(OperandSize::S64),
2350        )
2351    }
2352
2353    fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2354        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S64))
2355    }
2356
2357    fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2358        use OperandSize::*;
2359
2360        self.context
2361            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
2362                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Checked)
2363            })
2364    }
2365
2366    fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2367        use OperandSize::*;
2368
2369        self.masm
2370            .unsigned_truncate(&mut self.context, S32, S32, TruncKind::Checked)
2371    }
2372
2373    fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2374        use OperandSize::*;
2375
2376        self.context
2377            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
2378                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Checked)
2379            })
2380    }
2381
2382    fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2383        use OperandSize::*;
2384
2385        self.masm
2386            .unsigned_truncate(&mut self.context, S64, S32, TruncKind::Checked)
2387    }
2388
2389    fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2390        use OperandSize::*;
2391
2392        self.context
2393            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
2394                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Checked)
2395            })
2396    }
2397
2398    fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2399        use OperandSize::*;
2400
2401        self.masm
2402            .unsigned_truncate(&mut self.context, S32, S64, TruncKind::Checked)
2403    }
2404
2405    fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2406        use OperandSize::*;
2407
2408        self.context
2409            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
2410                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Checked)
2411            })
2412    }
2413
2414    fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2415        use OperandSize::*;
2416
2417        self.masm
2418            .unsigned_truncate(&mut self.context, S64, S64, TruncKind::Checked)
2419    }
2420
2421    fn visit_i64_add128(&mut self) -> Self::Output {
2422        self.context
2423            .binop128(self.masm, |masm, lhs_lo, lhs_hi, rhs_lo, rhs_hi| {
2424                masm.add128(
2425                    writable!(lhs_lo),
2426                    writable!(lhs_hi),
2427                    lhs_lo,
2428                    lhs_hi,
2429                    rhs_lo,
2430                    rhs_hi,
2431                )?;
2432                Ok((TypedReg::i64(lhs_lo), TypedReg::i64(lhs_hi)))
2433            })
2434    }
2435
2436    fn visit_i64_sub128(&mut self) -> Self::Output {
2437        self.context
2438            .binop128(self.masm, |masm, lhs_lo, lhs_hi, rhs_lo, rhs_hi| {
2439                masm.sub128(
2440                    writable!(lhs_lo),
2441                    writable!(lhs_hi),
2442                    lhs_lo,
2443                    lhs_hi,
2444                    rhs_lo,
2445                    rhs_hi,
2446                )?;
2447                Ok((TypedReg::i64(lhs_lo), TypedReg::i64(lhs_hi)))
2448            })
2449    }
2450
2451    fn visit_i64_mul_wide_s(&mut self) -> Self::Output {
2452        self.masm.mul_wide(&mut self.context, MulWideKind::Signed)
2453    }
2454
2455    fn visit_i64_mul_wide_u(&mut self) -> Self::Output {
2456        self.masm.mul_wide(&mut self.context, MulWideKind::Unsigned)
2457    }
2458
2459    fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2460        self.emit_wasm_load(
2461            &memarg,
2462            WasmValType::I32,
2463            LoadKind::Atomic(OperandSize::S8, Some(Extend::<Zero>::I32Extend8.into())),
2464        )
2465    }
2466
2467    fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2468        self.emit_wasm_load(
2469            &memarg,
2470            WasmValType::I32,
2471            LoadKind::Atomic(OperandSize::S16, Some(Extend::<Zero>::I32Extend16.into())),
2472        )
2473    }
2474
2475    fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2476        self.emit_wasm_load(
2477            &memarg,
2478            WasmValType::I32,
2479            LoadKind::Atomic(OperandSize::S32, None),
2480        )
2481    }
2482
2483    fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2484        self.emit_wasm_load(
2485            &memarg,
2486            WasmValType::I64,
2487            LoadKind::Atomic(OperandSize::S8, Some(Extend::<Zero>::I64Extend8.into())),
2488        )
2489    }
2490
2491    fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2492        self.emit_wasm_load(
2493            &memarg,
2494            WasmValType::I64,
2495            LoadKind::Atomic(OperandSize::S16, Some(Extend::<Zero>::I64Extend16.into())),
2496        )
2497    }
2498
2499    fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2500        self.emit_wasm_load(
2501            &memarg,
2502            WasmValType::I64,
2503            LoadKind::Atomic(OperandSize::S32, Some(Extend::<Zero>::I64Extend32.into())),
2504        )
2505    }
2506
2507    fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2508        self.emit_wasm_load(
2509            &memarg,
2510            WasmValType::I64,
2511            LoadKind::Atomic(OperandSize::S64, None),
2512        )
2513    }
2514
2515    fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2516        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S32))
2517    }
2518
2519    fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2520        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S64))
2521    }
2522
2523    fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2524        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S8))
2525    }
2526
2527    fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2528        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S16))
2529    }
2530
2531    fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2532        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S8))
2533    }
2534
2535    fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2536        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S16))
2537    }
2538
2539    fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2540        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S32))
2541    }
2542
2543    fn visit_i32_atomic_rmw8_add_u(&mut self, arg: MemArg) -> Self::Output {
2544        self.emit_atomic_rmw(
2545            &arg,
2546            RmwOp::Add,
2547            OperandSize::S8,
2548            Some(Extend::<Zero>::I32Extend8),
2549        )
2550    }
2551
2552    fn visit_i32_atomic_rmw16_add_u(&mut self, arg: MemArg) -> Self::Output {
2553        self.emit_atomic_rmw(
2554            &arg,
2555            RmwOp::Add,
2556            OperandSize::S16,
2557            Some(Extend::<Zero>::I32Extend16),
2558        )
2559    }
2560
2561    fn visit_i32_atomic_rmw_add(&mut self, arg: MemArg) -> Self::Output {
2562        self.emit_atomic_rmw(&arg, RmwOp::Add, OperandSize::S32, None)
2563    }
2564
2565    fn visit_i64_atomic_rmw8_add_u(&mut self, arg: MemArg) -> Self::Output {
2566        self.emit_atomic_rmw(
2567            &arg,
2568            RmwOp::Add,
2569            OperandSize::S8,
2570            Some(Extend::<Zero>::I64Extend8),
2571        )
2572    }
2573
2574    fn visit_i64_atomic_rmw16_add_u(&mut self, arg: MemArg) -> Self::Output {
2575        self.emit_atomic_rmw(
2576            &arg,
2577            RmwOp::Add,
2578            OperandSize::S16,
2579            Some(Extend::<Zero>::I64Extend16),
2580        )
2581    }
2582
2583    fn visit_i64_atomic_rmw32_add_u(&mut self, arg: MemArg) -> Self::Output {
2584        self.emit_atomic_rmw(
2585            &arg,
2586            RmwOp::Add,
2587            OperandSize::S32,
2588            Some(Extend::<Zero>::I64Extend32),
2589        )
2590    }
2591
2592    fn visit_i64_atomic_rmw_add(&mut self, arg: MemArg) -> Self::Output {
2593        self.emit_atomic_rmw(&arg, RmwOp::Add, OperandSize::S64, None)
2594    }
2595
2596    fn visit_i32_atomic_rmw8_sub_u(&mut self, arg: MemArg) -> Self::Output {
2597        self.emit_atomic_rmw(
2598            &arg,
2599            RmwOp::Sub,
2600            OperandSize::S8,
2601            Some(Extend::<Zero>::I32Extend8),
2602        )
2603    }
2604    fn visit_i32_atomic_rmw16_sub_u(&mut self, arg: MemArg) -> Self::Output {
2605        self.emit_atomic_rmw(
2606            &arg,
2607            RmwOp::Sub,
2608            OperandSize::S16,
2609            Some(Extend::<Zero>::I32Extend16),
2610        )
2611    }
2612
2613    fn visit_i32_atomic_rmw_sub(&mut self, arg: MemArg) -> Self::Output {
2614        self.emit_atomic_rmw(&arg, RmwOp::Sub, OperandSize::S32, None)
2615    }
2616
2617    fn visit_i64_atomic_rmw8_sub_u(&mut self, arg: MemArg) -> Self::Output {
2618        self.emit_atomic_rmw(
2619            &arg,
2620            RmwOp::Sub,
2621            OperandSize::S8,
2622            Some(Extend::<Zero>::I64Extend8),
2623        )
2624    }
2625
2626    fn visit_i64_atomic_rmw16_sub_u(&mut self, arg: MemArg) -> Self::Output {
2627        self.emit_atomic_rmw(
2628            &arg,
2629            RmwOp::Sub,
2630            OperandSize::S16,
2631            Some(Extend::<Zero>::I64Extend16),
2632        )
2633    }
2634
2635    fn visit_i64_atomic_rmw32_sub_u(&mut self, arg: MemArg) -> Self::Output {
2636        self.emit_atomic_rmw(
2637            &arg,
2638            RmwOp::Sub,
2639            OperandSize::S32,
2640            Some(Extend::<Zero>::I64Extend32),
2641        )
2642    }
2643
2644    fn visit_i64_atomic_rmw_sub(&mut self, arg: MemArg) -> Self::Output {
2645        self.emit_atomic_rmw(&arg, RmwOp::Sub, OperandSize::S64, None)
2646    }
2647
2648    fn visit_i32_atomic_rmw8_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2649        self.emit_atomic_rmw(
2650            &arg,
2651            RmwOp::Xchg,
2652            OperandSize::S8,
2653            Some(Extend::<Zero>::I32Extend8),
2654        )
2655    }
2656
2657    fn visit_i32_atomic_rmw16_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2658        self.emit_atomic_rmw(
2659            &arg,
2660            RmwOp::Xchg,
2661            OperandSize::S16,
2662            Some(Extend::<Zero>::I32Extend16),
2663        )
2664    }
2665
2666    fn visit_i32_atomic_rmw_xchg(&mut self, arg: MemArg) -> Self::Output {
2667        self.emit_atomic_rmw(&arg, RmwOp::Xchg, OperandSize::S32, None)
2668    }
2669
2670    fn visit_i64_atomic_rmw8_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2671        self.emit_atomic_rmw(
2672            &arg,
2673            RmwOp::Xchg,
2674            OperandSize::S8,
2675            Some(Extend::<Zero>::I64Extend8),
2676        )
2677    }
2678
2679    fn visit_i64_atomic_rmw16_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2680        self.emit_atomic_rmw(
2681            &arg,
2682            RmwOp::Xchg,
2683            OperandSize::S16,
2684            Some(Extend::<Zero>::I64Extend16),
2685        )
2686    }
2687
2688    fn visit_i64_atomic_rmw32_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2689        self.emit_atomic_rmw(
2690            &arg,
2691            RmwOp::Xchg,
2692            OperandSize::S32,
2693            Some(Extend::<Zero>::I64Extend32),
2694        )
2695    }
2696
2697    fn visit_i64_atomic_rmw_xchg(&mut self, arg: MemArg) -> Self::Output {
2698        self.emit_atomic_rmw(&arg, RmwOp::Xchg, OperandSize::S64, None)
2699    }
2700
2701    fn visit_i32_atomic_rmw8_and_u(&mut self, arg: MemArg) -> Self::Output {
2702        self.emit_atomic_rmw(
2703            &arg,
2704            RmwOp::And,
2705            OperandSize::S8,
2706            Some(Extend::<Zero>::I32Extend8),
2707        )
2708    }
2709
2710    fn visit_i32_atomic_rmw16_and_u(&mut self, arg: MemArg) -> Self::Output {
2711        self.emit_atomic_rmw(
2712            &arg,
2713            RmwOp::And,
2714            OperandSize::S16,
2715            Some(Extend::<Zero>::I32Extend16),
2716        )
2717    }
2718
2719    fn visit_i32_atomic_rmw_and(&mut self, arg: MemArg) -> Self::Output {
2720        self.emit_atomic_rmw(&arg, RmwOp::And, OperandSize::S32, None)
2721    }
2722
2723    fn visit_i64_atomic_rmw8_and_u(&mut self, arg: MemArg) -> Self::Output {
2724        self.emit_atomic_rmw(
2725            &arg,
2726            RmwOp::And,
2727            OperandSize::S8,
2728            Some(Extend::<Zero>::I64Extend8),
2729        )
2730    }
2731
2732    fn visit_i64_atomic_rmw16_and_u(&mut self, arg: MemArg) -> Self::Output {
2733        self.emit_atomic_rmw(
2734            &arg,
2735            RmwOp::And,
2736            OperandSize::S16,
2737            Some(Extend::<Zero>::I64Extend16),
2738        )
2739    }
2740
2741    fn visit_i64_atomic_rmw32_and_u(&mut self, arg: MemArg) -> Self::Output {
2742        self.emit_atomic_rmw(
2743            &arg,
2744            RmwOp::And,
2745            OperandSize::S32,
2746            Some(Extend::<Zero>::I64Extend32),
2747        )
2748    }
2749
2750    fn visit_i64_atomic_rmw_and(&mut self, arg: MemArg) -> Self::Output {
2751        self.emit_atomic_rmw(&arg, RmwOp::And, OperandSize::S64, None)
2752    }
2753
2754    fn visit_i32_atomic_rmw8_or_u(&mut self, arg: MemArg) -> Self::Output {
2755        self.emit_atomic_rmw(
2756            &arg,
2757            RmwOp::Or,
2758            OperandSize::S8,
2759            Some(Extend::<Zero>::I32Extend8),
2760        )
2761    }
2762
2763    fn visit_i32_atomic_rmw16_or_u(&mut self, arg: MemArg) -> Self::Output {
2764        self.emit_atomic_rmw(
2765            &arg,
2766            RmwOp::Or,
2767            OperandSize::S16,
2768            Some(Extend::<Zero>::I32Extend16),
2769        )
2770    }
2771
2772    fn visit_i32_atomic_rmw_or(&mut self, arg: MemArg) -> Self::Output {
2773        self.emit_atomic_rmw(&arg, RmwOp::Or, OperandSize::S32, None)
2774    }
2775
2776    fn visit_i64_atomic_rmw8_or_u(&mut self, arg: MemArg) -> Self::Output {
2777        self.emit_atomic_rmw(
2778            &arg,
2779            RmwOp::Or,
2780            OperandSize::S8,
2781            Some(Extend::<Zero>::I64Extend8),
2782        )
2783    }
2784
2785    fn visit_i64_atomic_rmw16_or_u(&mut self, arg: MemArg) -> Self::Output {
2786        self.emit_atomic_rmw(
2787            &arg,
2788            RmwOp::Or,
2789            OperandSize::S16,
2790            Some(Extend::<Zero>::I64Extend16),
2791        )
2792    }
2793
2794    fn visit_i64_atomic_rmw32_or_u(&mut self, arg: MemArg) -> Self::Output {
2795        self.emit_atomic_rmw(
2796            &arg,
2797            RmwOp::Or,
2798            OperandSize::S32,
2799            Some(Extend::<Zero>::I64Extend32),
2800        )
2801    }
2802
2803    fn visit_i64_atomic_rmw_or(&mut self, arg: MemArg) -> Self::Output {
2804        self.emit_atomic_rmw(&arg, RmwOp::Or, OperandSize::S64, None)
2805    }
2806
2807    fn visit_i32_atomic_rmw8_xor_u(&mut self, arg: MemArg) -> Self::Output {
2808        self.emit_atomic_rmw(
2809            &arg,
2810            RmwOp::Xor,
2811            OperandSize::S8,
2812            Some(Extend::<Zero>::I32Extend8),
2813        )
2814    }
2815
2816    fn visit_i32_atomic_rmw16_xor_u(&mut self, arg: MemArg) -> Self::Output {
2817        self.emit_atomic_rmw(
2818            &arg,
2819            RmwOp::Xor,
2820            OperandSize::S16,
2821            Some(Extend::<Zero>::I32Extend16),
2822        )
2823    }
2824
2825    fn visit_i32_atomic_rmw_xor(&mut self, arg: MemArg) -> Self::Output {
2826        self.emit_atomic_rmw(&arg, RmwOp::Xor, OperandSize::S32, None)
2827    }
2828
2829    fn visit_i64_atomic_rmw8_xor_u(&mut self, arg: MemArg) -> Self::Output {
2830        self.emit_atomic_rmw(
2831            &arg,
2832            RmwOp::Xor,
2833            OperandSize::S8,
2834            Some(Extend::<Zero>::I64Extend8),
2835        )
2836    }
2837
2838    fn visit_i64_atomic_rmw16_xor_u(&mut self, arg: MemArg) -> Self::Output {
2839        self.emit_atomic_rmw(
2840            &arg,
2841            RmwOp::Xor,
2842            OperandSize::S16,
2843            Some(Extend::<Zero>::I64Extend16),
2844        )
2845    }
2846
2847    fn visit_i64_atomic_rmw32_xor_u(&mut self, arg: MemArg) -> Self::Output {
2848        self.emit_atomic_rmw(
2849            &arg,
2850            RmwOp::Xor,
2851            OperandSize::S32,
2852            Some(Extend::<Zero>::I64Extend32),
2853        )
2854    }
2855
2856    fn visit_i64_atomic_rmw_xor(&mut self, arg: MemArg) -> Self::Output {
2857        self.emit_atomic_rmw(&arg, RmwOp::Xor, OperandSize::S64, None)
2858    }
2859
2860    fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2861        self.emit_atomic_cmpxchg(&arg, OperandSize::S8, Some(Extend::I32Extend8))
2862    }
2863
2864    fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2865        self.emit_atomic_cmpxchg(&arg, OperandSize::S16, Some(Extend::I32Extend16))
2866    }
2867
2868    fn visit_i32_atomic_rmw_cmpxchg(&mut self, arg: MemArg) -> Self::Output {
2869        self.emit_atomic_cmpxchg(&arg, OperandSize::S32, None)
2870    }
2871
2872    fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2873        self.emit_atomic_cmpxchg(&arg, OperandSize::S8, Some(Extend::I64Extend8))
2874    }
2875
2876    fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2877        self.emit_atomic_cmpxchg(&arg, OperandSize::S16, Some(Extend::I64Extend16))
2878    }
2879
2880    fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2881        self.emit_atomic_cmpxchg(&arg, OperandSize::S32, Some(Extend::I64Extend32))
2882    }
2883
2884    fn visit_i64_atomic_rmw_cmpxchg(&mut self, arg: MemArg) -> Self::Output {
2885        self.emit_atomic_cmpxchg(&arg, OperandSize::S64, None)
2886    }
2887
2888    fn visit_memory_atomic_wait32(&mut self, arg: MemArg) -> Self::Output {
2889        self.emit_atomic_wait(&arg, AtomicWaitKind::Wait32)
2890    }
2891
2892    fn visit_memory_atomic_wait64(&mut self, arg: MemArg) -> Self::Output {
2893        self.emit_atomic_wait(&arg, AtomicWaitKind::Wait64)
2894    }
2895
2896    fn visit_memory_atomic_notify(&mut self, arg: MemArg) -> Self::Output {
2897        self.emit_atomic_notify(&arg)
2898    }
2899
2900    fn visit_atomic_fence(&mut self) -> Self::Output {
2901        self.masm.fence()
2902    }
2903
2904    wasmparser::for_each_visit_operator!(def_unsupported);
2905}
2906
2907impl<'a, 'translation, 'data, M> VisitSimdOperator<'a>
2908    for CodeGen<'a, 'translation, 'data, M, Emission>
2909where
2910    M: MacroAssembler,
2911{
2912    fn visit_v128_const(&mut self, val: V128) -> Self::Output {
2913        self.context.stack.push(Val::v128(val.i128()));
2914        Ok(())
2915    }
2916
2917    fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output {
2918        self.emit_wasm_load(
2919            &memarg,
2920            WasmValType::V128,
2921            LoadKind::Operand(OperandSize::S128),
2922        )
2923    }
2924
2925    fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output {
2926        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S128))
2927    }
2928
2929    fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output {
2930        self.emit_wasm_load(
2931            &memarg,
2932            WasmValType::V128,
2933            LoadKind::VectorExtend(V128LoadExtendKind::E8x8S),
2934        )
2935    }
2936
2937    fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output {
2938        self.emit_wasm_load(
2939            &memarg,
2940            WasmValType::V128,
2941            LoadKind::VectorExtend(V128LoadExtendKind::E8x8U),
2942        )
2943    }
2944
2945    fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output {
2946        self.emit_wasm_load(
2947            &memarg,
2948            WasmValType::V128,
2949            LoadKind::VectorExtend(V128LoadExtendKind::E16x4S),
2950        )
2951    }
2952
2953    fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output {
2954        self.emit_wasm_load(
2955            &memarg,
2956            WasmValType::V128,
2957            LoadKind::VectorExtend(V128LoadExtendKind::E16x4U),
2958        )
2959    }
2960
2961    fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output {
2962        self.emit_wasm_load(
2963            &memarg,
2964            WasmValType::V128,
2965            LoadKind::VectorExtend(V128LoadExtendKind::E32x2S),
2966        )
2967    }
2968
2969    fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output {
2970        self.emit_wasm_load(
2971            &memarg,
2972            WasmValType::V128,
2973            LoadKind::VectorExtend(V128LoadExtendKind::E32x2U),
2974        )
2975    }
2976
2977    fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output {
2978        self.emit_wasm_load(
2979            &memarg,
2980            WasmValType::V128,
2981            LoadKind::Splat(SplatLoadKind::S8),
2982        )
2983    }
2984
2985    fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output {
2986        self.emit_wasm_load(
2987            &memarg,
2988            WasmValType::V128,
2989            LoadKind::Splat(SplatLoadKind::S16),
2990        )
2991    }
2992
2993    fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output {
2994        self.emit_wasm_load(
2995            &memarg,
2996            WasmValType::V128,
2997            LoadKind::Splat(SplatLoadKind::S32),
2998        )
2999    }
3000
3001    fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output {
3002        self.emit_wasm_load(
3003            &memarg,
3004            WasmValType::V128,
3005            LoadKind::Splat(SplatLoadKind::S64),
3006        )
3007    }
3008
3009    fn visit_i8x16_splat(&mut self) -> Self::Output {
3010        self.masm.splat(&mut self.context, SplatKind::I8x16)
3011    }
3012
3013    fn visit_i16x8_splat(&mut self) -> Self::Output {
3014        self.masm.splat(&mut self.context, SplatKind::I16x8)
3015    }
3016
3017    fn visit_i32x4_splat(&mut self) -> Self::Output {
3018        self.masm.splat(&mut self.context, SplatKind::I32x4)
3019    }
3020
3021    fn visit_i64x2_splat(&mut self) -> Self::Output {
3022        self.masm.splat(&mut self.context, SplatKind::I64x2)
3023    }
3024
3025    fn visit_f32x4_splat(&mut self) -> Self::Output {
3026        self.masm.splat(&mut self.context, SplatKind::F32x4)
3027    }
3028
3029    fn visit_f64x2_splat(&mut self) -> Self::Output {
3030        self.masm.splat(&mut self.context, SplatKind::F64x2)
3031    }
3032
3033    fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
3034        let rhs = self.context.pop_to_reg(self.masm, None)?;
3035        let lhs = self.context.pop_to_reg(self.masm, None)?;
3036        self.masm
3037            .shuffle(writable!(lhs.into()), lhs.into(), rhs.into(), lanes)?;
3038        self.context.stack.push(TypedReg::v128(lhs.into()).into());
3039        self.context.free_reg(rhs);
3040        Ok(())
3041    }
3042
3043    fn visit_i8x16_swizzle(&mut self) -> Self::Output {
3044        let rhs = self.context.pop_to_reg(self.masm, None)?;
3045        let lhs = self.context.pop_to_reg(self.masm, None)?;
3046        self.masm
3047            .swizzle(writable!(lhs.into()), lhs.into(), rhs.into())?;
3048        self.context.stack.push(TypedReg::v128(lhs.into()).into());
3049        self.context.free_reg(rhs);
3050        Ok(())
3051    }
3052
3053    fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output {
3054        self.context.extract_lane_op(
3055            self.masm,
3056            ExtractLaneKind::I8x16S,
3057            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3058        )
3059    }
3060
3061    fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output {
3062        self.context.extract_lane_op(
3063            self.masm,
3064            ExtractLaneKind::I8x16U,
3065            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3066        )
3067    }
3068
3069    fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output {
3070        self.context.extract_lane_op(
3071            self.masm,
3072            ExtractLaneKind::I16x8S,
3073            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3074        )
3075    }
3076
3077    fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output {
3078        self.context.extract_lane_op(
3079            self.masm,
3080            ExtractLaneKind::I16x8U,
3081            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3082        )
3083    }
3084
3085    fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
3086        self.context
3087            .extract_lane_op(self.masm, ExtractLaneKind::I32x4, |masm, src, dst, kind| {
3088                masm.extract_lane(src, dst, lane, kind)
3089            })
3090    }
3091
3092    fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
3093        self.context
3094            .extract_lane_op(self.masm, ExtractLaneKind::I64x2, |masm, src, dst, kind| {
3095                masm.extract_lane(src, dst, lane, kind)
3096            })
3097    }
3098
3099    fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
3100        self.context
3101            .extract_lane_op(self.masm, ExtractLaneKind::F32x4, |masm, src, dst, kind| {
3102                masm.extract_lane(src, dst, lane, kind)
3103            })
3104    }
3105
3106    fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
3107        self.context
3108            .extract_lane_op(self.masm, ExtractLaneKind::F64x2, |masm, src, dst, kind| {
3109                masm.extract_lane(src, dst, lane, kind)
3110            })
3111    }
3112
3113    fn visit_i8x16_eq(&mut self) -> Self::Output {
3114        self.context
3115            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3116                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I8x16)?;
3117                Ok(TypedReg::v128(dst))
3118            })
3119    }
3120
3121    fn visit_i16x8_eq(&mut self) -> Self::Output {
3122        self.context
3123            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3124                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I16x8)?;
3125                Ok(TypedReg::v128(dst))
3126            })
3127    }
3128
3129    fn visit_i32x4_eq(&mut self) -> Self::Output {
3130        self.context
3131            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3132                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I32x4)?;
3133                Ok(TypedReg::v128(dst))
3134            })
3135    }
3136
3137    fn visit_i64x2_eq(&mut self) -> Self::Output {
3138        self.context
3139            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3140                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I64x2)?;
3141                Ok(TypedReg::v128(dst))
3142            })
3143    }
3144
3145    fn visit_f32x4_eq(&mut self) -> Self::Output {
3146        self.context
3147            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3148                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::F32x4)?;
3149                Ok(TypedReg::v128(dst))
3150            })
3151    }
3152
3153    fn visit_f64x2_eq(&mut self) -> Self::Output {
3154        self.context
3155            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3156                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::F64x2)?;
3157                Ok(TypedReg::v128(dst))
3158            })
3159    }
3160
3161    fn visit_i8x16_ne(&mut self) -> Self::Output {
3162        self.context
3163            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3164                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I8x16)?;
3165                Ok(TypedReg::v128(dst))
3166            })
3167    }
3168
3169    fn visit_i16x8_ne(&mut self) -> Self::Output {
3170        self.context
3171            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3172                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I16x8)?;
3173                Ok(TypedReg::v128(dst))
3174            })
3175    }
3176
3177    fn visit_i32x4_ne(&mut self) -> Self::Output {
3178        self.context
3179            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3180                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I32x4)?;
3181                Ok(TypedReg::v128(dst))
3182            })
3183    }
3184
3185    fn visit_i64x2_ne(&mut self) -> Self::Output {
3186        self.context
3187            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3188                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I64x2)?;
3189                Ok(TypedReg::v128(dst))
3190            })
3191    }
3192
3193    fn visit_f32x4_ne(&mut self) -> Self::Output {
3194        self.context
3195            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3196                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::F32x4)?;
3197                Ok(TypedReg::v128(dst))
3198            })
3199    }
3200
3201    fn visit_f64x2_ne(&mut self) -> Self::Output {
3202        self.context
3203            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3204                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::F64x2)?;
3205                Ok(TypedReg::v128(dst))
3206            })
3207    }
3208
3209    fn visit_i8x16_lt_s(&mut self) -> Self::Output {
3210        self.context
3211            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3212                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3213                Ok(TypedReg::v128(dst))
3214            })
3215    }
3216
3217    fn visit_i8x16_lt_u(&mut self) -> Self::Output {
3218        self.context
3219            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3220                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3221                Ok(TypedReg::v128(dst))
3222            })
3223    }
3224
3225    fn visit_i16x8_lt_s(&mut self) -> Self::Output {
3226        self.context
3227            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3228                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3229                Ok(TypedReg::v128(dst))
3230            })
3231    }
3232
3233    fn visit_i16x8_lt_u(&mut self) -> Self::Output {
3234        self.context
3235            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3236                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3237                Ok(TypedReg::v128(dst))
3238            })
3239    }
3240
3241    fn visit_i32x4_lt_s(&mut self) -> Self::Output {
3242        self.context
3243            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3244                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3245                Ok(TypedReg::v128(dst))
3246            })
3247    }
3248
3249    fn visit_i32x4_lt_u(&mut self) -> Self::Output {
3250        self.context
3251            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3252                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3253                Ok(TypedReg::v128(dst))
3254            })
3255    }
3256
3257    fn visit_i64x2_lt_s(&mut self) -> Self::Output {
3258        self.context
3259            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3260                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3261                Ok(TypedReg::v128(dst))
3262            })
3263    }
3264
3265    fn visit_f32x4_lt(&mut self) -> Self::Output {
3266        self.context
3267            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3268                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3269                Ok(TypedReg::v128(dst))
3270            })
3271    }
3272
3273    fn visit_f64x2_lt(&mut self) -> Self::Output {
3274        self.context
3275            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3276                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3277                Ok(TypedReg::v128(dst))
3278            })
3279    }
3280
3281    fn visit_i8x16_le_s(&mut self) -> Self::Output {
3282        self.context
3283            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3284                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3285                Ok(TypedReg::v128(dst))
3286            })
3287    }
3288
3289    fn visit_i8x16_le_u(&mut self) -> Self::Output {
3290        self.context
3291            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3292                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3293                Ok(TypedReg::v128(dst))
3294            })
3295    }
3296
3297    fn visit_i16x8_le_s(&mut self) -> Self::Output {
3298        self.context
3299            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3300                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3301                Ok(TypedReg::v128(dst))
3302            })
3303    }
3304
3305    fn visit_i16x8_le_u(&mut self) -> Self::Output {
3306        self.context
3307            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3308                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3309                Ok(TypedReg::v128(dst))
3310            })
3311    }
3312
3313    fn visit_i32x4_le_s(&mut self) -> Self::Output {
3314        self.context
3315            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3316                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3317                Ok(TypedReg::v128(dst))
3318            })
3319    }
3320
3321    fn visit_i32x4_le_u(&mut self) -> Self::Output {
3322        self.context
3323            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3324                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3325                Ok(TypedReg::v128(dst))
3326            })
3327    }
3328
3329    fn visit_i64x2_le_s(&mut self) -> Self::Output {
3330        self.context
3331            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3332                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3333                Ok(TypedReg::v128(dst))
3334            })
3335    }
3336
3337    fn visit_f32x4_le(&mut self) -> Self::Output {
3338        self.context
3339            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3340                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3341                Ok(TypedReg::v128(dst))
3342            })
3343    }
3344
3345    fn visit_f64x2_le(&mut self) -> Self::Output {
3346        self.context
3347            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3348                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3349                Ok(TypedReg::v128(dst))
3350            })
3351    }
3352
3353    fn visit_i8x16_gt_s(&mut self) -> Self::Output {
3354        self.context
3355            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3356                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3357                Ok(TypedReg::v128(dst))
3358            })
3359    }
3360
3361    fn visit_i8x16_gt_u(&mut self) -> Self::Output {
3362        self.context
3363            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3364                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3365                Ok(TypedReg::v128(dst))
3366            })
3367    }
3368
3369    fn visit_i16x8_gt_s(&mut self) -> Self::Output {
3370        self.context
3371            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3372                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3373                Ok(TypedReg::v128(dst))
3374            })
3375    }
3376
3377    fn visit_i16x8_gt_u(&mut self) -> Self::Output {
3378        self.context
3379            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3380                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3381                Ok(TypedReg::v128(dst))
3382            })
3383    }
3384
3385    fn visit_i32x4_gt_s(&mut self) -> Self::Output {
3386        self.context
3387            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3388                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3389                Ok(TypedReg::v128(dst))
3390            })
3391    }
3392
3393    fn visit_i32x4_gt_u(&mut self) -> Self::Output {
3394        self.context
3395            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3396                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3397                Ok(TypedReg::v128(dst))
3398            })
3399    }
3400
3401    fn visit_i64x2_gt_s(&mut self) -> Self::Output {
3402        self.context
3403            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3404                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3405                Ok(TypedReg::v128(dst))
3406            })
3407    }
3408
3409    fn visit_f32x4_gt(&mut self) -> Self::Output {
3410        self.context
3411            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3412                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3413                Ok(TypedReg::v128(dst))
3414            })
3415    }
3416
3417    fn visit_f64x2_gt(&mut self) -> Self::Output {
3418        self.context
3419            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3420                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3421                Ok(TypedReg::v128(dst))
3422            })
3423    }
3424
3425    fn visit_i8x16_ge_s(&mut self) -> Self::Output {
3426        self.context
3427            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3428                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3429                Ok(TypedReg::v128(dst))
3430            })
3431    }
3432
3433    fn visit_i8x16_ge_u(&mut self) -> Self::Output {
3434        self.context
3435            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3436                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3437                Ok(TypedReg::v128(dst))
3438            })
3439    }
3440
3441    fn visit_i16x8_ge_s(&mut self) -> Self::Output {
3442        self.context
3443            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3444                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3445                Ok(TypedReg::v128(dst))
3446            })
3447    }
3448
3449    fn visit_i16x8_ge_u(&mut self) -> Self::Output {
3450        self.context
3451            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3452                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3453                Ok(TypedReg::v128(dst))
3454            })
3455    }
3456
3457    fn visit_i32x4_ge_s(&mut self) -> Self::Output {
3458        self.context
3459            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3460                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3461                Ok(TypedReg::v128(dst))
3462            })
3463    }
3464
3465    fn visit_i32x4_ge_u(&mut self) -> Self::Output {
3466        self.context
3467            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3468                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3469                Ok(TypedReg::v128(dst))
3470            })
3471    }
3472
3473    fn visit_i64x2_ge_s(&mut self) -> Self::Output {
3474        self.context
3475            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3476                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3477                Ok(TypedReg::v128(dst))
3478            })
3479    }
3480
3481    fn visit_f32x4_ge(&mut self) -> Self::Output {
3482        self.context
3483            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3484                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3485                Ok(TypedReg::v128(dst))
3486            })
3487    }
3488
3489    fn visit_f64x2_ge(&mut self) -> Self::Output {
3490        self.context
3491            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3492                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3493                Ok(TypedReg::v128(dst))
3494            })
3495    }
3496
3497    fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output {
3498        self.context
3499            .replace_lane_op(self.masm, ReplaceLaneKind::I8x16, |masm, src, dst, kind| {
3500                masm.replace_lane(src, dst, lane, kind)
3501            })
3502    }
3503
3504    fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output {
3505        self.context
3506            .replace_lane_op(self.masm, ReplaceLaneKind::I16x8, |masm, src, dst, kind| {
3507                masm.replace_lane(src, dst, lane, kind)
3508            })
3509    }
3510
3511    fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
3512        self.context
3513            .replace_lane_op(self.masm, ReplaceLaneKind::I32x4, |masm, src, dst, kind| {
3514                masm.replace_lane(src, dst, lane, kind)
3515            })
3516    }
3517
3518    fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
3519        self.context
3520            .replace_lane_op(self.masm, ReplaceLaneKind::I64x2, |masm, src, dst, kind| {
3521                masm.replace_lane(src, dst, lane, kind)
3522            })
3523    }
3524
3525    fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
3526        self.context
3527            .replace_lane_op(self.masm, ReplaceLaneKind::F32x4, |masm, src, dst, kind| {
3528                masm.replace_lane(src, dst, lane, kind)
3529            })
3530    }
3531
3532    fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
3533        self.context
3534            .replace_lane_op(self.masm, ReplaceLaneKind::F64x2, |masm, src, dst, kind| {
3535                masm.replace_lane(src, dst, lane, kind)
3536            })
3537    }
3538
3539    fn visit_v128_not(&mut self) -> Self::Output {
3540        self.context.unop(self.masm, |masm, reg| {
3541            masm.v128_not(writable!(reg))?;
3542            Ok(TypedReg::new(WasmValType::V128, reg))
3543        })
3544    }
3545
3546    fn visit_v128_and(&mut self) -> Self::Output {
3547        self.context
3548            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3549                masm.v128_and(dst, src, writable!(dst))?;
3550                Ok(TypedReg::new(WasmValType::V128, dst))
3551            })
3552    }
3553
3554    fn visit_v128_andnot(&mut self) -> Self::Output {
3555        self.context
3556            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3557                // careful here: and_not is *not* commutative: dst = !src1 & src2
3558                masm.v128_and_not(src, dst, writable!(dst))?;
3559                Ok(TypedReg::new(WasmValType::V128, dst))
3560            })
3561    }
3562
3563    fn visit_v128_or(&mut self) -> Self::Output {
3564        self.context
3565            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3566                // careful here: and_not is *not* commutative: dst = !src1 & src2
3567                masm.v128_or(src, dst, writable!(dst))?;
3568                Ok(TypedReg::new(WasmValType::V128, dst))
3569            })
3570    }
3571
3572    fn visit_v128_xor(&mut self) -> Self::Output {
3573        self.context
3574            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3575                // careful here: and_not is *not* commutative: dst = !src1 & src2
3576                masm.v128_xor(src, dst, writable!(dst))?;
3577                Ok(TypedReg::new(WasmValType::V128, dst))
3578            })
3579    }
3580
3581    fn visit_v128_bitselect(&mut self) -> Self::Output {
3582        let mask = self.context.pop_to_reg(self.masm, None)?;
3583        let op2 = self.context.pop_to_reg(self.masm, None)?;
3584        let op1 = self.context.pop_to_reg(self.masm, None)?;
3585        let dst = self.context.any_fpr(self.masm)?;
3586
3587        // careful here: bitselect is *not* commutative.
3588        self.masm
3589            .v128_bitselect(op1.reg, op2.reg, mask.reg, writable!(dst))?;
3590
3591        self.context
3592            .stack
3593            .push(TypedReg::new(WasmValType::V128, dst).into());
3594        self.context.free_reg(op1);
3595        self.context.free_reg(op2);
3596        self.context.free_reg(mask);
3597
3598        Ok(())
3599    }
3600
3601    fn visit_v128_any_true(&mut self) -> Self::Output {
3602        let src = self.context.pop_to_reg(self.masm, None)?;
3603        let dst = self.context.any_gpr(self.masm)?;
3604
3605        self.masm.v128_any_true(src.reg, writable!(dst))?;
3606
3607        self.context
3608            .stack
3609            .push(TypedReg::new(WasmValType::I32, dst).into());
3610        self.context.free_reg(src);
3611
3612        Ok(())
3613    }
3614
3615    fn visit_v128_load8_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3616        self.emit_wasm_load(
3617            &arg,
3618            WasmValType::V128,
3619            LoadKind::vector_lane(lane, OperandSize::S8),
3620        )
3621    }
3622
3623    fn visit_v128_load16_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3624        self.emit_wasm_load(
3625            &arg,
3626            WasmValType::V128,
3627            LoadKind::vector_lane(lane, OperandSize::S16),
3628        )
3629    }
3630
3631    fn visit_v128_load32_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3632        self.emit_wasm_load(
3633            &arg,
3634            WasmValType::V128,
3635            LoadKind::vector_lane(lane, OperandSize::S32),
3636        )
3637    }
3638
3639    fn visit_v128_load64_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3640        self.emit_wasm_load(
3641            &arg,
3642            WasmValType::V128,
3643            LoadKind::vector_lane(lane, OperandSize::S64),
3644        )
3645    }
3646
3647    fn visit_v128_store8_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3648        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S8))
3649    }
3650
3651    fn visit_v128_store16_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3652        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S16))
3653    }
3654
3655    fn visit_v128_store32_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3656        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S32))
3657    }
3658
3659    fn visit_v128_store64_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3660        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S64))
3661    }
3662
3663    fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output {
3664        self.context.unop(self.masm, |masm, reg| {
3665            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4S)?;
3666            Ok(TypedReg::v128(reg))
3667        })
3668    }
3669
3670    fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output {
3671        self.context.unop(self.masm, |masm, reg| {
3672            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4U)?;
3673            Ok(TypedReg::v128(reg))
3674        })
3675    }
3676
3677    fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output {
3678        self.context.unop(self.masm, |masm, reg| {
3679            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4LowS)?;
3680            Ok(TypedReg::v128(reg))
3681        })
3682    }
3683
3684    fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output {
3685        self.context.unop(self.masm, |masm, reg| {
3686            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4LowU)?;
3687            Ok(TypedReg::v128(reg))
3688        })
3689    }
3690
3691    fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output {
3692        self.context
3693            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3694                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I16x8S)?;
3695                Ok(TypedReg::v128(dst))
3696            })
3697    }
3698
3699    fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output {
3700        self.context
3701            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3702                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I16x8U)?;
3703                Ok(TypedReg::v128(dst))
3704            })
3705    }
3706
3707    fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output {
3708        self.context
3709            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3710                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I32x4S)?;
3711                Ok(TypedReg::v128(dst))
3712            })
3713    }
3714
3715    fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output {
3716        self.context
3717            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3718                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I32x4U)?;
3719                Ok(TypedReg::v128(dst))
3720            })
3721    }
3722
3723    fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output {
3724        self.context.unop(self.masm, |masm, reg| {
3725            masm.v128_demote(reg, writable!(reg))?;
3726            Ok(TypedReg::v128(reg))
3727        })
3728    }
3729
3730    fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output {
3731        self.context.unop(self.masm, |masm, reg| {
3732            masm.v128_promote(reg, writable!(reg))?;
3733            Ok(TypedReg::v128(reg))
3734        })
3735    }
3736
3737    fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output {
3738        self.context.unop(self.masm, |masm, reg| {
3739            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI8x16S)?;
3740            Ok(TypedReg::v128(reg))
3741        })
3742    }
3743
3744    fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output {
3745        self.context.unop(self.masm, |masm, reg| {
3746            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI8x16S)?;
3747            Ok(TypedReg::v128(reg))
3748        })
3749    }
3750
3751    fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output {
3752        self.context.unop(self.masm, |masm, reg| {
3753            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI8x16U)?;
3754            Ok(TypedReg::v128(reg))
3755        })
3756    }
3757
3758    fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output {
3759        self.context.unop(self.masm, |masm, reg| {
3760            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI8x16U)?;
3761            Ok(TypedReg::v128(reg))
3762        })
3763    }
3764
3765    fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output {
3766        self.context.unop(self.masm, |masm, reg| {
3767            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI16x8S)?;
3768            Ok(TypedReg::v128(reg))
3769        })
3770    }
3771
3772    fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output {
3773        self.context.unop(self.masm, |masm, reg| {
3774            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI16x8S)?;
3775            Ok(TypedReg::v128(reg))
3776        })
3777    }
3778
3779    fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output {
3780        self.context.unop(self.masm, |masm, reg| {
3781            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI16x8U)?;
3782            Ok(TypedReg::v128(reg))
3783        })
3784    }
3785
3786    fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output {
3787        self.context.unop(self.masm, |masm, reg| {
3788            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI16x8U)?;
3789            Ok(TypedReg::v128(reg))
3790        })
3791    }
3792
3793    fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output {
3794        self.context.unop(self.masm, |masm, reg| {
3795            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI32x4S)?;
3796            Ok(TypedReg::v128(reg))
3797        })
3798    }
3799
3800    fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output {
3801        self.context.unop(self.masm, |masm, reg| {
3802            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI32x4S)?;
3803            Ok(TypedReg::v128(reg))
3804        })
3805    }
3806
3807    fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output {
3808        self.context.unop(self.masm, |masm, reg| {
3809            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI32x4U)?;
3810            Ok(TypedReg::v128(reg))
3811        })
3812    }
3813
3814    fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output {
3815        self.context.unop(self.masm, |masm, reg| {
3816            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI32x4U)?;
3817            Ok(TypedReg::v128(reg))
3818        })
3819    }
3820
3821    fn visit_i8x16_add(&mut self) -> Self::Output {
3822        self.context
3823            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3824                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16)?;
3825                Ok(TypedReg::new(WasmValType::V128, dst))
3826            })
3827    }
3828
3829    fn visit_i16x8_add(&mut self) -> Self::Output {
3830        self.context
3831            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3832                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8)?;
3833                Ok(TypedReg::new(WasmValType::V128, dst))
3834            })
3835    }
3836
3837    fn visit_i32x4_add(&mut self) -> Self::Output {
3838        self.context
3839            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3840                masm.v128_add(dst, src, writable!(dst), V128AddKind::I32x4)?;
3841                Ok(TypedReg::new(WasmValType::V128, dst))
3842            })
3843    }
3844
3845    fn visit_i64x2_add(&mut self) -> Self::Output {
3846        self.context
3847            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3848                masm.v128_add(dst, src, writable!(dst), V128AddKind::I64x2)?;
3849                Ok(TypedReg::new(WasmValType::V128, dst))
3850            })
3851    }
3852
3853    fn visit_i8x16_sub(&mut self) -> Self::Output {
3854        self.context
3855            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3856                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16)?;
3857                Ok(TypedReg::new(WasmValType::V128, dst))
3858            })
3859    }
3860
3861    fn visit_i16x8_sub(&mut self) -> Self::Output {
3862        self.context
3863            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3864                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8)?;
3865                Ok(TypedReg::new(WasmValType::V128, dst))
3866            })
3867    }
3868
3869    fn visit_i32x4_sub(&mut self) -> Self::Output {
3870        self.context
3871            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3872                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I32x4)?;
3873                Ok(TypedReg::new(WasmValType::V128, dst))
3874            })
3875    }
3876
3877    fn visit_i64x2_sub(&mut self) -> Self::Output {
3878        self.context
3879            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3880                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I64x2)?;
3881                Ok(TypedReg::new(WasmValType::V128, dst))
3882            })
3883    }
3884
3885    fn visit_i16x8_mul(&mut self) -> Self::Output {
3886        self.masm.v128_mul(&mut self.context, V128MulKind::I16x8)
3887    }
3888
3889    fn visit_i32x4_mul(&mut self) -> Self::Output {
3890        self.masm.v128_mul(&mut self.context, V128MulKind::I32x4)
3891    }
3892
3893    fn visit_i64x2_mul(&mut self) -> Self::Output {
3894        self.masm.v128_mul(&mut self.context, V128MulKind::I64x2)
3895    }
3896
3897    fn visit_i8x16_add_sat_s(&mut self) -> Self::Output {
3898        self.context
3899            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3900                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16SatS)?;
3901                Ok(TypedReg::new(WasmValType::V128, dst))
3902            })
3903    }
3904
3905    fn visit_i16x8_add_sat_s(&mut self) -> Self::Output {
3906        self.context
3907            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3908                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8SatS)?;
3909                Ok(TypedReg::new(WasmValType::V128, dst))
3910            })
3911    }
3912
3913    fn visit_i8x16_add_sat_u(&mut self) -> Self::Output {
3914        self.context
3915            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3916                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16SatU)?;
3917                Ok(TypedReg::new(WasmValType::V128, dst))
3918            })
3919    }
3920
3921    fn visit_i16x8_add_sat_u(&mut self) -> Self::Output {
3922        self.context
3923            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3924                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8SatU)?;
3925                Ok(TypedReg::new(WasmValType::V128, dst))
3926            })
3927    }
3928
3929    fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output {
3930        self.context
3931            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3932                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16SatS)?;
3933                Ok(TypedReg::new(WasmValType::V128, dst))
3934            })
3935    }
3936
3937    fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output {
3938        self.context
3939            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3940                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8SatS)?;
3941                Ok(TypedReg::new(WasmValType::V128, dst))
3942            })
3943    }
3944
3945    fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output {
3946        self.context
3947            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3948                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16SatU)?;
3949                Ok(TypedReg::new(WasmValType::V128, dst))
3950            })
3951    }
3952
3953    fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output {
3954        self.context
3955            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3956                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8SatU)?;
3957                Ok(TypedReg::new(WasmValType::V128, dst))
3958            })
3959    }
3960
3961    fn visit_i8x16_abs(&mut self) -> Self::Output {
3962        self.context.unop(self.masm, |masm, reg| {
3963            masm.v128_abs(reg, writable!(reg), V128AbsKind::I8x16)?;
3964            Ok(TypedReg::new(WasmValType::V128, reg))
3965        })
3966    }
3967
3968    fn visit_i16x8_abs(&mut self) -> Self::Output {
3969        self.context.unop(self.masm, |masm, reg| {
3970            masm.v128_abs(reg, writable!(reg), V128AbsKind::I16x8)?;
3971            Ok(TypedReg::new(WasmValType::V128, reg))
3972        })
3973    }
3974
3975    fn visit_i32x4_abs(&mut self) -> Self::Output {
3976        self.context.unop(self.masm, |masm, reg| {
3977            masm.v128_abs(reg, writable!(reg), V128AbsKind::I32x4)?;
3978            Ok(TypedReg::new(WasmValType::V128, reg))
3979        })
3980    }
3981
3982    fn visit_i64x2_abs(&mut self) -> Self::Output {
3983        self.context.unop(self.masm, |masm, reg| {
3984            masm.v128_abs(reg, writable!(reg), V128AbsKind::I64x2)?;
3985            Ok(TypedReg::new(WasmValType::V128, reg))
3986        })
3987    }
3988
3989    fn visit_f32x4_abs(&mut self) -> Self::Output {
3990        self.context.unop(self.masm, |masm, reg| {
3991            masm.v128_abs(reg, writable!(reg), V128AbsKind::F32x4)?;
3992            Ok(TypedReg::new(WasmValType::V128, reg))
3993        })
3994    }
3995
3996    fn visit_f64x2_abs(&mut self) -> Self::Output {
3997        self.context.unop(self.masm, |masm, reg| {
3998            masm.v128_abs(reg, writable!(reg), V128AbsKind::F64x2)?;
3999            Ok(TypedReg::new(WasmValType::V128, reg))
4000        })
4001    }
4002
4003    fn visit_i8x16_neg(&mut self) -> Self::Output {
4004        self.context.unop(self.masm, |masm, op| {
4005            masm.v128_neg(writable!(op), V128NegKind::I8x16)?;
4006            Ok(TypedReg::new(WasmValType::V128, op))
4007        })
4008    }
4009
4010    fn visit_i16x8_neg(&mut self) -> Self::Output {
4011        self.context.unop(self.masm, |masm, op| {
4012            masm.v128_neg(writable!(op), V128NegKind::I16x8)?;
4013            Ok(TypedReg::new(WasmValType::V128, op))
4014        })
4015    }
4016
4017    fn visit_i32x4_neg(&mut self) -> Self::Output {
4018        self.context.unop(self.masm, |masm, op| {
4019            masm.v128_neg(writable!(op), V128NegKind::I32x4)?;
4020            Ok(TypedReg::new(WasmValType::V128, op))
4021        })
4022    }
4023
4024    fn visit_i64x2_neg(&mut self) -> Self::Output {
4025        self.context.unop(self.masm, |masm, op| {
4026            masm.v128_neg(writable!(op), V128NegKind::I64x2)?;
4027            Ok(TypedReg::new(WasmValType::V128, op))
4028        })
4029    }
4030
4031    fn visit_i8x16_shl(&mut self) -> Self::Output {
4032        self.masm
4033            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::Shl)
4034    }
4035
4036    fn visit_i16x8_shl(&mut self) -> Self::Output {
4037        self.masm
4038            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::Shl)
4039    }
4040
4041    fn visit_i32x4_shl(&mut self) -> Self::Output {
4042        self.masm
4043            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::Shl)
4044    }
4045
4046    fn visit_i64x2_shl(&mut self) -> Self::Output {
4047        self.masm
4048            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::Shl)
4049    }
4050
4051    fn visit_i8x16_shr_u(&mut self) -> Self::Output {
4052        self.masm
4053            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::ShrU)
4054    }
4055
4056    fn visit_i16x8_shr_u(&mut self) -> Self::Output {
4057        self.masm
4058            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::ShrU)
4059    }
4060
4061    fn visit_i32x4_shr_u(&mut self) -> Self::Output {
4062        self.masm
4063            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::ShrU)
4064    }
4065
4066    fn visit_i64x2_shr_u(&mut self) -> Self::Output {
4067        self.masm
4068            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::ShrU)
4069    }
4070
4071    fn visit_i8x16_shr_s(&mut self) -> Self::Output {
4072        self.masm
4073            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::ShrS)
4074    }
4075
4076    fn visit_i16x8_shr_s(&mut self) -> Self::Output {
4077        self.masm
4078            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::ShrS)
4079    }
4080
4081    fn visit_i32x4_shr_s(&mut self) -> Self::Output {
4082        self.masm
4083            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::ShrS)
4084    }
4085
4086    fn visit_i64x2_shr_s(&mut self) -> Self::Output {
4087        self.masm
4088            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::ShrS)
4089    }
4090
4091    fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output {
4092        self.context
4093            .binop(self.masm, OperandSize::S16, |masm, dst, src, size| {
4094                masm.v128_q15mulr_sat_s(dst, src, writable!(dst), size)?;
4095                Ok(TypedReg::v128(dst))
4096            })
4097    }
4098
4099    fn visit_i8x16_min_s(&mut self) -> Self::Output {
4100        self.context
4101            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4102                masm.v128_min(src, dst, writable!(dst), V128MinKind::I8x16S)?;
4103                Ok(TypedReg::v128(dst))
4104            })
4105    }
4106
4107    fn visit_i8x16_all_true(&mut self) -> Self::Output {
4108        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4109            masm.v128_all_true(src, writable!(dst), OperandSize::S8)
4110        })
4111    }
4112
4113    fn visit_i16x8_all_true(&mut self) -> Self::Output {
4114        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4115            masm.v128_all_true(src, writable!(dst), OperandSize::S16)
4116        })
4117    }
4118
4119    fn visit_i32x4_all_true(&mut self) -> Self::Output {
4120        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4121            masm.v128_all_true(src, writable!(dst), OperandSize::S32)
4122        })
4123    }
4124
4125    fn visit_i64x2_all_true(&mut self) -> Self::Output {
4126        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4127            masm.v128_all_true(src, writable!(dst), OperandSize::S64)
4128        })
4129    }
4130
4131    fn visit_i8x16_bitmask(&mut self) -> Self::Output {
4132        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4133            masm.v128_bitmask(src, writable!(dst), OperandSize::S8)
4134        })
4135    }
4136
4137    fn visit_i16x8_bitmask(&mut self) -> Self::Output {
4138        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4139            masm.v128_bitmask(src, writable!(dst), OperandSize::S16)
4140        })
4141    }
4142
4143    fn visit_i32x4_bitmask(&mut self) -> Self::Output {
4144        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4145            masm.v128_bitmask(src, writable!(dst), OperandSize::S32)
4146        })
4147    }
4148
4149    fn visit_i64x2_bitmask(&mut self) -> Self::Output {
4150        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4151            masm.v128_bitmask(src, writable!(dst), OperandSize::S64)
4152        })
4153    }
4154
4155    fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output {
4156        self.masm
4157            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF32x4S)
4158    }
4159
4160    fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output {
4161        self.masm
4162            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF32x4U)
4163    }
4164
4165    fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output {
4166        self.masm
4167            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF64x2SZero)
4168    }
4169
4170    fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output {
4171        self.masm
4172            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF64x2UZero)
4173    }
4174
4175    fn visit_i16x8_min_s(&mut self) -> Self::Output {
4176        self.context
4177            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4178                masm.v128_min(src, dst, writable!(dst), V128MinKind::I16x8S)?;
4179                Ok(TypedReg::v128(dst))
4180            })
4181    }
4182
4183    fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output {
4184        self.context
4185            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4186                masm.v128_dot(dst, src, writable!(dst))?;
4187                Ok(TypedReg::v128(dst))
4188            })
4189    }
4190
4191    fn visit_i8x16_popcnt(&mut self) -> Self::Output {
4192        self.masm.v128_popcnt(&mut self.context)
4193    }
4194
4195    fn visit_i8x16_avgr_u(&mut self) -> Self::Output {
4196        self.context
4197            .binop(self.masm, OperandSize::S8, |masm, dst, src, size| {
4198                masm.v128_avgr(dst, src, writable!(dst), size)?;
4199                Ok(TypedReg::v128(dst))
4200            })
4201    }
4202
4203    fn visit_i32x4_min_s(&mut self) -> Self::Output {
4204        self.context
4205            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4206                masm.v128_min(src, dst, writable!(dst), V128MinKind::I32x4S)?;
4207                Ok(TypedReg::v128(dst))
4208            })
4209    }
4210
4211    fn visit_i8x16_min_u(&mut self) -> Self::Output {
4212        self.context
4213            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4214                masm.v128_min(src, dst, writable!(dst), V128MinKind::I8x16U)?;
4215                Ok(TypedReg::v128(dst))
4216            })
4217    }
4218
4219    fn visit_i16x8_avgr_u(&mut self) -> Self::Output {
4220        self.context
4221            .binop(self.masm, OperandSize::S16, |masm, dst, src, size| {
4222                masm.v128_avgr(dst, src, writable!(dst), size)?;
4223                Ok(TypedReg::v128(dst))
4224            })
4225    }
4226
4227    fn visit_i16x8_min_u(&mut self) -> Self::Output {
4228        self.context
4229            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4230                masm.v128_min(src, dst, writable!(dst), V128MinKind::I16x8U)?;
4231                Ok(TypedReg::v128(dst))
4232            })
4233    }
4234
4235    fn visit_i32x4_min_u(&mut self) -> Self::Output {
4236        self.context
4237            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4238                masm.v128_min(src, dst, writable!(dst), V128MinKind::I32x4U)?;
4239                Ok(TypedReg::v128(dst))
4240            })
4241    }
4242
4243    fn visit_i8x16_max_s(&mut self) -> Self::Output {
4244        self.context
4245            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4246                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I8x16S)?;
4247                Ok(TypedReg::v128(dst))
4248            })
4249    }
4250
4251    fn visit_i16x8_max_s(&mut self) -> Self::Output {
4252        self.context
4253            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4254                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I16x8S)?;
4255                Ok(TypedReg::v128(dst))
4256            })
4257    }
4258
4259    fn visit_i32x4_max_s(&mut self) -> Self::Output {
4260        self.context
4261            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4262                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I32x4S)?;
4263                Ok(TypedReg::v128(dst))
4264            })
4265    }
4266
4267    fn visit_i8x16_max_u(&mut self) -> Self::Output {
4268        self.context
4269            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4270                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I8x16U)?;
4271                Ok(TypedReg::v128(dst))
4272            })
4273    }
4274
4275    fn visit_i16x8_max_u(&mut self) -> Self::Output {
4276        self.context
4277            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4278                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I16x8U)?;
4279                Ok(TypedReg::v128(dst))
4280            })
4281    }
4282
4283    fn visit_i32x4_max_u(&mut self) -> Self::Output {
4284        self.context
4285            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4286                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I32x4U)?;
4287                Ok(TypedReg::v128(dst))
4288            })
4289    }
4290
4291    fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output {
4292        self.masm
4293            .v128_extmul(&mut self.context, V128ExtMulKind::LowI8x16S)
4294    }
4295
4296    fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output {
4297        self.masm
4298            .v128_extmul(&mut self.context, V128ExtMulKind::LowI16x8S)
4299    }
4300
4301    fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output {
4302        self.masm
4303            .v128_extmul(&mut self.context, V128ExtMulKind::LowI32x4S)
4304    }
4305
4306    fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output {
4307        self.masm
4308            .v128_extmul(&mut self.context, V128ExtMulKind::LowI8x16U)
4309    }
4310
4311    fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output {
4312        self.masm
4313            .v128_extmul(&mut self.context, V128ExtMulKind::LowI16x8U)
4314    }
4315
4316    fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output {
4317        self.masm
4318            .v128_extmul(&mut self.context, V128ExtMulKind::LowI32x4U)
4319    }
4320
4321    fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output {
4322        self.masm
4323            .v128_extmul(&mut self.context, V128ExtMulKind::HighI8x16U)
4324    }
4325
4326    fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output {
4327        self.masm
4328            .v128_extmul(&mut self.context, V128ExtMulKind::HighI16x8U)
4329    }
4330
4331    fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output {
4332        self.masm
4333            .v128_extmul(&mut self.context, V128ExtMulKind::HighI32x4U)
4334    }
4335
4336    fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output {
4337        self.masm
4338            .v128_extmul(&mut self.context, V128ExtMulKind::HighI8x16S)
4339    }
4340
4341    fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output {
4342        self.masm
4343            .v128_extmul(&mut self.context, V128ExtMulKind::HighI16x8S)
4344    }
4345
4346    fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output {
4347        self.masm
4348            .v128_extmul(&mut self.context, V128ExtMulKind::HighI32x4S)
4349    }
4350
4351    fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output {
4352        self.context.unop(self.masm, |masm, op| {
4353            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I8x16S)?;
4354            Ok(TypedReg::v128(op))
4355        })
4356    }
4357
4358    fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output {
4359        self.context.unop(self.masm, |masm, op| {
4360            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I8x16U)?;
4361            Ok(TypedReg::v128(op))
4362        })
4363    }
4364
4365    fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output {
4366        self.context.unop(self.masm, |masm, op| {
4367            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I16x8S)?;
4368            Ok(TypedReg::v128(op))
4369        })
4370    }
4371
4372    fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output {
4373        self.context.unop(self.masm, |masm, op| {
4374            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I16x8U)?;
4375            Ok(TypedReg::v128(op))
4376        })
4377    }
4378
4379    fn visit_f32x4_add(&mut self) -> Self::Output {
4380        self.context
4381            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4382                masm.v128_add(dst, src, writable!(dst), V128AddKind::F32x4)?;
4383                Ok(TypedReg::v128(dst))
4384            })
4385    }
4386
4387    fn visit_f64x2_add(&mut self) -> Self::Output {
4388        self.context
4389            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4390                masm.v128_add(dst, src, writable!(dst), V128AddKind::F64x2)?;
4391                Ok(TypedReg::v128(dst))
4392            })
4393    }
4394
4395    fn visit_f32x4_sub(&mut self) -> Self::Output {
4396        self.context
4397            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4398                masm.v128_sub(dst, src, writable!(dst), V128SubKind::F32x4)?;
4399                Ok(TypedReg::v128(dst))
4400            })
4401    }
4402
4403    fn visit_f64x2_sub(&mut self) -> Self::Output {
4404        self.context
4405            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4406                masm.v128_sub(dst, src, writable!(dst), V128SubKind::F64x2)?;
4407                Ok(TypedReg::v128(dst))
4408            })
4409    }
4410
4411    fn visit_f32x4_mul(&mut self) -> Self::Output {
4412        self.masm.v128_mul(&mut self.context, V128MulKind::F32x4)
4413    }
4414
4415    fn visit_f64x2_mul(&mut self) -> Self::Output {
4416        self.masm.v128_mul(&mut self.context, V128MulKind::F64x2)
4417    }
4418
4419    fn visit_f32x4_div(&mut self) -> Self::Output {
4420        self.context
4421            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4422                masm.v128_div(dst, src, writable!(dst), size)?;
4423                Ok(TypedReg::v128(dst))
4424            })
4425    }
4426
4427    fn visit_f64x2_div(&mut self) -> Self::Output {
4428        self.context
4429            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4430                masm.v128_div(dst, src, writable!(dst), size)?;
4431                Ok(TypedReg::v128(dst))
4432            })
4433    }
4434
4435    fn visit_f32x4_neg(&mut self) -> Self::Output {
4436        self.context.unop(self.masm, |masm, reg| {
4437            masm.v128_neg(writable!(reg), V128NegKind::F32x4)?;
4438            Ok(TypedReg::v128(reg))
4439        })
4440    }
4441
4442    fn visit_f32x4_ceil(&mut self) -> Self::Output {
4443        self.context.unop(self.masm, |masm, reg| {
4444            masm.v128_ceil(reg, writable!(reg), OperandSize::S32)?;
4445            Ok(TypedReg::v128(reg))
4446        })
4447    }
4448
4449    fn visit_f64x2_neg(&mut self) -> Self::Output {
4450        self.context.unop(self.masm, |masm, reg| {
4451            masm.v128_neg(writable!(reg), V128NegKind::F64x2)?;
4452            Ok(TypedReg::v128(reg))
4453        })
4454    }
4455
4456    fn visit_f64x2_ceil(&mut self) -> Self::Output {
4457        self.context.unop(self.masm, |masm, reg| {
4458            masm.v128_ceil(reg, writable!(reg), OperandSize::S64)?;
4459            Ok(TypedReg::v128(reg))
4460        })
4461    }
4462
4463    fn visit_f32x4_sqrt(&mut self) -> Self::Output {
4464        self.context.unop(self.masm, |masm, reg| {
4465            masm.v128_sqrt(reg, writable!(reg), OperandSize::S32)?;
4466            Ok(TypedReg::v128(reg))
4467        })
4468    }
4469
4470    fn visit_f32x4_floor(&mut self) -> Self::Output {
4471        self.context.unop(self.masm, |masm, reg| {
4472            masm.v128_floor(reg, writable!(reg), OperandSize::S32)?;
4473            Ok(TypedReg::v128(reg))
4474        })
4475    }
4476
4477    fn visit_f64x2_sqrt(&mut self) -> Self::Output {
4478        self.context.unop(self.masm, |masm, reg| {
4479            masm.v128_sqrt(reg, writable!(reg), OperandSize::S64)?;
4480            Ok(TypedReg::v128(reg))
4481        })
4482    }
4483
4484    fn visit_f64x2_floor(&mut self) -> Self::Output {
4485        self.context.unop(self.masm, |masm, reg| {
4486            masm.v128_floor(reg, writable!(reg), OperandSize::S64)?;
4487            Ok(TypedReg::v128(reg))
4488        })
4489    }
4490
4491    fn visit_f32x4_nearest(&mut self) -> Self::Output {
4492        self.context.unop(self.masm, |masm, reg| {
4493            masm.v128_nearest(reg, writable!(reg), OperandSize::S32)?;
4494            Ok(TypedReg::v128(reg))
4495        })
4496    }
4497
4498    fn visit_f64x2_nearest(&mut self) -> Self::Output {
4499        self.context.unop(self.masm, |masm, reg| {
4500            masm.v128_nearest(reg, writable!(reg), OperandSize::S64)?;
4501            Ok(TypedReg::v128(reg))
4502        })
4503    }
4504
4505    fn visit_f32x4_trunc(&mut self) -> Self::Output {
4506        self.masm
4507            .v128_trunc(&mut self.context, V128TruncKind::F32x4)
4508    }
4509
4510    fn visit_f64x2_trunc(&mut self) -> Self::Output {
4511        self.masm
4512            .v128_trunc(&mut self.context, V128TruncKind::F64x2)
4513    }
4514
4515    fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output {
4516        self.emit_wasm_load(
4517            &memarg,
4518            WasmValType::V128,
4519            LoadKind::VectorZero(OperandSize::S32),
4520        )
4521    }
4522
4523    fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output {
4524        self.emit_wasm_load(
4525            &memarg,
4526            WasmValType::V128,
4527            LoadKind::VectorZero(OperandSize::S64),
4528        )
4529    }
4530
4531    fn visit_f32x4_pmin(&mut self) -> Self::Output {
4532        self.context
4533            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4534                masm.v128_pmin(dst, src, writable!(dst), size)?;
4535                Ok(TypedReg::v128(dst))
4536            })
4537    }
4538
4539    fn visit_f64x2_pmin(&mut self) -> Self::Output {
4540        self.context
4541            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4542                masm.v128_pmin(dst, src, writable!(dst), size)?;
4543                Ok(TypedReg::v128(dst))
4544            })
4545    }
4546
4547    fn visit_f32x4_pmax(&mut self) -> Self::Output {
4548        self.context
4549            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4550                masm.v128_pmax(dst, src, writable!(dst), size)?;
4551                Ok(TypedReg::v128(dst))
4552            })
4553    }
4554
4555    fn visit_f64x2_pmax(&mut self) -> Self::Output {
4556        self.context
4557            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4558                masm.v128_pmax(dst, src, writable!(dst), size)?;
4559                Ok(TypedReg::v128(dst))
4560            })
4561    }
4562
4563    fn visit_f32x4_min(&mut self) -> Self::Output {
4564        self.context
4565            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4566                masm.v128_min(dst, src, writable!(dst), V128MinKind::F32x4)?;
4567                Ok(TypedReg::v128(dst))
4568            })
4569    }
4570
4571    fn visit_f64x2_min(&mut self) -> Self::Output {
4572        self.context
4573            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4574                masm.v128_min(dst, src, writable!(dst), V128MinKind::F64x2)?;
4575                Ok(TypedReg::v128(dst))
4576            })
4577    }
4578
4579    fn visit_f32x4_max(&mut self) -> Self::Output {
4580        self.context
4581            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4582                masm.v128_max(dst, src, writable!(dst), V128MaxKind::F32x4)?;
4583                Ok(TypedReg::v128(dst))
4584            })
4585    }
4586
4587    fn visit_f64x2_max(&mut self) -> Self::Output {
4588        self.context
4589            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4590                masm.v128_max(dst, src, writable!(dst), V128MaxKind::F64x2)?;
4591                Ok(TypedReg::v128(dst))
4592            })
4593    }
4594
4595    wasmparser::for_each_visit_simd_operator!(def_unsupported);
4596}
4597
4598impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Emission>
4599where
4600    M: MacroAssembler,
4601{
4602    fn cmp_i32s(&mut self, kind: IntCmpKind) -> Result<()> {
4603        self.context.i32_binop(self.masm, |masm, dst, src, size| {
4604            masm.cmp_with_set(writable!(dst), src, kind, size)?;
4605            Ok(TypedReg::i32(dst))
4606        })
4607    }
4608
4609    fn cmp_i64s(&mut self, kind: IntCmpKind) -> Result<()> {
4610        self.context
4611            .i64_binop(self.masm, move |masm, dst, src, size| {
4612                masm.cmp_with_set(writable!(dst), src, kind, size)?;
4613                Ok(TypedReg::i32(dst)) // Return value for comparisons is an `i32`.
4614            })
4615    }
4616}
4617
4618impl TryFrom<WasmValType> for OperandSize {
4619    type Error = anyhow::Error;
4620    fn try_from(ty: WasmValType) -> Result<OperandSize> {
4621        let ty = match ty {
4622            WasmValType::I32 | WasmValType::F32 => OperandSize::S32,
4623            WasmValType::I64 | WasmValType::F64 => OperandSize::S64,
4624            WasmValType::V128 => OperandSize::S128,
4625            WasmValType::Ref(rt) => {
4626                match rt.heap_type {
4627                    // TODO: Hardcoded size, assuming 64-bit support only. Once
4628                    // Wasmtime supports 32-bit architectures, this will need
4629                    // to be updated in such a way that the calculation of the
4630                    // OperandSize will depend on the target's  pointer size.
4631                    WasmHeapType::Func => OperandSize::S64,
4632                    WasmHeapType::Extern => OperandSize::S64,
4633                    _ => bail!(CodeGenError::unsupported_wasm_type()),
4634                }
4635            }
4636        };
4637        Ok(ty)
4638    }
4639}