winch_codegen/
visitor.rs

1//! This module is the central place for machine code emission.
2//! It defines an implementation of wasmparser's Visitor trait
3//! for `CodeGen`; which defines a visitor per op-code,
4//! which validates and dispatches to the corresponding
5//! machine code emitter.
6
7use crate::abi::RetArea;
8use crate::codegen::{
9    Callee, CodeGen, CodeGenError, ConditionalBranch, ControlStackFrame, Emission, FnCall,
10    UnconditionalBranch, control_index,
11};
12use crate::masm::{
13    AtomicWaitKind, DivKind, Extend, ExtractLaneKind, FloatCmpKind, IntCmpKind, LoadKind,
14    MacroAssembler, MulWideKind, OperandSize, RegImm, RemKind, ReplaceLaneKind, RmwOp,
15    RoundingMode, SPOffset, ShiftKind, Signed, SplatKind, SplatLoadKind, StoreKind, TruncKind,
16    V128AbsKind, V128AddKind, V128ConvertKind, V128ExtAddKind, V128ExtMulKind, V128ExtendKind,
17    V128LoadExtendKind, V128MaxKind, V128MinKind, V128MulKind, V128NarrowKind, V128NegKind,
18    V128SubKind, V128TruncKind, VectorCompareKind, VectorEqualityKind, Zero,
19};
20
21use crate::reg::{Reg, writable};
22use crate::stack::{TypedReg, Val};
23use anyhow::{Result, anyhow, bail, ensure};
24use regalloc2::RegClass;
25use smallvec::{SmallVec, smallvec};
26use wasmparser::{
27    BlockType, BrTable, Ieee32, Ieee64, MemArg, V128, VisitOperator, VisitSimdOperator,
28};
29use wasmtime_cranelift::TRAP_INDIRECT_CALL_TO_NULL;
30use wasmtime_environ::{
31    FUNCREF_INIT_BIT, FuncIndex, GlobalIndex, MemoryIndex, TableIndex, TypeIndex, WasmHeapType,
32    WasmValType,
33};
34
35/// A macro to define unsupported WebAssembly operators.
36///
37/// This macro calls itself recursively;
38/// 1. It no-ops when matching a supported operator.
39/// 2. Defines the visitor function and panics when
40///    matching an unsupported operator.
41macro_rules! def_unsupported {
42    ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident $ann:tt)*) => {
43        $(
44            def_unsupported!(
45                emit
46                    $op
47
48                fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
49                    $($(let _ = $arg;)*)?
50
51                    Err(anyhow!(CodeGenError::unimplemented_wasm_instruction()))
52                }
53            );
54        )*
55    };
56
57    (emit I32Const $($rest:tt)*) => {};
58    (emit I64Const $($rest:tt)*) => {};
59    (emit F32Const $($rest:tt)*) => {};
60    (emit F64Const $($rest:tt)*) => {};
61    (emit V128Const $($rest:tt)*) => {};
62    (emit F32Add $($rest:tt)*) => {};
63    (emit F64Add $($rest:tt)*) => {};
64    (emit F32Sub $($rest:tt)*) => {};
65    (emit F64Sub $($rest:tt)*) => {};
66    (emit F32Mul $($rest:tt)*) => {};
67    (emit F64Mul $($rest:tt)*) => {};
68    (emit F32Div $($rest:tt)*) => {};
69    (emit F64Div $($rest:tt)*) => {};
70    (emit F32Min $($rest:tt)*) => {};
71    (emit F64Min $($rest:tt)*) => {};
72    (emit F32Max $($rest:tt)*) => {};
73    (emit F64Max $($rest:tt)*) => {};
74    (emit F32Copysign $($rest:tt)*) => {};
75    (emit F64Copysign $($rest:tt)*) => {};
76    (emit F32Abs $($rest:tt)*) => {};
77    (emit F64Abs $($rest:tt)*) => {};
78    (emit F32Neg $($rest:tt)*) => {};
79    (emit F64Neg $($rest:tt)*) => {};
80    (emit F32Floor $($rest:tt)*) => {};
81    (emit F64Floor $($rest:tt)*) => {};
82    (emit F32Ceil $($rest:tt)*) => {};
83    (emit F64Ceil $($rest:tt)*) => {};
84    (emit F32Nearest $($rest:tt)*) => {};
85    (emit F64Nearest $($rest:tt)*) => {};
86    (emit F32Trunc $($rest:tt)*) => {};
87    (emit F64Trunc $($rest:tt)*) => {};
88    (emit F32Sqrt $($rest:tt)*) => {};
89    (emit F64Sqrt $($rest:tt)*) => {};
90    (emit F32Eq $($rest:tt)*) => {};
91    (emit F64Eq $($rest:tt)*) => {};
92    (emit F32Ne $($rest:tt)*) => {};
93    (emit F64Ne $($rest:tt)*) => {};
94    (emit F32Lt $($rest:tt)*) => {};
95    (emit F64Lt $($rest:tt)*) => {};
96    (emit F32Gt $($rest:tt)*) => {};
97    (emit F64Gt $($rest:tt)*) => {};
98    (emit F32Le $($rest:tt)*) => {};
99    (emit F64Le $($rest:tt)*) => {};
100    (emit F32Ge $($rest:tt)*) => {};
101    (emit F64Ge $($rest:tt)*) => {};
102    (emit F32ConvertI32S $($rest:tt)*) => {};
103    (emit F32ConvertI32U $($rest:tt)*) => {};
104    (emit F32ConvertI64S $($rest:tt)*) => {};
105    (emit F32ConvertI64U $($rest:tt)*) => {};
106    (emit F64ConvertI32S $($rest:tt)*) => {};
107    (emit F64ConvertI32U $($rest:tt)*) => {};
108    (emit F64ConvertI64S $($rest:tt)*) => {};
109    (emit F64ConvertI64U $($rest:tt)*) => {};
110    (emit F32ReinterpretI32 $($rest:tt)*) => {};
111    (emit F64ReinterpretI64 $($rest:tt)*) => {};
112    (emit F32DemoteF64 $($rest:tt)*) => {};
113    (emit F64PromoteF32 $($rest:tt)*) => {};
114    (emit I32Add $($rest:tt)*) => {};
115    (emit I64Add $($rest:tt)*) => {};
116    (emit I32Sub $($rest:tt)*) => {};
117    (emit I32Mul $($rest:tt)*) => {};
118    (emit I32DivS $($rest:tt)*) => {};
119    (emit I32DivU $($rest:tt)*) => {};
120    (emit I64DivS $($rest:tt)*) => {};
121    (emit I64DivU $($rest:tt)*) => {};
122    (emit I64RemU $($rest:tt)*) => {};
123    (emit I64RemS $($rest:tt)*) => {};
124    (emit I32RemU $($rest:tt)*) => {};
125    (emit I32RemS $($rest:tt)*) => {};
126    (emit I64Mul $($rest:tt)*) => {};
127    (emit I64Sub $($rest:tt)*) => {};
128    (emit I32Eq $($rest:tt)*) => {};
129    (emit I64Eq $($rest:tt)*) => {};
130    (emit I32Ne $($rest:tt)*) => {};
131    (emit I64Ne $($rest:tt)*) => {};
132    (emit I32LtS $($rest:tt)*) => {};
133    (emit I64LtS $($rest:tt)*) => {};
134    (emit I32LtU $($rest:tt)*) => {};
135    (emit I64LtU $($rest:tt)*) => {};
136    (emit I32LeS $($rest:tt)*) => {};
137    (emit I64LeS $($rest:tt)*) => {};
138    (emit I32LeU $($rest:tt)*) => {};
139    (emit I64LeU $($rest:tt)*) => {};
140    (emit I32GtS $($rest:tt)*) => {};
141    (emit I64GtS $($rest:tt)*) => {};
142    (emit I32GtU $($rest:tt)*) => {};
143    (emit I64GtU $($rest:tt)*) => {};
144    (emit I32GeS $($rest:tt)*) => {};
145    (emit I64GeS $($rest:tt)*) => {};
146    (emit I32GeU $($rest:tt)*) => {};
147    (emit I64GeU $($rest:tt)*) => {};
148    (emit I32Eqz $($rest:tt)*) => {};
149    (emit I64Eqz $($rest:tt)*) => {};
150    (emit I32And $($rest:tt)*) => {};
151    (emit I64And $($rest:tt)*) => {};
152    (emit I32Or $($rest:tt)*) => {};
153    (emit I64Or $($rest:tt)*) => {};
154    (emit I32Xor $($rest:tt)*) => {};
155    (emit I64Xor $($rest:tt)*) => {};
156    (emit I32Shl $($rest:tt)*) => {};
157    (emit I64Shl $($rest:tt)*) => {};
158    (emit I32ShrS $($rest:tt)*) => {};
159    (emit I64ShrS $($rest:tt)*) => {};
160    (emit I32ShrU $($rest:tt)*) => {};
161    (emit I64ShrU $($rest:tt)*) => {};
162    (emit I32Rotl $($rest:tt)*) => {};
163    (emit I64Rotl $($rest:tt)*) => {};
164    (emit I32Rotr $($rest:tt)*) => {};
165    (emit I64Rotr $($rest:tt)*) => {};
166    (emit I32Clz $($rest:tt)*) => {};
167    (emit I64Clz $($rest:tt)*) => {};
168    (emit I32Ctz $($rest:tt)*) => {};
169    (emit I64Ctz $($rest:tt)*) => {};
170    (emit I32Popcnt $($rest:tt)*) => {};
171    (emit I64Popcnt $($rest:tt)*) => {};
172    (emit I32WrapI64 $($rest:tt)*) => {};
173    (emit I64ExtendI32S $($rest:tt)*) => {};
174    (emit I64ExtendI32U $($rest:tt)*) => {};
175    (emit I32Extend8S $($rest:tt)*) => {};
176    (emit I32Extend16S $($rest:tt)*) => {};
177    (emit I64Extend8S $($rest:tt)*) => {};
178    (emit I64Extend16S $($rest:tt)*) => {};
179    (emit I64Extend32S $($rest:tt)*) => {};
180    (emit I32TruncF32S $($rest:tt)*) => {};
181    (emit I32TruncF32U $($rest:tt)*) => {};
182    (emit I32TruncF64S $($rest:tt)*) => {};
183    (emit I32TruncF64U $($rest:tt)*) => {};
184    (emit I64TruncF32S $($rest:tt)*) => {};
185    (emit I64TruncF32U $($rest:tt)*) => {};
186    (emit I64TruncF64S $($rest:tt)*) => {};
187    (emit I64TruncF64U $($rest:tt)*) => {};
188    (emit I32ReinterpretF32 $($rest:tt)*) => {};
189    (emit I64ReinterpretF64 $($rest:tt)*) => {};
190    (emit LocalGet $($rest:tt)*) => {};
191    (emit LocalSet $($rest:tt)*) => {};
192    (emit Call $($rest:tt)*) => {};
193    (emit End $($rest:tt)*) => {};
194    (emit Nop $($rest:tt)*) => {};
195    (emit If $($rest:tt)*) => {};
196    (emit Else $($rest:tt)*) => {};
197    (emit Block $($rest:tt)*) => {};
198    (emit Loop $($rest:tt)*) => {};
199    (emit Br $($rest:tt)*) => {};
200    (emit BrIf $($rest:tt)*) => {};
201    (emit Return $($rest:tt)*) => {};
202    (emit Unreachable $($rest:tt)*) => {};
203    (emit LocalTee $($rest:tt)*) => {};
204    (emit GlobalGet $($rest:tt)*) => {};
205    (emit GlobalSet $($rest:tt)*) => {};
206    (emit Select $($rest:tt)*) => {};
207    (emit Drop $($rest:tt)*) => {};
208    (emit BrTable $($rest:tt)*) => {};
209    (emit CallIndirect $($rest:tt)*) => {};
210    (emit TableInit $($rest:tt)*) => {};
211    (emit TableCopy $($rest:tt)*) => {};
212    (emit TableGet $($rest:tt)*) => {};
213    (emit TableSet $($rest:tt)*) => {};
214    (emit TableGrow $($rest:tt)*) => {};
215    (emit TableSize $($rest:tt)*) => {};
216    (emit TableFill $($rest:tt)*) => {};
217    (emit ElemDrop $($rest:tt)*) => {};
218    (emit MemoryInit $($rest:tt)*) => {};
219    (emit MemoryCopy $($rest:tt)*) => {};
220    (emit DataDrop $($rest:tt)*) => {};
221    (emit MemoryFill $($rest:tt)*) => {};
222    (emit MemorySize $($rest:tt)*) => {};
223    (emit MemoryGrow $($rest:tt)*) => {};
224    (emit I32Load $($rest:tt)*) => {};
225    (emit I32Load8S $($rest:tt)*) => {};
226    (emit I32Load8U $($rest:tt)*) => {};
227    (emit I32Load16S $($rest:tt)*) => {};
228    (emit I32Load16U $($rest:tt)*) => {};
229    (emit I64Load8S $($rest:tt)*) => {};
230    (emit I64Load8U $($rest:tt)*) => {};
231    (emit I64Load16S $($rest:tt)*) => {};
232    (emit I64Load16U $($rest:tt)*) => {};
233    (emit I64Load32S $($rest:tt)*) => {};
234    (emit I64Load32U $($rest:tt)*) => {};
235    (emit I64Load $($rest:tt)*) => {};
236    (emit I32Store $($rest:tt)*) => {};
237    (emit I32Store8 $($rest:tt)*) => {};
238    (emit I32Store16 $($rest:tt)*) => {};
239    (emit I64Store $($rest:tt)*) => {};
240    (emit I64Store8 $($rest:tt)*) => {};
241    (emit I64Store16 $($rest:tt)*) => {};
242    (emit I64Store32 $($rest:tt)*) => {};
243    (emit F32Load $($rest:tt)*) => {};
244    (emit F32Store $($rest:tt)*) => {};
245    (emit F64Load $($rest:tt)*) => {};
246    (emit F64Store $($rest:tt)*) => {};
247    (emit I32TruncSatF32S $($rest:tt)*) => {};
248    (emit I32TruncSatF32U $($rest:tt)*) => {};
249    (emit I32TruncSatF64S $($rest:tt)*) => {};
250    (emit I32TruncSatF64U $($rest:tt)*) => {};
251    (emit I64TruncSatF32S $($rest:tt)*) => {};
252    (emit I64TruncSatF32U $($rest:tt)*) => {};
253    (emit I64TruncSatF64S $($rest:tt)*) => {};
254    (emit I64TruncSatF64U $($rest:tt)*) => {};
255    (emit V128Load $($rest:tt)*) => {};
256    (emit V128Store $($rest:tt)*) => {};
257    (emit I64Add128 $($rest:tt)*) => {};
258    (emit I64Sub128 $($rest:tt)*) => {};
259    (emit I64MulWideS $($rest:tt)*) => {};
260    (emit I64MulWideU $($rest:tt)*) => {};
261    (emit I32AtomicLoad8U $($rest:tt)*) => {};
262    (emit I32AtomicLoad16U $($rest:tt)*) => {};
263    (emit I32AtomicLoad $($rest:tt)*) => {};
264    (emit I64AtomicLoad8U $($rest:tt)*) => {};
265    (emit I64AtomicLoad16U $($rest:tt)*) => {};
266    (emit I64AtomicLoad32U $($rest:tt)*) => {};
267    (emit I64AtomicLoad $($rest:tt)*) => {};
268    (emit V128Load8x8S $($rest:tt)*) => {};
269    (emit V128Load8x8U $($rest:tt)*) => {};
270    (emit V128Load16x4S $($rest:tt)*) => {};
271    (emit V128Load16x4U $($rest:tt)*) => {};
272    (emit V128Load32x2S $($rest:tt)*) => {};
273    (emit V128Load32x2U $($rest:tt)*) => {};
274    (emit V128Load8Splat $($rest:tt)*) => {};
275    (emit V128Load16Splat $($rest:tt)*) => {};
276    (emit V128Load32Splat $($rest:tt)*) => {};
277    (emit V128Load64Splat $($rest:tt)*) => {};
278    (emit I8x16Splat $($rest:tt)*) => {};
279    (emit I16x8Splat $($rest:tt)*) => {};
280    (emit I32x4Splat $($rest:tt)*) => {};
281    (emit I64x2Splat $($rest:tt)*) => {};
282    (emit F32x4Splat $($rest:tt)*) => {};
283    (emit F64x2Splat $($rest:tt)*) => {};
284    (emit I32AtomicStore8 $($rest:tt)*) => {};
285    (emit I32AtomicStore16 $($rest:tt)*) => {};
286    (emit I32AtomicStore $($rest:tt)*) => {};
287    (emit I64AtomicStore8 $($rest:tt)*) => {};
288    (emit I64AtomicStore16 $($rest:tt)*) => {};
289    (emit I64AtomicStore32 $($rest:tt)*) => {};
290    (emit I64AtomicStore $($rest:tt)*) => {};
291    (emit I32AtomicRmw8AddU $($rest:tt)*) => {};
292    (emit I32AtomicRmw16AddU $($rest:tt)*) => {};
293    (emit I32AtomicRmwAdd $($rest:tt)*) => {};
294    (emit I64AtomicRmw8AddU $($rest:tt)*) => {};
295    (emit I64AtomicRmw16AddU $($rest:tt)*) => {};
296    (emit I64AtomicRmw32AddU $($rest:tt)*) => {};
297    (emit I64AtomicRmwAdd $($rest:tt)*) => {};
298    (emit I8x16Shuffle $($rest:tt)*) => {};
299    (emit I8x16Swizzle $($rest:tt)*) => {};
300    (emit I32AtomicRmw8SubU $($rest:tt)*) => {};
301    (emit I32AtomicRmw16SubU $($rest:tt)*) => {};
302    (emit I32AtomicRmwSub $($rest:tt)*) => {};
303    (emit I64AtomicRmw8SubU $($rest:tt)*) => {};
304    (emit I64AtomicRmw16SubU $($rest:tt)*) => {};
305    (emit I64AtomicRmw32SubU $($rest:tt)*) => {};
306    (emit I64AtomicRmwSub $($rest:tt)*) => {};
307    (emit I32AtomicRmw8XchgU $($rest:tt)*) => {};
308    (emit I32AtomicRmw16XchgU $($rest:tt)*) => {};
309    (emit I32AtomicRmwXchg $($rest:tt)*) => {};
310    (emit I64AtomicRmw8XchgU $($rest:tt)*) => {};
311    (emit I64AtomicRmw16XchgU $($rest:tt)*) => {};
312    (emit I64AtomicRmw32XchgU $($rest:tt)*) => {};
313    (emit I64AtomicRmwXchg $($rest:tt)*) => {};
314    (emit I8x16ExtractLaneS $($rest:tt)*) => {};
315    (emit I8x16ExtractLaneU $($rest:tt)*) => {};
316    (emit I16x8ExtractLaneS $($rest:tt)*) => {};
317    (emit I16x8ExtractLaneU $($rest:tt)*) => {};
318    (emit I32x4ExtractLane $($rest:tt)*) => {};
319    (emit I64x2ExtractLane $($rest:tt)*) => {};
320    (emit F32x4ExtractLane $($rest:tt)*) => {};
321    (emit F64x2ExtractLane $($rest:tt)*) => {};
322    (emit I32AtomicRmw8AndU $($rest:tt)*) => {};
323    (emit I32AtomicRmw16AndU $($rest:tt)*) => {};
324    (emit I32AtomicRmwAnd $($rest:tt)*) => {};
325    (emit I64AtomicRmw8AndU $($rest:tt)*) => {};
326    (emit I64AtomicRmw16AndU $($rest:tt)*) => {};
327    (emit I64AtomicRmw32AndU $($rest:tt)*) => {};
328    (emit I64AtomicRmwAnd $($rest:tt)*) => {};
329    (emit I32AtomicRmw8OrU $($rest:tt)*) => {};
330    (emit I32AtomicRmw16OrU $($rest:tt)*) => {};
331    (emit I32AtomicRmwOr $($rest:tt)*) => {};
332    (emit I64AtomicRmw8OrU $($rest:tt)*) => {};
333    (emit I64AtomicRmw16OrU $($rest:tt)*) => {};
334    (emit I64AtomicRmw32OrU $($rest:tt)*) => {};
335    (emit I64AtomicRmwOr $($rest:tt)*) => {};
336    (emit I32AtomicRmw8XorU $($rest:tt)*) => {};
337    (emit I32AtomicRmw16XorU $($rest:tt)*) => {};
338    (emit I32AtomicRmwXor $($rest:tt)*) => {};
339    (emit I64AtomicRmw8XorU $($rest:tt)*) => {};
340    (emit I64AtomicRmw16XorU $($rest:tt)*) => {};
341    (emit I64AtomicRmw32XorU $($rest:tt)*) => {};
342    (emit I64AtomicRmwXor $($rest:tt)*) => {};
343    (emit I8x16ReplaceLane $($rest:tt)*) => {};
344    (emit I16x8ReplaceLane $($rest:tt)*) => {};
345    (emit I32x4ReplaceLane $($rest:tt)*) => {};
346    (emit I64x2ReplaceLane $($rest:tt)*) => {};
347    (emit F32x4ReplaceLane $($rest:tt)*) => {};
348    (emit F64x2ReplaceLane $($rest:tt)*) => {};
349    (emit I32AtomicRmw8CmpxchgU $($rest:tt)*) => {};
350    (emit I32AtomicRmw16CmpxchgU $($rest:tt)*) => {};
351    (emit I32AtomicRmwCmpxchg $($rest:tt)*) => {};
352    (emit I64AtomicRmw8CmpxchgU $($rest:tt)*) => {};
353    (emit I64AtomicRmw16CmpxchgU $($rest:tt)*) => {};
354    (emit I64AtomicRmw32CmpxchgU $($rest:tt)*) => {};
355    (emit I64AtomicRmwCmpxchg $($rest:tt)*) => {};
356    (emit I8x16Eq $($rest:tt)*) => {};
357    (emit I16x8Eq $($rest:tt)*) => {};
358    (emit I32x4Eq $($rest:tt)*) => {};
359    (emit I64x2Eq $($rest:tt)*) => {};
360    (emit F32x4Eq $($rest:tt)*) => {};
361    (emit F64x2Eq $($rest:tt)*) => {};
362    (emit I8x16Ne $($rest:tt)*) => {};
363    (emit I16x8Ne $($rest:tt)*) => {};
364    (emit I32x4Ne $($rest:tt)*) => {};
365    (emit I64x2Ne $($rest:tt)*) => {};
366    (emit F32x4Ne $($rest:tt)*) => {};
367    (emit F64x2Ne $($rest:tt)*) => {};
368    (emit I8x16LtS $($rest:tt)*) => {};
369    (emit I8x16LtU $($rest:tt)*) => {};
370    (emit I16x8LtS $($rest:tt)*) => {};
371    (emit I16x8LtU $($rest:tt)*) => {};
372    (emit I32x4LtS $($rest:tt)*) => {};
373    (emit I32x4LtU $($rest:tt)*) => {};
374    (emit I64x2LtS $($rest:tt)*) => {};
375    (emit F32x4Lt $($rest:tt)*) => {};
376    (emit F64x2Lt $($rest:tt)*) => {};
377    (emit I8x16LeS $($rest:tt)*) => {};
378    (emit I8x16LeU $($rest:tt)*) => {};
379    (emit I16x8LeS $($rest:tt)*) => {};
380    (emit I16x8LeU $($rest:tt)*) => {};
381    (emit I32x4LeS $($rest:tt)*) => {};
382    (emit I32x4LeU $($rest:tt)*) => {};
383    (emit I64x2LeS $($rest:tt)*) => {};
384    (emit F32x4Le $($rest:tt)*) => {};
385    (emit F64x2Le $($rest:tt)*) => {};
386    (emit I8x16GtS $($rest:tt)*) => {};
387    (emit I8x16GtU $($rest:tt)*) => {};
388    (emit I16x8GtS $($rest:tt)*) => {};
389    (emit I16x8GtU $($rest:tt)*) => {};
390    (emit I32x4GtS $($rest:tt)*) => {};
391    (emit I32x4GtU $($rest:tt)*) => {};
392    (emit I64x2GtS $($rest:tt)*) => {};
393    (emit F32x4Gt $($rest:tt)*) => {};
394    (emit F64x2Gt $($rest:tt)*) => {};
395    (emit I8x16GeS $($rest:tt)*) => {};
396    (emit I8x16GeU $($rest:tt)*) => {};
397    (emit I16x8GeS $($rest:tt)*) => {};
398    (emit I16x8GeU $($rest:tt)*) => {};
399    (emit I32x4GeS $($rest:tt)*) => {};
400    (emit I32x4GeU $($rest:tt)*) => {};
401    (emit I64x2GeS $($rest:tt)*) => {};
402    (emit F32x4Ge $($rest:tt)*) => {};
403    (emit F64x2Ge $($rest:tt)*) => {};
404    (emit MemoryAtomicWait32 $($rest:tt)*) => {};
405    (emit MemoryAtomicWait64 $($rest:tt)*) => {};
406    (emit MemoryAtomicNotify $($rest:tt)*) => {};
407    (emit AtomicFence $($rest:tt)*) => {};
408    (emit V128Not $($rest:tt)*) => {};
409    (emit V128And $($rest:tt)*) => {};
410    (emit V128AndNot $($rest:tt)*) => {};
411    (emit V128Or $($rest:tt)*) => {};
412    (emit V128Xor $($rest:tt)*) => {};
413    (emit V128Bitselect $($rest:tt)*) => {};
414    (emit V128AnyTrue $($rest:tt)*) => {};
415    (emit V128Load8Lane $($rest:tt)*) => {};
416    (emit V128Load16Lane $($rest:tt)*) => {};
417    (emit V128Load32Lane $($rest:tt)*) => {};
418    (emit V128Load64Lane $($rest:tt)*) => {};
419    (emit V128Store8Lane $($rest:tt)*) => {};
420    (emit V128Store16Lane $($rest:tt)*) => {};
421    (emit V128Store32Lane $($rest:tt)*) => {};
422    (emit V128Store64Lane $($rest:tt)*) => {};
423    (emit F32x4ConvertI32x4S $($rest:tt)*) => {};
424    (emit F32x4ConvertI32x4U $($rest:tt)*) => {};
425    (emit F64x2ConvertLowI32x4S $($rest:tt)*) => {};
426    (emit F64x2ConvertLowI32x4U $($rest:tt)*) => {};
427    (emit I8x16NarrowI16x8S $($rest:tt)*) => {};
428    (emit I8x16NarrowI16x8U $($rest:tt)*) => {};
429    (emit I16x8NarrowI32x4S $($rest:tt)*) => {};
430    (emit I16x8NarrowI32x4U $($rest:tt)*) => {};
431    (emit F32x4DemoteF64x2Zero $($rest:tt)*) => {};
432    (emit F64x2PromoteLowF32x4 $($rest:tt)*) => {};
433    (emit I16x8ExtendLowI8x16S $($rest:tt)*) => {};
434    (emit I16x8ExtendHighI8x16S $($rest:tt)*) => {};
435    (emit I16x8ExtendLowI8x16U $($rest:tt)*) => {};
436    (emit I16x8ExtendHighI8x16U $($rest:tt)*) => {};
437    (emit I32x4ExtendLowI16x8S $($rest:tt)*) => {};
438    (emit I32x4ExtendHighI16x8S $($rest:tt)*) => {};
439    (emit I32x4ExtendLowI16x8U $($rest:tt)*) => {};
440    (emit I32x4ExtendHighI16x8U $($rest:tt)*) => {};
441    (emit I64x2ExtendLowI32x4S $($rest:tt)*) => {};
442    (emit I64x2ExtendHighI32x4S $($rest:tt)*) => {};
443    (emit I64x2ExtendLowI32x4U $($rest:tt)*) => {};
444    (emit I64x2ExtendHighI32x4U $($rest:tt)*) => {};
445    (emit I8x16Add $($rest:tt)*) => {};
446    (emit I16x8Add $($rest:tt)*) => {};
447    (emit I32x4Add $($rest:tt)*) => {};
448    (emit I64x2Add $($rest:tt)*) => {};
449    (emit I8x16Sub $($rest:tt)*) => {};
450    (emit I16x8Sub $($rest:tt)*) => {};
451    (emit I32x4Sub $($rest:tt)*) => {};
452    (emit I64x2Sub $($rest:tt)*) => {};
453    (emit I16x8Mul $($rest:tt)*) => {};
454    (emit I32x4Mul $($rest:tt)*) => {};
455    (emit I64x2Mul $($rest:tt)*) => {};
456    (emit I8x16AddSatS $($rest:tt)*) => {};
457    (emit I16x8AddSatS $($rest:tt)*) => {};
458    (emit I8x16AddSatU $($rest:tt)*) => {};
459    (emit I16x8AddSatU $($rest:tt)*) => {};
460    (emit I8x16SubSatS $($rest:tt)*) => {};
461    (emit I16x8SubSatS $($rest:tt)*) => {};
462    (emit I8x16SubSatU $($rest:tt)*) => {};
463    (emit I16x8SubSatU $($rest:tt)*) => {};
464    (emit I8x16Abs $($rest:tt)*) => {};
465    (emit I16x8Abs $($rest:tt)*) => {};
466    (emit I32x4Abs $($rest:tt)*) => {};
467    (emit I64x2Abs $($rest:tt)*) => {};
468    (emit F32x4Abs $($rest:tt)*) => {};
469    (emit F64x2Abs $($rest:tt)*) => {};
470    (emit I8x16Neg $($rest:tt)*) => {};
471    (emit I16x8Neg $($rest:tt)*) => {};
472    (emit I32x4Neg $($rest:tt)*) => {};
473    (emit I64x2Neg $($rest:tt)*) => {};
474    (emit I8x16Shl $($rest:tt)*) => {};
475    (emit I16x8Shl $($rest:tt)*) => {};
476    (emit I32x4Shl $($rest:tt)*) => {};
477    (emit I64x2Shl $($rest:tt)*) => {};
478    (emit I8x16ShrU $($rest:tt)*) => {};
479    (emit I16x8ShrU $($rest:tt)*) => {};
480    (emit I32x4ShrU $($rest:tt)*) => {};
481    (emit I64x2ShrU $($rest:tt)*) => {};
482    (emit I8x16ShrS $($rest:tt)*) => {};
483    (emit I16x8ShrS $($rest:tt)*) => {};
484    (emit I32x4ShrS $($rest:tt)*) => {};
485    (emit I64x2ShrS $($rest:tt)*) => {};
486    (emit I16x8Q15MulrSatS $($rest:tt)*) => {};
487    (emit I8x16AllTrue $($rest:tt)*) => {};
488    (emit I16x8AllTrue $($rest:tt)*) => {};
489    (emit I32x4AllTrue $($rest:tt)*) => {};
490    (emit I64x2AllTrue $($rest:tt)*) => {};
491    (emit I8x16Bitmask $($rest:tt)*) => {};
492    (emit I16x8Bitmask $($rest:tt)*) => {};
493    (emit I32x4Bitmask $($rest:tt)*) => {};
494    (emit I64x2Bitmask $($rest:tt)*) => {};
495    (emit I32x4TruncSatF32x4S $($rest:tt)*) => {};
496    (emit I32x4TruncSatF32x4U $($rest:tt)*) => {};
497    (emit I32x4TruncSatF64x2SZero $($rest:tt)*) => {};
498    (emit I32x4TruncSatF64x2UZero $($rest:tt)*) => {};
499    (emit I8x16MinU $($rest:tt)*) => {};
500    (emit I16x8MinU $($rest:tt)*) => {};
501    (emit I32x4MinU $($rest:tt)*) => {};
502    (emit I8x16MinS $($rest:tt)*) => {};
503    (emit I16x8MinS $($rest:tt)*) => {};
504    (emit I32x4MinS $($rest:tt)*) => {};
505    (emit I8x16MaxU $($rest:tt)*) => {};
506    (emit I16x8MaxU $($rest:tt)*) => {};
507    (emit I32x4MaxU $($rest:tt)*) => {};
508    (emit I8x16MaxS $($rest:tt)*) => {};
509    (emit I16x8MaxS $($rest:tt)*) => {};
510    (emit I32x4MaxS $($rest:tt)*) => {};
511    (emit I16x8ExtMulLowI8x16S $($rest:tt)*) => {};
512    (emit I32x4ExtMulLowI16x8S $($rest:tt)*) => {};
513    (emit I64x2ExtMulLowI32x4S $($rest:tt)*) => {};
514    (emit I16x8ExtMulHighI8x16S $($rest:tt)*) => {};
515    (emit I32x4ExtMulHighI16x8S $($rest:tt)*) => {};
516    (emit I64x2ExtMulHighI32x4S $($rest:tt)*) => {};
517    (emit I16x8ExtMulLowI8x16U $($rest:tt)*) => {};
518    (emit I32x4ExtMulLowI16x8U $($rest:tt)*) => {};
519    (emit I64x2ExtMulLowI32x4U $($rest:tt)*) => {};
520    (emit I16x8ExtMulHighI8x16U $($rest:tt)*) => {};
521    (emit I32x4ExtMulHighI16x8U $($rest:tt)*) => {};
522    (emit I64x2ExtMulHighI32x4U $($rest:tt)*) => {};
523    (emit I16x8ExtAddPairwiseI8x16U $($rest:tt)*) => {};
524    (emit I16x8ExtAddPairwiseI8x16S $($rest:tt)*) => {};
525    (emit I32x4ExtAddPairwiseI16x8U $($rest:tt)*) => {};
526    (emit I32x4ExtAddPairwiseI16x8S $($rest:tt)*) => {};
527    (emit I32x4DotI16x8S $($rest:tt)*) => {};
528    (emit I8x16Popcnt $($rest:tt)*) => {};
529    (emit I8x16AvgrU $($rest:tt)*) => {};
530    (emit I16x8AvgrU $($rest:tt)*) => {};
531    (emit F32x4Add $($rest:tt)*) => {};
532    (emit F64x2Add $($rest:tt)*) => {};
533    (emit F32x4Sub $($rest:tt)*) => {};
534    (emit F64x2Sub $($rest:tt)*) => {};
535    (emit F32x4Mul $($rest:tt)*) => {};
536    (emit F64x2Mul $($rest:tt)*) => {};
537    (emit F32x4Div $($rest:tt)*) => {};
538    (emit F64x2Div $($rest:tt)*) => {};
539    (emit F32x4Neg $($rest:tt)*) => {};
540    (emit F64x2Neg $($rest:tt)*) => {};
541    (emit F32x4Sqrt $($rest:tt)*) => {};
542    (emit F64x2Sqrt $($rest:tt)*) => {};
543    (emit F32x4Ceil $($rest:tt)*) => {};
544    (emit F64x2Ceil $($rest:tt)*) => {};
545    (emit F32x4Floor $($rest:tt)*) => {};
546    (emit F64x2Floor $($rest:tt)*) => {};
547    (emit F32x4Nearest $($rest:tt)*) => {};
548    (emit F64x2Nearest $($rest:tt)*) => {};
549    (emit F32x4Trunc $($rest:tt)*) => {};
550    (emit F64x2Trunc $($rest:tt)*) => {};
551    (emit V128Load32Zero $($rest:tt)*) => {};
552    (emit V128Load64Zero $($rest:tt)*) => {};
553    (emit F32x4PMin $($rest:tt)*) => {};
554    (emit F64x2PMin $($rest:tt)*) => {};
555    (emit F32x4PMax $($rest:tt)*) => {};
556    (emit F64x2PMax $($rest:tt)*) => {};
557    (emit F32x4Min $($rest:tt)*) => {};
558    (emit F64x2Min $($rest:tt)*) => {};
559    (emit F32x4Max $($rest:tt)*) => {};
560    (emit F64x2Max $($rest:tt)*) => {};
561
562    (emit $unsupported:tt $($rest:tt)*) => {$($rest)*};
563}
564
565impl<'a, 'translation, 'data, M> VisitOperator<'a> for CodeGen<'a, 'translation, 'data, M, Emission>
566where
567    M: MacroAssembler,
568{
569    type Output = Result<()>;
570
571    fn visit_i32_const(&mut self, val: i32) -> Self::Output {
572        self.context.stack.push(Val::i32(val));
573
574        Ok(())
575    }
576
577    fn visit_i64_const(&mut self, val: i64) -> Self::Output {
578        self.context.stack.push(Val::i64(val));
579        Ok(())
580    }
581
582    fn visit_f32_const(&mut self, val: Ieee32) -> Self::Output {
583        self.context.stack.push(Val::f32(val));
584        Ok(())
585    }
586
587    fn visit_f64_const(&mut self, val: Ieee64) -> Self::Output {
588        self.context.stack.push(Val::f64(val));
589        Ok(())
590    }
591
592    fn visit_f32_add(&mut self) -> Self::Output {
593        self.context.binop(
594            self.masm,
595            OperandSize::S32,
596            &mut |masm: &mut M, dst, src, size| {
597                masm.float_add(writable!(dst), dst, src, size)?;
598                Ok(TypedReg::f32(dst))
599            },
600        )
601    }
602
603    fn visit_f64_add(&mut self) -> Self::Output {
604        self.context.binop(
605            self.masm,
606            OperandSize::S64,
607            &mut |masm: &mut M, dst, src, size| {
608                masm.float_add(writable!(dst), dst, src, size)?;
609                Ok(TypedReg::f64(dst))
610            },
611        )
612    }
613
614    fn visit_f32_sub(&mut self) -> Self::Output {
615        self.context.binop(
616            self.masm,
617            OperandSize::S32,
618            &mut |masm: &mut M, dst, src, size| {
619                masm.float_sub(writable!(dst), dst, src, size)?;
620                Ok(TypedReg::f32(dst))
621            },
622        )
623    }
624
625    fn visit_f64_sub(&mut self) -> Self::Output {
626        self.context.binop(
627            self.masm,
628            OperandSize::S64,
629            &mut |masm: &mut M, dst, src, size| {
630                masm.float_sub(writable!(dst), dst, src, size)?;
631                Ok(TypedReg::f64(dst))
632            },
633        )
634    }
635
636    fn visit_f32_mul(&mut self) -> Self::Output {
637        self.context.binop(
638            self.masm,
639            OperandSize::S32,
640            &mut |masm: &mut M, dst, src, size| {
641                masm.float_mul(writable!(dst), dst, src, size)?;
642                Ok(TypedReg::f32(dst))
643            },
644        )
645    }
646
647    fn visit_f64_mul(&mut self) -> Self::Output {
648        self.context.binop(
649            self.masm,
650            OperandSize::S64,
651            &mut |masm: &mut M, dst, src, size| {
652                masm.float_mul(writable!(dst), dst, src, size)?;
653                Ok(TypedReg::f64(dst))
654            },
655        )
656    }
657
658    fn visit_f32_div(&mut self) -> Self::Output {
659        self.context.binop(
660            self.masm,
661            OperandSize::S32,
662            &mut |masm: &mut M, dst, src, size| {
663                masm.float_div(writable!(dst), dst, src, size)?;
664                Ok(TypedReg::f32(dst))
665            },
666        )
667    }
668
669    fn visit_f64_div(&mut self) -> Self::Output {
670        self.context.binop(
671            self.masm,
672            OperandSize::S64,
673            &mut |masm: &mut M, dst, src, size| {
674                masm.float_div(writable!(dst), dst, src, size)?;
675                Ok(TypedReg::f64(dst))
676            },
677        )
678    }
679
680    fn visit_f32_min(&mut self) -> Self::Output {
681        self.context.binop(
682            self.masm,
683            OperandSize::S32,
684            &mut |masm: &mut M, dst, src, size| {
685                masm.float_min(writable!(dst), dst, src, size)?;
686                Ok(TypedReg::f32(dst))
687            },
688        )
689    }
690
691    fn visit_f64_min(&mut self) -> Self::Output {
692        self.context.binop(
693            self.masm,
694            OperandSize::S64,
695            &mut |masm: &mut M, dst, src, size| {
696                masm.float_min(writable!(dst), dst, src, size)?;
697                Ok(TypedReg::f64(dst))
698            },
699        )
700    }
701
702    fn visit_f32_max(&mut self) -> Self::Output {
703        self.context.binop(
704            self.masm,
705            OperandSize::S32,
706            &mut |masm: &mut M, dst, src, size| {
707                masm.float_max(writable!(dst), dst, src, size)?;
708                Ok(TypedReg::f32(dst))
709            },
710        )
711    }
712
713    fn visit_f64_max(&mut self) -> Self::Output {
714        self.context.binop(
715            self.masm,
716            OperandSize::S64,
717            &mut |masm: &mut M, dst, src, size| {
718                masm.float_max(writable!(dst), dst, src, size)?;
719                Ok(TypedReg::f64(dst))
720            },
721        )
722    }
723
724    fn visit_f32_copysign(&mut self) -> Self::Output {
725        self.context.binop(
726            self.masm,
727            OperandSize::S32,
728            &mut |masm: &mut M, dst, src, size| {
729                masm.float_copysign(writable!(dst), dst, src, size)?;
730                Ok(TypedReg::f32(dst))
731            },
732        )
733    }
734
735    fn visit_f64_copysign(&mut self) -> Self::Output {
736        self.context.binop(
737            self.masm,
738            OperandSize::S64,
739            &mut |masm: &mut M, dst, src, size| {
740                masm.float_copysign(writable!(dst), dst, src, size)?;
741                Ok(TypedReg::f64(dst))
742            },
743        )
744    }
745
746    fn visit_f32_abs(&mut self) -> Self::Output {
747        self.context.unop(self.masm, |masm, reg| {
748            masm.float_abs(writable!(reg), OperandSize::S32)?;
749            Ok(TypedReg::f32(reg))
750        })
751    }
752
753    fn visit_f64_abs(&mut self) -> Self::Output {
754        self.context.unop(self.masm, |masm, reg| {
755            masm.float_abs(writable!(reg), OperandSize::S64)?;
756            Ok(TypedReg::f64(reg))
757        })
758    }
759
760    fn visit_f32_neg(&mut self) -> Self::Output {
761        self.context.unop(self.masm, |masm, reg| {
762            masm.float_neg(writable!(reg), OperandSize::S32)?;
763            Ok(TypedReg::f32(reg))
764        })
765    }
766
767    fn visit_f64_neg(&mut self) -> Self::Output {
768        self.context.unop(self.masm, |masm, reg| {
769            masm.float_neg(writable!(reg), OperandSize::S64)?;
770            Ok(TypedReg::f64(reg))
771        })
772    }
773
774    fn visit_f32_floor(&mut self) -> Self::Output {
775        self.masm.float_round(
776            RoundingMode::Down,
777            &mut self.env,
778            &mut self.context,
779            OperandSize::S32,
780            |env, cx, masm| {
781                let builtin = env.builtins.floor_f32::<M::ABI, M::Ptr>()?;
782                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
783            },
784        )
785    }
786
787    fn visit_f64_floor(&mut self) -> Self::Output {
788        self.masm.float_round(
789            RoundingMode::Down,
790            &mut self.env,
791            &mut self.context,
792            OperandSize::S64,
793            |env, cx, masm| {
794                let builtin = env.builtins.floor_f64::<M::ABI, M::Ptr>()?;
795                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
796            },
797        )
798    }
799
800    fn visit_f32_ceil(&mut self) -> Self::Output {
801        self.masm.float_round(
802            RoundingMode::Up,
803            &mut self.env,
804            &mut self.context,
805            OperandSize::S32,
806            |env, cx, masm| {
807                let builtin = env.builtins.ceil_f32::<M::ABI, M::Ptr>()?;
808                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
809            },
810        )
811    }
812
813    fn visit_f64_ceil(&mut self) -> Self::Output {
814        self.masm.float_round(
815            RoundingMode::Up,
816            &mut self.env,
817            &mut self.context,
818            OperandSize::S64,
819            |env, cx, masm| {
820                let builtin = env.builtins.ceil_f64::<M::ABI, M::Ptr>()?;
821                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
822            },
823        )
824    }
825
826    fn visit_f32_nearest(&mut self) -> Self::Output {
827        self.masm.float_round(
828            RoundingMode::Nearest,
829            &mut self.env,
830            &mut self.context,
831            OperandSize::S32,
832            |env, cx, masm| {
833                let builtin = env.builtins.nearest_f32::<M::ABI, M::Ptr>()?;
834                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
835            },
836        )
837    }
838
839    fn visit_f64_nearest(&mut self) -> Self::Output {
840        self.masm.float_round(
841            RoundingMode::Nearest,
842            &mut self.env,
843            &mut self.context,
844            OperandSize::S64,
845            |env, cx, masm| {
846                let builtin = env.builtins.nearest_f64::<M::ABI, M::Ptr>()?;
847                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
848            },
849        )
850    }
851
852    fn visit_f32_trunc(&mut self) -> Self::Output {
853        self.masm.float_round(
854            RoundingMode::Zero,
855            &mut self.env,
856            &mut self.context,
857            OperandSize::S32,
858            |env, cx, masm| {
859                let builtin = env.builtins.trunc_f32::<M::ABI, M::Ptr>()?;
860                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
861            },
862        )
863    }
864
865    fn visit_f64_trunc(&mut self) -> Self::Output {
866        self.masm.float_round(
867            RoundingMode::Zero,
868            &mut self.env,
869            &mut self.context,
870            OperandSize::S64,
871            |env, cx, masm| {
872                let builtin = env.builtins.trunc_f64::<M::ABI, M::Ptr>()?;
873                FnCall::emit::<M>(env, masm, cx, Callee::Builtin(builtin))
874            },
875        )
876    }
877
878    fn visit_f32_sqrt(&mut self) -> Self::Output {
879        self.context.unop(self.masm, |masm, reg| {
880            masm.float_sqrt(writable!(reg), reg, OperandSize::S32)?;
881            Ok(TypedReg::f32(reg))
882        })
883    }
884
885    fn visit_f64_sqrt(&mut self) -> Self::Output {
886        self.context.unop(self.masm, |masm, reg| {
887            masm.float_sqrt(writable!(reg), reg, OperandSize::S64)?;
888            Ok(TypedReg::f64(reg))
889        })
890    }
891
892    fn visit_f32_eq(&mut self) -> Self::Output {
893        self.context.float_cmp_op(
894            self.masm,
895            OperandSize::S32,
896            &mut |masm: &mut M, dst, src1, src2, size| {
897                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Eq, size)
898            },
899        )
900    }
901
902    fn visit_f64_eq(&mut self) -> Self::Output {
903        self.context.float_cmp_op(
904            self.masm,
905            OperandSize::S64,
906            &mut |masm: &mut M, dst, src1, src2, size| {
907                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Eq, size)
908            },
909        )
910    }
911
912    fn visit_f32_ne(&mut self) -> Self::Output {
913        self.context.float_cmp_op(
914            self.masm,
915            OperandSize::S32,
916            &mut |masm: &mut M, dst, src1, src2, size| {
917                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ne, size)
918            },
919        )
920    }
921
922    fn visit_f64_ne(&mut self) -> Self::Output {
923        self.context.float_cmp_op(
924            self.masm,
925            OperandSize::S64,
926            &mut |masm: &mut M, dst, src1, src2, size| {
927                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ne, size)
928            },
929        )
930    }
931
932    fn visit_f32_lt(&mut self) -> Self::Output {
933        self.context.float_cmp_op(
934            self.masm,
935            OperandSize::S32,
936            &mut |masm: &mut M, dst, src1, src2, size| {
937                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Lt, size)
938            },
939        )
940    }
941
942    fn visit_f64_lt(&mut self) -> Self::Output {
943        self.context.float_cmp_op(
944            self.masm,
945            OperandSize::S64,
946            &mut |masm: &mut M, dst, src1, src2, size| {
947                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Lt, size)
948            },
949        )
950    }
951
952    fn visit_f32_gt(&mut self) -> Self::Output {
953        self.context.float_cmp_op(
954            self.masm,
955            OperandSize::S32,
956            &mut |masm: &mut M, dst, src1, src2, size| {
957                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Gt, size)
958            },
959        )
960    }
961
962    fn visit_f64_gt(&mut self) -> Self::Output {
963        self.context.float_cmp_op(
964            self.masm,
965            OperandSize::S64,
966            &mut |masm: &mut M, dst, src1, src2, size| {
967                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Gt, size)
968            },
969        )
970    }
971
972    fn visit_f32_le(&mut self) -> Self::Output {
973        self.context.float_cmp_op(
974            self.masm,
975            OperandSize::S32,
976            &mut |masm: &mut M, dst, src1, src2, size| {
977                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Le, size)
978            },
979        )
980    }
981
982    fn visit_f64_le(&mut self) -> Self::Output {
983        self.context.float_cmp_op(
984            self.masm,
985            OperandSize::S64,
986            &mut |masm: &mut M, dst, src1, src2, size| {
987                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Le, size)
988            },
989        )
990    }
991
992    fn visit_f32_ge(&mut self) -> Self::Output {
993        self.context.float_cmp_op(
994            self.masm,
995            OperandSize::S32,
996            &mut |masm: &mut M, dst, src1, src2, size| {
997                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ge, size)
998            },
999        )
1000    }
1001
1002    fn visit_f64_ge(&mut self) -> Self::Output {
1003        self.context.float_cmp_op(
1004            self.masm,
1005            OperandSize::S64,
1006            &mut |masm: &mut M, dst, src1, src2, size| {
1007                masm.float_cmp_with_set(writable!(dst), src1, src2, FloatCmpKind::Ge, size)
1008            },
1009        )
1010    }
1011
1012    fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
1013        self.context
1014            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, dst_size| {
1015                masm.signed_convert(writable!(dst), src, OperandSize::S32, dst_size)
1016            })
1017    }
1018
1019    fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
1020        self.context.convert_op_with_tmp_reg(
1021            self.masm,
1022            WasmValType::F32,
1023            RegClass::Int,
1024            |masm, dst, src, tmp_gpr, dst_size| {
1025                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S32, dst_size)
1026            },
1027        )
1028    }
1029
1030    fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
1031        self.context
1032            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, dst_size| {
1033                masm.signed_convert(writable!(dst), src, OperandSize::S64, dst_size)
1034            })
1035    }
1036
1037    fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
1038        self.context.convert_op_with_tmp_reg(
1039            self.masm,
1040            WasmValType::F32,
1041            RegClass::Int,
1042            |masm, dst, src, tmp_gpr, dst_size| {
1043                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S64, dst_size)
1044            },
1045        )
1046    }
1047
1048    fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
1049        self.context
1050            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, dst_size| {
1051                masm.signed_convert(writable!(dst), src, OperandSize::S32, dst_size)
1052            })
1053    }
1054
1055    fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
1056        self.context.convert_op_with_tmp_reg(
1057            self.masm,
1058            WasmValType::F64,
1059            RegClass::Int,
1060            |masm, dst, src, tmp_gpr, dst_size| {
1061                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S32, dst_size)
1062            },
1063        )
1064    }
1065
1066    fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
1067        self.context
1068            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, dst_size| {
1069                masm.signed_convert(writable!(dst), src, OperandSize::S64, dst_size)
1070            })
1071    }
1072
1073    fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
1074        self.context.convert_op_with_tmp_reg(
1075            self.masm,
1076            WasmValType::F64,
1077            RegClass::Int,
1078            |masm, dst, src, tmp_gpr, dst_size| {
1079                masm.unsigned_convert(writable!(dst), src, tmp_gpr, OperandSize::S64, dst_size)
1080            },
1081        )
1082    }
1083
1084    fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
1085        self.context
1086            .convert_op(self.masm, WasmValType::F32, |masm, dst, src, size| {
1087                masm.reinterpret_int_as_float(writable!(dst), src, size)
1088            })
1089    }
1090
1091    fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
1092        self.context
1093            .convert_op(self.masm, WasmValType::F64, |masm, dst, src, size| {
1094                masm.reinterpret_int_as_float(writable!(dst), src, size)
1095            })
1096    }
1097
1098    fn visit_f32_demote_f64(&mut self) -> Self::Output {
1099        self.context.unop(self.masm, |masm, reg| {
1100            masm.demote(writable!(reg), reg)?;
1101            Ok(TypedReg::f32(reg))
1102        })
1103    }
1104
1105    fn visit_f64_promote_f32(&mut self) -> Self::Output {
1106        self.context.unop(self.masm, |masm, reg| {
1107            masm.promote(writable!(reg), reg)?;
1108            Ok(TypedReg::f64(reg))
1109        })
1110    }
1111
1112    fn visit_i32_add(&mut self) -> Self::Output {
1113        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1114            masm.add(writable!(dst), dst, src, size)?;
1115            Ok(TypedReg::i32(dst))
1116        })
1117    }
1118
1119    fn visit_i64_add(&mut self) -> Self::Output {
1120        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1121            masm.add(writable!(dst), dst, src, size)?;
1122            Ok(TypedReg::i64(dst))
1123        })
1124    }
1125
1126    fn visit_i32_sub(&mut self) -> Self::Output {
1127        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1128            masm.sub(writable!(dst), dst, src, size)?;
1129            Ok(TypedReg::i32(dst))
1130        })
1131    }
1132
1133    fn visit_i64_sub(&mut self) -> Self::Output {
1134        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1135            masm.sub(writable!(dst), dst, src, size)?;
1136            Ok(TypedReg::i64(dst))
1137        })
1138    }
1139
1140    fn visit_i32_mul(&mut self) -> Self::Output {
1141        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1142            masm.mul(writable!(dst), dst, src, size)?;
1143            Ok(TypedReg::i32(dst))
1144        })
1145    }
1146
1147    fn visit_i64_mul(&mut self) -> Self::Output {
1148        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1149            masm.mul(writable!(dst), dst, src, size)?;
1150            Ok(TypedReg::i64(dst))
1151        })
1152    }
1153
1154    fn visit_i32_div_s(&mut self) -> Self::Output {
1155        use DivKind::*;
1156        use OperandSize::*;
1157
1158        self.masm.div(&mut self.context, Signed, S32)
1159    }
1160
1161    fn visit_i32_div_u(&mut self) -> Self::Output {
1162        use DivKind::*;
1163        use OperandSize::*;
1164
1165        self.masm.div(&mut self.context, Unsigned, S32)
1166    }
1167
1168    fn visit_i64_div_s(&mut self) -> Self::Output {
1169        use DivKind::*;
1170        use OperandSize::*;
1171
1172        self.masm.div(&mut self.context, Signed, S64)
1173    }
1174
1175    fn visit_i64_div_u(&mut self) -> Self::Output {
1176        use DivKind::*;
1177        use OperandSize::*;
1178
1179        self.masm.div(&mut self.context, Unsigned, S64)
1180    }
1181
1182    fn visit_i32_rem_s(&mut self) -> Self::Output {
1183        use OperandSize::*;
1184        use RemKind::*;
1185
1186        self.masm.rem(&mut self.context, Signed, S32)
1187    }
1188
1189    fn visit_i32_rem_u(&mut self) -> Self::Output {
1190        use OperandSize::*;
1191        use RemKind::*;
1192
1193        self.masm.rem(&mut self.context, Unsigned, S32)
1194    }
1195
1196    fn visit_i64_rem_s(&mut self) -> Self::Output {
1197        use OperandSize::*;
1198        use RemKind::*;
1199
1200        self.masm.rem(&mut self.context, Signed, S64)
1201    }
1202
1203    fn visit_i64_rem_u(&mut self) -> Self::Output {
1204        use OperandSize::*;
1205        use RemKind::*;
1206
1207        self.masm.rem(&mut self.context, Unsigned, S64)
1208    }
1209
1210    fn visit_i32_eq(&mut self) -> Self::Output {
1211        self.cmp_i32s(IntCmpKind::Eq)
1212    }
1213
1214    fn visit_i64_eq(&mut self) -> Self::Output {
1215        self.cmp_i64s(IntCmpKind::Eq)
1216    }
1217
1218    fn visit_i32_ne(&mut self) -> Self::Output {
1219        self.cmp_i32s(IntCmpKind::Ne)
1220    }
1221
1222    fn visit_i64_ne(&mut self) -> Self::Output {
1223        self.cmp_i64s(IntCmpKind::Ne)
1224    }
1225
1226    fn visit_i32_lt_s(&mut self) -> Self::Output {
1227        self.cmp_i32s(IntCmpKind::LtS)
1228    }
1229
1230    fn visit_i64_lt_s(&mut self) -> Self::Output {
1231        self.cmp_i64s(IntCmpKind::LtS)
1232    }
1233
1234    fn visit_i32_lt_u(&mut self) -> Self::Output {
1235        self.cmp_i32s(IntCmpKind::LtU)
1236    }
1237
1238    fn visit_i64_lt_u(&mut self) -> Self::Output {
1239        self.cmp_i64s(IntCmpKind::LtU)
1240    }
1241
1242    fn visit_i32_le_s(&mut self) -> Self::Output {
1243        self.cmp_i32s(IntCmpKind::LeS)
1244    }
1245
1246    fn visit_i64_le_s(&mut self) -> Self::Output {
1247        self.cmp_i64s(IntCmpKind::LeS)
1248    }
1249
1250    fn visit_i32_le_u(&mut self) -> Self::Output {
1251        self.cmp_i32s(IntCmpKind::LeU)
1252    }
1253
1254    fn visit_i64_le_u(&mut self) -> Self::Output {
1255        self.cmp_i64s(IntCmpKind::LeU)
1256    }
1257
1258    fn visit_i32_gt_s(&mut self) -> Self::Output {
1259        self.cmp_i32s(IntCmpKind::GtS)
1260    }
1261
1262    fn visit_i64_gt_s(&mut self) -> Self::Output {
1263        self.cmp_i64s(IntCmpKind::GtS)
1264    }
1265
1266    fn visit_i32_gt_u(&mut self) -> Self::Output {
1267        self.cmp_i32s(IntCmpKind::GtU)
1268    }
1269
1270    fn visit_i64_gt_u(&mut self) -> Self::Output {
1271        self.cmp_i64s(IntCmpKind::GtU)
1272    }
1273
1274    fn visit_i32_ge_s(&mut self) -> Self::Output {
1275        self.cmp_i32s(IntCmpKind::GeS)
1276    }
1277
1278    fn visit_i64_ge_s(&mut self) -> Self::Output {
1279        self.cmp_i64s(IntCmpKind::GeS)
1280    }
1281
1282    fn visit_i32_ge_u(&mut self) -> Self::Output {
1283        self.cmp_i32s(IntCmpKind::GeU)
1284    }
1285
1286    fn visit_i64_ge_u(&mut self) -> Self::Output {
1287        self.cmp_i64s(IntCmpKind::GeU)
1288    }
1289
1290    fn visit_i32_eqz(&mut self) -> Self::Output {
1291        use OperandSize::*;
1292
1293        self.context.unop(self.masm, |masm, reg| {
1294            masm.cmp_with_set(writable!(reg), RegImm::i32(0), IntCmpKind::Eq, S32)?;
1295            Ok(TypedReg::i32(reg))
1296        })
1297    }
1298
1299    fn visit_i64_eqz(&mut self) -> Self::Output {
1300        use OperandSize::*;
1301
1302        self.context.unop(self.masm, |masm, reg| {
1303            masm.cmp_with_set(writable!(reg), RegImm::i64(0), IntCmpKind::Eq, S64)?;
1304            Ok(TypedReg::i32(reg)) // Return value for `i64.eqz` is an `i32`.
1305        })
1306    }
1307
1308    fn visit_i32_clz(&mut self) -> Self::Output {
1309        use OperandSize::*;
1310
1311        self.context.unop(self.masm, |masm, reg| {
1312            masm.clz(writable!(reg), reg, S32)?;
1313            Ok(TypedReg::i32(reg))
1314        })
1315    }
1316
1317    fn visit_i64_clz(&mut self) -> Self::Output {
1318        use OperandSize::*;
1319
1320        self.context.unop(self.masm, |masm, reg| {
1321            masm.clz(writable!(reg), reg, S64)?;
1322            Ok(TypedReg::i64(reg))
1323        })
1324    }
1325
1326    fn visit_i32_ctz(&mut self) -> Self::Output {
1327        use OperandSize::*;
1328
1329        self.context.unop(self.masm, |masm, reg| {
1330            masm.ctz(writable!(reg), reg, S32)?;
1331            Ok(TypedReg::i32(reg))
1332        })
1333    }
1334
1335    fn visit_i64_ctz(&mut self) -> Self::Output {
1336        use OperandSize::*;
1337
1338        self.context.unop(self.masm, |masm, reg| {
1339            masm.ctz(writable!(reg), reg, S64)?;
1340            Ok(TypedReg::i64(reg))
1341        })
1342    }
1343
1344    fn visit_i32_and(&mut self) -> Self::Output {
1345        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1346            masm.and(writable!(dst), dst, src, size)?;
1347            Ok(TypedReg::i32(dst))
1348        })
1349    }
1350
1351    fn visit_i64_and(&mut self) -> Self::Output {
1352        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1353            masm.and(writable!(dst), dst, src, size)?;
1354            Ok(TypedReg::i64(dst))
1355        })
1356    }
1357
1358    fn visit_i32_or(&mut self) -> Self::Output {
1359        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1360            masm.or(writable!(dst), dst, src, size)?;
1361            Ok(TypedReg::i32(dst))
1362        })
1363    }
1364
1365    fn visit_i64_or(&mut self) -> Self::Output {
1366        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1367            masm.or(writable!(dst), dst, src, size)?;
1368            Ok(TypedReg::i64(dst))
1369        })
1370    }
1371
1372    fn visit_i32_xor(&mut self) -> Self::Output {
1373        self.context.i32_binop(self.masm, |masm, dst, src, size| {
1374            masm.xor(writable!(dst), dst, src, size)?;
1375            Ok(TypedReg::i32(dst))
1376        })
1377    }
1378
1379    fn visit_i64_xor(&mut self) -> Self::Output {
1380        self.context.i64_binop(self.masm, |masm, dst, src, size| {
1381            masm.xor(writable!(dst), dst, src, size)?;
1382            Ok(TypedReg::i64(dst))
1383        })
1384    }
1385
1386    fn visit_i32_shl(&mut self) -> Self::Output {
1387        use ShiftKind::*;
1388
1389        self.context.i32_shift(self.masm, Shl)
1390    }
1391
1392    fn visit_i64_shl(&mut self) -> Self::Output {
1393        use ShiftKind::*;
1394
1395        self.context.i64_shift(self.masm, Shl)
1396    }
1397
1398    fn visit_i32_shr_s(&mut self) -> Self::Output {
1399        use ShiftKind::*;
1400
1401        self.context.i32_shift(self.masm, ShrS)
1402    }
1403
1404    fn visit_i64_shr_s(&mut self) -> Self::Output {
1405        use ShiftKind::*;
1406
1407        self.context.i64_shift(self.masm, ShrS)
1408    }
1409
1410    fn visit_i32_shr_u(&mut self) -> Self::Output {
1411        use ShiftKind::*;
1412
1413        self.context.i32_shift(self.masm, ShrU)
1414    }
1415
1416    fn visit_i64_shr_u(&mut self) -> Self::Output {
1417        use ShiftKind::*;
1418
1419        self.context.i64_shift(self.masm, ShrU)
1420    }
1421
1422    fn visit_i32_rotl(&mut self) -> Self::Output {
1423        use ShiftKind::*;
1424
1425        self.context.i32_shift(self.masm, Rotl)
1426    }
1427
1428    fn visit_i64_rotl(&mut self) -> Self::Output {
1429        use ShiftKind::*;
1430
1431        self.context.i64_shift(self.masm, Rotl)
1432    }
1433
1434    fn visit_i32_rotr(&mut self) -> Self::Output {
1435        use ShiftKind::*;
1436
1437        self.context.i32_shift(self.masm, Rotr)
1438    }
1439
1440    fn visit_i64_rotr(&mut self) -> Self::Output {
1441        use ShiftKind::*;
1442
1443        self.context.i64_shift(self.masm, Rotr)
1444    }
1445
1446    fn visit_end(&mut self) -> Self::Output {
1447        if !self.context.reachable {
1448            self.handle_unreachable_end()
1449        } else {
1450            let mut control = self.pop_control_frame()?;
1451            control.emit_end(self.masm, &mut self.context)
1452        }
1453    }
1454
1455    fn visit_i32_popcnt(&mut self) -> Self::Output {
1456        use OperandSize::*;
1457        self.masm.popcnt(&mut self.context, S32)
1458    }
1459
1460    fn visit_i64_popcnt(&mut self) -> Self::Output {
1461        use OperandSize::*;
1462
1463        self.masm.popcnt(&mut self.context, S64)
1464    }
1465
1466    fn visit_i32_wrap_i64(&mut self) -> Self::Output {
1467        self.context.unop(self.masm, |masm, reg| {
1468            masm.wrap(writable!(reg), reg)?;
1469            Ok(TypedReg::i32(reg))
1470        })
1471    }
1472
1473    fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
1474        self.context.unop(self.masm, |masm, reg| {
1475            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend32.into())?;
1476            Ok(TypedReg::i64(reg))
1477        })
1478    }
1479
1480    fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
1481        self.context.unop(self.masm, |masm, reg| {
1482            masm.extend(writable!(reg), reg, Extend::<Zero>::I64Extend32.into())?;
1483            Ok(TypedReg::i64(reg))
1484        })
1485    }
1486
1487    fn visit_i32_extend8_s(&mut self) -> Self::Output {
1488        self.context.unop(self.masm, |masm, reg| {
1489            masm.extend(writable!(reg), reg, Extend::<Signed>::I32Extend8.into())?;
1490            Ok(TypedReg::i32(reg))
1491        })
1492    }
1493
1494    fn visit_i32_extend16_s(&mut self) -> Self::Output {
1495        self.context.unop(self.masm, |masm, reg| {
1496            masm.extend(writable!(reg), reg, Extend::<Signed>::I32Extend16.into())?;
1497            Ok(TypedReg::i32(reg))
1498        })
1499    }
1500
1501    fn visit_i64_extend8_s(&mut self) -> Self::Output {
1502        self.context.unop(self.masm, |masm, reg| {
1503            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend8.into())?;
1504            Ok(TypedReg::i64(reg))
1505        })
1506    }
1507
1508    fn visit_i64_extend16_s(&mut self) -> Self::Output {
1509        self.context.unop(self.masm, |masm, reg| {
1510            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend16.into())?;
1511            Ok(TypedReg::i64(reg))
1512        })
1513    }
1514
1515    fn visit_i64_extend32_s(&mut self) -> Self::Output {
1516        self.context.unop(self.masm, |masm, reg| {
1517            masm.extend(writable!(reg), reg, Extend::<Signed>::I64Extend32.into())?;
1518            Ok(TypedReg::i64(reg))
1519        })
1520    }
1521
1522    fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
1523        use OperandSize::*;
1524
1525        self.context
1526            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
1527                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Unchecked)
1528            })
1529    }
1530
1531    fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
1532        use OperandSize::*;
1533
1534        self.masm
1535            .unsigned_truncate(&mut self.context, S32, S32, TruncKind::Unchecked)
1536    }
1537
1538    fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
1539        use OperandSize::*;
1540
1541        self.context
1542            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
1543                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Unchecked)
1544            })
1545    }
1546
1547    fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
1548        use OperandSize::*;
1549        self.masm
1550            .unsigned_truncate(&mut self.context, S64, S32, TruncKind::Unchecked)
1551    }
1552
1553    fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
1554        use OperandSize::*;
1555
1556        self.context
1557            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
1558                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Unchecked)
1559            })
1560    }
1561
1562    fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
1563        use OperandSize::*;
1564
1565        self.masm
1566            .unsigned_truncate(&mut self.context, S32, S64, TruncKind::Unchecked)
1567    }
1568
1569    fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
1570        use OperandSize::*;
1571
1572        self.context
1573            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
1574                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Unchecked)
1575            })
1576    }
1577
1578    fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
1579        use OperandSize::*;
1580
1581        self.masm
1582            .unsigned_truncate(&mut self.context, S64, S64, TruncKind::Unchecked)
1583    }
1584
1585    fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
1586        self.context
1587            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, size| {
1588                masm.reinterpret_float_as_int(writable!(dst), src, size)
1589            })
1590    }
1591
1592    fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
1593        self.context
1594            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, size| {
1595                masm.reinterpret_float_as_int(writable!(dst), src, size)
1596            })
1597    }
1598
1599    fn visit_local_get(&mut self, index: u32) -> Self::Output {
1600        use WasmValType::*;
1601        let context = &mut self.context;
1602        let slot = context.frame.get_wasm_local(index);
1603        match slot.ty {
1604            I32 | I64 | F32 | F64 | V128 => context.stack.push(Val::local(index, slot.ty)),
1605            Ref(rt) => match rt.heap_type {
1606                WasmHeapType::Func => context.stack.push(Val::local(index, slot.ty)),
1607                _ => bail!(CodeGenError::unsupported_wasm_type()),
1608            },
1609        }
1610
1611        Ok(())
1612    }
1613
1614    fn visit_local_set(&mut self, index: u32) -> Self::Output {
1615        let src = self.emit_set_local(index)?;
1616        self.context.free_reg(src);
1617        Ok(())
1618    }
1619
1620    fn visit_call(&mut self, index: u32) -> Self::Output {
1621        let callee = self.env.callee_from_index(FuncIndex::from_u32(index));
1622        FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, callee)?;
1623        Ok(())
1624    }
1625
1626    fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
1627        // Spill now because `emit_lazy_init_funcref` and the `FnCall::emit`
1628        // invocations will both trigger spills since they both call functions.
1629        // However, the machine instructions for the spill emitted by
1630        // `emit_lazy_funcref` will be jumped over if the funcref was previously
1631        // initialized which may result in the machine stack becoming
1632        // unbalanced.
1633        self.context.spill(self.masm)?;
1634
1635        let type_index = TypeIndex::from_u32(type_index);
1636        let table_index = TableIndex::from_u32(table_index);
1637
1638        self.emit_lazy_init_funcref(table_index)?;
1639
1640        // Perform the indirect call.
1641        // This code assumes that [`Self::emit_lazy_init_funcref`] will
1642        // push the funcref to the value stack.
1643        let funcref_ptr = self
1644            .context
1645            .stack
1646            .peek()
1647            .map(|v| v.unwrap_reg())
1648            .ok_or_else(|| CodeGenError::missing_values_in_stack())?;
1649        self.masm
1650            .trapz(funcref_ptr.into(), TRAP_INDIRECT_CALL_TO_NULL)?;
1651        self.emit_typecheck_funcref(funcref_ptr.into(), type_index)?;
1652
1653        let callee = self.env.funcref(type_index);
1654        FnCall::emit::<M>(&mut self.env, self.masm, &mut self.context, callee)?;
1655        Ok(())
1656    }
1657
1658    fn visit_table_init(&mut self, elem: u32, table: u32) -> Self::Output {
1659        let at = self.context.stack.ensure_index_at(3)?;
1660
1661        self.context
1662            .stack
1663            .insert_many(at, &[table.try_into()?, elem.try_into()?]);
1664
1665        let builtin = self.env.builtins.table_init::<M::ABI, M::Ptr>()?;
1666        FnCall::emit::<M>(
1667            &mut self.env,
1668            self.masm,
1669            &mut self.context,
1670            Callee::Builtin(builtin.clone()),
1671        )?;
1672        self.context.pop_and_free(self.masm)
1673    }
1674
1675    fn visit_table_copy(&mut self, dst: u32, src: u32) -> Self::Output {
1676        let at = self.context.stack.ensure_index_at(3)?;
1677        self.context
1678            .stack
1679            .insert_many(at, &[dst.try_into()?, src.try_into()?]);
1680
1681        let builtin = self.env.builtins.table_copy::<M::ABI, M::Ptr>()?;
1682        FnCall::emit::<M>(
1683            &mut self.env,
1684            self.masm,
1685            &mut self.context,
1686            Callee::Builtin(builtin),
1687        )?;
1688        self.context.pop_and_free(self.masm)
1689    }
1690
1691    fn visit_table_get(&mut self, table: u32) -> Self::Output {
1692        let table_index = TableIndex::from_u32(table);
1693        let table = self.env.table(table_index);
1694        let heap_type = table.ref_type.heap_type;
1695
1696        match heap_type {
1697            WasmHeapType::Func => self.emit_lazy_init_funcref(table_index),
1698            _ => Err(anyhow!(CodeGenError::unsupported_wasm_type())),
1699        }
1700    }
1701
1702    fn visit_table_grow(&mut self, table: u32) -> Self::Output {
1703        let table_index = TableIndex::from_u32(table);
1704        let table_ty = self.env.table(table_index);
1705        let builtin = match table_ty.ref_type.heap_type {
1706            WasmHeapType::Func => self.env.builtins.table_grow_func_ref::<M::ABI, M::Ptr>()?,
1707            _ => bail!(CodeGenError::unsupported_wasm_type()),
1708        };
1709
1710        let len = self.context.stack.len();
1711        // table.grow` requires at least 2 elements on the value stack.
1712        let at = self.context.stack.ensure_index_at(2)?;
1713
1714        // The table_grow builtin expects the parameters in a different
1715        // order.
1716        // The value stack at this point should contain:
1717        // [ init_value | delta ] (stack top)
1718        // but the builtin function expects the init value as the last
1719        // argument.
1720        self.context.stack.inner_mut().swap(len - 1, len - 2);
1721        self.context.stack.insert_many(at, &[table.try_into()?]);
1722
1723        FnCall::emit::<M>(
1724            &mut self.env,
1725            self.masm,
1726            &mut self.context,
1727            Callee::Builtin(builtin.clone()),
1728        )?;
1729
1730        Ok(())
1731    }
1732
1733    fn visit_table_size(&mut self, table: u32) -> Self::Output {
1734        let table_index = TableIndex::from_u32(table);
1735        let table_data = self.env.resolve_table_data(table_index);
1736        self.emit_compute_table_size(&table_data)
1737    }
1738
1739    fn visit_table_fill(&mut self, table: u32) -> Self::Output {
1740        let table_index = TableIndex::from_u32(table);
1741        let table_ty = self.env.table(table_index);
1742
1743        ensure!(
1744            table_ty.ref_type.heap_type == WasmHeapType::Func,
1745            CodeGenError::unsupported_wasm_type()
1746        );
1747
1748        let builtin = self.env.builtins.table_fill_func_ref::<M::ABI, M::Ptr>()?;
1749
1750        let at = self.context.stack.ensure_index_at(3)?;
1751
1752        self.context.stack.insert_many(at, &[table.try_into()?]);
1753        FnCall::emit::<M>(
1754            &mut self.env,
1755            self.masm,
1756            &mut self.context,
1757            Callee::Builtin(builtin.clone()),
1758        )?;
1759        self.context.pop_and_free(self.masm)
1760    }
1761
1762    fn visit_table_set(&mut self, table: u32) -> Self::Output {
1763        let ptr_type = self.env.ptr_type();
1764        let table_index = TableIndex::from_u32(table);
1765        let table_data = self.env.resolve_table_data(table_index);
1766        let table = self.env.table(table_index);
1767        match table.ref_type.heap_type {
1768            WasmHeapType::Func => {
1769                ensure!(
1770                    self.tunables.table_lazy_init,
1771                    CodeGenError::unsupported_table_eager_init()
1772                );
1773                let value = self.context.pop_to_reg(self.masm, None)?;
1774                let index = self.context.pop_to_reg(self.masm, None)?;
1775                let base = self.context.any_gpr(self.masm)?;
1776                let elem_addr =
1777                    self.emit_compute_table_elem_addr(index.into(), base, &table_data)?;
1778                // Set the initialized bit.
1779                self.masm.or(
1780                    writable!(value.into()),
1781                    value.into(),
1782                    RegImm::i64(FUNCREF_INIT_BIT as i64),
1783                    ptr_type.try_into()?,
1784                )?;
1785
1786                self.masm.store_ptr(value.into(), elem_addr)?;
1787
1788                self.context.free_reg(value);
1789                self.context.free_reg(index);
1790                self.context.free_reg(base);
1791                Ok(())
1792            }
1793            _ => Err(anyhow!(CodeGenError::unsupported_wasm_type())),
1794        }
1795    }
1796
1797    fn visit_elem_drop(&mut self, index: u32) -> Self::Output {
1798        let elem_drop = self.env.builtins.elem_drop::<M::ABI, M::Ptr>()?;
1799        self.context.stack.extend([index.try_into()?]);
1800        FnCall::emit::<M>(
1801            &mut self.env,
1802            self.masm,
1803            &mut self.context,
1804            Callee::Builtin(elem_drop),
1805        )?;
1806        Ok(())
1807    }
1808
1809    fn visit_memory_init(&mut self, data_index: u32, mem: u32) -> Self::Output {
1810        let at = self.context.stack.ensure_index_at(3)?;
1811        self.context
1812            .stack
1813            .insert_many(at, &[mem.try_into()?, data_index.try_into()?]);
1814        let builtin = self.env.builtins.memory_init::<M::ABI, M::Ptr>()?;
1815        FnCall::emit::<M>(
1816            &mut self.env,
1817            self.masm,
1818            &mut self.context,
1819            Callee::Builtin(builtin),
1820        )?;
1821        self.context.pop_and_free(self.masm)
1822    }
1823
1824    fn visit_memory_copy(&mut self, dst_mem: u32, src_mem: u32) -> Self::Output {
1825        // At this point, the stack is expected to contain:
1826        //     [ dst_offset, src_offset, len ]
1827        // The following code inserts the missing params, so that stack contains:
1828        //     [ vmctx, dst_mem, dst_offset, src_mem, src_offset, len ]
1829        // Which is the order expected by the builtin function.
1830        let _ = self.context.stack.ensure_index_at(3)?;
1831        let at = self.context.stack.ensure_index_at(2)?;
1832        self.context.stack.insert_many(at, &[src_mem.try_into()?]);
1833
1834        // One element was inserted above, so instead of 3, we use 4.
1835        let at = self.context.stack.ensure_index_at(4)?;
1836        self.context.stack.insert_many(at, &[dst_mem.try_into()?]);
1837
1838        let builtin = self.env.builtins.memory_copy::<M::ABI, M::Ptr>()?;
1839
1840        FnCall::emit::<M>(
1841            &mut self.env,
1842            self.masm,
1843            &mut self.context,
1844            Callee::Builtin(builtin),
1845        )?;
1846        self.context.pop_and_free(self.masm)
1847    }
1848
1849    fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
1850        let at = self.context.stack.ensure_index_at(3)?;
1851
1852        self.context.stack.insert_many(at, &[mem.try_into()?]);
1853
1854        let builtin = self.env.builtins.memory_fill::<M::ABI, M::Ptr>()?;
1855        FnCall::emit::<M>(
1856            &mut self.env,
1857            self.masm,
1858            &mut self.context,
1859            Callee::Builtin(builtin),
1860        )?;
1861        self.context.pop_and_free(self.masm)
1862    }
1863
1864    fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
1865        let heap = self.env.resolve_heap(MemoryIndex::from_u32(mem));
1866        self.emit_compute_memory_size(&heap)
1867    }
1868
1869    fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
1870        let _ = self.context.stack.ensure_index_at(1)?;
1871        // The stack at this point contains: [ delta ]
1872        // The desired state is
1873        //   [ vmctx, delta, index ]
1874        self.context.stack.extend([mem.try_into()?]);
1875
1876        let heap = self.env.resolve_heap(MemoryIndex::from_u32(mem));
1877        let builtin = self.env.builtins.memory_grow::<M::ABI, M::Ptr>()?;
1878        FnCall::emit::<M>(
1879            &mut self.env,
1880            self.masm,
1881            &mut self.context,
1882            Callee::Builtin(builtin),
1883        )?;
1884
1885        // The memory32_grow builtin returns a pointer type, therefore we must
1886        // ensure that the return type is representative of the address space of
1887        // the heap type.
1888        match (self.env.ptr_type(), heap.index_type()) {
1889            (WasmValType::I64, WasmValType::I64) => Ok(()),
1890            // When the heap type is smaller than the pointer type, we adjust
1891            // the result of the memory32_grow builtin.
1892            (WasmValType::I64, WasmValType::I32) => {
1893                let top: Reg = self.context.pop_to_reg(self.masm, None)?.into();
1894                self.masm.wrap(writable!(top), top)?;
1895                self.context.stack.push(TypedReg::i32(top).into());
1896                Ok(())
1897            }
1898            _ => Err(anyhow!(CodeGenError::unsupported_32_bit_platform())),
1899        }
1900    }
1901
1902    fn visit_data_drop(&mut self, data_index: u32) -> Self::Output {
1903        self.context.stack.extend([data_index.try_into()?]);
1904
1905        let builtin = self.env.builtins.data_drop::<M::ABI, M::Ptr>()?;
1906        FnCall::emit::<M>(
1907            &mut self.env,
1908            self.masm,
1909            &mut self.context,
1910            Callee::Builtin(builtin),
1911        )
1912    }
1913
1914    fn visit_nop(&mut self) -> Self::Output {
1915        Ok(())
1916    }
1917
1918    fn visit_if(&mut self, blockty: BlockType) -> Self::Output {
1919        self.control_frames.push(ControlStackFrame::r#if(
1920            self.env.resolve_block_sig(blockty)?,
1921            self.masm,
1922            &mut self.context,
1923        )?);
1924
1925        Ok(())
1926    }
1927
1928    fn visit_else(&mut self) -> Self::Output {
1929        if !self.context.reachable {
1930            self.handle_unreachable_else()
1931        } else {
1932            let control = self
1933                .control_frames
1934                .last_mut()
1935                .ok_or_else(|| CodeGenError::control_frame_expected())?;
1936            control.emit_else(self.masm, &mut self.context)
1937        }
1938    }
1939
1940    fn visit_block(&mut self, blockty: BlockType) -> Self::Output {
1941        self.control_frames.push(ControlStackFrame::block(
1942            self.env.resolve_block_sig(blockty)?,
1943            self.masm,
1944            &mut self.context,
1945        )?);
1946
1947        Ok(())
1948    }
1949
1950    fn visit_loop(&mut self, blockty: BlockType) -> Self::Output {
1951        self.control_frames.push(ControlStackFrame::r#loop(
1952            self.env.resolve_block_sig(blockty)?,
1953            self.masm,
1954            &mut self.context,
1955        )?);
1956
1957        self.maybe_emit_epoch_check()?;
1958        self.maybe_emit_fuel_check()
1959    }
1960
1961    fn visit_br(&mut self, depth: u32) -> Self::Output {
1962        let index = control_index(depth, self.control_frames.len())?;
1963        let frame = &mut self.control_frames[index];
1964        self.context
1965            .br::<_, _, UnconditionalBranch>(frame, self.masm, |masm, cx, frame| {
1966                frame.pop_abi_results::<M, _>(cx, masm, |results, _, _| {
1967                    Ok(results.ret_area().copied())
1968                })
1969            })
1970    }
1971
1972    fn visit_br_if(&mut self, depth: u32) -> Self::Output {
1973        let index = control_index(depth, self.control_frames.len())?;
1974        let frame = &mut self.control_frames[index];
1975        frame.set_as_target();
1976
1977        let top = {
1978            let top = self.context.without::<Result<TypedReg>, M, _>(
1979                frame.results::<M>()?.regs(),
1980                self.masm,
1981                |ctx, masm| ctx.pop_to_reg(masm, None),
1982            )??;
1983            // Explicitly save any live registers and locals before setting up
1984            // the branch state.
1985            // In some cases, calculating the `top` value above, will result in
1986            // a spill, thus the following one will result in a no-op.
1987            self.context.spill(self.masm)?;
1988            frame.top_abi_results::<M, _>(
1989                &mut self.context,
1990                self.masm,
1991                |results, context, masm| {
1992                    // In the case of `br_if` there's a possibility that we'll
1993                    // exit early from the block or fallthrough, for
1994                    // a fallthrough, we cannot rely on the pre-computed return area;
1995                    // it must be recalculated so that any values that are
1996                    // generated are correctly placed near the current stack
1997                    // pointer.
1998                    if results.on_stack() {
1999                        let stack_consumed = context.stack.sizeof(results.stack_operands_len());
2000                        let base = masm.sp_offset()?.as_u32() - stack_consumed;
2001                        let offs = base + results.size();
2002                        Ok(Some(RetArea::sp(SPOffset::from_u32(offs))))
2003                    } else {
2004                        Ok(None)
2005                    }
2006                },
2007            )?;
2008            top
2009        };
2010
2011        // Emit instructions to balance the machine stack.
2012        let current_sp_offset = self.masm.sp_offset()?;
2013        let unbalanced = frame.unbalanced(self.masm)?;
2014        let (label, cmp) = if unbalanced {
2015            (self.masm.get_label()?, IntCmpKind::Eq)
2016        } else {
2017            (*frame.label(), IntCmpKind::Ne)
2018        };
2019
2020        self.masm
2021            .branch(cmp, top.reg, top.reg.into(), label, OperandSize::S32)?;
2022        self.context.free_reg(top);
2023
2024        if unbalanced {
2025            self.context
2026                .br::<_, _, ConditionalBranch>(frame, self.masm, |_, _, _| Ok(()))?;
2027
2028            // Restore sp_offset to what it was for falling through and emit
2029            // fallthrough label.
2030            self.masm.reset_stack_pointer(current_sp_offset)?;
2031            self.masm.bind(label)?;
2032        }
2033
2034        Ok(())
2035    }
2036
2037    fn visit_br_table(&mut self, targets: BrTable<'a>) -> Self::Output {
2038        // +1 to account for the default target.
2039        let len = targets.len() + 1;
2040        // SmallVec<[_; 5]> to match the binary emission layer (e.g
2041        // see `JmpTableSeq'), but here we use 5 instead since we
2042        // bundle the default target as the last element in the array.
2043        let mut labels: SmallVec<[_; 5]> = smallvec![];
2044        for _ in 0..len {
2045            labels.push(self.masm.get_label()?);
2046        }
2047
2048        // Find the innermost target and use it as the relative frame
2049        // for result handling below.
2050        //
2051        // This approach ensures that
2052        // 1. The stack pointer offset is correctly positioned
2053        //    according to the expectations of the innermost block end
2054        //    sequence.
2055        // 2. We meet the jump site invariants introduced by
2056        //    `CodegenContext::br`, which take advantage of Wasm
2057        //    semantics given that all jumps are "outward".
2058        let mut innermost = targets.default();
2059        for target in targets.targets() {
2060            let target = target?;
2061            if target < innermost {
2062                innermost = target;
2063            }
2064        }
2065
2066        let innermost_index = control_index(innermost, self.control_frames.len())?;
2067        let innermost_frame = &mut self.control_frames[innermost_index];
2068        let innermost_result = innermost_frame.results::<M>()?;
2069
2070        let (index, tmp) = {
2071            let index_and_tmp = self.context.without::<Result<(TypedReg, _)>, M, _>(
2072                innermost_result.regs(),
2073                self.masm,
2074                |cx, masm| Ok((cx.pop_to_reg(masm, None)?, cx.any_gpr(masm)?)),
2075            )??;
2076
2077            // Materialize any constants or locals into their result
2078            // representation, so that when reachability is restored,
2079            // they are correctly located.  NB: the results are popped
2080            // in function of the innermost branch specified for
2081            // `br_table`, which implies that the machine stack will
2082            // be correctly balanced, by virtue of calling
2083            // `pop_abi_results`.
2084
2085            // It's possible that we need to balance the stack for the
2086            // rest of the targets, which will be done before emitting
2087            // the unconditional jump below.
2088            innermost_frame.pop_abi_results::<M, _>(
2089                &mut self.context,
2090                self.masm,
2091                |results, _, _| Ok(results.ret_area().copied()),
2092            )?;
2093            index_and_tmp
2094        };
2095
2096        self.masm.jmp_table(&labels, index.into(), tmp)?;
2097        // Save the original stack pointer offset; we will reset the stack
2098        // pointer to this offset after jumping to each of the targets. Each
2099        // jump might adjust the stack according to the base offset of the
2100        // target.
2101        let current_sp = self.masm.sp_offset()?;
2102
2103        for (t, l) in targets
2104            .targets()
2105            .chain(std::iter::once(Ok(targets.default())))
2106            .zip(labels.iter())
2107        {
2108            let control_index = control_index(t?, self.control_frames.len())?;
2109            let frame = &mut self.control_frames[control_index];
2110            // Reset the stack pointer to its original offset. This is needed
2111            // because each jump will potentially adjust the stack pointer
2112            // according to the base offset of the target.
2113            self.masm.reset_stack_pointer(current_sp)?;
2114
2115            // NB: We don't perform any result handling as it was
2116            // already taken care of above before jumping to the
2117            // jump table.
2118            self.masm.bind(*l)?;
2119            // Ensure that the stack pointer is correctly positioned before
2120            // jumping to the jump table code.
2121            self.context
2122                .br::<_, _, UnconditionalBranch>(frame, self.masm, |_, _, _| Ok(()))?;
2123        }
2124        // Finally reset the stack pointer to the original location.
2125        // The reachability analysis, will ensure it's correctly located
2126        // once reachability is restored.
2127        self.masm.reset_stack_pointer(current_sp)?;
2128        self.context.reachable = false;
2129        self.context.free_reg(index.reg);
2130        self.context.free_reg(tmp);
2131
2132        Ok(())
2133    }
2134
2135    fn visit_return(&mut self) -> Self::Output {
2136        // Grab the outermost frame, which is the function's body
2137        // frame. We don't rely on [`codegen::control_index`] since
2138        // this frame is implicit and we know that it should exist at
2139        // index 0.
2140        let outermost = &mut self.control_frames[0];
2141        self.context
2142            .br::<_, _, UnconditionalBranch>(outermost, self.masm, |masm, cx, frame| {
2143                frame.pop_abi_results::<M, _>(cx, masm, |results, _, _| {
2144                    Ok(results.ret_area().copied())
2145                })
2146            })
2147    }
2148
2149    fn visit_unreachable(&mut self) -> Self::Output {
2150        self.masm.unreachable()?;
2151        self.context.reachable = false;
2152        // Set the implicit outermost frame as target to perform the necessary
2153        // stack clean up.
2154        let outermost = &mut self.control_frames[0];
2155        outermost.set_as_target();
2156
2157        Ok(())
2158    }
2159
2160    fn visit_local_tee(&mut self, index: u32) -> Self::Output {
2161        let typed_reg = self.emit_set_local(index)?;
2162        self.context.stack.push(typed_reg.into());
2163
2164        Ok(())
2165    }
2166
2167    fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
2168        let index = GlobalIndex::from_u32(global_index);
2169        let (ty, base, offset) = self.emit_get_global_addr(index)?;
2170        let addr = self.masm.address_at_reg(base, offset)?;
2171        let dst = self.context.reg_for_type(ty, self.masm)?;
2172        self.masm.load(addr, writable!(dst), ty.try_into()?)?;
2173        self.context.stack.push(Val::reg(dst, ty));
2174
2175        self.context.free_reg(base);
2176
2177        Ok(())
2178    }
2179
2180    fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
2181        let index = GlobalIndex::from_u32(global_index);
2182        let (ty, base, offset) = self.emit_get_global_addr(index)?;
2183        let addr = self.masm.address_at_reg(base, offset)?;
2184
2185        let typed_reg = self.context.pop_to_reg(self.masm, None)?;
2186        self.masm
2187            .store(typed_reg.reg.into(), addr, ty.try_into()?)?;
2188        self.context.free_reg(typed_reg.reg);
2189        self.context.free_reg(base);
2190
2191        Ok(())
2192    }
2193
2194    fn visit_drop(&mut self) -> Self::Output {
2195        self.context.drop_last(1, |regalloc, val| match val {
2196            Val::Reg(tr) => Ok(regalloc.free(tr.reg)),
2197            Val::Memory(m) => self.masm.free_stack(m.slot.size),
2198            _ => Ok(()),
2199        })
2200    }
2201
2202    fn visit_select(&mut self) -> Self::Output {
2203        let cond = self.context.pop_to_reg(self.masm, None)?;
2204        let val2 = self.context.pop_to_reg(self.masm, None)?;
2205        let val1 = self.context.pop_to_reg(self.masm, None)?;
2206        self.masm.cmp(cond.reg, RegImm::i32(0), OperandSize::S32)?;
2207        // Conditionally move val1 to val2 if the comparison is
2208        // not zero.
2209        self.masm.cmov(
2210            writable!(val2.into()),
2211            val1.into(),
2212            IntCmpKind::Ne,
2213            val1.ty.try_into()?,
2214        )?;
2215        self.context.stack.push(val2.into());
2216        self.context.free_reg(val1.reg);
2217        self.context.free_reg(cond);
2218
2219        Ok(())
2220    }
2221
2222    fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2223        self.emit_wasm_load(
2224            &memarg,
2225            WasmValType::I32,
2226            LoadKind::Operand(OperandSize::S32),
2227        )
2228    }
2229
2230    fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2231        self.emit_wasm_load(
2232            &memarg,
2233            WasmValType::I32,
2234            LoadKind::ScalarExtend(Extend::<Signed>::I32Extend8.into()),
2235        )
2236    }
2237
2238    fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2239        self.emit_wasm_load(
2240            &memarg,
2241            WasmValType::I32,
2242            LoadKind::ScalarExtend(Extend::<Zero>::I32Extend8.into()),
2243        )
2244    }
2245
2246    fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2247        self.emit_wasm_load(
2248            &memarg,
2249            WasmValType::I32,
2250            LoadKind::ScalarExtend(Extend::<Signed>::I32Extend16.into()),
2251        )
2252    }
2253
2254    fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2255        self.emit_wasm_load(
2256            &memarg,
2257            WasmValType::I32,
2258            LoadKind::ScalarExtend(Extend::<Zero>::I32Extend16.into()),
2259        )
2260    }
2261
2262    fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2263        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2264    }
2265
2266    fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2267        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S8))
2268    }
2269
2270    fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2271        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S16))
2272    }
2273
2274    fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2275        self.emit_wasm_load(
2276            &memarg,
2277            WasmValType::I64,
2278            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend8.into()),
2279        )
2280    }
2281
2282    fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2283        self.emit_wasm_load(
2284            &memarg,
2285            WasmValType::I64,
2286            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend8.into()),
2287        )
2288    }
2289
2290    fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2291        self.emit_wasm_load(
2292            &memarg,
2293            WasmValType::I64,
2294            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend16.into()),
2295        )
2296    }
2297
2298    fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2299        self.emit_wasm_load(
2300            &memarg,
2301            WasmValType::I64,
2302            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend16.into()),
2303        )
2304    }
2305
2306    fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2307        self.emit_wasm_load(
2308            &memarg,
2309            WasmValType::I64,
2310            LoadKind::ScalarExtend(Extend::<Zero>::I64Extend32.into()),
2311        )
2312    }
2313
2314    fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2315        self.emit_wasm_load(
2316            &memarg,
2317            WasmValType::I64,
2318            LoadKind::ScalarExtend(Extend::<Signed>::I64Extend32.into()),
2319        )
2320    }
2321
2322    fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2323        self.emit_wasm_load(
2324            &memarg,
2325            WasmValType::I64,
2326            LoadKind::Operand(OperandSize::S64),
2327        )
2328    }
2329
2330    fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2331        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S64))
2332    }
2333
2334    fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2335        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S8))
2336    }
2337
2338    fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2339        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S16))
2340    }
2341
2342    fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2343        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2344    }
2345
2346    fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2347        self.emit_wasm_load(
2348            &memarg,
2349            WasmValType::F32,
2350            LoadKind::Operand(OperandSize::S32),
2351        )
2352    }
2353
2354    fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2355        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S32))
2356    }
2357
2358    fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2359        self.emit_wasm_load(
2360            &memarg,
2361            WasmValType::F64,
2362            LoadKind::Operand(OperandSize::S64),
2363        )
2364    }
2365
2366    fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2367        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S64))
2368    }
2369
2370    fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2371        use OperandSize::*;
2372
2373        self.context
2374            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
2375                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Checked)
2376            })
2377    }
2378
2379    fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2380        use OperandSize::*;
2381
2382        self.masm
2383            .unsigned_truncate(&mut self.context, S32, S32, TruncKind::Checked)
2384    }
2385
2386    fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2387        use OperandSize::*;
2388
2389        self.context
2390            .convert_op(self.masm, WasmValType::I32, |masm, dst, src, dst_size| {
2391                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Checked)
2392            })
2393    }
2394
2395    fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2396        use OperandSize::*;
2397
2398        self.masm
2399            .unsigned_truncate(&mut self.context, S64, S32, TruncKind::Checked)
2400    }
2401
2402    fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2403        use OperandSize::*;
2404
2405        self.context
2406            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
2407                masm.signed_truncate(writable!(dst), src, S32, dst_size, TruncKind::Checked)
2408            })
2409    }
2410
2411    fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2412        use OperandSize::*;
2413
2414        self.masm
2415            .unsigned_truncate(&mut self.context, S32, S64, TruncKind::Checked)
2416    }
2417
2418    fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2419        use OperandSize::*;
2420
2421        self.context
2422            .convert_op(self.masm, WasmValType::I64, |masm, dst, src, dst_size| {
2423                masm.signed_truncate(writable!(dst), src, S64, dst_size, TruncKind::Checked)
2424            })
2425    }
2426
2427    fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2428        use OperandSize::*;
2429
2430        self.masm
2431            .unsigned_truncate(&mut self.context, S64, S64, TruncKind::Checked)
2432    }
2433
2434    fn visit_i64_add128(&mut self) -> Self::Output {
2435        self.context
2436            .binop128(self.masm, |masm, lhs_lo, lhs_hi, rhs_lo, rhs_hi| {
2437                masm.add128(
2438                    writable!(lhs_lo),
2439                    writable!(lhs_hi),
2440                    lhs_lo,
2441                    lhs_hi,
2442                    rhs_lo,
2443                    rhs_hi,
2444                )?;
2445                Ok((TypedReg::i64(lhs_lo), TypedReg::i64(lhs_hi)))
2446            })
2447    }
2448
2449    fn visit_i64_sub128(&mut self) -> Self::Output {
2450        self.context
2451            .binop128(self.masm, |masm, lhs_lo, lhs_hi, rhs_lo, rhs_hi| {
2452                masm.sub128(
2453                    writable!(lhs_lo),
2454                    writable!(lhs_hi),
2455                    lhs_lo,
2456                    lhs_hi,
2457                    rhs_lo,
2458                    rhs_hi,
2459                )?;
2460                Ok((TypedReg::i64(lhs_lo), TypedReg::i64(lhs_hi)))
2461            })
2462    }
2463
2464    fn visit_i64_mul_wide_s(&mut self) -> Self::Output {
2465        self.masm.mul_wide(&mut self.context, MulWideKind::Signed)
2466    }
2467
2468    fn visit_i64_mul_wide_u(&mut self) -> Self::Output {
2469        self.masm.mul_wide(&mut self.context, MulWideKind::Unsigned)
2470    }
2471
2472    fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2473        self.emit_wasm_load(
2474            &memarg,
2475            WasmValType::I32,
2476            LoadKind::Atomic(OperandSize::S8, Some(Extend::<Zero>::I32Extend8.into())),
2477        )
2478    }
2479
2480    fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2481        self.emit_wasm_load(
2482            &memarg,
2483            WasmValType::I32,
2484            LoadKind::Atomic(OperandSize::S16, Some(Extend::<Zero>::I32Extend16.into())),
2485        )
2486    }
2487
2488    fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2489        self.emit_wasm_load(
2490            &memarg,
2491            WasmValType::I32,
2492            LoadKind::Atomic(OperandSize::S32, None),
2493        )
2494    }
2495
2496    fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2497        self.emit_wasm_load(
2498            &memarg,
2499            WasmValType::I64,
2500            LoadKind::Atomic(OperandSize::S8, Some(Extend::<Zero>::I64Extend8.into())),
2501        )
2502    }
2503
2504    fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2505        self.emit_wasm_load(
2506            &memarg,
2507            WasmValType::I64,
2508            LoadKind::Atomic(OperandSize::S16, Some(Extend::<Zero>::I64Extend16.into())),
2509        )
2510    }
2511
2512    fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2513        self.emit_wasm_load(
2514            &memarg,
2515            WasmValType::I64,
2516            LoadKind::Atomic(OperandSize::S32, Some(Extend::<Zero>::I64Extend32.into())),
2517        )
2518    }
2519
2520    fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2521        self.emit_wasm_load(
2522            &memarg,
2523            WasmValType::I64,
2524            LoadKind::Atomic(OperandSize::S64, None),
2525        )
2526    }
2527
2528    fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2529        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S32))
2530    }
2531
2532    fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2533        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S64))
2534    }
2535
2536    fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2537        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S8))
2538    }
2539
2540    fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2541        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S16))
2542    }
2543
2544    fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2545        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S8))
2546    }
2547
2548    fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2549        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S16))
2550    }
2551
2552    fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2553        self.emit_wasm_store(&memarg, StoreKind::Atomic(OperandSize::S32))
2554    }
2555
2556    fn visit_i32_atomic_rmw8_add_u(&mut self, arg: MemArg) -> Self::Output {
2557        self.emit_atomic_rmw(
2558            &arg,
2559            RmwOp::Add,
2560            OperandSize::S8,
2561            Some(Extend::<Zero>::I32Extend8),
2562        )
2563    }
2564
2565    fn visit_i32_atomic_rmw16_add_u(&mut self, arg: MemArg) -> Self::Output {
2566        self.emit_atomic_rmw(
2567            &arg,
2568            RmwOp::Add,
2569            OperandSize::S16,
2570            Some(Extend::<Zero>::I32Extend16),
2571        )
2572    }
2573
2574    fn visit_i32_atomic_rmw_add(&mut self, arg: MemArg) -> Self::Output {
2575        self.emit_atomic_rmw(&arg, RmwOp::Add, OperandSize::S32, None)
2576    }
2577
2578    fn visit_i64_atomic_rmw8_add_u(&mut self, arg: MemArg) -> Self::Output {
2579        self.emit_atomic_rmw(
2580            &arg,
2581            RmwOp::Add,
2582            OperandSize::S8,
2583            Some(Extend::<Zero>::I64Extend8),
2584        )
2585    }
2586
2587    fn visit_i64_atomic_rmw16_add_u(&mut self, arg: MemArg) -> Self::Output {
2588        self.emit_atomic_rmw(
2589            &arg,
2590            RmwOp::Add,
2591            OperandSize::S16,
2592            Some(Extend::<Zero>::I64Extend16),
2593        )
2594    }
2595
2596    fn visit_i64_atomic_rmw32_add_u(&mut self, arg: MemArg) -> Self::Output {
2597        self.emit_atomic_rmw(
2598            &arg,
2599            RmwOp::Add,
2600            OperandSize::S32,
2601            Some(Extend::<Zero>::I64Extend32),
2602        )
2603    }
2604
2605    fn visit_i64_atomic_rmw_add(&mut self, arg: MemArg) -> Self::Output {
2606        self.emit_atomic_rmw(&arg, RmwOp::Add, OperandSize::S64, None)
2607    }
2608
2609    fn visit_i32_atomic_rmw8_sub_u(&mut self, arg: MemArg) -> Self::Output {
2610        self.emit_atomic_rmw(
2611            &arg,
2612            RmwOp::Sub,
2613            OperandSize::S8,
2614            Some(Extend::<Zero>::I32Extend8),
2615        )
2616    }
2617    fn visit_i32_atomic_rmw16_sub_u(&mut self, arg: MemArg) -> Self::Output {
2618        self.emit_atomic_rmw(
2619            &arg,
2620            RmwOp::Sub,
2621            OperandSize::S16,
2622            Some(Extend::<Zero>::I32Extend16),
2623        )
2624    }
2625
2626    fn visit_i32_atomic_rmw_sub(&mut self, arg: MemArg) -> Self::Output {
2627        self.emit_atomic_rmw(&arg, RmwOp::Sub, OperandSize::S32, None)
2628    }
2629
2630    fn visit_i64_atomic_rmw8_sub_u(&mut self, arg: MemArg) -> Self::Output {
2631        self.emit_atomic_rmw(
2632            &arg,
2633            RmwOp::Sub,
2634            OperandSize::S8,
2635            Some(Extend::<Zero>::I64Extend8),
2636        )
2637    }
2638
2639    fn visit_i64_atomic_rmw16_sub_u(&mut self, arg: MemArg) -> Self::Output {
2640        self.emit_atomic_rmw(
2641            &arg,
2642            RmwOp::Sub,
2643            OperandSize::S16,
2644            Some(Extend::<Zero>::I64Extend16),
2645        )
2646    }
2647
2648    fn visit_i64_atomic_rmw32_sub_u(&mut self, arg: MemArg) -> Self::Output {
2649        self.emit_atomic_rmw(
2650            &arg,
2651            RmwOp::Sub,
2652            OperandSize::S32,
2653            Some(Extend::<Zero>::I64Extend32),
2654        )
2655    }
2656
2657    fn visit_i64_atomic_rmw_sub(&mut self, arg: MemArg) -> Self::Output {
2658        self.emit_atomic_rmw(&arg, RmwOp::Sub, OperandSize::S64, None)
2659    }
2660
2661    fn visit_i32_atomic_rmw8_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2662        self.emit_atomic_rmw(
2663            &arg,
2664            RmwOp::Xchg,
2665            OperandSize::S8,
2666            Some(Extend::<Zero>::I32Extend8),
2667        )
2668    }
2669
2670    fn visit_i32_atomic_rmw16_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2671        self.emit_atomic_rmw(
2672            &arg,
2673            RmwOp::Xchg,
2674            OperandSize::S16,
2675            Some(Extend::<Zero>::I32Extend16),
2676        )
2677    }
2678
2679    fn visit_i32_atomic_rmw_xchg(&mut self, arg: MemArg) -> Self::Output {
2680        self.emit_atomic_rmw(&arg, RmwOp::Xchg, OperandSize::S32, None)
2681    }
2682
2683    fn visit_i64_atomic_rmw8_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2684        self.emit_atomic_rmw(
2685            &arg,
2686            RmwOp::Xchg,
2687            OperandSize::S8,
2688            Some(Extend::<Zero>::I64Extend8),
2689        )
2690    }
2691
2692    fn visit_i64_atomic_rmw16_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2693        self.emit_atomic_rmw(
2694            &arg,
2695            RmwOp::Xchg,
2696            OperandSize::S16,
2697            Some(Extend::<Zero>::I64Extend16),
2698        )
2699    }
2700
2701    fn visit_i64_atomic_rmw32_xchg_u(&mut self, arg: MemArg) -> Self::Output {
2702        self.emit_atomic_rmw(
2703            &arg,
2704            RmwOp::Xchg,
2705            OperandSize::S32,
2706            Some(Extend::<Zero>::I64Extend32),
2707        )
2708    }
2709
2710    fn visit_i64_atomic_rmw_xchg(&mut self, arg: MemArg) -> Self::Output {
2711        self.emit_atomic_rmw(&arg, RmwOp::Xchg, OperandSize::S64, None)
2712    }
2713
2714    fn visit_i32_atomic_rmw8_and_u(&mut self, arg: MemArg) -> Self::Output {
2715        self.emit_atomic_rmw(
2716            &arg,
2717            RmwOp::And,
2718            OperandSize::S8,
2719            Some(Extend::<Zero>::I32Extend8),
2720        )
2721    }
2722
2723    fn visit_i32_atomic_rmw16_and_u(&mut self, arg: MemArg) -> Self::Output {
2724        self.emit_atomic_rmw(
2725            &arg,
2726            RmwOp::And,
2727            OperandSize::S16,
2728            Some(Extend::<Zero>::I32Extend16),
2729        )
2730    }
2731
2732    fn visit_i32_atomic_rmw_and(&mut self, arg: MemArg) -> Self::Output {
2733        self.emit_atomic_rmw(&arg, RmwOp::And, OperandSize::S32, None)
2734    }
2735
2736    fn visit_i64_atomic_rmw8_and_u(&mut self, arg: MemArg) -> Self::Output {
2737        self.emit_atomic_rmw(
2738            &arg,
2739            RmwOp::And,
2740            OperandSize::S8,
2741            Some(Extend::<Zero>::I64Extend8),
2742        )
2743    }
2744
2745    fn visit_i64_atomic_rmw16_and_u(&mut self, arg: MemArg) -> Self::Output {
2746        self.emit_atomic_rmw(
2747            &arg,
2748            RmwOp::And,
2749            OperandSize::S16,
2750            Some(Extend::<Zero>::I64Extend16),
2751        )
2752    }
2753
2754    fn visit_i64_atomic_rmw32_and_u(&mut self, arg: MemArg) -> Self::Output {
2755        self.emit_atomic_rmw(
2756            &arg,
2757            RmwOp::And,
2758            OperandSize::S32,
2759            Some(Extend::<Zero>::I64Extend32),
2760        )
2761    }
2762
2763    fn visit_i64_atomic_rmw_and(&mut self, arg: MemArg) -> Self::Output {
2764        self.emit_atomic_rmw(&arg, RmwOp::And, OperandSize::S64, None)
2765    }
2766
2767    fn visit_i32_atomic_rmw8_or_u(&mut self, arg: MemArg) -> Self::Output {
2768        self.emit_atomic_rmw(
2769            &arg,
2770            RmwOp::Or,
2771            OperandSize::S8,
2772            Some(Extend::<Zero>::I32Extend8),
2773        )
2774    }
2775
2776    fn visit_i32_atomic_rmw16_or_u(&mut self, arg: MemArg) -> Self::Output {
2777        self.emit_atomic_rmw(
2778            &arg,
2779            RmwOp::Or,
2780            OperandSize::S16,
2781            Some(Extend::<Zero>::I32Extend16),
2782        )
2783    }
2784
2785    fn visit_i32_atomic_rmw_or(&mut self, arg: MemArg) -> Self::Output {
2786        self.emit_atomic_rmw(&arg, RmwOp::Or, OperandSize::S32, None)
2787    }
2788
2789    fn visit_i64_atomic_rmw8_or_u(&mut self, arg: MemArg) -> Self::Output {
2790        self.emit_atomic_rmw(
2791            &arg,
2792            RmwOp::Or,
2793            OperandSize::S8,
2794            Some(Extend::<Zero>::I64Extend8),
2795        )
2796    }
2797
2798    fn visit_i64_atomic_rmw16_or_u(&mut self, arg: MemArg) -> Self::Output {
2799        self.emit_atomic_rmw(
2800            &arg,
2801            RmwOp::Or,
2802            OperandSize::S16,
2803            Some(Extend::<Zero>::I64Extend16),
2804        )
2805    }
2806
2807    fn visit_i64_atomic_rmw32_or_u(&mut self, arg: MemArg) -> Self::Output {
2808        self.emit_atomic_rmw(
2809            &arg,
2810            RmwOp::Or,
2811            OperandSize::S32,
2812            Some(Extend::<Zero>::I64Extend32),
2813        )
2814    }
2815
2816    fn visit_i64_atomic_rmw_or(&mut self, arg: MemArg) -> Self::Output {
2817        self.emit_atomic_rmw(&arg, RmwOp::Or, OperandSize::S64, None)
2818    }
2819
2820    fn visit_i32_atomic_rmw8_xor_u(&mut self, arg: MemArg) -> Self::Output {
2821        self.emit_atomic_rmw(
2822            &arg,
2823            RmwOp::Xor,
2824            OperandSize::S8,
2825            Some(Extend::<Zero>::I32Extend8),
2826        )
2827    }
2828
2829    fn visit_i32_atomic_rmw16_xor_u(&mut self, arg: MemArg) -> Self::Output {
2830        self.emit_atomic_rmw(
2831            &arg,
2832            RmwOp::Xor,
2833            OperandSize::S16,
2834            Some(Extend::<Zero>::I32Extend16),
2835        )
2836    }
2837
2838    fn visit_i32_atomic_rmw_xor(&mut self, arg: MemArg) -> Self::Output {
2839        self.emit_atomic_rmw(&arg, RmwOp::Xor, OperandSize::S32, None)
2840    }
2841
2842    fn visit_i64_atomic_rmw8_xor_u(&mut self, arg: MemArg) -> Self::Output {
2843        self.emit_atomic_rmw(
2844            &arg,
2845            RmwOp::Xor,
2846            OperandSize::S8,
2847            Some(Extend::<Zero>::I64Extend8),
2848        )
2849    }
2850
2851    fn visit_i64_atomic_rmw16_xor_u(&mut self, arg: MemArg) -> Self::Output {
2852        self.emit_atomic_rmw(
2853            &arg,
2854            RmwOp::Xor,
2855            OperandSize::S16,
2856            Some(Extend::<Zero>::I64Extend16),
2857        )
2858    }
2859
2860    fn visit_i64_atomic_rmw32_xor_u(&mut self, arg: MemArg) -> Self::Output {
2861        self.emit_atomic_rmw(
2862            &arg,
2863            RmwOp::Xor,
2864            OperandSize::S32,
2865            Some(Extend::<Zero>::I64Extend32),
2866        )
2867    }
2868
2869    fn visit_i64_atomic_rmw_xor(&mut self, arg: MemArg) -> Self::Output {
2870        self.emit_atomic_rmw(&arg, RmwOp::Xor, OperandSize::S64, None)
2871    }
2872
2873    fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2874        self.emit_atomic_cmpxchg(&arg, OperandSize::S8, Some(Extend::I32Extend8))
2875    }
2876
2877    fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2878        self.emit_atomic_cmpxchg(&arg, OperandSize::S16, Some(Extend::I32Extend16))
2879    }
2880
2881    fn visit_i32_atomic_rmw_cmpxchg(&mut self, arg: MemArg) -> Self::Output {
2882        self.emit_atomic_cmpxchg(&arg, OperandSize::S32, None)
2883    }
2884
2885    fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2886        self.emit_atomic_cmpxchg(&arg, OperandSize::S8, Some(Extend::I64Extend8))
2887    }
2888
2889    fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2890        self.emit_atomic_cmpxchg(&arg, OperandSize::S16, Some(Extend::I64Extend16))
2891    }
2892
2893    fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, arg: MemArg) -> Self::Output {
2894        self.emit_atomic_cmpxchg(&arg, OperandSize::S32, Some(Extend::I64Extend32))
2895    }
2896
2897    fn visit_i64_atomic_rmw_cmpxchg(&mut self, arg: MemArg) -> Self::Output {
2898        self.emit_atomic_cmpxchg(&arg, OperandSize::S64, None)
2899    }
2900
2901    fn visit_memory_atomic_wait32(&mut self, arg: MemArg) -> Self::Output {
2902        self.emit_atomic_wait(&arg, AtomicWaitKind::Wait32)
2903    }
2904
2905    fn visit_memory_atomic_wait64(&mut self, arg: MemArg) -> Self::Output {
2906        self.emit_atomic_wait(&arg, AtomicWaitKind::Wait64)
2907    }
2908
2909    fn visit_memory_atomic_notify(&mut self, arg: MemArg) -> Self::Output {
2910        self.emit_atomic_notify(&arg)
2911    }
2912
2913    fn visit_atomic_fence(&mut self) -> Self::Output {
2914        self.masm.fence()
2915    }
2916
2917    wasmparser::for_each_visit_operator!(def_unsupported);
2918}
2919
2920impl<'a, 'translation, 'data, M> VisitSimdOperator<'a>
2921    for CodeGen<'a, 'translation, 'data, M, Emission>
2922where
2923    M: MacroAssembler,
2924{
2925    fn visit_v128_const(&mut self, val: V128) -> Self::Output {
2926        self.context.stack.push(Val::v128(val.i128()));
2927        Ok(())
2928    }
2929
2930    fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output {
2931        self.emit_wasm_load(
2932            &memarg,
2933            WasmValType::V128,
2934            LoadKind::Operand(OperandSize::S128),
2935        )
2936    }
2937
2938    fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output {
2939        self.emit_wasm_store(&memarg, StoreKind::Operand(OperandSize::S128))
2940    }
2941
2942    fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output {
2943        self.emit_wasm_load(
2944            &memarg,
2945            WasmValType::V128,
2946            LoadKind::VectorExtend(V128LoadExtendKind::E8x8S),
2947        )
2948    }
2949
2950    fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output {
2951        self.emit_wasm_load(
2952            &memarg,
2953            WasmValType::V128,
2954            LoadKind::VectorExtend(V128LoadExtendKind::E8x8U),
2955        )
2956    }
2957
2958    fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output {
2959        self.emit_wasm_load(
2960            &memarg,
2961            WasmValType::V128,
2962            LoadKind::VectorExtend(V128LoadExtendKind::E16x4S),
2963        )
2964    }
2965
2966    fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output {
2967        self.emit_wasm_load(
2968            &memarg,
2969            WasmValType::V128,
2970            LoadKind::VectorExtend(V128LoadExtendKind::E16x4U),
2971        )
2972    }
2973
2974    fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output {
2975        self.emit_wasm_load(
2976            &memarg,
2977            WasmValType::V128,
2978            LoadKind::VectorExtend(V128LoadExtendKind::E32x2S),
2979        )
2980    }
2981
2982    fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output {
2983        self.emit_wasm_load(
2984            &memarg,
2985            WasmValType::V128,
2986            LoadKind::VectorExtend(V128LoadExtendKind::E32x2U),
2987        )
2988    }
2989
2990    fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output {
2991        self.emit_wasm_load(
2992            &memarg,
2993            WasmValType::V128,
2994            LoadKind::Splat(SplatLoadKind::S8),
2995        )
2996    }
2997
2998    fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output {
2999        self.emit_wasm_load(
3000            &memarg,
3001            WasmValType::V128,
3002            LoadKind::Splat(SplatLoadKind::S16),
3003        )
3004    }
3005
3006    fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output {
3007        self.emit_wasm_load(
3008            &memarg,
3009            WasmValType::V128,
3010            LoadKind::Splat(SplatLoadKind::S32),
3011        )
3012    }
3013
3014    fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output {
3015        self.emit_wasm_load(
3016            &memarg,
3017            WasmValType::V128,
3018            LoadKind::Splat(SplatLoadKind::S64),
3019        )
3020    }
3021
3022    fn visit_i8x16_splat(&mut self) -> Self::Output {
3023        self.masm.splat(&mut self.context, SplatKind::I8x16)
3024    }
3025
3026    fn visit_i16x8_splat(&mut self) -> Self::Output {
3027        self.masm.splat(&mut self.context, SplatKind::I16x8)
3028    }
3029
3030    fn visit_i32x4_splat(&mut self) -> Self::Output {
3031        self.masm.splat(&mut self.context, SplatKind::I32x4)
3032    }
3033
3034    fn visit_i64x2_splat(&mut self) -> Self::Output {
3035        self.masm.splat(&mut self.context, SplatKind::I64x2)
3036    }
3037
3038    fn visit_f32x4_splat(&mut self) -> Self::Output {
3039        self.masm.splat(&mut self.context, SplatKind::F32x4)
3040    }
3041
3042    fn visit_f64x2_splat(&mut self) -> Self::Output {
3043        self.masm.splat(&mut self.context, SplatKind::F64x2)
3044    }
3045
3046    fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
3047        let rhs = self.context.pop_to_reg(self.masm, None)?;
3048        let lhs = self.context.pop_to_reg(self.masm, None)?;
3049        self.masm
3050            .shuffle(writable!(lhs.into()), lhs.into(), rhs.into(), lanes)?;
3051        self.context.stack.push(TypedReg::v128(lhs.into()).into());
3052        self.context.free_reg(rhs);
3053        Ok(())
3054    }
3055
3056    fn visit_i8x16_swizzle(&mut self) -> Self::Output {
3057        let rhs = self.context.pop_to_reg(self.masm, None)?;
3058        let lhs = self.context.pop_to_reg(self.masm, None)?;
3059        self.masm
3060            .swizzle(writable!(lhs.into()), lhs.into(), rhs.into())?;
3061        self.context.stack.push(TypedReg::v128(lhs.into()).into());
3062        self.context.free_reg(rhs);
3063        Ok(())
3064    }
3065
3066    fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output {
3067        self.context.extract_lane_op(
3068            self.masm,
3069            ExtractLaneKind::I8x16S,
3070            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3071        )
3072    }
3073
3074    fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output {
3075        self.context.extract_lane_op(
3076            self.masm,
3077            ExtractLaneKind::I8x16U,
3078            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3079        )
3080    }
3081
3082    fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output {
3083        self.context.extract_lane_op(
3084            self.masm,
3085            ExtractLaneKind::I16x8S,
3086            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3087        )
3088    }
3089
3090    fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output {
3091        self.context.extract_lane_op(
3092            self.masm,
3093            ExtractLaneKind::I16x8U,
3094            |masm, src, dst, kind| masm.extract_lane(src, dst, lane, kind),
3095        )
3096    }
3097
3098    fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
3099        self.context
3100            .extract_lane_op(self.masm, ExtractLaneKind::I32x4, |masm, src, dst, kind| {
3101                masm.extract_lane(src, dst, lane, kind)
3102            })
3103    }
3104
3105    fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
3106        self.context
3107            .extract_lane_op(self.masm, ExtractLaneKind::I64x2, |masm, src, dst, kind| {
3108                masm.extract_lane(src, dst, lane, kind)
3109            })
3110    }
3111
3112    fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
3113        self.context
3114            .extract_lane_op(self.masm, ExtractLaneKind::F32x4, |masm, src, dst, kind| {
3115                masm.extract_lane(src, dst, lane, kind)
3116            })
3117    }
3118
3119    fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
3120        self.context
3121            .extract_lane_op(self.masm, ExtractLaneKind::F64x2, |masm, src, dst, kind| {
3122                masm.extract_lane(src, dst, lane, kind)
3123            })
3124    }
3125
3126    fn visit_i8x16_eq(&mut self) -> Self::Output {
3127        self.context
3128            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3129                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I8x16)?;
3130                Ok(TypedReg::v128(dst))
3131            })
3132    }
3133
3134    fn visit_i16x8_eq(&mut self) -> Self::Output {
3135        self.context
3136            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3137                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I16x8)?;
3138                Ok(TypedReg::v128(dst))
3139            })
3140    }
3141
3142    fn visit_i32x4_eq(&mut self) -> Self::Output {
3143        self.context
3144            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3145                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I32x4)?;
3146                Ok(TypedReg::v128(dst))
3147            })
3148    }
3149
3150    fn visit_i64x2_eq(&mut self) -> Self::Output {
3151        self.context
3152            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3153                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::I64x2)?;
3154                Ok(TypedReg::v128(dst))
3155            })
3156    }
3157
3158    fn visit_f32x4_eq(&mut self) -> Self::Output {
3159        self.context
3160            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3161                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::F32x4)?;
3162                Ok(TypedReg::v128(dst))
3163            })
3164    }
3165
3166    fn visit_f64x2_eq(&mut self) -> Self::Output {
3167        self.context
3168            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3169                masm.v128_eq(writable!(dst), dst, src, VectorEqualityKind::F64x2)?;
3170                Ok(TypedReg::v128(dst))
3171            })
3172    }
3173
3174    fn visit_i8x16_ne(&mut self) -> Self::Output {
3175        self.context
3176            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3177                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I8x16)?;
3178                Ok(TypedReg::v128(dst))
3179            })
3180    }
3181
3182    fn visit_i16x8_ne(&mut self) -> Self::Output {
3183        self.context
3184            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3185                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I16x8)?;
3186                Ok(TypedReg::v128(dst))
3187            })
3188    }
3189
3190    fn visit_i32x4_ne(&mut self) -> Self::Output {
3191        self.context
3192            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3193                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I32x4)?;
3194                Ok(TypedReg::v128(dst))
3195            })
3196    }
3197
3198    fn visit_i64x2_ne(&mut self) -> Self::Output {
3199        self.context
3200            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3201                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::I64x2)?;
3202                Ok(TypedReg::v128(dst))
3203            })
3204    }
3205
3206    fn visit_f32x4_ne(&mut self) -> Self::Output {
3207        self.context
3208            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3209                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::F32x4)?;
3210                Ok(TypedReg::v128(dst))
3211            })
3212    }
3213
3214    fn visit_f64x2_ne(&mut self) -> Self::Output {
3215        self.context
3216            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3217                masm.v128_ne(writable!(dst), dst, src, VectorEqualityKind::F64x2)?;
3218                Ok(TypedReg::v128(dst))
3219            })
3220    }
3221
3222    fn visit_i8x16_lt_s(&mut self) -> Self::Output {
3223        self.context
3224            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3225                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3226                Ok(TypedReg::v128(dst))
3227            })
3228    }
3229
3230    fn visit_i8x16_lt_u(&mut self) -> Self::Output {
3231        self.context
3232            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3233                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3234                Ok(TypedReg::v128(dst))
3235            })
3236    }
3237
3238    fn visit_i16x8_lt_s(&mut self) -> Self::Output {
3239        self.context
3240            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3241                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3242                Ok(TypedReg::v128(dst))
3243            })
3244    }
3245
3246    fn visit_i16x8_lt_u(&mut self) -> Self::Output {
3247        self.context
3248            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3249                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3250                Ok(TypedReg::v128(dst))
3251            })
3252    }
3253
3254    fn visit_i32x4_lt_s(&mut self) -> Self::Output {
3255        self.context
3256            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3257                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3258                Ok(TypedReg::v128(dst))
3259            })
3260    }
3261
3262    fn visit_i32x4_lt_u(&mut self) -> Self::Output {
3263        self.context
3264            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3265                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3266                Ok(TypedReg::v128(dst))
3267            })
3268    }
3269
3270    fn visit_i64x2_lt_s(&mut self) -> Self::Output {
3271        self.context
3272            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3273                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3274                Ok(TypedReg::v128(dst))
3275            })
3276    }
3277
3278    fn visit_f32x4_lt(&mut self) -> Self::Output {
3279        self.context
3280            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3281                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3282                Ok(TypedReg::v128(dst))
3283            })
3284    }
3285
3286    fn visit_f64x2_lt(&mut self) -> Self::Output {
3287        self.context
3288            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3289                masm.v128_lt(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3290                Ok(TypedReg::v128(dst))
3291            })
3292    }
3293
3294    fn visit_i8x16_le_s(&mut self) -> Self::Output {
3295        self.context
3296            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3297                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3298                Ok(TypedReg::v128(dst))
3299            })
3300    }
3301
3302    fn visit_i8x16_le_u(&mut self) -> Self::Output {
3303        self.context
3304            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3305                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3306                Ok(TypedReg::v128(dst))
3307            })
3308    }
3309
3310    fn visit_i16x8_le_s(&mut self) -> Self::Output {
3311        self.context
3312            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3313                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3314                Ok(TypedReg::v128(dst))
3315            })
3316    }
3317
3318    fn visit_i16x8_le_u(&mut self) -> Self::Output {
3319        self.context
3320            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3321                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3322                Ok(TypedReg::v128(dst))
3323            })
3324    }
3325
3326    fn visit_i32x4_le_s(&mut self) -> Self::Output {
3327        self.context
3328            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3329                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3330                Ok(TypedReg::v128(dst))
3331            })
3332    }
3333
3334    fn visit_i32x4_le_u(&mut self) -> Self::Output {
3335        self.context
3336            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3337                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3338                Ok(TypedReg::v128(dst))
3339            })
3340    }
3341
3342    fn visit_i64x2_le_s(&mut self) -> Self::Output {
3343        self.context
3344            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3345                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3346                Ok(TypedReg::v128(dst))
3347            })
3348    }
3349
3350    fn visit_f32x4_le(&mut self) -> Self::Output {
3351        self.context
3352            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3353                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3354                Ok(TypedReg::v128(dst))
3355            })
3356    }
3357
3358    fn visit_f64x2_le(&mut self) -> Self::Output {
3359        self.context
3360            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3361                masm.v128_le(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3362                Ok(TypedReg::v128(dst))
3363            })
3364    }
3365
3366    fn visit_i8x16_gt_s(&mut self) -> Self::Output {
3367        self.context
3368            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3369                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3370                Ok(TypedReg::v128(dst))
3371            })
3372    }
3373
3374    fn visit_i8x16_gt_u(&mut self) -> Self::Output {
3375        self.context
3376            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3377                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3378                Ok(TypedReg::v128(dst))
3379            })
3380    }
3381
3382    fn visit_i16x8_gt_s(&mut self) -> Self::Output {
3383        self.context
3384            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3385                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3386                Ok(TypedReg::v128(dst))
3387            })
3388    }
3389
3390    fn visit_i16x8_gt_u(&mut self) -> Self::Output {
3391        self.context
3392            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3393                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3394                Ok(TypedReg::v128(dst))
3395            })
3396    }
3397
3398    fn visit_i32x4_gt_s(&mut self) -> Self::Output {
3399        self.context
3400            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3401                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3402                Ok(TypedReg::v128(dst))
3403            })
3404    }
3405
3406    fn visit_i32x4_gt_u(&mut self) -> Self::Output {
3407        self.context
3408            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3409                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3410                Ok(TypedReg::v128(dst))
3411            })
3412    }
3413
3414    fn visit_i64x2_gt_s(&mut self) -> Self::Output {
3415        self.context
3416            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3417                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3418                Ok(TypedReg::v128(dst))
3419            })
3420    }
3421
3422    fn visit_f32x4_gt(&mut self) -> Self::Output {
3423        self.context
3424            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3425                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3426                Ok(TypedReg::v128(dst))
3427            })
3428    }
3429
3430    fn visit_f64x2_gt(&mut self) -> Self::Output {
3431        self.context
3432            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3433                masm.v128_gt(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3434                Ok(TypedReg::v128(dst))
3435            })
3436    }
3437
3438    fn visit_i8x16_ge_s(&mut self) -> Self::Output {
3439        self.context
3440            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3441                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I8x16S)?;
3442                Ok(TypedReg::v128(dst))
3443            })
3444    }
3445
3446    fn visit_i8x16_ge_u(&mut self) -> Self::Output {
3447        self.context
3448            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3449                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I8x16U)?;
3450                Ok(TypedReg::v128(dst))
3451            })
3452    }
3453
3454    fn visit_i16x8_ge_s(&mut self) -> Self::Output {
3455        self.context
3456            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3457                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I16x8S)?;
3458                Ok(TypedReg::v128(dst))
3459            })
3460    }
3461
3462    fn visit_i16x8_ge_u(&mut self) -> Self::Output {
3463        self.context
3464            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3465                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I16x8U)?;
3466                Ok(TypedReg::v128(dst))
3467            })
3468    }
3469
3470    fn visit_i32x4_ge_s(&mut self) -> Self::Output {
3471        self.context
3472            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3473                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I32x4S)?;
3474                Ok(TypedReg::v128(dst))
3475            })
3476    }
3477
3478    fn visit_i32x4_ge_u(&mut self) -> Self::Output {
3479        self.context
3480            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3481                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I32x4U)?;
3482                Ok(TypedReg::v128(dst))
3483            })
3484    }
3485
3486    fn visit_i64x2_ge_s(&mut self) -> Self::Output {
3487        self.context
3488            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3489                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::I64x2S)?;
3490                Ok(TypedReg::v128(dst))
3491            })
3492    }
3493
3494    fn visit_f32x4_ge(&mut self) -> Self::Output {
3495        self.context
3496            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3497                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::F32x4)?;
3498                Ok(TypedReg::v128(dst))
3499            })
3500    }
3501
3502    fn visit_f64x2_ge(&mut self) -> Self::Output {
3503        self.context
3504            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3505                masm.v128_ge(writable!(dst), dst, src, VectorCompareKind::F64x2)?;
3506                Ok(TypedReg::v128(dst))
3507            })
3508    }
3509
3510    fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output {
3511        self.context
3512            .replace_lane_op(self.masm, ReplaceLaneKind::I8x16, |masm, src, dst, kind| {
3513                masm.replace_lane(src, dst, lane, kind)
3514            })
3515    }
3516
3517    fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output {
3518        self.context
3519            .replace_lane_op(self.masm, ReplaceLaneKind::I16x8, |masm, src, dst, kind| {
3520                masm.replace_lane(src, dst, lane, kind)
3521            })
3522    }
3523
3524    fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
3525        self.context
3526            .replace_lane_op(self.masm, ReplaceLaneKind::I32x4, |masm, src, dst, kind| {
3527                masm.replace_lane(src, dst, lane, kind)
3528            })
3529    }
3530
3531    fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
3532        self.context
3533            .replace_lane_op(self.masm, ReplaceLaneKind::I64x2, |masm, src, dst, kind| {
3534                masm.replace_lane(src, dst, lane, kind)
3535            })
3536    }
3537
3538    fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
3539        self.context
3540            .replace_lane_op(self.masm, ReplaceLaneKind::F32x4, |masm, src, dst, kind| {
3541                masm.replace_lane(src, dst, lane, kind)
3542            })
3543    }
3544
3545    fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
3546        self.context
3547            .replace_lane_op(self.masm, ReplaceLaneKind::F64x2, |masm, src, dst, kind| {
3548                masm.replace_lane(src, dst, lane, kind)
3549            })
3550    }
3551
3552    fn visit_v128_not(&mut self) -> Self::Output {
3553        self.context.unop(self.masm, |masm, reg| {
3554            masm.v128_not(writable!(reg))?;
3555            Ok(TypedReg::new(WasmValType::V128, reg))
3556        })
3557    }
3558
3559    fn visit_v128_and(&mut self) -> Self::Output {
3560        self.context
3561            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3562                masm.v128_and(dst, src, writable!(dst))?;
3563                Ok(TypedReg::new(WasmValType::V128, dst))
3564            })
3565    }
3566
3567    fn visit_v128_andnot(&mut self) -> Self::Output {
3568        self.context
3569            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3570                // careful here: and_not is *not* commutative: dst = !src1 & src2
3571                masm.v128_and_not(src, dst, writable!(dst))?;
3572                Ok(TypedReg::new(WasmValType::V128, dst))
3573            })
3574    }
3575
3576    fn visit_v128_or(&mut self) -> Self::Output {
3577        self.context
3578            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3579                // careful here: and_not is *not* commutative: dst = !src1 & src2
3580                masm.v128_or(src, dst, writable!(dst))?;
3581                Ok(TypedReg::new(WasmValType::V128, dst))
3582            })
3583    }
3584
3585    fn visit_v128_xor(&mut self) -> Self::Output {
3586        self.context
3587            .binop(self.masm, OperandSize::S128, |masm, dst, src, _size| {
3588                // careful here: and_not is *not* commutative: dst = !src1 & src2
3589                masm.v128_xor(src, dst, writable!(dst))?;
3590                Ok(TypedReg::new(WasmValType::V128, dst))
3591            })
3592    }
3593
3594    fn visit_v128_bitselect(&mut self) -> Self::Output {
3595        let mask = self.context.pop_to_reg(self.masm, None)?;
3596        let op2 = self.context.pop_to_reg(self.masm, None)?;
3597        let op1 = self.context.pop_to_reg(self.masm, None)?;
3598        let dst = self.context.any_fpr(self.masm)?;
3599
3600        // careful here: bitselect is *not* commutative.
3601        self.masm
3602            .v128_bitselect(op1.reg, op2.reg, mask.reg, writable!(dst))?;
3603
3604        self.context
3605            .stack
3606            .push(TypedReg::new(WasmValType::V128, dst).into());
3607        self.context.free_reg(op1);
3608        self.context.free_reg(op2);
3609        self.context.free_reg(mask);
3610
3611        Ok(())
3612    }
3613
3614    fn visit_v128_any_true(&mut self) -> Self::Output {
3615        let src = self.context.pop_to_reg(self.masm, None)?;
3616        let dst = self.context.any_gpr(self.masm)?;
3617
3618        self.masm.v128_any_true(src.reg, writable!(dst))?;
3619
3620        self.context
3621            .stack
3622            .push(TypedReg::new(WasmValType::I32, dst).into());
3623        self.context.free_reg(src);
3624
3625        Ok(())
3626    }
3627
3628    fn visit_v128_load8_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3629        self.emit_wasm_load(
3630            &arg,
3631            WasmValType::V128,
3632            LoadKind::vector_lane(lane, OperandSize::S8),
3633        )
3634    }
3635
3636    fn visit_v128_load16_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3637        self.emit_wasm_load(
3638            &arg,
3639            WasmValType::V128,
3640            LoadKind::vector_lane(lane, OperandSize::S16),
3641        )
3642    }
3643
3644    fn visit_v128_load32_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3645        self.emit_wasm_load(
3646            &arg,
3647            WasmValType::V128,
3648            LoadKind::vector_lane(lane, OperandSize::S32),
3649        )
3650    }
3651
3652    fn visit_v128_load64_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3653        self.emit_wasm_load(
3654            &arg,
3655            WasmValType::V128,
3656            LoadKind::vector_lane(lane, OperandSize::S64),
3657        )
3658    }
3659
3660    fn visit_v128_store8_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3661        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S8))
3662    }
3663
3664    fn visit_v128_store16_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3665        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S16))
3666    }
3667
3668    fn visit_v128_store32_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3669        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S32))
3670    }
3671
3672    fn visit_v128_store64_lane(&mut self, arg: MemArg, lane: u8) -> Self::Output {
3673        self.emit_wasm_store(&arg, StoreKind::vector_lane(lane, OperandSize::S64))
3674    }
3675
3676    fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output {
3677        self.context.unop(self.masm, |masm, reg| {
3678            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4S)?;
3679            Ok(TypedReg::v128(reg))
3680        })
3681    }
3682
3683    fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output {
3684        self.context.unop(self.masm, |masm, reg| {
3685            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4U)?;
3686            Ok(TypedReg::v128(reg))
3687        })
3688    }
3689
3690    fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output {
3691        self.context.unop(self.masm, |masm, reg| {
3692            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4LowS)?;
3693            Ok(TypedReg::v128(reg))
3694        })
3695    }
3696
3697    fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output {
3698        self.context.unop(self.masm, |masm, reg| {
3699            masm.v128_convert(reg, writable!(reg), V128ConvertKind::I32x4LowU)?;
3700            Ok(TypedReg::v128(reg))
3701        })
3702    }
3703
3704    fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output {
3705        self.context
3706            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3707                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I16x8S)?;
3708                Ok(TypedReg::v128(dst))
3709            })
3710    }
3711
3712    fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output {
3713        self.context
3714            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3715                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I16x8U)?;
3716                Ok(TypedReg::v128(dst))
3717            })
3718    }
3719
3720    fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output {
3721        self.context
3722            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3723                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I32x4S)?;
3724                Ok(TypedReg::v128(dst))
3725            })
3726    }
3727
3728    fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output {
3729        self.context
3730            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3731                masm.v128_narrow(dst, src, writable!(dst), V128NarrowKind::I32x4U)?;
3732                Ok(TypedReg::v128(dst))
3733            })
3734    }
3735
3736    fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output {
3737        self.context.unop(self.masm, |masm, reg| {
3738            masm.v128_demote(reg, writable!(reg))?;
3739            Ok(TypedReg::v128(reg))
3740        })
3741    }
3742
3743    fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output {
3744        self.context.unop(self.masm, |masm, reg| {
3745            masm.v128_promote(reg, writable!(reg))?;
3746            Ok(TypedReg::v128(reg))
3747        })
3748    }
3749
3750    fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output {
3751        self.context.unop(self.masm, |masm, reg| {
3752            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI8x16S)?;
3753            Ok(TypedReg::v128(reg))
3754        })
3755    }
3756
3757    fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output {
3758        self.context.unop(self.masm, |masm, reg| {
3759            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI8x16S)?;
3760            Ok(TypedReg::v128(reg))
3761        })
3762    }
3763
3764    fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output {
3765        self.context.unop(self.masm, |masm, reg| {
3766            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI8x16U)?;
3767            Ok(TypedReg::v128(reg))
3768        })
3769    }
3770
3771    fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output {
3772        self.context.unop(self.masm, |masm, reg| {
3773            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI8x16U)?;
3774            Ok(TypedReg::v128(reg))
3775        })
3776    }
3777
3778    fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output {
3779        self.context.unop(self.masm, |masm, reg| {
3780            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI16x8S)?;
3781            Ok(TypedReg::v128(reg))
3782        })
3783    }
3784
3785    fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output {
3786        self.context.unop(self.masm, |masm, reg| {
3787            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI16x8S)?;
3788            Ok(TypedReg::v128(reg))
3789        })
3790    }
3791
3792    fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output {
3793        self.context.unop(self.masm, |masm, reg| {
3794            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI16x8U)?;
3795            Ok(TypedReg::v128(reg))
3796        })
3797    }
3798
3799    fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output {
3800        self.context.unop(self.masm, |masm, reg| {
3801            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI16x8U)?;
3802            Ok(TypedReg::v128(reg))
3803        })
3804    }
3805
3806    fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output {
3807        self.context.unop(self.masm, |masm, reg| {
3808            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI32x4S)?;
3809            Ok(TypedReg::v128(reg))
3810        })
3811    }
3812
3813    fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output {
3814        self.context.unop(self.masm, |masm, reg| {
3815            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI32x4S)?;
3816            Ok(TypedReg::v128(reg))
3817        })
3818    }
3819
3820    fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output {
3821        self.context.unop(self.masm, |masm, reg| {
3822            masm.v128_extend(reg, writable!(reg), V128ExtendKind::LowI32x4U)?;
3823            Ok(TypedReg::v128(reg))
3824        })
3825    }
3826
3827    fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output {
3828        self.context.unop(self.masm, |masm, reg| {
3829            masm.v128_extend(reg, writable!(reg), V128ExtendKind::HighI32x4U)?;
3830            Ok(TypedReg::v128(reg))
3831        })
3832    }
3833
3834    fn visit_i8x16_add(&mut self) -> Self::Output {
3835        self.context
3836            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3837                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16)?;
3838                Ok(TypedReg::new(WasmValType::V128, dst))
3839            })
3840    }
3841
3842    fn visit_i16x8_add(&mut self) -> Self::Output {
3843        self.context
3844            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3845                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8)?;
3846                Ok(TypedReg::new(WasmValType::V128, dst))
3847            })
3848    }
3849
3850    fn visit_i32x4_add(&mut self) -> Self::Output {
3851        self.context
3852            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3853                masm.v128_add(dst, src, writable!(dst), V128AddKind::I32x4)?;
3854                Ok(TypedReg::new(WasmValType::V128, dst))
3855            })
3856    }
3857
3858    fn visit_i64x2_add(&mut self) -> Self::Output {
3859        self.context
3860            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3861                masm.v128_add(dst, src, writable!(dst), V128AddKind::I64x2)?;
3862                Ok(TypedReg::new(WasmValType::V128, dst))
3863            })
3864    }
3865
3866    fn visit_i8x16_sub(&mut self) -> Self::Output {
3867        self.context
3868            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3869                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16)?;
3870                Ok(TypedReg::new(WasmValType::V128, dst))
3871            })
3872    }
3873
3874    fn visit_i16x8_sub(&mut self) -> Self::Output {
3875        self.context
3876            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3877                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8)?;
3878                Ok(TypedReg::new(WasmValType::V128, dst))
3879            })
3880    }
3881
3882    fn visit_i32x4_sub(&mut self) -> Self::Output {
3883        self.context
3884            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
3885                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I32x4)?;
3886                Ok(TypedReg::new(WasmValType::V128, dst))
3887            })
3888    }
3889
3890    fn visit_i64x2_sub(&mut self) -> Self::Output {
3891        self.context
3892            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
3893                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I64x2)?;
3894                Ok(TypedReg::new(WasmValType::V128, dst))
3895            })
3896    }
3897
3898    fn visit_i16x8_mul(&mut self) -> Self::Output {
3899        self.masm.v128_mul(&mut self.context, V128MulKind::I16x8)
3900    }
3901
3902    fn visit_i32x4_mul(&mut self) -> Self::Output {
3903        self.masm.v128_mul(&mut self.context, V128MulKind::I32x4)
3904    }
3905
3906    fn visit_i64x2_mul(&mut self) -> Self::Output {
3907        self.masm.v128_mul(&mut self.context, V128MulKind::I64x2)
3908    }
3909
3910    fn visit_i8x16_add_sat_s(&mut self) -> Self::Output {
3911        self.context
3912            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3913                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16SatS)?;
3914                Ok(TypedReg::new(WasmValType::V128, dst))
3915            })
3916    }
3917
3918    fn visit_i16x8_add_sat_s(&mut self) -> Self::Output {
3919        self.context
3920            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3921                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8SatS)?;
3922                Ok(TypedReg::new(WasmValType::V128, dst))
3923            })
3924    }
3925
3926    fn visit_i8x16_add_sat_u(&mut self) -> Self::Output {
3927        self.context
3928            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3929                masm.v128_add(dst, src, writable!(dst), V128AddKind::I8x16SatU)?;
3930                Ok(TypedReg::new(WasmValType::V128, dst))
3931            })
3932    }
3933
3934    fn visit_i16x8_add_sat_u(&mut self) -> Self::Output {
3935        self.context
3936            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3937                masm.v128_add(dst, src, writable!(dst), V128AddKind::I16x8SatU)?;
3938                Ok(TypedReg::new(WasmValType::V128, dst))
3939            })
3940    }
3941
3942    fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output {
3943        self.context
3944            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3945                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16SatS)?;
3946                Ok(TypedReg::new(WasmValType::V128, dst))
3947            })
3948    }
3949
3950    fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output {
3951        self.context
3952            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3953                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8SatS)?;
3954                Ok(TypedReg::new(WasmValType::V128, dst))
3955            })
3956    }
3957
3958    fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output {
3959        self.context
3960            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
3961                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I8x16SatU)?;
3962                Ok(TypedReg::new(WasmValType::V128, dst))
3963            })
3964    }
3965
3966    fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output {
3967        self.context
3968            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
3969                masm.v128_sub(dst, src, writable!(dst), V128SubKind::I16x8SatU)?;
3970                Ok(TypedReg::new(WasmValType::V128, dst))
3971            })
3972    }
3973
3974    fn visit_i8x16_abs(&mut self) -> Self::Output {
3975        self.context.unop(self.masm, |masm, reg| {
3976            masm.v128_abs(reg, writable!(reg), V128AbsKind::I8x16)?;
3977            Ok(TypedReg::new(WasmValType::V128, reg))
3978        })
3979    }
3980
3981    fn visit_i16x8_abs(&mut self) -> Self::Output {
3982        self.context.unop(self.masm, |masm, reg| {
3983            masm.v128_abs(reg, writable!(reg), V128AbsKind::I16x8)?;
3984            Ok(TypedReg::new(WasmValType::V128, reg))
3985        })
3986    }
3987
3988    fn visit_i32x4_abs(&mut self) -> Self::Output {
3989        self.context.unop(self.masm, |masm, reg| {
3990            masm.v128_abs(reg, writable!(reg), V128AbsKind::I32x4)?;
3991            Ok(TypedReg::new(WasmValType::V128, reg))
3992        })
3993    }
3994
3995    fn visit_i64x2_abs(&mut self) -> Self::Output {
3996        self.context.unop(self.masm, |masm, reg| {
3997            masm.v128_abs(reg, writable!(reg), V128AbsKind::I64x2)?;
3998            Ok(TypedReg::new(WasmValType::V128, reg))
3999        })
4000    }
4001
4002    fn visit_f32x4_abs(&mut self) -> Self::Output {
4003        self.context.unop(self.masm, |masm, reg| {
4004            masm.v128_abs(reg, writable!(reg), V128AbsKind::F32x4)?;
4005            Ok(TypedReg::new(WasmValType::V128, reg))
4006        })
4007    }
4008
4009    fn visit_f64x2_abs(&mut self) -> Self::Output {
4010        self.context.unop(self.masm, |masm, reg| {
4011            masm.v128_abs(reg, writable!(reg), V128AbsKind::F64x2)?;
4012            Ok(TypedReg::new(WasmValType::V128, reg))
4013        })
4014    }
4015
4016    fn visit_i8x16_neg(&mut self) -> Self::Output {
4017        self.context.unop(self.masm, |masm, op| {
4018            masm.v128_neg(writable!(op), V128NegKind::I8x16)?;
4019            Ok(TypedReg::new(WasmValType::V128, op))
4020        })
4021    }
4022
4023    fn visit_i16x8_neg(&mut self) -> Self::Output {
4024        self.context.unop(self.masm, |masm, op| {
4025            masm.v128_neg(writable!(op), V128NegKind::I16x8)?;
4026            Ok(TypedReg::new(WasmValType::V128, op))
4027        })
4028    }
4029
4030    fn visit_i32x4_neg(&mut self) -> Self::Output {
4031        self.context.unop(self.masm, |masm, op| {
4032            masm.v128_neg(writable!(op), V128NegKind::I32x4)?;
4033            Ok(TypedReg::new(WasmValType::V128, op))
4034        })
4035    }
4036
4037    fn visit_i64x2_neg(&mut self) -> Self::Output {
4038        self.context.unop(self.masm, |masm, op| {
4039            masm.v128_neg(writable!(op), V128NegKind::I64x2)?;
4040            Ok(TypedReg::new(WasmValType::V128, op))
4041        })
4042    }
4043
4044    fn visit_i8x16_shl(&mut self) -> Self::Output {
4045        self.masm
4046            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::Shl)
4047    }
4048
4049    fn visit_i16x8_shl(&mut self) -> Self::Output {
4050        self.masm
4051            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::Shl)
4052    }
4053
4054    fn visit_i32x4_shl(&mut self) -> Self::Output {
4055        self.masm
4056            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::Shl)
4057    }
4058
4059    fn visit_i64x2_shl(&mut self) -> Self::Output {
4060        self.masm
4061            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::Shl)
4062    }
4063
4064    fn visit_i8x16_shr_u(&mut self) -> Self::Output {
4065        self.masm
4066            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::ShrU)
4067    }
4068
4069    fn visit_i16x8_shr_u(&mut self) -> Self::Output {
4070        self.masm
4071            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::ShrU)
4072    }
4073
4074    fn visit_i32x4_shr_u(&mut self) -> Self::Output {
4075        self.masm
4076            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::ShrU)
4077    }
4078
4079    fn visit_i64x2_shr_u(&mut self) -> Self::Output {
4080        self.masm
4081            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::ShrU)
4082    }
4083
4084    fn visit_i8x16_shr_s(&mut self) -> Self::Output {
4085        self.masm
4086            .v128_shift(&mut self.context, OperandSize::S8, ShiftKind::ShrS)
4087    }
4088
4089    fn visit_i16x8_shr_s(&mut self) -> Self::Output {
4090        self.masm
4091            .v128_shift(&mut self.context, OperandSize::S16, ShiftKind::ShrS)
4092    }
4093
4094    fn visit_i32x4_shr_s(&mut self) -> Self::Output {
4095        self.masm
4096            .v128_shift(&mut self.context, OperandSize::S32, ShiftKind::ShrS)
4097    }
4098
4099    fn visit_i64x2_shr_s(&mut self) -> Self::Output {
4100        self.masm
4101            .v128_shift(&mut self.context, OperandSize::S64, ShiftKind::ShrS)
4102    }
4103
4104    fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output {
4105        self.context
4106            .binop(self.masm, OperandSize::S16, |masm, dst, src, size| {
4107                masm.v128_q15mulr_sat_s(dst, src, writable!(dst), size)?;
4108                Ok(TypedReg::v128(dst))
4109            })
4110    }
4111
4112    fn visit_i8x16_min_s(&mut self) -> Self::Output {
4113        self.context
4114            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4115                masm.v128_min(src, dst, writable!(dst), V128MinKind::I8x16S)?;
4116                Ok(TypedReg::v128(dst))
4117            })
4118    }
4119
4120    fn visit_i8x16_all_true(&mut self) -> Self::Output {
4121        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4122            masm.v128_all_true(src, writable!(dst), OperandSize::S8)
4123        })
4124    }
4125
4126    fn visit_i16x8_all_true(&mut self) -> Self::Output {
4127        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4128            masm.v128_all_true(src, writable!(dst), OperandSize::S16)
4129        })
4130    }
4131
4132    fn visit_i32x4_all_true(&mut self) -> Self::Output {
4133        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4134            masm.v128_all_true(src, writable!(dst), OperandSize::S32)
4135        })
4136    }
4137
4138    fn visit_i64x2_all_true(&mut self) -> Self::Output {
4139        self.context.v128_all_true_op(self.masm, |masm, src, dst| {
4140            masm.v128_all_true(src, writable!(dst), OperandSize::S64)
4141        })
4142    }
4143
4144    fn visit_i8x16_bitmask(&mut self) -> Self::Output {
4145        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4146            masm.v128_bitmask(src, writable!(dst), OperandSize::S8)
4147        })
4148    }
4149
4150    fn visit_i16x8_bitmask(&mut self) -> Self::Output {
4151        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4152            masm.v128_bitmask(src, writable!(dst), OperandSize::S16)
4153        })
4154    }
4155
4156    fn visit_i32x4_bitmask(&mut self) -> Self::Output {
4157        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4158            masm.v128_bitmask(src, writable!(dst), OperandSize::S32)
4159        })
4160    }
4161
4162    fn visit_i64x2_bitmask(&mut self) -> Self::Output {
4163        self.context.v128_bitmask_op(self.masm, |masm, src, dst| {
4164            masm.v128_bitmask(src, writable!(dst), OperandSize::S64)
4165        })
4166    }
4167
4168    fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output {
4169        self.masm
4170            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF32x4S)
4171    }
4172
4173    fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output {
4174        self.masm
4175            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF32x4U)
4176    }
4177
4178    fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output {
4179        self.masm
4180            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF64x2SZero)
4181    }
4182
4183    fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output {
4184        self.masm
4185            .v128_trunc(&mut self.context, V128TruncKind::I32x4FromF64x2UZero)
4186    }
4187
4188    fn visit_i16x8_min_s(&mut self) -> Self::Output {
4189        self.context
4190            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4191                masm.v128_min(src, dst, writable!(dst), V128MinKind::I16x8S)?;
4192                Ok(TypedReg::v128(dst))
4193            })
4194    }
4195
4196    fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output {
4197        self.context
4198            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4199                masm.v128_dot(dst, src, writable!(dst))?;
4200                Ok(TypedReg::v128(dst))
4201            })
4202    }
4203
4204    fn visit_i8x16_popcnt(&mut self) -> Self::Output {
4205        self.masm.v128_popcnt(&mut self.context)
4206    }
4207
4208    fn visit_i8x16_avgr_u(&mut self) -> Self::Output {
4209        self.context
4210            .binop(self.masm, OperandSize::S8, |masm, dst, src, size| {
4211                masm.v128_avgr(dst, src, writable!(dst), size)?;
4212                Ok(TypedReg::v128(dst))
4213            })
4214    }
4215
4216    fn visit_i32x4_min_s(&mut self) -> Self::Output {
4217        self.context
4218            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4219                masm.v128_min(src, dst, writable!(dst), V128MinKind::I32x4S)?;
4220                Ok(TypedReg::v128(dst))
4221            })
4222    }
4223
4224    fn visit_i8x16_min_u(&mut self) -> Self::Output {
4225        self.context
4226            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4227                masm.v128_min(src, dst, writable!(dst), V128MinKind::I8x16U)?;
4228                Ok(TypedReg::v128(dst))
4229            })
4230    }
4231
4232    fn visit_i16x8_avgr_u(&mut self) -> Self::Output {
4233        self.context
4234            .binop(self.masm, OperandSize::S16, |masm, dst, src, size| {
4235                masm.v128_avgr(dst, src, writable!(dst), size)?;
4236                Ok(TypedReg::v128(dst))
4237            })
4238    }
4239
4240    fn visit_i16x8_min_u(&mut self) -> Self::Output {
4241        self.context
4242            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4243                masm.v128_min(src, dst, writable!(dst), V128MinKind::I16x8U)?;
4244                Ok(TypedReg::v128(dst))
4245            })
4246    }
4247
4248    fn visit_i32x4_min_u(&mut self) -> Self::Output {
4249        self.context
4250            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4251                masm.v128_min(src, dst, writable!(dst), V128MinKind::I32x4U)?;
4252                Ok(TypedReg::v128(dst))
4253            })
4254    }
4255
4256    fn visit_i8x16_max_s(&mut self) -> Self::Output {
4257        self.context
4258            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4259                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I8x16S)?;
4260                Ok(TypedReg::v128(dst))
4261            })
4262    }
4263
4264    fn visit_i16x8_max_s(&mut self) -> Self::Output {
4265        self.context
4266            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4267                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I16x8S)?;
4268                Ok(TypedReg::v128(dst))
4269            })
4270    }
4271
4272    fn visit_i32x4_max_s(&mut self) -> Self::Output {
4273        self.context
4274            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4275                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I32x4S)?;
4276                Ok(TypedReg::v128(dst))
4277            })
4278    }
4279
4280    fn visit_i8x16_max_u(&mut self) -> Self::Output {
4281        self.context
4282            .binop(self.masm, OperandSize::S8, |masm, dst, src, _size| {
4283                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I8x16U)?;
4284                Ok(TypedReg::v128(dst))
4285            })
4286    }
4287
4288    fn visit_i16x8_max_u(&mut self) -> Self::Output {
4289        self.context
4290            .binop(self.masm, OperandSize::S16, |masm, dst, src, _size| {
4291                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I16x8U)?;
4292                Ok(TypedReg::v128(dst))
4293            })
4294    }
4295
4296    fn visit_i32x4_max_u(&mut self) -> Self::Output {
4297        self.context
4298            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4299                masm.v128_max(src, dst, writable!(dst), V128MaxKind::I32x4U)?;
4300                Ok(TypedReg::v128(dst))
4301            })
4302    }
4303
4304    fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output {
4305        self.masm
4306            .v128_extmul(&mut self.context, V128ExtMulKind::LowI8x16S)
4307    }
4308
4309    fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output {
4310        self.masm
4311            .v128_extmul(&mut self.context, V128ExtMulKind::LowI16x8S)
4312    }
4313
4314    fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output {
4315        self.masm
4316            .v128_extmul(&mut self.context, V128ExtMulKind::LowI32x4S)
4317    }
4318
4319    fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output {
4320        self.masm
4321            .v128_extmul(&mut self.context, V128ExtMulKind::LowI8x16U)
4322    }
4323
4324    fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output {
4325        self.masm
4326            .v128_extmul(&mut self.context, V128ExtMulKind::LowI16x8U)
4327    }
4328
4329    fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output {
4330        self.masm
4331            .v128_extmul(&mut self.context, V128ExtMulKind::LowI32x4U)
4332    }
4333
4334    fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output {
4335        self.masm
4336            .v128_extmul(&mut self.context, V128ExtMulKind::HighI8x16U)
4337    }
4338
4339    fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output {
4340        self.masm
4341            .v128_extmul(&mut self.context, V128ExtMulKind::HighI16x8U)
4342    }
4343
4344    fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output {
4345        self.masm
4346            .v128_extmul(&mut self.context, V128ExtMulKind::HighI32x4U)
4347    }
4348
4349    fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output {
4350        self.masm
4351            .v128_extmul(&mut self.context, V128ExtMulKind::HighI8x16S)
4352    }
4353
4354    fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output {
4355        self.masm
4356            .v128_extmul(&mut self.context, V128ExtMulKind::HighI16x8S)
4357    }
4358
4359    fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output {
4360        self.masm
4361            .v128_extmul(&mut self.context, V128ExtMulKind::HighI32x4S)
4362    }
4363
4364    fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output {
4365        self.context.unop(self.masm, |masm, op| {
4366            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I8x16S)?;
4367            Ok(TypedReg::v128(op))
4368        })
4369    }
4370
4371    fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output {
4372        self.context.unop(self.masm, |masm, op| {
4373            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I8x16U)?;
4374            Ok(TypedReg::v128(op))
4375        })
4376    }
4377
4378    fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output {
4379        self.context.unop(self.masm, |masm, op| {
4380            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I16x8S)?;
4381            Ok(TypedReg::v128(op))
4382        })
4383    }
4384
4385    fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output {
4386        self.context.unop(self.masm, |masm, op| {
4387            masm.v128_extadd_pairwise(op, writable!(op), V128ExtAddKind::I16x8U)?;
4388            Ok(TypedReg::v128(op))
4389        })
4390    }
4391
4392    fn visit_f32x4_add(&mut self) -> Self::Output {
4393        self.context
4394            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4395                masm.v128_add(dst, src, writable!(dst), V128AddKind::F32x4)?;
4396                Ok(TypedReg::v128(dst))
4397            })
4398    }
4399
4400    fn visit_f64x2_add(&mut self) -> Self::Output {
4401        self.context
4402            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4403                masm.v128_add(dst, src, writable!(dst), V128AddKind::F64x2)?;
4404                Ok(TypedReg::v128(dst))
4405            })
4406    }
4407
4408    fn visit_f32x4_sub(&mut self) -> Self::Output {
4409        self.context
4410            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4411                masm.v128_sub(dst, src, writable!(dst), V128SubKind::F32x4)?;
4412                Ok(TypedReg::v128(dst))
4413            })
4414    }
4415
4416    fn visit_f64x2_sub(&mut self) -> Self::Output {
4417        self.context
4418            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4419                masm.v128_sub(dst, src, writable!(dst), V128SubKind::F64x2)?;
4420                Ok(TypedReg::v128(dst))
4421            })
4422    }
4423
4424    fn visit_f32x4_mul(&mut self) -> Self::Output {
4425        self.masm.v128_mul(&mut self.context, V128MulKind::F32x4)
4426    }
4427
4428    fn visit_f64x2_mul(&mut self) -> Self::Output {
4429        self.masm.v128_mul(&mut self.context, V128MulKind::F64x2)
4430    }
4431
4432    fn visit_f32x4_div(&mut self) -> Self::Output {
4433        self.context
4434            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4435                masm.v128_div(dst, src, writable!(dst), size)?;
4436                Ok(TypedReg::v128(dst))
4437            })
4438    }
4439
4440    fn visit_f64x2_div(&mut self) -> Self::Output {
4441        self.context
4442            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4443                masm.v128_div(dst, src, writable!(dst), size)?;
4444                Ok(TypedReg::v128(dst))
4445            })
4446    }
4447
4448    fn visit_f32x4_neg(&mut self) -> Self::Output {
4449        self.context.unop(self.masm, |masm, reg| {
4450            masm.v128_neg(writable!(reg), V128NegKind::F32x4)?;
4451            Ok(TypedReg::v128(reg))
4452        })
4453    }
4454
4455    fn visit_f32x4_ceil(&mut self) -> Self::Output {
4456        self.context.unop(self.masm, |masm, reg| {
4457            masm.v128_ceil(reg, writable!(reg), OperandSize::S32)?;
4458            Ok(TypedReg::v128(reg))
4459        })
4460    }
4461
4462    fn visit_f64x2_neg(&mut self) -> Self::Output {
4463        self.context.unop(self.masm, |masm, reg| {
4464            masm.v128_neg(writable!(reg), V128NegKind::F64x2)?;
4465            Ok(TypedReg::v128(reg))
4466        })
4467    }
4468
4469    fn visit_f64x2_ceil(&mut self) -> Self::Output {
4470        self.context.unop(self.masm, |masm, reg| {
4471            masm.v128_ceil(reg, writable!(reg), OperandSize::S64)?;
4472            Ok(TypedReg::v128(reg))
4473        })
4474    }
4475
4476    fn visit_f32x4_sqrt(&mut self) -> Self::Output {
4477        self.context.unop(self.masm, |masm, reg| {
4478            masm.v128_sqrt(reg, writable!(reg), OperandSize::S32)?;
4479            Ok(TypedReg::v128(reg))
4480        })
4481    }
4482
4483    fn visit_f32x4_floor(&mut self) -> Self::Output {
4484        self.context.unop(self.masm, |masm, reg| {
4485            masm.v128_floor(reg, writable!(reg), OperandSize::S32)?;
4486            Ok(TypedReg::v128(reg))
4487        })
4488    }
4489
4490    fn visit_f64x2_sqrt(&mut self) -> Self::Output {
4491        self.context.unop(self.masm, |masm, reg| {
4492            masm.v128_sqrt(reg, writable!(reg), OperandSize::S64)?;
4493            Ok(TypedReg::v128(reg))
4494        })
4495    }
4496
4497    fn visit_f64x2_floor(&mut self) -> Self::Output {
4498        self.context.unop(self.masm, |masm, reg| {
4499            masm.v128_floor(reg, writable!(reg), OperandSize::S64)?;
4500            Ok(TypedReg::v128(reg))
4501        })
4502    }
4503
4504    fn visit_f32x4_nearest(&mut self) -> Self::Output {
4505        self.context.unop(self.masm, |masm, reg| {
4506            masm.v128_nearest(reg, writable!(reg), OperandSize::S32)?;
4507            Ok(TypedReg::v128(reg))
4508        })
4509    }
4510
4511    fn visit_f64x2_nearest(&mut self) -> Self::Output {
4512        self.context.unop(self.masm, |masm, reg| {
4513            masm.v128_nearest(reg, writable!(reg), OperandSize::S64)?;
4514            Ok(TypedReg::v128(reg))
4515        })
4516    }
4517
4518    fn visit_f32x4_trunc(&mut self) -> Self::Output {
4519        self.masm
4520            .v128_trunc(&mut self.context, V128TruncKind::F32x4)
4521    }
4522
4523    fn visit_f64x2_trunc(&mut self) -> Self::Output {
4524        self.masm
4525            .v128_trunc(&mut self.context, V128TruncKind::F64x2)
4526    }
4527
4528    fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output {
4529        self.emit_wasm_load(
4530            &memarg,
4531            WasmValType::V128,
4532            LoadKind::VectorZero(OperandSize::S32),
4533        )
4534    }
4535
4536    fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output {
4537        self.emit_wasm_load(
4538            &memarg,
4539            WasmValType::V128,
4540            LoadKind::VectorZero(OperandSize::S64),
4541        )
4542    }
4543
4544    fn visit_f32x4_pmin(&mut self) -> Self::Output {
4545        self.context
4546            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4547                masm.v128_pmin(dst, src, writable!(dst), size)?;
4548                Ok(TypedReg::v128(dst))
4549            })
4550    }
4551
4552    fn visit_f64x2_pmin(&mut self) -> Self::Output {
4553        self.context
4554            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4555                masm.v128_pmin(dst, src, writable!(dst), size)?;
4556                Ok(TypedReg::v128(dst))
4557            })
4558    }
4559
4560    fn visit_f32x4_pmax(&mut self) -> Self::Output {
4561        self.context
4562            .binop(self.masm, OperandSize::S32, |masm, dst, src, size| {
4563                masm.v128_pmax(dst, src, writable!(dst), size)?;
4564                Ok(TypedReg::v128(dst))
4565            })
4566    }
4567
4568    fn visit_f64x2_pmax(&mut self) -> Self::Output {
4569        self.context
4570            .binop(self.masm, OperandSize::S64, |masm, dst, src, size| {
4571                masm.v128_pmax(dst, src, writable!(dst), size)?;
4572                Ok(TypedReg::v128(dst))
4573            })
4574    }
4575
4576    fn visit_f32x4_min(&mut self) -> Self::Output {
4577        self.context
4578            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4579                masm.v128_min(dst, src, writable!(dst), V128MinKind::F32x4)?;
4580                Ok(TypedReg::v128(dst))
4581            })
4582    }
4583
4584    fn visit_f64x2_min(&mut self) -> Self::Output {
4585        self.context
4586            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4587                masm.v128_min(dst, src, writable!(dst), V128MinKind::F64x2)?;
4588                Ok(TypedReg::v128(dst))
4589            })
4590    }
4591
4592    fn visit_f32x4_max(&mut self) -> Self::Output {
4593        self.context
4594            .binop(self.masm, OperandSize::S32, |masm, dst, src, _size| {
4595                masm.v128_max(dst, src, writable!(dst), V128MaxKind::F32x4)?;
4596                Ok(TypedReg::v128(dst))
4597            })
4598    }
4599
4600    fn visit_f64x2_max(&mut self) -> Self::Output {
4601        self.context
4602            .binop(self.masm, OperandSize::S64, |masm, dst, src, _size| {
4603                masm.v128_max(dst, src, writable!(dst), V128MaxKind::F64x2)?;
4604                Ok(TypedReg::v128(dst))
4605            })
4606    }
4607
4608    wasmparser::for_each_visit_simd_operator!(def_unsupported);
4609}
4610
4611impl<'a, 'translation, 'data, M> CodeGen<'a, 'translation, 'data, M, Emission>
4612where
4613    M: MacroAssembler,
4614{
4615    fn cmp_i32s(&mut self, kind: IntCmpKind) -> Result<()> {
4616        self.context.i32_binop(self.masm, |masm, dst, src, size| {
4617            masm.cmp_with_set(writable!(dst), src, kind, size)?;
4618            Ok(TypedReg::i32(dst))
4619        })
4620    }
4621
4622    fn cmp_i64s(&mut self, kind: IntCmpKind) -> Result<()> {
4623        self.context
4624            .i64_binop(self.masm, move |masm, dst, src, size| {
4625                masm.cmp_with_set(writable!(dst), src, kind, size)?;
4626                Ok(TypedReg::i32(dst)) // Return value for comparisons is an `i32`.
4627            })
4628    }
4629}
4630
4631impl TryFrom<WasmValType> for OperandSize {
4632    type Error = anyhow::Error;
4633    fn try_from(ty: WasmValType) -> Result<OperandSize> {
4634        let ty = match ty {
4635            WasmValType::I32 | WasmValType::F32 => OperandSize::S32,
4636            WasmValType::I64 | WasmValType::F64 => OperandSize::S64,
4637            WasmValType::V128 => OperandSize::S128,
4638            WasmValType::Ref(rt) => {
4639                match rt.heap_type {
4640                    // TODO: Hardcoded size, assuming 64-bit support only. Once
4641                    // Wasmtime supports 32-bit architectures, this will need
4642                    // to be updated in such a way that the calculation of the
4643                    // OperandSize will depend on the target's  pointer size.
4644                    WasmHeapType::Func => OperandSize::S64,
4645                    WasmHeapType::Extern => OperandSize::S64,
4646                    _ => bail!(CodeGenError::unsupported_wasm_type()),
4647                }
4648            }
4649        };
4650        Ok(ty)
4651    }
4652}