1use super::*;
4use crate::ir::ExternalName;
5use crate::machinst::abi::StackAMode;
6use pulley_interpreter::encode;
7use pulley_interpreter::regs::Reg as _;
8use std::fmt;
9
10macro_rules! newtype_of_reg {
13 (
14 $newtype_reg:ident,
15 $newtype_writable_reg:ident,
16 $class:expr
17 ) => {
18 #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
20 pub struct $newtype_reg(Reg);
21
22 impl PartialEq<Reg> for $newtype_reg {
23 fn eq(&self, other: &Reg) -> bool {
24 self.0 == *other
25 }
26 }
27
28 impl From<$newtype_reg> for Reg {
29 fn from(r: $newtype_reg) -> Self {
30 r.0
31 }
32 }
33
34 impl TryFrom<Reg> for $newtype_reg {
35 type Error = ();
36 fn try_from(r: Reg) -> Result<Self, Self::Error> {
37 Self::new(r).ok_or(())
38 }
39 }
40
41 impl $newtype_reg {
42 pub fn new(reg: Reg) -> Option<Self> {
45 if reg.class() == $class {
46 Some(Self(reg))
47 } else {
48 None
49 }
50 }
51
52 pub fn to_reg(self) -> Reg {
54 self.0
55 }
56 }
57
58 impl std::ops::Deref for $newtype_reg {
65 type Target = Reg;
66
67 fn deref(&self) -> &Reg {
68 &self.0
69 }
70 }
71
72 impl AsMut<Reg> for $newtype_reg {
76 fn as_mut(&mut self) -> &mut Reg {
77 &mut self.0
78 }
79 }
80
81 pub type $newtype_writable_reg = Writable<$newtype_reg>;
83
84 impl From<pulley_interpreter::regs::$newtype_reg> for $newtype_reg {
85 fn from(r: pulley_interpreter::regs::$newtype_reg) -> Self {
86 Self::new(regalloc2::PReg::new(usize::from(r as u8), $class).into()).unwrap()
87 }
88 }
89 impl From<$newtype_reg> for pulley_interpreter::regs::$newtype_reg {
90 fn from(r: $newtype_reg) -> Self {
91 Self::new(r.to_real_reg().unwrap().hw_enc()).unwrap()
92 }
93 }
94 impl<'a> From<&'a $newtype_reg> for pulley_interpreter::regs::$newtype_reg {
95 fn from(r: &'a $newtype_reg) -> Self {
96 Self::new(r.to_real_reg().unwrap().hw_enc()).unwrap()
97 }
98 }
99 impl From<$newtype_writable_reg> for pulley_interpreter::regs::$newtype_reg {
100 fn from(r: $newtype_writable_reg) -> Self {
101 Self::new(r.to_reg().to_real_reg().unwrap().hw_enc()).unwrap()
102 }
103 }
104 impl<'a> From<&'a $newtype_writable_reg> for pulley_interpreter::regs::$newtype_reg {
105 fn from(r: &'a $newtype_writable_reg) -> Self {
106 Self::new(r.to_reg().to_real_reg().unwrap().hw_enc()).unwrap()
107 }
108 }
109
110 impl TryFrom<Writable<Reg>> for $newtype_writable_reg {
111 type Error = ();
112 fn try_from(r: Writable<Reg>) -> Result<Self, Self::Error> {
113 let r = r.to_reg();
114 match $newtype_reg::new(r) {
115 Some(r) => Ok(Writable::from_reg(r)),
116 None => Err(()),
117 }
118 }
119 }
120 };
121}
122
123newtype_of_reg!(XReg, WritableXReg, RegClass::Int);
125newtype_of_reg!(FReg, WritableFReg, RegClass::Float);
126newtype_of_reg!(VReg, WritableVReg, RegClass::Vector);
127
128impl XReg {
129 pub const SPECIAL_START: u8 = pulley_interpreter::regs::XReg::SPECIAL_START;
132
133 pub fn is_special(&self) -> bool {
135 match self.as_pulley() {
136 Some(reg) => reg.is_special(),
137 None => false,
138 }
139 }
140
141 pub fn as_pulley(&self) -> Option<pulley_interpreter::XReg> {
143 let enc = self.to_real_reg()?.hw_enc();
144 Some(pulley_interpreter::XReg::new(enc).unwrap())
145 }
146}
147
148pub use super::super::lower::isle::generated_code::Amode;
149
150impl Amode {
151 pub(crate) fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
153 match self {
154 Amode::RegOffset { base, offset: _ } => collector.reg_use(base),
155 Amode::SpOffset { .. } | Amode::Stack { .. } => {}
157 }
158 }
159
160 pub(crate) fn get_base_register(&self) -> Option<XReg> {
161 match self {
162 Amode::RegOffset { base, offset: _ } => Some((*base).into()),
163 Amode::SpOffset { .. } | Amode::Stack { .. } => Some(XReg::new(stack_reg()).unwrap()),
164 }
165 }
166
167 pub(crate) fn get_offset_with_state<P>(&self, state: &EmitState<P>) -> i32
168 where
169 P: PulleyTargetKind,
170 {
171 match self {
172 Amode::RegOffset { base: _, offset } | Amode::SpOffset { offset } => *offset,
173 Amode::Stack { amode } => {
174 let offset64 = match amode {
175 StackAMode::IncomingArg(offset, stack_args_size) => {
176 let offset = i64::from(*stack_args_size) - *offset;
177 let frame_layout = state.frame_layout();
178 let sp_offset = frame_layout.tail_args_size
179 + frame_layout.setup_area_size
180 + frame_layout.clobber_size
181 + frame_layout.fixed_frame_storage_size
182 + frame_layout.outgoing_args_size;
183 i64::from(sp_offset) - offset
184 }
185 StackAMode::Slot(offset) => {
186 offset + i64::from(state.frame_layout().outgoing_args_size)
187 }
188 StackAMode::OutgoingArg(offset) => *offset,
189 };
190 i32::try_from(offset64).unwrap()
191 }
192 }
193 }
194}
195
196impl core::fmt::Display for Amode {
197 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
198 match self {
199 Amode::SpOffset { offset } => {
200 if *offset >= 0 {
201 write!(f, "sp+{offset}")
202 } else {
203 write!(f, "sp{offset}")
204 }
205 }
206 Amode::RegOffset { base, offset } => {
207 let name = reg_name(**base);
208 if *offset >= 0 {
209 write!(f, "{name}+{offset}")
210 } else {
211 write!(f, "{name}{offset}")
212 }
213 }
214 Amode::Stack { amode } => core::fmt::Debug::fmt(amode, f),
215 }
216 }
217}
218
219impl From<StackAMode> for Amode {
220 fn from(amode: StackAMode) -> Self {
221 Amode::Stack { amode }
222 }
223}
224
225#[derive(Clone, Copy, Debug, PartialEq, Eq)]
227pub enum OperandSize {
228 Size32,
230 Size64,
232}
233
234pub use crate::isa::pulley_shared::lower::isle::generated_code::Cond;
235
236impl Cond {
237 pub fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
239 match self {
240 Cond::If32 { reg } | Cond::IfNot32 { reg } => collector.reg_use(reg),
241
242 Cond::IfXeq32 { src1, src2 }
243 | Cond::IfXneq32 { src1, src2 }
244 | Cond::IfXslt32 { src1, src2 }
245 | Cond::IfXslteq32 { src1, src2 }
246 | Cond::IfXult32 { src1, src2 }
247 | Cond::IfXulteq32 { src1, src2 }
248 | Cond::IfXeq64 { src1, src2 }
249 | Cond::IfXneq64 { src1, src2 }
250 | Cond::IfXslt64 { src1, src2 }
251 | Cond::IfXslteq64 { src1, src2 }
252 | Cond::IfXult64 { src1, src2 }
253 | Cond::IfXulteq64 { src1, src2 } => {
254 collector.reg_use(src1);
255 collector.reg_use(src2);
256 }
257
258 Cond::IfXeq32I32 { src1, src2 }
259 | Cond::IfXneq32I32 { src1, src2 }
260 | Cond::IfXslt32I32 { src1, src2 }
261 | Cond::IfXslteq32I32 { src1, src2 }
262 | Cond::IfXsgt32I32 { src1, src2 }
263 | Cond::IfXsgteq32I32 { src1, src2 }
264 | Cond::IfXeq64I32 { src1, src2 }
265 | Cond::IfXneq64I32 { src1, src2 }
266 | Cond::IfXslt64I32 { src1, src2 }
267 | Cond::IfXslteq64I32 { src1, src2 }
268 | Cond::IfXsgt64I32 { src1, src2 }
269 | Cond::IfXsgteq64I32 { src1, src2 } => {
270 collector.reg_use(src1);
271 let _: &mut i32 = src2;
272 }
273
274 Cond::IfXult32I32 { src1, src2 }
275 | Cond::IfXulteq32I32 { src1, src2 }
276 | Cond::IfXugt32I32 { src1, src2 }
277 | Cond::IfXugteq32I32 { src1, src2 }
278 | Cond::IfXult64I32 { src1, src2 }
279 | Cond::IfXulteq64I32 { src1, src2 }
280 | Cond::IfXugt64I32 { src1, src2 }
281 | Cond::IfXugteq64I32 { src1, src2 } => {
282 collector.reg_use(src1);
283 let _: &mut u32 = src2;
284 }
285 }
286 }
287
288 pub fn encode(&self, sink: &mut impl Extend<u8>) {
293 match *self {
294 Cond::If32 { reg } => encode::br_if32(sink, reg, 0),
295 Cond::IfNot32 { reg } => encode::br_if_not32(sink, reg, 0),
296 Cond::IfXeq32 { src1, src2 } => encode::br_if_xeq32(sink, src1, src2, 0),
297 Cond::IfXneq32 { src1, src2 } => encode::br_if_xneq32(sink, src1, src2, 0),
298 Cond::IfXslt32 { src1, src2 } => encode::br_if_xslt32(sink, src1, src2, 0),
299 Cond::IfXslteq32 { src1, src2 } => encode::br_if_xslteq32(sink, src1, src2, 0),
300 Cond::IfXult32 { src1, src2 } => encode::br_if_xult32(sink, src1, src2, 0),
301 Cond::IfXulteq32 { src1, src2 } => encode::br_if_xulteq32(sink, src1, src2, 0),
302 Cond::IfXeq64 { src1, src2 } => encode::br_if_xeq64(sink, src1, src2, 0),
303 Cond::IfXneq64 { src1, src2 } => encode::br_if_xneq64(sink, src1, src2, 0),
304 Cond::IfXslt64 { src1, src2 } => encode::br_if_xslt64(sink, src1, src2, 0),
305 Cond::IfXslteq64 { src1, src2 } => encode::br_if_xslteq64(sink, src1, src2, 0),
306 Cond::IfXult64 { src1, src2 } => encode::br_if_xult64(sink, src1, src2, 0),
307 Cond::IfXulteq64 { src1, src2 } => encode::br_if_xulteq64(sink, src1, src2, 0),
308
309 Cond::IfXeq32I32 { src1, src2 } => match i8::try_from(src2) {
310 Ok(src2) => encode::br_if_xeq32_i8(sink, src1, src2, 0),
311 Err(_) => encode::br_if_xeq32_i32(sink, src1, src2, 0),
312 },
313 Cond::IfXneq32I32 { src1, src2 } => match i8::try_from(src2) {
314 Ok(src2) => encode::br_if_xneq32_i8(sink, src1, src2, 0),
315 Err(_) => encode::br_if_xneq32_i32(sink, src1, src2, 0),
316 },
317 Cond::IfXslt32I32 { src1, src2 } => match i8::try_from(src2) {
318 Ok(src2) => encode::br_if_xslt32_i8(sink, src1, src2, 0),
319 Err(_) => encode::br_if_xslt32_i32(sink, src1, src2, 0),
320 },
321 Cond::IfXslteq32I32 { src1, src2 } => match i8::try_from(src2) {
322 Ok(src2) => encode::br_if_xslteq32_i8(sink, src1, src2, 0),
323 Err(_) => encode::br_if_xslteq32_i32(sink, src1, src2, 0),
324 },
325 Cond::IfXsgt32I32 { src1, src2 } => match i8::try_from(src2) {
326 Ok(src2) => encode::br_if_xsgt32_i8(sink, src1, src2, 0),
327 Err(_) => encode::br_if_xsgt32_i32(sink, src1, src2, 0),
328 },
329 Cond::IfXsgteq32I32 { src1, src2 } => match i8::try_from(src2) {
330 Ok(src2) => encode::br_if_xsgteq32_i8(sink, src1, src2, 0),
331 Err(_) => encode::br_if_xsgteq32_i32(sink, src1, src2, 0),
332 },
333 Cond::IfXult32I32 { src1, src2 } => match u8::try_from(src2) {
334 Ok(src2) => encode::br_if_xult32_u8(sink, src1, src2, 0),
335 Err(_) => encode::br_if_xult32_u32(sink, src1, src2, 0),
336 },
337 Cond::IfXulteq32I32 { src1, src2 } => match u8::try_from(src2) {
338 Ok(src2) => encode::br_if_xulteq32_u8(sink, src1, src2, 0),
339 Err(_) => encode::br_if_xulteq32_u32(sink, src1, src2, 0),
340 },
341 Cond::IfXugt32I32 { src1, src2 } => match u8::try_from(src2) {
342 Ok(src2) => encode::br_if_xugt32_u8(sink, src1, src2, 0),
343 Err(_) => encode::br_if_xugt32_u32(sink, src1, src2, 0),
344 },
345 Cond::IfXugteq32I32 { src1, src2 } => match u8::try_from(src2) {
346 Ok(src2) => encode::br_if_xugteq32_u8(sink, src1, src2, 0),
347 Err(_) => encode::br_if_xugteq32_u32(sink, src1, src2, 0),
348 },
349
350 Cond::IfXeq64I32 { src1, src2 } => match i8::try_from(src2) {
351 Ok(src2) => encode::br_if_xeq64_i8(sink, src1, src2, 0),
352 Err(_) => encode::br_if_xeq64_i32(sink, src1, src2, 0),
353 },
354 Cond::IfXneq64I32 { src1, src2 } => match i8::try_from(src2) {
355 Ok(src2) => encode::br_if_xneq64_i8(sink, src1, src2, 0),
356 Err(_) => encode::br_if_xneq64_i32(sink, src1, src2, 0),
357 },
358 Cond::IfXslt64I32 { src1, src2 } => match i8::try_from(src2) {
359 Ok(src2) => encode::br_if_xslt64_i8(sink, src1, src2, 0),
360 Err(_) => encode::br_if_xslt64_i32(sink, src1, src2, 0),
361 },
362 Cond::IfXslteq64I32 { src1, src2 } => match i8::try_from(src2) {
363 Ok(src2) => encode::br_if_xslteq64_i8(sink, src1, src2, 0),
364 Err(_) => encode::br_if_xslteq64_i32(sink, src1, src2, 0),
365 },
366 Cond::IfXsgt64I32 { src1, src2 } => match i8::try_from(src2) {
367 Ok(src2) => encode::br_if_xsgt64_i8(sink, src1, src2, 0),
368 Err(_) => encode::br_if_xsgt64_i32(sink, src1, src2, 0),
369 },
370 Cond::IfXsgteq64I32 { src1, src2 } => match i8::try_from(src2) {
371 Ok(src2) => encode::br_if_xsgteq64_i8(sink, src1, src2, 0),
372 Err(_) => encode::br_if_xsgteq64_i32(sink, src1, src2, 0),
373 },
374 Cond::IfXult64I32 { src1, src2 } => match u8::try_from(src2) {
375 Ok(src2) => encode::br_if_xult64_u8(sink, src1, src2, 0),
376 Err(_) => encode::br_if_xult64_u32(sink, src1, src2, 0),
377 },
378 Cond::IfXulteq64I32 { src1, src2 } => match u8::try_from(src2) {
379 Ok(src2) => encode::br_if_xulteq64_u8(sink, src1, src2, 0),
380 Err(_) => encode::br_if_xulteq64_u32(sink, src1, src2, 0),
381 },
382 Cond::IfXugt64I32 { src1, src2 } => match u8::try_from(src2) {
383 Ok(src2) => encode::br_if_xugt64_u8(sink, src1, src2, 0),
384 Err(_) => encode::br_if_xugt64_u32(sink, src1, src2, 0),
385 },
386 Cond::IfXugteq64I32 { src1, src2 } => match u8::try_from(src2) {
387 Ok(src2) => encode::br_if_xugteq64_u8(sink, src1, src2, 0),
388 Err(_) => encode::br_if_xugteq64_u32(sink, src1, src2, 0),
389 },
390 }
391 }
392
393 pub fn invert(&self) -> Cond {
395 match *self {
396 Cond::If32 { reg } => Cond::IfNot32 { reg },
397 Cond::IfNot32 { reg } => Cond::If32 { reg },
398 Cond::IfXeq32 { src1, src2 } => Cond::IfXneq32 { src1, src2 },
399 Cond::IfXneq32 { src1, src2 } => Cond::IfXeq32 { src1, src2 },
400 Cond::IfXeq64 { src1, src2 } => Cond::IfXneq64 { src1, src2 },
401 Cond::IfXneq64 { src1, src2 } => Cond::IfXeq64 { src1, src2 },
402
403 Cond::IfXslt32 { src1, src2 } => Cond::IfXslteq32 {
406 src1: src2,
407 src2: src1,
408 },
409 Cond::IfXslteq32 { src1, src2 } => Cond::IfXslt32 {
410 src1: src2,
411 src2: src1,
412 },
413 Cond::IfXult32 { src1, src2 } => Cond::IfXulteq32 {
414 src1: src2,
415 src2: src1,
416 },
417 Cond::IfXulteq32 { src1, src2 } => Cond::IfXult32 {
418 src1: src2,
419 src2: src1,
420 },
421 Cond::IfXslt64 { src1, src2 } => Cond::IfXslteq64 {
422 src1: src2,
423 src2: src1,
424 },
425 Cond::IfXslteq64 { src1, src2 } => Cond::IfXslt64 {
426 src1: src2,
427 src2: src1,
428 },
429 Cond::IfXult64 { src1, src2 } => Cond::IfXulteq64 {
430 src1: src2,
431 src2: src1,
432 },
433 Cond::IfXulteq64 { src1, src2 } => Cond::IfXult64 {
434 src1: src2,
435 src2: src1,
436 },
437
438 Cond::IfXeq32I32 { src1, src2 } => Cond::IfXneq32I32 { src1, src2 },
439 Cond::IfXneq32I32 { src1, src2 } => Cond::IfXeq32I32 { src1, src2 },
440 Cond::IfXslt32I32 { src1, src2 } => Cond::IfXsgteq32I32 { src1, src2 },
441 Cond::IfXslteq32I32 { src1, src2 } => Cond::IfXsgt32I32 { src1, src2 },
442 Cond::IfXult32I32 { src1, src2 } => Cond::IfXugteq32I32 { src1, src2 },
443 Cond::IfXulteq32I32 { src1, src2 } => Cond::IfXugt32I32 { src1, src2 },
444 Cond::IfXsgt32I32 { src1, src2 } => Cond::IfXslteq32I32 { src1, src2 },
445 Cond::IfXsgteq32I32 { src1, src2 } => Cond::IfXslt32I32 { src1, src2 },
446 Cond::IfXugt32I32 { src1, src2 } => Cond::IfXulteq32I32 { src1, src2 },
447 Cond::IfXugteq32I32 { src1, src2 } => Cond::IfXult32I32 { src1, src2 },
448
449 Cond::IfXeq64I32 { src1, src2 } => Cond::IfXneq64I32 { src1, src2 },
450 Cond::IfXneq64I32 { src1, src2 } => Cond::IfXeq64I32 { src1, src2 },
451 Cond::IfXslt64I32 { src1, src2 } => Cond::IfXsgteq64I32 { src1, src2 },
452 Cond::IfXslteq64I32 { src1, src2 } => Cond::IfXsgt64I32 { src1, src2 },
453 Cond::IfXult64I32 { src1, src2 } => Cond::IfXugteq64I32 { src1, src2 },
454 Cond::IfXulteq64I32 { src1, src2 } => Cond::IfXugt64I32 { src1, src2 },
455 Cond::IfXsgt64I32 { src1, src2 } => Cond::IfXslteq64I32 { src1, src2 },
456 Cond::IfXsgteq64I32 { src1, src2 } => Cond::IfXslt64I32 { src1, src2 },
457 Cond::IfXugt64I32 { src1, src2 } => Cond::IfXulteq64I32 { src1, src2 },
458 Cond::IfXugteq64I32 { src1, src2 } => Cond::IfXult64I32 { src1, src2 },
459 }
460 }
461}
462
463impl fmt::Display for Cond {
464 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
465 match self {
466 Cond::If32 { reg } => write!(f, "if32 {}", reg_name(**reg)),
467 Cond::IfNot32 { reg } => write!(f, "if_not32 {}", reg_name(**reg)),
468 Cond::IfXeq32 { src1, src2 } => {
469 write!(f, "if_xeq32 {}, {}", reg_name(**src1), reg_name(**src2))
470 }
471 Cond::IfXneq32 { src1, src2 } => {
472 write!(f, "if_xneq32 {}, {}", reg_name(**src1), reg_name(**src2))
473 }
474 Cond::IfXslt32 { src1, src2 } => {
475 write!(f, "if_xslt32 {}, {}", reg_name(**src1), reg_name(**src2))
476 }
477 Cond::IfXslteq32 { src1, src2 } => {
478 write!(f, "if_xslteq32 {}, {}", reg_name(**src1), reg_name(**src2))
479 }
480 Cond::IfXult32 { src1, src2 } => {
481 write!(f, "if_xult32 {}, {}", reg_name(**src1), reg_name(**src2))
482 }
483 Cond::IfXulteq32 { src1, src2 } => {
484 write!(f, "if_xulteq32 {}, {}", reg_name(**src1), reg_name(**src2))
485 }
486 Cond::IfXeq64 { src1, src2 } => {
487 write!(f, "if_xeq64 {}, {}", reg_name(**src1), reg_name(**src2))
488 }
489 Cond::IfXneq64 { src1, src2 } => {
490 write!(f, "if_xneq64 {}, {}", reg_name(**src1), reg_name(**src2))
491 }
492 Cond::IfXslt64 { src1, src2 } => {
493 write!(f, "if_xslt64 {}, {}", reg_name(**src1), reg_name(**src2))
494 }
495 Cond::IfXslteq64 { src1, src2 } => {
496 write!(f, "if_xslteq64 {}, {}", reg_name(**src1), reg_name(**src2))
497 }
498 Cond::IfXult64 { src1, src2 } => {
499 write!(f, "if_xult64 {}, {}", reg_name(**src1), reg_name(**src2))
500 }
501 Cond::IfXulteq64 { src1, src2 } => {
502 write!(f, "if_xulteq64 {}, {}", reg_name(**src1), reg_name(**src2))
503 }
504 Cond::IfXeq32I32 { src1, src2 } => {
505 write!(f, "if_xeq32_i32 {}, {src2}", reg_name(**src1))
506 }
507 Cond::IfXneq32I32 { src1, src2 } => {
508 write!(f, "if_xneq32_i32 {}, {src2}", reg_name(**src1))
509 }
510 Cond::IfXslt32I32 { src1, src2 } => {
511 write!(f, "if_xslt32_i32 {}, {src2}", reg_name(**src1))
512 }
513 Cond::IfXslteq32I32 { src1, src2 } => {
514 write!(f, "if_xslteq32_i32 {}, {src2}", reg_name(**src1))
515 }
516 Cond::IfXsgt32I32 { src1, src2 } => {
517 write!(f, "if_xsgt32_i32 {}, {src2}", reg_name(**src1))
518 }
519 Cond::IfXsgteq32I32 { src1, src2 } => {
520 write!(f, "if_xsgteq32_i32 {}, {src2}", reg_name(**src1))
521 }
522 Cond::IfXult32I32 { src1, src2 } => {
523 write!(f, "if_xult32_i32 {}, {src2}", reg_name(**src1))
524 }
525 Cond::IfXulteq32I32 { src1, src2 } => {
526 write!(f, "if_xulteq32_i32 {}, {src2}", reg_name(**src1))
527 }
528 Cond::IfXugt32I32 { src1, src2 } => {
529 write!(f, "if_xugt32_i32 {}, {src2}", reg_name(**src1))
530 }
531 Cond::IfXugteq32I32 { src1, src2 } => {
532 write!(f, "if_xugteq32_i32 {}, {src2}", reg_name(**src1))
533 }
534 Cond::IfXeq64I32 { src1, src2 } => {
535 write!(f, "if_xeq64_i32 {}, {src2}", reg_name(**src1))
536 }
537 Cond::IfXneq64I32 { src1, src2 } => {
538 write!(f, "if_xneq64_i32 {}, {src2}", reg_name(**src1))
539 }
540 Cond::IfXslt64I32 { src1, src2 } => {
541 write!(f, "if_xslt64_i32 {}, {src2}", reg_name(**src1))
542 }
543 Cond::IfXslteq64I32 { src1, src2 } => {
544 write!(f, "if_xslteq64_i32 {}, {src2}", reg_name(**src1))
545 }
546 Cond::IfXsgt64I32 { src1, src2 } => {
547 write!(f, "if_xsgt64_i32 {}, {src2}", reg_name(**src1))
548 }
549 Cond::IfXsgteq64I32 { src1, src2 } => {
550 write!(f, "if_xsgteq64_i32 {}, {src2}", reg_name(**src1))
551 }
552 Cond::IfXult64I32 { src1, src2 } => {
553 write!(f, "if_xult64_i32 {}, {src2}", reg_name(**src1))
554 }
555 Cond::IfXulteq64I32 { src1, src2 } => {
556 write!(f, "if_xulteq64_i32 {}, {src2}", reg_name(**src1))
557 }
558 Cond::IfXugt64I32 { src1, src2 } => {
559 write!(f, "if_xugt64_i32 {}, {src2}", reg_name(**src1))
560 }
561 Cond::IfXugteq64I32 { src1, src2 } => {
562 write!(f, "if_xugteq64_i32 {}, {src2}", reg_name(**src1))
563 }
564 }
565 }
566}
567
568#[derive(Clone, Debug)]
570pub struct PulleyCall {
571 pub name: ExternalName,
574 pub args: SmallVec<[XReg; 4]>,
578}
579
580pub use super::super::lower::isle::generated_code::AddrO32;
581
582impl Copy for AddrO32 {}
583
584impl AddrO32 {
585 pub fn collect_operands(&mut self, collector: &mut impl OperandVisitor) {
587 match self {
588 AddrO32::Base { addr, offset: _ } => {
589 collector.reg_use(addr);
590 }
591 }
592 }
593}
594
595impl From<AddrO32> for pulley_interpreter::AddrO32 {
596 fn from(addr: AddrO32) -> Self {
597 match addr {
598 AddrO32::Base { addr, offset } => Self {
599 addr: addr.into(),
600 offset,
601 },
602 }
603 }
604}
605
606impl fmt::Display for AddrO32 {
607 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
608 match self {
609 AddrO32::Base { addr, offset } => {
610 let addr = reg_name(**addr);
611 write!(f, "{addr}, {offset}")
612 }
613 }
614 }
615}
616
617pub use super::super::lower::isle::generated_code::AddrZ;
618
619impl Copy for AddrZ {}
620
621impl AddrZ {
622 pub fn collect_operands(&mut self, collector: &mut impl OperandVisitor) {
624 match self {
625 AddrZ::Base { addr, offset: _ } => {
626 collector.reg_use(addr);
627 }
628 }
629 }
630}
631
632impl From<AddrZ> for pulley_interpreter::AddrZ {
633 fn from(addr: AddrZ) -> Self {
634 match addr {
635 AddrZ::Base { addr, offset } => Self {
636 addr: addr.into(),
637 offset,
638 },
639 }
640 }
641}
642
643impl fmt::Display for AddrZ {
644 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
645 match self {
646 AddrZ::Base { addr, offset } => {
647 let addr = reg_name(**addr);
648 write!(f, "{addr}, {offset}")
649 }
650 }
651 }
652}
653
654pub use super::super::lower::isle::generated_code::AddrG32;
655
656impl Copy for AddrG32 {}
657
658impl AddrG32 {
659 pub fn collect_operands(&mut self, collector: &mut impl OperandVisitor) {
661 match self {
662 AddrG32::RegisterBound {
663 host_heap_base,
664 host_heap_bound,
665 wasm_addr,
666 offset: _,
667 } => {
668 collector.reg_use(host_heap_base);
669 collector.reg_use(host_heap_bound);
670 collector.reg_use(wasm_addr);
671 }
672 }
673 }
674}
675
676impl From<AddrG32> for pulley_interpreter::AddrG32 {
677 fn from(addr: AddrG32) -> Self {
678 match addr {
679 AddrG32::RegisterBound {
680 host_heap_base,
681 host_heap_bound,
682 wasm_addr,
683 offset,
684 } => Self {
685 host_heap_base: host_heap_base.into(),
686 host_heap_bound: host_heap_bound.into(),
687 wasm_addr: wasm_addr.into(),
688 offset,
689 },
690 }
691 }
692}
693
694impl fmt::Display for AddrG32 {
695 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
696 match self {
697 AddrG32::RegisterBound {
698 host_heap_base,
699 host_heap_bound,
700 wasm_addr,
701 offset,
702 } => {
703 let host_heap_base = reg_name(**host_heap_base);
704 let host_heap_bound = reg_name(**host_heap_bound);
705 let wasm_addr = reg_name(**wasm_addr);
706 write!(
707 f,
708 "{host_heap_base}, {host_heap_bound}, {wasm_addr}, {offset}",
709 )
710 }
711 }
712 }
713}
714
715pub use super::super::lower::isle::generated_code::AddrG32Bne;
716
717impl Copy for AddrG32Bne {}
718
719impl AddrG32Bne {
720 pub fn collect_operands(&mut self, collector: &mut impl OperandVisitor) {
722 match self {
723 AddrG32Bne::BoundNe {
724 host_heap_base,
725 host_heap_bound_addr,
726 host_heap_bound_offset: _,
727 wasm_addr,
728 offset: _,
729 } => {
730 collector.reg_use(host_heap_base);
731 collector.reg_use(host_heap_bound_addr);
732 collector.reg_use(wasm_addr);
733 }
734 }
735 }
736}
737
738impl From<AddrG32Bne> for pulley_interpreter::AddrG32Bne {
739 fn from(addr: AddrG32Bne) -> Self {
740 match addr {
741 AddrG32Bne::BoundNe {
742 host_heap_base,
743 host_heap_bound_addr,
744 host_heap_bound_offset,
745 wasm_addr,
746 offset,
747 } => Self {
748 host_heap_base: host_heap_base.into(),
749 host_heap_bound_addr: host_heap_bound_addr.into(),
750 host_heap_bound_offset,
751 wasm_addr: wasm_addr.into(),
752 offset,
753 },
754 }
755 }
756}
757
758impl fmt::Display for AddrG32Bne {
759 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
760 match self {
761 AddrG32Bne::BoundNe {
762 host_heap_base,
763 host_heap_bound_addr,
764 host_heap_bound_offset,
765 wasm_addr,
766 offset,
767 } => {
768 let host_heap_base = reg_name(**host_heap_base);
769 let host_heap_bound_addr = reg_name(**host_heap_bound_addr);
770 let wasm_addr = reg_name(**wasm_addr);
771 write!(
772 f,
773 "{host_heap_base}, \
774 *[{host_heap_bound_addr} + {host_heap_bound_offset}], \
775 {wasm_addr}, \
776 {offset}",
777 )
778 }
779 }
780 }
781}