1use core::marker::PhantomData;
4
5use crate::binemit::{Addend, CodeOffset, Reloc};
6use crate::ir::types::{self, F32, F64, I8, I8X16, I16, I32, I64, I128};
7use crate::ir::{self, MemFlags, Type};
8use crate::isa::FunctionAlignment;
9use crate::isa::pulley_shared::abi::PulleyMachineDeps;
10use crate::{CodegenError, CodegenResult, settings};
11use crate::{machinst::*, trace};
12use alloc::string::{String, ToString};
13use alloc::vec::Vec;
14use regalloc2::RegClass;
15use smallvec::SmallVec;
16
17pub mod regs;
18pub use self::regs::*;
19pub mod args;
20pub use self::args::*;
21pub mod emit;
22pub use self::emit::*;
23
24pub use crate::isa::pulley_shared::lower::isle::generated_code::MInst as Inst;
28pub use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
29
30impl From<RawInst> for Inst {
31 fn from(raw: RawInst) -> Inst {
32 Inst::Raw { raw }
33 }
34}
35
36use super::PulleyTargetKind;
37
38mod generated {
39 use super::*;
40 use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
41
42 include!(concat!(env!("OUT_DIR"), "/pulley_inst_gen.rs"));
43}
44
45#[derive(Clone, Debug)]
47pub struct ReturnCallInfo<T> {
48 pub dest: T,
50
51 pub new_stack_arg_size: u32,
54
55 pub uses: CallArgList,
57}
58
59impl Inst {
60 pub fn gen_load(dst: Writable<Reg>, mem: Amode, ty: Type, flags: MemFlags) -> Inst {
62 if ty.is_vector() {
63 assert_eq!(ty.bytes(), 16);
64 Inst::VLoad {
65 dst: dst.map(|r| VReg::new(r).unwrap()),
66 mem,
67 ty,
68 flags,
69 }
70 } else if ty.is_int() {
71 assert!(ty.bytes() <= 8);
72 Inst::XLoad {
73 dst: dst.map(|r| XReg::new(r).unwrap()),
74 mem,
75 ty,
76 flags,
77 }
78 } else {
79 Inst::FLoad {
80 dst: dst.map(|r| FReg::new(r).unwrap()),
81 mem,
82 ty,
83 flags,
84 }
85 }
86 }
87
88 pub fn gen_store(mem: Amode, from_reg: Reg, ty: Type, flags: MemFlags) -> Inst {
90 if ty.is_vector() {
91 assert_eq!(ty.bytes(), 16);
92 Inst::VStore {
93 mem,
94 src: VReg::new(from_reg).unwrap(),
95 ty,
96 flags,
97 }
98 } else if ty.is_int() {
99 assert!(ty.bytes() <= 8);
100 Inst::XStore {
101 mem,
102 src: XReg::new(from_reg).unwrap(),
103 ty,
104 flags,
105 }
106 } else {
107 Inst::FStore {
108 mem,
109 src: FReg::new(from_reg).unwrap(),
110 ty,
111 flags,
112 }
113 }
114 }
115}
116
117fn pulley_get_operands(inst: &mut Inst, collector: &mut impl OperandVisitor) {
118 match inst {
119 Inst::Args { args } => {
120 for ArgPair { vreg, preg } in args {
121 collector.reg_fixed_def(vreg, *preg);
122 }
123 }
124 Inst::Rets { rets } => {
125 for RetPair { vreg, preg } in rets {
126 collector.reg_fixed_use(vreg, *preg);
127 }
128 }
129
130 Inst::DummyUse { reg } => {
131 collector.reg_use(reg);
132 }
133
134 Inst::Nop => {}
135
136 Inst::TrapIf { cond, code: _ } => {
137 cond.get_operands(collector);
138 }
139
140 Inst::GetSpecial { dst, reg } => {
141 collector.reg_def(dst);
142 assert!(reg.is_special());
146 }
147
148 Inst::LoadExtName {
149 dst,
150 name: _,
151 offset: _,
152 } => {
153 collector.reg_def(dst);
154 }
155
156 Inst::Call { info } => {
157 let CallInfo {
158 uses, defs, dest, ..
159 } = &mut **info;
160
161 let PulleyCall { args, .. } = dest;
164 for arg in args {
165 collector.reg_use(arg);
166 }
167
168 for CallArgPair { vreg, preg } in uses {
171 collector.reg_fixed_use(vreg, *preg);
172 }
173 for CallRetPair { vreg, location } in defs {
174 match location {
175 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
176 RetLocation::Stack(..) => collector.any_def(vreg),
177 }
178 }
179 collector.reg_clobbers(info.clobbers);
180 }
181 Inst::IndirectCallHost { info } => {
182 let CallInfo { uses, defs, .. } = &mut **info;
183 for CallArgPair { vreg, preg } in uses {
184 collector.reg_fixed_use(vreg, *preg);
185 }
186 for CallRetPair { vreg, location } in defs {
187 match location {
188 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
189 RetLocation::Stack(..) => collector.any_def(vreg),
190 }
191 }
192 collector.reg_clobbers(info.clobbers);
193 }
194 Inst::IndirectCall { info } => {
195 collector.reg_use(&mut info.dest);
196 let CallInfo { uses, defs, .. } = &mut **info;
197 for CallArgPair { vreg, preg } in uses {
198 collector.reg_fixed_use(vreg, *preg);
199 }
200 for CallRetPair { vreg, location } in defs {
201 match location {
202 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
203 RetLocation::Stack(..) => collector.any_def(vreg),
204 }
205 }
206 collector.reg_clobbers(info.clobbers);
207 }
208 Inst::ReturnCall { info } => {
209 for CallArgPair { vreg, preg } in &mut info.uses {
210 collector.reg_fixed_use(vreg, *preg);
211 }
212 }
213 Inst::ReturnIndirectCall { info } => {
214 collector.reg_fixed_use(&mut info.dest, regs::x15());
225
226 for CallArgPair { vreg, preg } in &mut info.uses {
227 collector.reg_fixed_use(vreg, *preg);
228 }
229 }
230
231 Inst::Jump { .. } => {}
232
233 Inst::BrIf {
234 cond,
235 taken: _,
236 not_taken: _,
237 } => {
238 cond.get_operands(collector);
239 }
240
241 Inst::LoadAddr { dst, mem } => {
242 collector.reg_def(dst);
243 mem.get_operands(collector);
244 }
245
246 Inst::XLoad {
247 dst,
248 mem,
249 ty: _,
250 flags: _,
251 } => {
252 collector.reg_def(dst);
253 mem.get_operands(collector);
254 }
255
256 Inst::XStore {
257 mem,
258 src,
259 ty: _,
260 flags: _,
261 } => {
262 mem.get_operands(collector);
263 collector.reg_use(src);
264 }
265
266 Inst::FLoad {
267 dst,
268 mem,
269 ty: _,
270 flags: _,
271 } => {
272 collector.reg_def(dst);
273 mem.get_operands(collector);
274 }
275
276 Inst::FStore {
277 mem,
278 src,
279 ty: _,
280 flags: _,
281 } => {
282 mem.get_operands(collector);
283 collector.reg_use(src);
284 }
285
286 Inst::VLoad {
287 dst,
288 mem,
289 ty: _,
290 flags: _,
291 } => {
292 collector.reg_def(dst);
293 mem.get_operands(collector);
294 }
295
296 Inst::VStore {
297 mem,
298 src,
299 ty: _,
300 flags: _,
301 } => {
302 mem.get_operands(collector);
303 collector.reg_use(src);
304 }
305
306 Inst::BrTable { idx, .. } => {
307 collector.reg_use(idx);
308 }
309
310 Inst::Raw { raw } => generated::get_operands(raw, collector),
311
312 Inst::EmitIsland { .. } => {}
313 }
314}
315
316#[derive(Clone, Debug)]
322pub struct InstAndKind<P>
323where
324 P: PulleyTargetKind,
325{
326 inst: Inst,
327 kind: PhantomData<P>,
328}
329
330impl<P> From<Inst> for InstAndKind<P>
331where
332 P: PulleyTargetKind,
333{
334 fn from(inst: Inst) -> Self {
335 Self {
336 inst,
337 kind: PhantomData,
338 }
339 }
340}
341
342impl<P> From<RawInst> for InstAndKind<P>
343where
344 P: PulleyTargetKind,
345{
346 fn from(inst: RawInst) -> Self {
347 Self {
348 inst: inst.into(),
349 kind: PhantomData,
350 }
351 }
352}
353
354impl<P> From<InstAndKind<P>> for Inst
355where
356 P: PulleyTargetKind,
357{
358 fn from(inst: InstAndKind<P>) -> Self {
359 inst.inst
360 }
361}
362
363impl<P> core::ops::Deref for InstAndKind<P>
364where
365 P: PulleyTargetKind,
366{
367 type Target = Inst;
368
369 fn deref(&self) -> &Self::Target {
370 &self.inst
371 }
372}
373
374impl<P> core::ops::DerefMut for InstAndKind<P>
375where
376 P: PulleyTargetKind,
377{
378 fn deref_mut(&mut self) -> &mut Self::Target {
379 &mut self.inst
380 }
381}
382
383impl<P> MachInst for InstAndKind<P>
384where
385 P: PulleyTargetKind,
386{
387 type LabelUse = LabelUse;
388 type ABIMachineSpec = PulleyMachineDeps<P>;
389
390 const TRAP_OPCODE: &'static [u8] = TRAP_OPCODE;
391
392 fn gen_dummy_use(reg: Reg) -> Self {
393 Inst::DummyUse { reg }.into()
394 }
395
396 fn canonical_type_for_rc(rc: RegClass) -> Type {
397 match rc {
398 regalloc2::RegClass::Int => I64,
399 regalloc2::RegClass::Float => F64,
400 regalloc2::RegClass::Vector => I8X16,
401 }
402 }
403
404 fn is_safepoint(&self) -> bool {
405 match self.inst {
406 Inst::Raw {
407 raw: RawInst::Trap { .. },
408 }
409 | Inst::Call { .. }
410 | Inst::IndirectCall { .. }
411 | Inst::IndirectCallHost { .. } => true,
412 _ => false,
413 }
414 }
415
416 fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
417 pulley_get_operands(self, collector);
418 }
419
420 fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {
421 match self.inst {
422 Inst::Raw {
423 raw: RawInst::Xmov { dst, src },
424 } => Some((Writable::from_reg(*dst.to_reg()), *src)),
425 _ => None,
426 }
427 }
428
429 fn is_included_in_clobbers(&self) -> bool {
430 !self.is_args()
431 }
432
433 fn is_trap(&self) -> bool {
434 match self.inst {
435 Inst::Raw {
436 raw: RawInst::Trap { .. },
437 } => true,
438 _ => false,
439 }
440 }
441
442 fn is_args(&self) -> bool {
443 match self.inst {
444 Inst::Args { .. } => true,
445 _ => false,
446 }
447 }
448
449 fn is_term(&self) -> MachTerminator {
450 match &self.inst {
451 Inst::Raw {
452 raw: RawInst::Ret { .. },
453 }
454 | Inst::Rets { .. } => MachTerminator::Ret,
455 Inst::Jump { .. } => MachTerminator::Branch,
456 Inst::BrIf { .. } => MachTerminator::Branch,
457 Inst::BrTable { .. } => MachTerminator::Branch,
458 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => MachTerminator::RetCall,
459 Inst::Call { info } if info.try_call_info.is_some() => MachTerminator::Branch,
460 Inst::IndirectCall { info } if info.try_call_info.is_some() => MachTerminator::Branch,
461 _ => MachTerminator::None,
462 }
463 }
464
465 fn is_mem_access(&self) -> bool {
466 todo!()
467 }
468
469 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self {
470 match ty {
471 ir::types::I8 | ir::types::I16 | ir::types::I32 | ir::types::I64 => RawInst::Xmov {
472 dst: WritableXReg::try_from(to_reg).unwrap(),
473 src: XReg::new(from_reg).unwrap(),
474 }
475 .into(),
476 ir::types::F32 | ir::types::F64 => RawInst::Fmov {
477 dst: WritableFReg::try_from(to_reg).unwrap(),
478 src: FReg::new(from_reg).unwrap(),
479 }
480 .into(),
481 _ if ty.is_vector() => RawInst::Vmov {
482 dst: WritableVReg::try_from(to_reg).unwrap(),
483 src: VReg::new(from_reg).unwrap(),
484 }
485 .into(),
486 _ => panic!("don't know how to generate a move for type {ty}"),
487 }
488 }
489
490 fn gen_nop(_preferred_size: usize) -> Self {
491 todo!()
492 }
493
494 fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])> {
495 match ty {
496 I8 => Ok((&[RegClass::Int], &[I8])),
497 I16 => Ok((&[RegClass::Int], &[I16])),
498 I32 => Ok((&[RegClass::Int], &[I32])),
499 I64 => Ok((&[RegClass::Int], &[I64])),
500 F32 => Ok((&[RegClass::Float], &[F32])),
501 F64 => Ok((&[RegClass::Float], &[F64])),
502 I128 => Ok((&[RegClass::Int, RegClass::Int], &[I64, I64])),
503 _ if ty.is_vector() => {
504 debug_assert!(ty.bits() <= 512);
505
506 const SIMD_TYPES: [[Type; 1]; 6] = [
510 [types::I8X2],
511 [types::I8X4],
512 [types::I8X8],
513 [types::I8X16],
514 [types::I16X16],
515 [types::I32X16],
516 ];
517 let idx = (ty.bytes().ilog2() - 1) as usize;
518 let ty = &SIMD_TYPES[idx][..];
519
520 Ok((&[RegClass::Vector], ty))
521 }
522 _ => Err(CodegenError::Unsupported(format!(
523 "Unexpected SSA-value type: {ty}"
524 ))),
525 }
526 }
527
528 fn gen_jump(label: MachLabel) -> Self {
529 Inst::Jump { label }.into()
530 }
531
532 fn worst_case_size() -> CodeOffset {
533 22
538 }
539
540 fn ref_type_regclass(_settings: &settings::Flags) -> RegClass {
541 RegClass::Int
542 }
543
544 fn function_alignment() -> FunctionAlignment {
545 FunctionAlignment {
546 minimum: 1,
547 preferred: 1,
548 }
549 }
550}
551
552const TRAP_OPCODE: &'static [u8] = &[
553 pulley_interpreter::opcode::Opcode::ExtendedOp as u8,
554 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 0) as u8,
555 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 8) as u8,
556];
557
558#[test]
559fn test_trap_encoding() {
560 let mut dst = std::vec::Vec::new();
561 pulley_interpreter::encode::trap(&mut dst);
562 assert_eq!(dst, TRAP_OPCODE);
563}
564
565pub fn reg_name(reg: Reg) -> String {
569 match reg.to_real_reg() {
570 Some(real) => {
571 let n = real.hw_enc();
572 match (real.class(), n) {
573 (RegClass::Int, 63) => format!("sp"),
574 (RegClass::Int, 62) => format!("lr"),
575 (RegClass::Int, 61) => format!("fp"),
576 (RegClass::Int, 60) => format!("tmp0"),
577 (RegClass::Int, 59) => format!("tmp1"),
578
579 (RegClass::Int, _) => format!("x{n}"),
580 (RegClass::Float, _) => format!("f{n}"),
581 (RegClass::Vector, _) => format!("v{n}"),
582 }
583 }
584 None => {
585 format!("{reg:?}")
586 }
587 }
588}
589
590fn pretty_print_try_call(info: &TryCallInfo) -> String {
591 let dests = info
592 .exception_dests
593 .iter()
594 .map(|(tag, label)| format!("{tag:?}: {label:?}"))
595 .collect::<Vec<_>>()
596 .join(", ");
597 format!("; jump {:?}; catch [{dests}]", info.continuation)
598}
599
600impl Inst {
601 fn print_with_state<P>(&self, _state: &mut EmitState<P>) -> String
602 where
603 P: PulleyTargetKind,
604 {
605 use core::fmt::Write;
606
607 let format_reg = |reg: Reg| -> String { reg_name(reg) };
608
609 match self {
610 Inst::Args { args } => {
611 let mut s = "args".to_string();
612 for arg in args {
613 let preg = format_reg(arg.preg);
614 let def = format_reg(arg.vreg.to_reg());
615 write!(&mut s, " {def}={preg}").unwrap();
616 }
617 s
618 }
619 Inst::Rets { rets } => {
620 let mut s = "rets".to_string();
621 for ret in rets {
622 let preg = format_reg(ret.preg);
623 let vreg = format_reg(ret.vreg);
624 write!(&mut s, " {vreg}={preg}").unwrap();
625 }
626 s
627 }
628
629 Inst::DummyUse { reg } => {
630 let reg = format_reg(*reg);
631 format!("dummy_use {reg}")
632 }
633
634 Inst::TrapIf { cond, code } => {
635 format!("trap_{cond} // code = {code:?}")
636 }
637
638 Inst::Nop => format!("nop"),
639
640 Inst::GetSpecial { dst, reg } => {
641 let dst = format_reg(*dst.to_reg());
642 let reg = format_reg(**reg);
643 format!("xmov {dst}, {reg}")
644 }
645
646 Inst::LoadExtName { dst, name, offset } => {
647 let dst = format_reg(*dst.to_reg());
648 format!("{dst} = load_ext_name {name:?}, {offset}")
649 }
650
651 Inst::Call { info } => {
652 let try_call = info
653 .try_call_info
654 .as_ref()
655 .map(|tci| pretty_print_try_call(tci))
656 .unwrap_or_default();
657 format!("call {info:?}{try_call}")
658 }
659
660 Inst::IndirectCall { info } => {
661 let callee = format_reg(*info.dest);
662 let try_call = info
663 .try_call_info
664 .as_ref()
665 .map(|tci| pretty_print_try_call(tci))
666 .unwrap_or_default();
667 format!("indirect_call {callee}, {info:?}{try_call}")
668 }
669
670 Inst::ReturnCall { info } => {
671 format!("return_call {info:?}")
672 }
673
674 Inst::ReturnIndirectCall { info } => {
675 let callee = format_reg(*info.dest);
676 format!("return_indirect_call {callee}, {info:?}")
677 }
678
679 Inst::IndirectCallHost { info } => {
680 format!("indirect_call_host {info:?}")
681 }
682
683 Inst::Jump { label } => format!("jump {}", label.to_string()),
684
685 Inst::BrIf {
686 cond,
687 taken,
688 not_taken,
689 } => {
690 let taken = taken.to_string();
691 let not_taken = not_taken.to_string();
692 format!("br_{cond}, {taken}; jump {not_taken}")
693 }
694
695 Inst::LoadAddr { dst, mem } => {
696 let dst = format_reg(*dst.to_reg());
697 let mem = mem.to_string();
698 format!("{dst} = load_addr {mem}")
699 }
700
701 Inst::XLoad {
702 dst,
703 mem,
704 ty,
705 flags,
706 } => {
707 let dst = format_reg(*dst.to_reg());
708 let ty = ty.bits();
709 let mem = mem.to_string();
710 format!("{dst} = xload{ty} {mem} // flags ={flags}")
711 }
712
713 Inst::XStore {
714 mem,
715 src,
716 ty,
717 flags,
718 } => {
719 let ty = ty.bits();
720 let mem = mem.to_string();
721 let src = format_reg(**src);
722 format!("xstore{ty} {mem}, {src} // flags = {flags}")
723 }
724
725 Inst::FLoad {
726 dst,
727 mem,
728 ty,
729 flags,
730 } => {
731 let dst = format_reg(*dst.to_reg());
732 let ty = ty.bits();
733 let mem = mem.to_string();
734 format!("{dst} = fload{ty} {mem} // flags ={flags}")
735 }
736
737 Inst::FStore {
738 mem,
739 src,
740 ty,
741 flags,
742 } => {
743 let ty = ty.bits();
744 let mem = mem.to_string();
745 let src = format_reg(**src);
746 format!("fstore{ty} {mem}, {src} // flags = {flags}")
747 }
748
749 Inst::VLoad {
750 dst,
751 mem,
752 ty,
753 flags,
754 } => {
755 let dst = format_reg(*dst.to_reg());
756 let ty = ty.bits();
757 let mem = mem.to_string();
758 format!("{dst} = vload{ty} {mem} // flags ={flags}")
759 }
760
761 Inst::VStore {
762 mem,
763 src,
764 ty,
765 flags,
766 } => {
767 let ty = ty.bits();
768 let mem = mem.to_string();
769 let src = format_reg(**src);
770 format!("vstore{ty} {mem}, {src} // flags = {flags}")
771 }
772
773 Inst::BrTable {
774 idx,
775 default,
776 targets,
777 } => {
778 let idx = format_reg(**idx);
779 format!("br_table {idx} {default:?} {targets:?}")
780 }
781 Inst::Raw { raw } => generated::print(raw),
782
783 Inst::EmitIsland { space_needed } => format!("emit_island {space_needed}"),
784 }
785 }
786}
787
788#[derive(Clone, Copy, Debug, PartialEq, Eq)]
790pub enum LabelUse {
791 Jump(u32),
795}
796
797impl MachInstLabelUse for LabelUse {
798 const ALIGN: CodeOffset = 1;
801
802 fn max_pos_range(self) -> CodeOffset {
804 match self {
805 Self::Jump(_) => 0x7fff_ffff,
806 }
807 }
808
809 fn max_neg_range(self) -> CodeOffset {
811 match self {
812 Self::Jump(_) => 0x8000_0000,
813 }
814 }
815
816 fn patch_size(self) -> CodeOffset {
818 match self {
819 Self::Jump(_) => 4,
820 }
821 }
822
823 fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset) {
825 let use_relative = (label_offset as i64) - (use_offset as i64);
826 debug_assert!(use_relative <= self.max_pos_range() as i64);
827 debug_assert!(use_relative >= -(self.max_neg_range() as i64));
828 let pc_rel = i32::try_from(use_relative).unwrap() as u32;
829 match self {
830 Self::Jump(addend) => {
831 let value = pc_rel.wrapping_add(addend);
832 trace!(
833 "patching label use @ {use_offset:#x} to label {label_offset:#x} via \
834 PC-relative offset {pc_rel:#x}"
835 );
836 buffer.copy_from_slice(&value.to_le_bytes()[..]);
837 }
838 }
839 }
840
841 fn supports_veneer(self) -> bool {
843 match self {
844 Self::Jump(_) => false,
845 }
846 }
847
848 fn veneer_size(self) -> CodeOffset {
850 match self {
851 Self::Jump(_) => 0,
852 }
853 }
854
855 fn worst_case_veneer_size() -> CodeOffset {
856 0
857 }
858
859 fn generate_veneer(
862 self,
863 _buffer: &mut [u8],
864 _veneer_offset: CodeOffset,
865 ) -> (CodeOffset, LabelUse) {
866 match self {
867 Self::Jump(_) => panic!("veneer not supported for {self:?}"),
868 }
869 }
870
871 fn from_reloc(reloc: Reloc, addend: Addend) -> Option<LabelUse> {
872 match reloc {
873 Reloc::X86CallPCRel4 if addend < 0 => {
874 Some(LabelUse::Jump(i32::try_from(-addend).unwrap() as u32))
879 }
880 _ => None,
881 }
882 }
883}