1use core::marker::PhantomData;
4
5use crate::binemit::{Addend, CodeOffset, Reloc};
6use crate::ir::types::{self, F32, F64, I128, I16, I32, I64, I8, I8X16};
7use crate::ir::{self, MemFlags, Type};
8use crate::isa::pulley_shared::abi::PulleyMachineDeps;
9use crate::isa::FunctionAlignment;
10use crate::{machinst::*, trace};
11use crate::{settings, CodegenError, CodegenResult};
12use alloc::string::{String, ToString};
13use regalloc2::RegClass;
14use smallvec::SmallVec;
15
16pub mod regs;
17pub use self::regs::*;
18pub mod args;
19pub use self::args::*;
20pub mod emit;
21pub use self::emit::*;
22
23pub use crate::isa::pulley_shared::lower::isle::generated_code::MInst as Inst;
27pub use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
28
29impl From<RawInst> for Inst {
30 fn from(raw: RawInst) -> Inst {
31 Inst::Raw { raw }
32 }
33}
34
35use super::PulleyTargetKind;
36
37mod generated {
38 use super::*;
39 use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
40
41 include!(concat!(env!("OUT_DIR"), "/pulley_inst_gen.rs"));
42}
43
44#[derive(Clone, Debug)]
46pub struct ReturnCallInfo<T> {
47 pub dest: T,
49
50 pub new_stack_arg_size: u32,
53
54 pub uses: CallArgList,
56}
57
58impl Inst {
59 pub fn gen_load(dst: Writable<Reg>, mem: Amode, ty: Type, flags: MemFlags) -> Inst {
61 if ty.is_vector() {
62 assert_eq!(ty.bytes(), 16);
63 Inst::VLoad {
64 dst: dst.map(|r| VReg::new(r).unwrap()),
65 mem,
66 ty,
67 flags,
68 }
69 } else if ty.is_int() {
70 assert!(ty.bytes() <= 8);
71 Inst::XLoad {
72 dst: dst.map(|r| XReg::new(r).unwrap()),
73 mem,
74 ty,
75 flags,
76 }
77 } else {
78 Inst::FLoad {
79 dst: dst.map(|r| FReg::new(r).unwrap()),
80 mem,
81 ty,
82 flags,
83 }
84 }
85 }
86
87 pub fn gen_store(mem: Amode, from_reg: Reg, ty: Type, flags: MemFlags) -> Inst {
89 if ty.is_vector() {
90 assert_eq!(ty.bytes(), 16);
91 Inst::VStore {
92 mem,
93 src: VReg::new(from_reg).unwrap(),
94 ty,
95 flags,
96 }
97 } else if ty.is_int() {
98 assert!(ty.bytes() <= 8);
99 Inst::XStore {
100 mem,
101 src: XReg::new(from_reg).unwrap(),
102 ty,
103 flags,
104 }
105 } else {
106 Inst::FStore {
107 mem,
108 src: FReg::new(from_reg).unwrap(),
109 ty,
110 flags,
111 }
112 }
113 }
114}
115
116fn pulley_get_operands(inst: &mut Inst, collector: &mut impl OperandVisitor) {
117 match inst {
118 Inst::Args { args } => {
119 for ArgPair { vreg, preg } in args {
120 collector.reg_fixed_def(vreg, *preg);
121 }
122 }
123 Inst::Rets { rets } => {
124 for RetPair { vreg, preg } in rets {
125 collector.reg_fixed_use(vreg, *preg);
126 }
127 }
128
129 Inst::DummyUse { reg } => {
130 collector.reg_use(reg);
131 }
132
133 Inst::Nop => {}
134
135 Inst::TrapIf { cond, code: _ } => {
136 cond.get_operands(collector);
137 }
138
139 Inst::GetSpecial { dst, reg } => {
140 collector.reg_def(dst);
141 assert!(reg.is_special());
145 }
146
147 Inst::LoadExtName {
148 dst,
149 name: _,
150 offset: _,
151 } => {
152 collector.reg_def(dst);
153 }
154
155 Inst::Call { info } => {
156 let CallInfo {
157 uses, defs, dest, ..
158 } = &mut **info;
159
160 let PulleyCall { args, .. } = dest;
163 for arg in args {
164 collector.reg_use(arg);
165 }
166
167 for CallArgPair { vreg, preg } in uses {
170 collector.reg_fixed_use(vreg, *preg);
171 }
172 for CallRetPair { vreg, preg } in defs {
173 collector.reg_fixed_def(vreg, *preg);
174 }
175 collector.reg_clobbers(info.clobbers);
176 }
177 Inst::IndirectCallHost { info } => {
178 let CallInfo { uses, defs, .. } = &mut **info;
179 for CallArgPair { vreg, preg } in uses {
180 collector.reg_fixed_use(vreg, *preg);
181 }
182 for CallRetPair { vreg, preg } in defs {
183 collector.reg_fixed_def(vreg, *preg);
184 }
185 collector.reg_clobbers(info.clobbers);
186 }
187 Inst::IndirectCall { info } => {
188 collector.reg_use(&mut info.dest);
189 let CallInfo { uses, defs, .. } = &mut **info;
190 for CallArgPair { vreg, preg } in uses {
191 collector.reg_fixed_use(vreg, *preg);
192 }
193 for CallRetPair { vreg, preg } in defs {
194 collector.reg_fixed_def(vreg, *preg);
195 }
196 collector.reg_clobbers(info.clobbers);
197 }
198 Inst::ReturnCall { info } => {
199 for CallArgPair { vreg, preg } in &mut info.uses {
200 collector.reg_fixed_use(vreg, *preg);
201 }
202 }
203 Inst::ReturnIndirectCall { info } => {
204 collector.reg_fixed_use(&mut info.dest, regs::x15());
215
216 for CallArgPair { vreg, preg } in &mut info.uses {
217 collector.reg_fixed_use(vreg, *preg);
218 }
219 }
220
221 Inst::Jump { .. } => {}
222
223 Inst::BrIf {
224 cond,
225 taken: _,
226 not_taken: _,
227 } => {
228 cond.get_operands(collector);
229 }
230
231 Inst::LoadAddr { dst, mem } => {
232 collector.reg_def(dst);
233 mem.get_operands(collector);
234 }
235
236 Inst::XLoad {
237 dst,
238 mem,
239 ty: _,
240 flags: _,
241 } => {
242 collector.reg_def(dst);
243 mem.get_operands(collector);
244 }
245
246 Inst::XStore {
247 mem,
248 src,
249 ty: _,
250 flags: _,
251 } => {
252 mem.get_operands(collector);
253 collector.reg_use(src);
254 }
255
256 Inst::FLoad {
257 dst,
258 mem,
259 ty: _,
260 flags: _,
261 } => {
262 collector.reg_def(dst);
263 mem.get_operands(collector);
264 }
265
266 Inst::FStore {
267 mem,
268 src,
269 ty: _,
270 flags: _,
271 } => {
272 mem.get_operands(collector);
273 collector.reg_use(src);
274 }
275
276 Inst::VLoad {
277 dst,
278 mem,
279 ty: _,
280 flags: _,
281 } => {
282 collector.reg_def(dst);
283 mem.get_operands(collector);
284 }
285
286 Inst::VStore {
287 mem,
288 src,
289 ty: _,
290 flags: _,
291 } => {
292 mem.get_operands(collector);
293 collector.reg_use(src);
294 }
295
296 Inst::BrTable { idx, .. } => {
297 collector.reg_use(idx);
298 }
299
300 Inst::Raw { raw } => generated::get_operands(raw, collector),
301 }
302}
303
304#[derive(Clone, Debug)]
310pub struct InstAndKind<P>
311where
312 P: PulleyTargetKind,
313{
314 inst: Inst,
315 kind: PhantomData<P>,
316}
317
318impl<P> From<Inst> for InstAndKind<P>
319where
320 P: PulleyTargetKind,
321{
322 fn from(inst: Inst) -> Self {
323 Self {
324 inst,
325 kind: PhantomData,
326 }
327 }
328}
329
330impl<P> From<RawInst> for InstAndKind<P>
331where
332 P: PulleyTargetKind,
333{
334 fn from(inst: RawInst) -> Self {
335 Self {
336 inst: inst.into(),
337 kind: PhantomData,
338 }
339 }
340}
341
342impl<P> From<InstAndKind<P>> for Inst
343where
344 P: PulleyTargetKind,
345{
346 fn from(inst: InstAndKind<P>) -> Self {
347 inst.inst
348 }
349}
350
351impl<P> core::ops::Deref for InstAndKind<P>
352where
353 P: PulleyTargetKind,
354{
355 type Target = Inst;
356
357 fn deref(&self) -> &Self::Target {
358 &self.inst
359 }
360}
361
362impl<P> core::ops::DerefMut for InstAndKind<P>
363where
364 P: PulleyTargetKind,
365{
366 fn deref_mut(&mut self) -> &mut Self::Target {
367 &mut self.inst
368 }
369}
370
371impl<P> MachInst for InstAndKind<P>
372where
373 P: PulleyTargetKind,
374{
375 type LabelUse = LabelUse;
376 type ABIMachineSpec = PulleyMachineDeps<P>;
377
378 const TRAP_OPCODE: &'static [u8] = TRAP_OPCODE;
379
380 fn gen_dummy_use(reg: Reg) -> Self {
381 Inst::DummyUse { reg }.into()
382 }
383
384 fn canonical_type_for_rc(rc: RegClass) -> Type {
385 match rc {
386 regalloc2::RegClass::Int => I64,
387 regalloc2::RegClass::Float => F64,
388 regalloc2::RegClass::Vector => I8X16,
389 }
390 }
391
392 fn is_safepoint(&self) -> bool {
393 match self.inst {
394 Inst::Raw {
395 raw: RawInst::Trap { .. },
396 }
397 | Inst::Call { .. }
398 | Inst::IndirectCall { .. }
399 | Inst::IndirectCallHost { .. } => true,
400 _ => false,
401 }
402 }
403
404 fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
405 pulley_get_operands(self, collector);
406 }
407
408 fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {
409 match self.inst {
410 Inst::Raw {
411 raw: RawInst::Xmov { dst, src },
412 } => Some((Writable::from_reg(*dst.to_reg()), *src)),
413 _ => None,
414 }
415 }
416
417 fn is_included_in_clobbers(&self) -> bool {
418 !self.is_args()
419 }
420
421 fn is_trap(&self) -> bool {
422 match self.inst {
423 Inst::Raw {
424 raw: RawInst::Trap { .. },
425 } => true,
426 _ => false,
427 }
428 }
429
430 fn is_args(&self) -> bool {
431 match self.inst {
432 Inst::Args { .. } => true,
433 _ => false,
434 }
435 }
436
437 fn is_term(&self) -> MachTerminator {
438 match self.inst {
439 Inst::Raw {
440 raw: RawInst::Ret { .. },
441 }
442 | Inst::Rets { .. } => MachTerminator::Ret,
443 Inst::Jump { .. } => MachTerminator::Uncond,
444 Inst::BrIf { .. } => MachTerminator::Cond,
445 Inst::BrTable { .. } => MachTerminator::Indirect,
446 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => MachTerminator::Indirect,
447 _ => MachTerminator::None,
448 }
449 }
450
451 fn is_mem_access(&self) -> bool {
452 todo!()
453 }
454
455 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self {
456 match ty {
457 ir::types::I8 | ir::types::I16 | ir::types::I32 | ir::types::I64 => RawInst::Xmov {
458 dst: WritableXReg::try_from(to_reg).unwrap(),
459 src: XReg::new(from_reg).unwrap(),
460 }
461 .into(),
462 ir::types::F32 | ir::types::F64 => RawInst::Fmov {
463 dst: WritableFReg::try_from(to_reg).unwrap(),
464 src: FReg::new(from_reg).unwrap(),
465 }
466 .into(),
467 _ if ty.is_vector() => RawInst::Vmov {
468 dst: WritableVReg::try_from(to_reg).unwrap(),
469 src: VReg::new(from_reg).unwrap(),
470 }
471 .into(),
472 _ => panic!("don't know how to generate a move for type {ty}"),
473 }
474 }
475
476 fn gen_nop(_preferred_size: usize) -> Self {
477 todo!()
478 }
479
480 fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])> {
481 match ty {
482 I8 => Ok((&[RegClass::Int], &[I8])),
483 I16 => Ok((&[RegClass::Int], &[I16])),
484 I32 => Ok((&[RegClass::Int], &[I32])),
485 I64 => Ok((&[RegClass::Int], &[I64])),
486 F32 => Ok((&[RegClass::Float], &[F32])),
487 F64 => Ok((&[RegClass::Float], &[F64])),
488 I128 => Ok((&[RegClass::Int, RegClass::Int], &[I64, I64])),
489 _ if ty.is_vector() => {
490 debug_assert!(ty.bits() <= 512);
491
492 const SIMD_TYPES: [[Type; 1]; 6] = [
496 [types::I8X2],
497 [types::I8X4],
498 [types::I8X8],
499 [types::I8X16],
500 [types::I16X16],
501 [types::I32X16],
502 ];
503 let idx = (ty.bytes().ilog2() - 1) as usize;
504 let ty = &SIMD_TYPES[idx][..];
505
506 Ok((&[RegClass::Vector], ty))
507 }
508 _ => Err(CodegenError::Unsupported(format!(
509 "Unexpected SSA-value type: {ty}"
510 ))),
511 }
512 }
513
514 fn gen_jump(label: MachLabel) -> Self {
515 Inst::Jump { label }.into()
516 }
517
518 fn worst_case_size() -> CodeOffset {
519 22
524 }
525
526 fn ref_type_regclass(_settings: &settings::Flags) -> RegClass {
527 RegClass::Int
528 }
529
530 fn function_alignment() -> FunctionAlignment {
531 FunctionAlignment {
532 minimum: 1,
533 preferred: 1,
534 }
535 }
536}
537
538const TRAP_OPCODE: &'static [u8] = &[
539 pulley_interpreter::opcode::Opcode::ExtendedOp as u8,
540 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 0) as u8,
541 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 8) as u8,
542];
543
544#[test]
545fn test_trap_encoding() {
546 let mut dst = std::vec::Vec::new();
547 pulley_interpreter::encode::trap(&mut dst);
548 assert_eq!(dst, TRAP_OPCODE);
549}
550
551pub fn reg_name(reg: Reg) -> String {
555 match reg.to_real_reg() {
556 Some(real) => {
557 let n = real.hw_enc();
558 match (real.class(), n) {
559 (RegClass::Int, 63) => format!("sp"),
560 (RegClass::Int, 62) => format!("lr"),
561 (RegClass::Int, 61) => format!("fp"),
562 (RegClass::Int, 60) => format!("tmp0"),
563 (RegClass::Int, 59) => format!("tmp1"),
564
565 (RegClass::Int, _) => format!("x{n}"),
566 (RegClass::Float, _) => format!("f{n}"),
567 (RegClass::Vector, _) => format!("v{n}"),
568 }
569 }
570 None => {
571 format!("{reg:?}")
572 }
573 }
574}
575
576impl Inst {
577 fn print_with_state<P>(&self, _state: &mut EmitState<P>) -> String
578 where
579 P: PulleyTargetKind,
580 {
581 use core::fmt::Write;
582
583 let format_reg = |reg: Reg| -> String { reg_name(reg) };
584
585 match self {
586 Inst::Args { args } => {
587 let mut s = "args".to_string();
588 for arg in args {
589 let preg = format_reg(arg.preg);
590 let def = format_reg(arg.vreg.to_reg());
591 write!(&mut s, " {def}={preg}").unwrap();
592 }
593 s
594 }
595 Inst::Rets { rets } => {
596 let mut s = "rets".to_string();
597 for ret in rets {
598 let preg = format_reg(ret.preg);
599 let vreg = format_reg(ret.vreg);
600 write!(&mut s, " {vreg}={preg}").unwrap();
601 }
602 s
603 }
604
605 Inst::DummyUse { reg } => {
606 let reg = format_reg(*reg);
607 format!("dummy_use {reg}")
608 }
609
610 Inst::TrapIf { cond, code } => {
611 format!("trap_{cond} // code = {code:?}")
612 }
613
614 Inst::Nop => format!("nop"),
615
616 Inst::GetSpecial { dst, reg } => {
617 let dst = format_reg(*dst.to_reg());
618 let reg = format_reg(**reg);
619 format!("xmov {dst}, {reg}")
620 }
621
622 Inst::LoadExtName { dst, name, offset } => {
623 let dst = format_reg(*dst.to_reg());
624 format!("{dst} = load_ext_name {name:?}, {offset}")
625 }
626
627 Inst::Call { info } => {
628 format!("call {info:?}")
629 }
630
631 Inst::IndirectCall { info } => {
632 let callee = format_reg(*info.dest);
633 format!("indirect_call {callee}, {info:?}")
634 }
635
636 Inst::ReturnCall { info } => {
637 format!("return_call {info:?}")
638 }
639
640 Inst::ReturnIndirectCall { info } => {
641 let callee = format_reg(*info.dest);
642 format!("return_indirect_call {callee}, {info:?}")
643 }
644
645 Inst::IndirectCallHost { info } => {
646 format!("indirect_call_host {info:?}")
647 }
648
649 Inst::Jump { label } => format!("jump {}", label.to_string()),
650
651 Inst::BrIf {
652 cond,
653 taken,
654 not_taken,
655 } => {
656 let taken = taken.to_string();
657 let not_taken = not_taken.to_string();
658 format!("br_{cond}, {taken}; jump {not_taken}")
659 }
660
661 Inst::LoadAddr { dst, mem } => {
662 let dst = format_reg(*dst.to_reg());
663 let mem = mem.to_string();
664 format!("{dst} = load_addr {mem}")
665 }
666
667 Inst::XLoad {
668 dst,
669 mem,
670 ty,
671 flags,
672 } => {
673 let dst = format_reg(*dst.to_reg());
674 let ty = ty.bits();
675 let mem = mem.to_string();
676 format!("{dst} = xload{ty} {mem} // flags ={flags}")
677 }
678
679 Inst::XStore {
680 mem,
681 src,
682 ty,
683 flags,
684 } => {
685 let ty = ty.bits();
686 let mem = mem.to_string();
687 let src = format_reg(**src);
688 format!("xstore{ty} {mem}, {src} // flags = {flags}")
689 }
690
691 Inst::FLoad {
692 dst,
693 mem,
694 ty,
695 flags,
696 } => {
697 let dst = format_reg(*dst.to_reg());
698 let ty = ty.bits();
699 let mem = mem.to_string();
700 format!("{dst} = fload{ty} {mem} // flags ={flags}")
701 }
702
703 Inst::FStore {
704 mem,
705 src,
706 ty,
707 flags,
708 } => {
709 let ty = ty.bits();
710 let mem = mem.to_string();
711 let src = format_reg(**src);
712 format!("fstore{ty} {mem}, {src} // flags = {flags}")
713 }
714
715 Inst::VLoad {
716 dst,
717 mem,
718 ty,
719 flags,
720 } => {
721 let dst = format_reg(*dst.to_reg());
722 let ty = ty.bits();
723 let mem = mem.to_string();
724 format!("{dst} = vload{ty} {mem} // flags ={flags}")
725 }
726
727 Inst::VStore {
728 mem,
729 src,
730 ty,
731 flags,
732 } => {
733 let ty = ty.bits();
734 let mem = mem.to_string();
735 let src = format_reg(**src);
736 format!("vstore{ty} {mem}, {src} // flags = {flags}")
737 }
738
739 Inst::BrTable {
740 idx,
741 default,
742 targets,
743 } => {
744 let idx = format_reg(**idx);
745 format!("br_table {idx} {default:?} {targets:?}")
746 }
747 Inst::Raw { raw } => generated::print(raw),
748 }
749 }
750}
751
752#[derive(Clone, Copy, Debug, PartialEq, Eq)]
754pub enum LabelUse {
755 Jump(u32),
759}
760
761impl MachInstLabelUse for LabelUse {
762 const ALIGN: CodeOffset = 1;
765
766 fn max_pos_range(self) -> CodeOffset {
768 match self {
769 Self::Jump(_) => 0x7fff_ffff,
770 }
771 }
772
773 fn max_neg_range(self) -> CodeOffset {
775 match self {
776 Self::Jump(_) => 0x8000_0000,
777 }
778 }
779
780 fn patch_size(self) -> CodeOffset {
782 match self {
783 Self::Jump(_) => 4,
784 }
785 }
786
787 fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset) {
789 let use_relative = (label_offset as i64) - (use_offset as i64);
790 debug_assert!(use_relative <= self.max_pos_range() as i64);
791 debug_assert!(use_relative >= -(self.max_neg_range() as i64));
792 let pc_rel = i32::try_from(use_relative).unwrap() as u32;
793 match self {
794 Self::Jump(addend) => {
795 let value = pc_rel.wrapping_add(addend);
796 trace!(
797 "patching label use @ {use_offset:#x} to label {label_offset:#x} via \
798 PC-relative offset {pc_rel:#x}"
799 );
800 buffer.copy_from_slice(&value.to_le_bytes()[..]);
801 }
802 }
803 }
804
805 fn supports_veneer(self) -> bool {
807 match self {
808 Self::Jump(_) => false,
809 }
810 }
811
812 fn veneer_size(self) -> CodeOffset {
814 match self {
815 Self::Jump(_) => 0,
816 }
817 }
818
819 fn worst_case_veneer_size() -> CodeOffset {
820 0
821 }
822
823 fn generate_veneer(
826 self,
827 _buffer: &mut [u8],
828 _veneer_offset: CodeOffset,
829 ) -> (CodeOffset, LabelUse) {
830 match self {
831 Self::Jump(_) => panic!("veneer not supported for {self:?}"),
832 }
833 }
834
835 fn from_reloc(reloc: Reloc, addend: Addend) -> Option<LabelUse> {
836 match reloc {
837 Reloc::X86CallPCRel4 if addend < 0 => {
838 Some(LabelUse::Jump(i32::try_from(-addend).unwrap() as u32))
843 }
844 _ => None,
845 }
846 }
847}