cranelift_codegen/isa/pulley_shared/lower/
isle.rs1pub mod generated_code;
5use generated_code::MInst;
6use inst::InstAndKind;
7
8use crate::ir::{condcodes::*, immediates::*, types::*, *};
10use crate::isa::pulley_shared::{
11 abi::*,
12 inst::{
13 FReg, OperandSize, PulleyCall, ReturnCallInfo, VReg, WritableFReg, WritableVReg,
14 WritableXReg, XReg,
15 },
16 lower::{regs, Cond},
17 *,
18};
19use crate::machinst::{
20 abi::{ArgPair, RetPair, StackAMode},
21 isle::*,
22 CallInfo, IsTailCall, MachInst, Reg, VCodeConstant, VCodeConstantData,
23};
24use alloc::boxed::Box;
25use pulley_interpreter::U6;
26use regalloc2::PReg;
27type Unit = ();
28type VecArgPair = Vec<ArgPair>;
29type VecRetPair = Vec<RetPair>;
30type BoxCallInfo = Box<CallInfo<PulleyCall>>;
31type BoxCallIndInfo = Box<CallInfo<XReg>>;
32type BoxCallIndirectHostInfo = Box<CallInfo<ExternalName>>;
33type BoxReturnCallInfo = Box<ReturnCallInfo<ExternalName>>;
34type BoxReturnCallIndInfo = Box<ReturnCallInfo<XReg>>;
35type BoxExternalName = Box<ExternalName>;
36type UpperXRegSet = pulley_interpreter::UpperRegSet<pulley_interpreter::XReg>;
37
38#[expect(
39 unused_imports,
40 reason = "used on other backends, used here to suppress warning elsewhere"
41)]
42use crate::machinst::isle::UnwindInst as _;
43
44pub(crate) struct PulleyIsleContext<'a, 'b, I, B>
45where
46 I: VCodeInst,
47 B: LowerBackend,
48{
49 pub lower_ctx: &'a mut Lower<'b, I>,
50 pub backend: &'a B,
51}
52
53impl<'a, 'b, P> PulleyIsleContext<'a, 'b, InstAndKind<P>, PulleyBackend<P>>
54where
55 P: PulleyTargetKind,
56{
57 fn new(lower_ctx: &'a mut Lower<'b, InstAndKind<P>>, backend: &'a PulleyBackend<P>) -> Self {
58 Self { lower_ctx, backend }
59 }
60}
61
62impl<P> generated_code::Context for PulleyIsleContext<'_, '_, InstAndKind<P>, PulleyBackend<P>>
63where
64 P: PulleyTargetKind,
65{
66 crate::isle_lower_prelude_methods!(InstAndKind<P>);
67 crate::isle_prelude_caller_methods!(PulleyABICallSite<P>);
68
69 fn vreg_new(&mut self, r: Reg) -> VReg {
70 VReg::new(r).unwrap()
71 }
72 fn writable_vreg_new(&mut self, r: WritableReg) -> WritableVReg {
73 r.map(|wr| VReg::new(wr).unwrap())
74 }
75 fn writable_vreg_to_vreg(&mut self, arg0: WritableVReg) -> VReg {
76 arg0.to_reg()
77 }
78 fn writable_vreg_to_writable_reg(&mut self, arg0: WritableVReg) -> WritableReg {
79 arg0.map(|vr| vr.to_reg())
80 }
81 fn vreg_to_reg(&mut self, arg0: VReg) -> Reg {
82 *arg0
83 }
84 fn xreg_new(&mut self, r: Reg) -> XReg {
85 XReg::new(r).unwrap()
86 }
87 fn writable_xreg_new(&mut self, r: WritableReg) -> WritableXReg {
88 r.map(|wr| XReg::new(wr).unwrap())
89 }
90 fn writable_xreg_to_xreg(&mut self, arg0: WritableXReg) -> XReg {
91 arg0.to_reg()
92 }
93 fn writable_xreg_to_writable_reg(&mut self, arg0: WritableXReg) -> WritableReg {
94 arg0.map(|xr| xr.to_reg())
95 }
96 fn xreg_to_reg(&mut self, arg0: XReg) -> Reg {
97 *arg0
98 }
99 fn freg_new(&mut self, r: Reg) -> FReg {
100 FReg::new(r).unwrap()
101 }
102 fn writable_freg_new(&mut self, r: WritableReg) -> WritableFReg {
103 r.map(|wr| FReg::new(wr).unwrap())
104 }
105 fn writable_freg_to_freg(&mut self, arg0: WritableFReg) -> FReg {
106 arg0.to_reg()
107 }
108 fn writable_freg_to_writable_reg(&mut self, arg0: WritableFReg) -> WritableReg {
109 arg0.map(|fr| fr.to_reg())
110 }
111 fn freg_to_reg(&mut self, arg0: FReg) -> Reg {
112 *arg0
113 }
114
115 #[inline]
116 fn emit(&mut self, arg0: &MInst) -> Unit {
117 self.lower_ctx.emit(arg0.clone().into());
118 }
119
120 fn sp_reg(&mut self) -> XReg {
121 XReg::new(regs::stack_reg()).unwrap()
122 }
123
124 fn cond_invert(&mut self, cond: &Cond) -> Cond {
125 cond.invert()
126 }
127
128 fn u6_from_u8(&mut self, imm: u8) -> Option<U6> {
129 U6::new(imm)
130 }
131
132 fn endianness(&mut self, flags: MemFlags) -> Endianness {
133 flags.endianness(self.backend.isa_flags.endianness())
134 }
135
136 fn is_native_endianness(&mut self, endianness: &Endianness) -> bool {
137 *endianness == self.backend.isa_flags.endianness()
138 }
139
140 fn pointer_width(&mut self) -> PointerWidth {
141 P::pointer_width()
142 }
143
144 fn memflags_nontrapping(&mut self, flags: MemFlags) -> bool {
145 flags.trap_code().is_none()
146 }
147
148 fn memflags_is_wasm(&mut self, flags: MemFlags) -> bool {
149 flags.trap_code() == Some(TrapCode::HEAP_OUT_OF_BOUNDS)
150 && self.endianness(flags) == Endianness::Little
151 }
152
153 fn g32_offset(
154 &mut self,
155 load_offset: i32,
156 load_ty: Type,
157 bound_check_offset: u64,
158 ) -> Option<u16> {
159 let load_offset = u64::try_from(load_offset).ok()?;
161 let load_bytes = u64::from(load_ty.bytes());
162 if bound_check_offset != load_offset + load_bytes {
163 return None;
164 }
165 u16::try_from(load_offset).ok()
166 }
167}
168
169pub(crate) fn lower<P>(
171 lower_ctx: &mut Lower<InstAndKind<P>>,
172 backend: &PulleyBackend<P>,
173 inst: Inst,
174) -> Option<InstOutput>
175where
176 P: PulleyTargetKind,
177{
178 let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);
181 generated_code::constructor_lower(&mut isle_ctx, inst)
182}
183
184pub(crate) fn lower_branch<P>(
186 lower_ctx: &mut Lower<InstAndKind<P>>,
187 backend: &PulleyBackend<P>,
188 branch: Inst,
189 targets: &[MachLabel],
190) -> Option<()>
191where
192 P: PulleyTargetKind,
193{
194 let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);
197 generated_code::constructor_lower_branch(&mut isle_ctx, branch, targets)
198}