cranelift_codegen/machinst/
isle.rs

1use crate::ir::{BlockCall, Value, ValueList};
2use alloc::boxed::Box;
3use alloc::vec::Vec;
4use smallvec::SmallVec;
5use std::cell::Cell;
6
7pub use super::MachLabel;
8use super::RetPair;
9pub use crate::ir::{condcodes::CondCode, *};
10pub use crate::isa::{unwind::UnwindInst, TargetIsa};
11pub use crate::machinst::{
12    ABIArg, ABIArgSlot, ABIMachineSpec, CallSite, InputSourceInst, Lower, LowerBackend, RealReg,
13    Reg, RelocDistance, Sig, VCodeInst, Writable,
14};
15pub use crate::settings::{StackSwitchModel, TlsModel};
16
17pub type Unit = ();
18pub type ValueSlice = (ValueList, usize);
19pub type ValueArray2 = [Value; 2];
20pub type ValueArray3 = [Value; 3];
21pub type BlockArray2 = [BlockCall; 2];
22pub type WritableReg = Writable<Reg>;
23pub type VecRetPair = Vec<RetPair>;
24pub type VecMask = Vec<u8>;
25pub type ValueRegs = crate::machinst::ValueRegs<Reg>;
26pub type WritableValueRegs = crate::machinst::ValueRegs<WritableReg>;
27pub type InstOutput = SmallVec<[ValueRegs; 2]>;
28pub type InstOutputBuilder = Cell<InstOutput>;
29pub type BoxExternalName = Box<ExternalName>;
30pub type Range = (usize, usize);
31pub type MachLabelSlice = [MachLabel];
32pub type BoxVecMachLabel = Box<Vec<MachLabel>>;
33
34pub enum RangeView {
35    Empty,
36    NonEmpty { index: usize, rest: Range },
37}
38
39/// Helper macro to define methods in `prelude.isle` within `impl Context for
40/// ...` for each backend. These methods are shared amongst all backends.
41#[macro_export]
42#[doc(hidden)]
43macro_rules! isle_lower_prelude_methods {
44    () => {
45        crate::isle_lower_prelude_methods!(MInst);
46    };
47    ($inst:ty) => {
48        crate::isle_common_prelude_methods!();
49
50        #[inline]
51        fn value_type(&mut self, val: Value) -> Type {
52            self.lower_ctx.dfg().value_type(val)
53        }
54
55        #[inline]
56        fn value_reg(&mut self, reg: Reg) -> ValueRegs {
57            ValueRegs::one(reg)
58        }
59
60        #[inline]
61        fn value_regs(&mut self, r1: Reg, r2: Reg) -> ValueRegs {
62            ValueRegs::two(r1, r2)
63        }
64
65        #[inline]
66        fn writable_value_regs(&mut self, r1: WritableReg, r2: WritableReg) -> WritableValueRegs {
67            WritableValueRegs::two(r1, r2)
68        }
69
70        #[inline]
71        fn writable_value_reg(&mut self, r: WritableReg) -> WritableValueRegs {
72            WritableValueRegs::one(r)
73        }
74
75        #[inline]
76        fn value_regs_invalid(&mut self) -> ValueRegs {
77            ValueRegs::invalid()
78        }
79
80        #[inline]
81        fn output_none(&mut self) -> InstOutput {
82            smallvec::smallvec![]
83        }
84
85        #[inline]
86        fn output(&mut self, regs: ValueRegs) -> InstOutput {
87            smallvec::smallvec![regs]
88        }
89
90        #[inline]
91        fn output_pair(&mut self, r1: ValueRegs, r2: ValueRegs) -> InstOutput {
92            smallvec::smallvec![r1, r2]
93        }
94
95        #[inline]
96        fn output_builder_new(&mut self) -> InstOutputBuilder {
97            std::cell::Cell::new(InstOutput::new())
98        }
99
100        #[inline]
101        fn output_builder_push(&mut self, builder: &InstOutputBuilder, regs: ValueRegs) -> Unit {
102            let mut vec = builder.take();
103            vec.push(regs);
104            builder.set(vec);
105        }
106
107        #[inline]
108        fn output_builder_finish(&mut self, builder: &InstOutputBuilder) -> InstOutput {
109            builder.take()
110        }
111
112        #[inline]
113        fn temp_writable_reg(&mut self, ty: Type) -> WritableReg {
114            let value_regs = self.lower_ctx.alloc_tmp(ty);
115            value_regs.only_reg().unwrap()
116        }
117
118        #[inline]
119        fn is_valid_reg(&mut self, reg: Reg) -> bool {
120            use crate::machinst::valueregs::InvalidSentinel;
121            !reg.is_invalid_sentinel()
122        }
123
124        #[inline]
125        fn invalid_reg(&mut self) -> Reg {
126            use crate::machinst::valueregs::InvalidSentinel;
127            Reg::invalid_sentinel()
128        }
129
130        #[inline]
131        fn mark_value_used(&mut self, val: Value) {
132            self.lower_ctx.increment_lowered_uses(val);
133        }
134
135        #[inline]
136        fn put_in_reg(&mut self, val: Value) -> Reg {
137            self.put_in_regs(val).only_reg().unwrap()
138        }
139
140        #[inline]
141        fn put_in_regs(&mut self, val: Value) -> ValueRegs {
142            self.lower_ctx.put_value_in_regs(val)
143        }
144
145        #[inline]
146        fn ensure_in_vreg(&mut self, reg: Reg, ty: Type) -> Reg {
147            self.lower_ctx.ensure_in_vreg(reg, ty)
148        }
149
150        #[inline]
151        fn value_regs_get(&mut self, regs: ValueRegs, i: usize) -> Reg {
152            regs.regs()[i]
153        }
154
155        #[inline]
156        fn value_regs_len(&mut self, regs: ValueRegs) -> usize {
157            regs.regs().len()
158        }
159
160        #[inline]
161        fn value_list_slice(&mut self, list: ValueList) -> ValueSlice {
162            (list, 0)
163        }
164
165        #[inline]
166        fn value_slice_empty(&mut self, slice: ValueSlice) -> Option<()> {
167            let (list, off) = slice;
168            if off >= list.len(&self.lower_ctx.dfg().value_lists) {
169                Some(())
170            } else {
171                None
172            }
173        }
174
175        #[inline]
176        fn value_slice_unwrap(&mut self, slice: ValueSlice) -> Option<(Value, ValueSlice)> {
177            let (list, off) = slice;
178            if let Some(val) = list.get(off, &self.lower_ctx.dfg().value_lists) {
179                Some((val, (list, off + 1)))
180            } else {
181                None
182            }
183        }
184
185        #[inline]
186        fn value_slice_len(&mut self, slice: ValueSlice) -> usize {
187            let (list, off) = slice;
188            list.len(&self.lower_ctx.dfg().value_lists) - off
189        }
190
191        #[inline]
192        fn value_slice_get(&mut self, slice: ValueSlice, idx: usize) -> Value {
193            let (list, off) = slice;
194            list.get(off + idx, &self.lower_ctx.dfg().value_lists)
195                .unwrap()
196        }
197
198        #[inline]
199        fn writable_reg_to_reg(&mut self, r: WritableReg) -> Reg {
200            r.to_reg()
201        }
202
203        #[inline]
204        fn inst_results(&mut self, inst: Inst) -> ValueSlice {
205            (self.lower_ctx.dfg().inst_results_list(inst), 0)
206        }
207
208        #[inline]
209        fn first_result(&mut self, inst: Inst) -> Option<Value> {
210            self.lower_ctx.dfg().inst_results(inst).first().copied()
211        }
212
213        #[inline]
214        fn inst_data(&mut self, inst: Inst) -> InstructionData {
215            self.lower_ctx.dfg().insts[inst]
216        }
217
218        #[inline]
219        fn def_inst(&mut self, val: Value) -> Option<Inst> {
220            self.lower_ctx.dfg().value_def(val).inst()
221        }
222
223        #[inline]
224        fn i64_from_iconst(&mut self, val: Value) -> Option<i64> {
225            let inst = self.def_inst(val)?;
226            let constant = match self.lower_ctx.data(inst) {
227                InstructionData::UnaryImm {
228                    opcode: Opcode::Iconst,
229                    imm,
230                } => imm.bits(),
231                _ => return None,
232            };
233            let ty = self.lower_ctx.output_ty(inst, 0);
234            let shift_amt = std::cmp::max(0, 64 - self.ty_bits(ty));
235            Some((constant << shift_amt) >> shift_amt)
236        }
237
238        fn i32_from_iconst(&mut self, val: Value) -> Option<i32> {
239            self.i64_from_iconst(val)?.try_into().ok()
240        }
241
242        fn zero_value(&mut self, value: Value) -> Option<Value> {
243            let insn = self.def_inst(value);
244            if insn.is_some() {
245                let insn = insn.unwrap();
246                let inst_data = self.lower_ctx.data(insn);
247                match inst_data {
248                    InstructionData::Unary {
249                        opcode: Opcode::Splat,
250                        arg,
251                    } => {
252                        let arg = arg.clone();
253                        return self.zero_value(arg);
254                    }
255                    InstructionData::UnaryConst {
256                        opcode: Opcode::Vconst | Opcode::F128const,
257                        constant_handle,
258                    } => {
259                        let constant_data =
260                            self.lower_ctx.get_constant_data(*constant_handle).clone();
261                        if constant_data.into_vec().iter().any(|&x| x != 0) {
262                            return None;
263                        } else {
264                            return Some(value);
265                        }
266                    }
267                    InstructionData::UnaryImm { imm, .. } => {
268                        if imm.bits() == 0 {
269                            return Some(value);
270                        } else {
271                            return None;
272                        }
273                    }
274                    InstructionData::UnaryIeee16 { imm, .. } => {
275                        if imm.bits() == 0 {
276                            return Some(value);
277                        } else {
278                            return None;
279                        }
280                    }
281                    InstructionData::UnaryIeee32 { imm, .. } => {
282                        if imm.bits() == 0 {
283                            return Some(value);
284                        } else {
285                            return None;
286                        }
287                    }
288                    InstructionData::UnaryIeee64 { imm, .. } => {
289                        if imm.bits() == 0 {
290                            return Some(value);
291                        } else {
292                            return None;
293                        }
294                    }
295                    _ => None,
296                }
297            } else {
298                None
299            }
300        }
301
302        #[inline]
303        fn tls_model(&mut self, _: Type) -> TlsModel {
304            self.backend.flags().tls_model()
305        }
306
307        #[inline]
308        fn tls_model_is_elf_gd(&mut self) -> Option<()> {
309            if self.backend.flags().tls_model() == TlsModel::ElfGd {
310                Some(())
311            } else {
312                None
313            }
314        }
315
316        #[inline]
317        fn tls_model_is_macho(&mut self) -> Option<()> {
318            if self.backend.flags().tls_model() == TlsModel::Macho {
319                Some(())
320            } else {
321                None
322            }
323        }
324
325        #[inline]
326        fn tls_model_is_coff(&mut self) -> Option<()> {
327            if self.backend.flags().tls_model() == TlsModel::Coff {
328                Some(())
329            } else {
330                None
331            }
332        }
333
334        #[inline]
335        fn preserve_frame_pointers(&mut self) -> Option<()> {
336            if self.backend.flags().preserve_frame_pointers() {
337                Some(())
338            } else {
339                None
340            }
341        }
342
343        #[inline]
344        fn stack_switch_model(&mut self) -> Option<StackSwitchModel> {
345            Some(self.backend.flags().stack_switch_model())
346        }
347
348        #[inline]
349        fn func_ref_data(&mut self, func_ref: FuncRef) -> (SigRef, ExternalName, RelocDistance) {
350            let funcdata = &self.lower_ctx.dfg().ext_funcs[func_ref];
351            let reloc_distance = if funcdata.colocated {
352                RelocDistance::Near
353            } else {
354                RelocDistance::Far
355            };
356            (funcdata.signature, funcdata.name.clone(), reloc_distance)
357        }
358
359        #[inline]
360        fn box_external_name(&mut self, extname: ExternalName) -> BoxExternalName {
361            Box::new(extname)
362        }
363
364        #[inline]
365        fn symbol_value_data(
366            &mut self,
367            global_value: GlobalValue,
368        ) -> Option<(ExternalName, RelocDistance, i64)> {
369            let (name, reloc, offset) = self.lower_ctx.symbol_value_data(global_value)?;
370            Some((name.clone(), reloc, offset))
371        }
372
373        #[inline]
374        fn reloc_distance_near(&mut self, dist: RelocDistance) -> Option<()> {
375            if dist == RelocDistance::Near {
376                Some(())
377            } else {
378                None
379            }
380        }
381
382        #[inline]
383        fn u128_from_immediate(&mut self, imm: Immediate) -> Option<u128> {
384            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
385            Some(u128::from_le_bytes(bytes.try_into().ok()?))
386        }
387
388        #[inline]
389        fn vconst_from_immediate(&mut self, imm: Immediate) -> Option<VCodeConstant> {
390            Some(self.lower_ctx.use_constant(VCodeConstantData::Generated(
391                self.lower_ctx.get_immediate_data(imm).clone(),
392            )))
393        }
394
395        #[inline]
396        fn vec_mask_from_immediate(&mut self, imm: Immediate) -> Option<VecMask> {
397            let data = self.lower_ctx.get_immediate_data(imm);
398            if data.len() == 16 {
399                Some(Vec::from(data.as_slice()))
400            } else {
401                None
402            }
403        }
404
405        #[inline]
406        fn u64_from_constant(&mut self, constant: Constant) -> Option<u64> {
407            let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
408            Some(u64::from_le_bytes(bytes.try_into().ok()?))
409        }
410
411        #[inline]
412        fn u128_from_constant(&mut self, constant: Constant) -> Option<u128> {
413            let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
414            Some(u128::from_le_bytes(bytes.try_into().ok()?))
415        }
416
417        #[inline]
418        fn emit_u64_le_const(&mut self, value: u64) -> VCodeConstant {
419            let data = VCodeConstantData::U64(value.to_le_bytes());
420            self.lower_ctx.use_constant(data)
421        }
422
423        #[inline]
424        fn emit_u128_le_const(&mut self, value: u128) -> VCodeConstant {
425            let data = VCodeConstantData::Generated(value.to_le_bytes().as_slice().into());
426            self.lower_ctx.use_constant(data)
427        }
428
429        #[inline]
430        fn const_to_vconst(&mut self, constant: Constant) -> VCodeConstant {
431            self.lower_ctx.use_constant(VCodeConstantData::Pool(
432                constant,
433                self.lower_ctx.get_constant_data(constant).clone(),
434            ))
435        }
436
437        fn only_writable_reg(&mut self, regs: WritableValueRegs) -> Option<WritableReg> {
438            regs.only_reg()
439        }
440
441        fn writable_regs_get(&mut self, regs: WritableValueRegs, idx: usize) -> WritableReg {
442            regs.regs()[idx]
443        }
444
445        fn abi_num_args(&mut self, abi: Sig) -> usize {
446            self.lower_ctx.sigs().num_args(abi)
447        }
448
449        fn abi_get_arg(&mut self, abi: Sig, idx: usize) -> ABIArg {
450            self.lower_ctx.sigs().get_arg(abi, idx)
451        }
452
453        fn abi_num_rets(&mut self, abi: Sig) -> usize {
454            self.lower_ctx.sigs().num_rets(abi)
455        }
456
457        fn abi_get_ret(&mut self, abi: Sig, idx: usize) -> ABIArg {
458            self.lower_ctx.sigs().get_ret(abi, idx)
459        }
460
461        fn abi_ret_arg(&mut self, abi: Sig) -> Option<ABIArg> {
462            self.lower_ctx.sigs().get_ret_arg(abi)
463        }
464
465        fn abi_no_ret_arg(&mut self, abi: Sig) -> Option<()> {
466            if let Some(_) = self.lower_ctx.sigs().get_ret_arg(abi) {
467                None
468            } else {
469                Some(())
470            }
471        }
472
473        fn abi_arg_only_slot(&mut self, arg: &ABIArg) -> Option<ABIArgSlot> {
474            match arg {
475                &ABIArg::Slots { ref slots, .. } => {
476                    if slots.len() == 1 {
477                        Some(slots[0])
478                    } else {
479                        None
480                    }
481                }
482                _ => None,
483            }
484        }
485
486        fn abi_arg_implicit_pointer(&mut self, arg: &ABIArg) -> Option<(ABIArgSlot, i64, Type)> {
487            match arg {
488                &ABIArg::ImplicitPtrArg {
489                    pointer,
490                    offset,
491                    ty,
492                    ..
493                } => Some((pointer, offset, ty)),
494                _ => None,
495            }
496        }
497
498        fn abi_unwrap_ret_area_ptr(&mut self) -> Reg {
499            self.lower_ctx.abi().ret_area_ptr().unwrap()
500        }
501
502        fn abi_stackslot_addr(
503            &mut self,
504            dst: WritableReg,
505            stack_slot: StackSlot,
506            offset: Offset32,
507        ) -> MInst {
508            let offset = u32::try_from(i32::from(offset)).unwrap();
509            self.lower_ctx
510                .abi()
511                .sized_stackslot_addr(stack_slot, offset, dst)
512                .into()
513        }
514
515        fn abi_dynamic_stackslot_addr(
516            &mut self,
517            dst: WritableReg,
518            stack_slot: DynamicStackSlot,
519        ) -> MInst {
520            assert!(self
521                .lower_ctx
522                .abi()
523                .dynamic_stackslot_offsets()
524                .is_valid(stack_slot));
525            self.lower_ctx
526                .abi()
527                .dynamic_stackslot_addr(stack_slot, dst)
528                .into()
529        }
530
531        fn real_reg_to_reg(&mut self, reg: RealReg) -> Reg {
532            Reg::from(reg)
533        }
534
535        fn real_reg_to_writable_reg(&mut self, reg: RealReg) -> WritableReg {
536            Writable::from_reg(Reg::from(reg))
537        }
538
539        fn is_sinkable_inst(&mut self, val: Value) -> Option<Inst> {
540            let input = self.lower_ctx.get_value_as_source_or_const(val);
541
542            if let InputSourceInst::UniqueUse(inst, _) = input.inst {
543                Some(inst)
544            } else {
545                None
546            }
547        }
548
549        #[inline]
550        fn sink_inst(&mut self, inst: Inst) {
551            self.lower_ctx.sink_inst(inst);
552        }
553
554        #[inline]
555        fn maybe_uextend(&mut self, value: Value) -> Option<Value> {
556            if let Some(def_inst) = self.def_inst(value) {
557                if let InstructionData::Unary {
558                    opcode: Opcode::Uextend,
559                    arg,
560                } = self.lower_ctx.data(def_inst)
561                {
562                    return Some(*arg);
563                }
564            }
565
566            Some(value)
567        }
568
569        #[inline]
570        fn uimm8(&mut self, x: Imm64) -> Option<u8> {
571            let x64: i64 = x.into();
572            let x8: u8 = x64.try_into().ok()?;
573            Some(x8)
574        }
575
576        #[inline]
577        fn preg_to_reg(&mut self, preg: PReg) -> Reg {
578            preg.into()
579        }
580
581        #[inline]
582        fn gen_move(&mut self, ty: Type, dst: WritableReg, src: Reg) -> MInst {
583            <$inst>::gen_move(dst, src, ty).into()
584        }
585
586        /// Generate the return instruction.
587        fn gen_return(&mut self, (list, off): ValueSlice) {
588            let rets = (off..list.len(&self.lower_ctx.dfg().value_lists))
589                .map(|ix| {
590                    let val = list.get(ix, &self.lower_ctx.dfg().value_lists).unwrap();
591                    self.put_in_regs(val)
592                })
593                .collect();
594            self.lower_ctx.gen_return(rets);
595        }
596
597        /// Same as `shuffle32_from_imm`, but for 64-bit lane shuffles.
598        fn shuffle64_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8)> {
599            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
600
601            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
602            Some((
603                shuffle_imm_as_le_lane_idx(8, &bytes[0..8])?,
604                shuffle_imm_as_le_lane_idx(8, &bytes[8..16])?,
605            ))
606        }
607
608        /// Attempts to interpret the shuffle immediate `imm` as a shuffle of
609        /// 32-bit lanes, returning four integers, each of which is less than 8,
610        /// which represents a permutation of 32-bit lanes as specified by
611        /// `imm`.
612        ///
613        /// For example the shuffle immediate
614        ///
615        /// `0 1 2 3 8 9 10 11 16 17 18 19 24 25 26 27`
616        ///
617        /// would return `Some((0, 2, 4, 6))`.
618        fn shuffle32_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8, u8, u8)> {
619            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
620
621            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
622            Some((
623                shuffle_imm_as_le_lane_idx(4, &bytes[0..4])?,
624                shuffle_imm_as_le_lane_idx(4, &bytes[4..8])?,
625                shuffle_imm_as_le_lane_idx(4, &bytes[8..12])?,
626                shuffle_imm_as_le_lane_idx(4, &bytes[12..16])?,
627            ))
628        }
629
630        /// Same as `shuffle32_from_imm`, but for 16-bit lane shuffles.
631        fn shuffle16_from_imm(
632            &mut self,
633            imm: Immediate,
634        ) -> Option<(u8, u8, u8, u8, u8, u8, u8, u8)> {
635            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
636            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
637            Some((
638                shuffle_imm_as_le_lane_idx(2, &bytes[0..2])?,
639                shuffle_imm_as_le_lane_idx(2, &bytes[2..4])?,
640                shuffle_imm_as_le_lane_idx(2, &bytes[4..6])?,
641                shuffle_imm_as_le_lane_idx(2, &bytes[6..8])?,
642                shuffle_imm_as_le_lane_idx(2, &bytes[8..10])?,
643                shuffle_imm_as_le_lane_idx(2, &bytes[10..12])?,
644                shuffle_imm_as_le_lane_idx(2, &bytes[12..14])?,
645                shuffle_imm_as_le_lane_idx(2, &bytes[14..16])?,
646            ))
647        }
648
649        fn safe_divisor_from_imm64(&mut self, ty: Type, val: Imm64) -> Option<u64> {
650            let minus_one = if ty.bytes() == 8 {
651                -1
652            } else {
653                (1 << (ty.bytes() * 8)) - 1
654            };
655            let bits = val.bits() & minus_one;
656            if bits == 0 || bits == minus_one {
657                None
658            } else {
659                Some(bits as u64)
660            }
661        }
662
663        fn single_target(&mut self, targets: &MachLabelSlice) -> Option<MachLabel> {
664            if targets.len() == 1 {
665                Some(targets[0])
666            } else {
667                None
668            }
669        }
670
671        fn two_targets(&mut self, targets: &MachLabelSlice) -> Option<(MachLabel, MachLabel)> {
672            if targets.len() == 2 {
673                Some((targets[0], targets[1]))
674            } else {
675                None
676            }
677        }
678
679        fn jump_table_targets(
680            &mut self,
681            targets: &MachLabelSlice,
682        ) -> Option<(MachLabel, BoxVecMachLabel)> {
683            use std::boxed::Box;
684            if targets.is_empty() {
685                return None;
686            }
687
688            let default_label = targets[0];
689            let jt_targets = Box::new(targets[1..].to_vec());
690            Some((default_label, jt_targets))
691        }
692
693        fn jump_table_size(&mut self, targets: &BoxVecMachLabel) -> u32 {
694            targets.len() as u32
695        }
696
697        fn add_range_fact(&mut self, reg: Reg, bits: u16, min: u64, max: u64) -> Reg {
698            self.lower_ctx.add_range_fact(reg, bits, min, max);
699            reg
700        }
701
702        fn value_is_unused(&mut self, val: Value) -> bool {
703            self.lower_ctx.value_is_unused(val)
704        }
705    };
706}
707
708/// Returns the `size`-byte lane referred to by the shuffle immediate specified
709/// in `bytes`.
710///
711/// This helper is used by `shuffleNN_from_imm` above and is used to interpret a
712/// byte-based shuffle as a higher-level shuffle of bigger lanes. This will see
713/// if the `bytes` specified, which must have `size` length, specifies a lane in
714/// vectors aligned to a `size`-byte boundary.
715///
716/// Returns `None` if `bytes` doesn't specify a `size`-byte lane aligned
717/// appropriately, or returns `Some(n)` where `n` is the index of the lane being
718/// shuffled.
719pub fn shuffle_imm_as_le_lane_idx(size: u8, bytes: &[u8]) -> Option<u8> {
720    assert_eq!(bytes.len(), usize::from(size));
721
722    // The first index in `bytes` must be aligned to a `size` boundary for the
723    // bytes to be a valid specifier for a lane of `size` bytes.
724    if bytes[0] % size != 0 {
725        return None;
726    }
727
728    // Afterwards the bytes must all be one larger than the prior to specify a
729    // contiguous sequence of bytes that's being shuffled. Basically `bytes`
730    // must refer to the entire `size`-byte lane, in little-endian order.
731    for i in 0..size - 1 {
732        let idx = usize::from(i);
733        if bytes[idx] + 1 != bytes[idx + 1] {
734            return None;
735        }
736    }
737
738    // All of the `bytes` are in-order, meaning that this is a valid shuffle
739    // immediate to specify a lane of `size` bytes. The index, when viewed as
740    // `size`-byte immediates, will be the first byte divided by the byte size.
741    Some(bytes[0] / size)
742}
743
744/// Helpers specifically for machines that use `abi::CallSite`.
745#[macro_export]
746#[doc(hidden)]
747macro_rules! isle_prelude_caller_methods {
748    ($abicaller:ty) => {
749        fn gen_call(
750            &mut self,
751            sig_ref: SigRef,
752            extname: ExternalName,
753            dist: RelocDistance,
754            args @ (inputs, off): ValueSlice,
755        ) -> InstOutput {
756            let caller_conv = self.lower_ctx.abi().call_conv(self.lower_ctx.sigs());
757            let sig = &self.lower_ctx.dfg().signatures[sig_ref];
758            let num_rets = sig.returns.len();
759            let caller = <$abicaller>::from_func(
760                self.lower_ctx.sigs(),
761                sig_ref,
762                &extname,
763                IsTailCall::No,
764                dist,
765                caller_conv,
766                self.backend.flags().clone(),
767            );
768
769            assert_eq!(
770                inputs.len(&self.lower_ctx.dfg().value_lists) - off,
771                sig.params.len()
772            );
773
774            crate::machinst::isle::gen_call_common(&mut self.lower_ctx, num_rets, caller, args)
775        }
776
777        fn gen_call_indirect(
778            &mut self,
779            sig_ref: SigRef,
780            val: Value,
781            args @ (inputs, off): ValueSlice,
782        ) -> InstOutput {
783            let caller_conv = self.lower_ctx.abi().call_conv(self.lower_ctx.sigs());
784            let ptr = self.put_in_reg(val);
785            let sig = &self.lower_ctx.dfg().signatures[sig_ref];
786            let num_rets = sig.returns.len();
787            let caller = <$abicaller>::from_ptr(
788                self.lower_ctx.sigs(),
789                sig_ref,
790                ptr,
791                IsTailCall::No,
792                caller_conv,
793                self.backend.flags().clone(),
794            );
795
796            assert_eq!(
797                inputs.len(&self.lower_ctx.dfg().value_lists) - off,
798                sig.params.len()
799            );
800
801            crate::machinst::isle::gen_call_common(&mut self.lower_ctx, num_rets, caller, args)
802        }
803
804        fn gen_return_call(
805            &mut self,
806            callee_sig: SigRef,
807            callee: ExternalName,
808            distance: RelocDistance,
809            args: ValueSlice,
810        ) -> InstOutput {
811            let caller_conv = isa::CallConv::Tail;
812            debug_assert_eq!(
813                self.lower_ctx.abi().call_conv(self.lower_ctx.sigs()),
814                caller_conv,
815                "Can only do `return_call`s from within a `tail` calling convention function"
816            );
817
818            let call_site = <$abicaller>::from_func(
819                self.lower_ctx.sigs(),
820                callee_sig,
821                &callee,
822                IsTailCall::Yes,
823                distance,
824                caller_conv,
825                self.backend.flags().clone(),
826            );
827            call_site.emit_return_call(self.lower_ctx, args, self.backend);
828
829            InstOutput::new()
830        }
831
832        fn gen_return_call_indirect(
833            &mut self,
834            callee_sig: SigRef,
835            callee: Value,
836            args: ValueSlice,
837        ) -> InstOutput {
838            let caller_conv = isa::CallConv::Tail;
839            debug_assert_eq!(
840                self.lower_ctx.abi().call_conv(self.lower_ctx.sigs()),
841                caller_conv,
842                "Can only do `return_call`s from within a `tail` calling convention function"
843            );
844
845            let callee = self.put_in_reg(callee);
846
847            let call_site = <$abicaller>::from_ptr(
848                self.lower_ctx.sigs(),
849                callee_sig,
850                callee,
851                IsTailCall::Yes,
852                caller_conv,
853                self.backend.flags().clone(),
854            );
855            call_site.emit_return_call(self.lower_ctx, args, self.backend);
856
857            InstOutput::new()
858        }
859    };
860}
861
862fn gen_call_common_args<M: ABIMachineSpec>(
863    ctx: &mut Lower<'_, M::I>,
864    call_site: &mut CallSite<M>,
865    (inputs, off): ValueSlice,
866) {
867    let num_args = call_site.num_args(ctx.sigs());
868
869    assert_eq!(inputs.len(&ctx.dfg().value_lists) - off, num_args);
870    let mut arg_regs = vec![];
871    for i in 0..num_args {
872        let input = inputs.get(off + i, &ctx.dfg().value_lists).unwrap();
873        arg_regs.push(ctx.put_value_in_regs(input));
874    }
875    for (i, arg_regs) in arg_regs.iter().enumerate() {
876        call_site.emit_copy_regs_to_buffer(ctx, i, *arg_regs);
877    }
878    for (i, arg_regs) in arg_regs.iter().enumerate() {
879        call_site.gen_arg(ctx, i, *arg_regs);
880    }
881}
882
883pub fn gen_call_common<M: ABIMachineSpec>(
884    ctx: &mut Lower<'_, M::I>,
885    num_rets: usize,
886    mut caller: CallSite<M>,
887    args: ValueSlice,
888) -> InstOutput {
889    gen_call_common_args(ctx, &mut caller, args);
890
891    // Handle retvals prior to emitting call, so the
892    // constraints are on the call instruction; but buffer the
893    // instructions till after the call.
894    let mut outputs = InstOutput::new();
895    let mut retval_insts = crate::machinst::abi::SmallInstVec::new();
896    // We take the *last* `num_rets` returns of the sig:
897    // this skips a StructReturn, if any, that is present.
898    let sigdata_num_rets = caller.num_rets(ctx.sigs());
899    debug_assert!(num_rets <= sigdata_num_rets);
900    for i in (sigdata_num_rets - num_rets)..sigdata_num_rets {
901        let (retval_inst, retval_regs) = caller.gen_retval(ctx, i);
902        retval_insts.extend(retval_inst.into_iter());
903        outputs.push(retval_regs);
904    }
905
906    caller.emit_call(ctx);
907
908    for inst in retval_insts {
909        ctx.emit(inst);
910    }
911
912    outputs
913}
914
915/// This structure is used to implement the ISLE-generated `Context` trait and
916/// internally has a temporary reference to a machinst `LowerCtx`.
917pub(crate) struct IsleContext<'a, 'b, I, B>
918where
919    I: VCodeInst,
920    B: LowerBackend,
921{
922    pub lower_ctx: &'a mut Lower<'b, I>,
923    pub backend: &'a B,
924}