1use crate::ir;
4use crate::ir::types::*;
5
6use crate::isa;
7
8use crate::isa::CallConv;
9use crate::isa::riscv64::inst::*;
10use crate::machinst::*;
11
12use crate::CodegenResult;
13use crate::ir::LibCall;
14use crate::ir::Signature;
15use crate::isa::riscv64::settings::Flags as RiscvFlags;
16use crate::isa::unwind::UnwindInst;
17use crate::settings;
18use alloc::boxed::Box;
19use alloc::vec::Vec;
20use regalloc2::{MachineEnv, PReg, PRegSet};
21
22use smallvec::{SmallVec, smallvec};
23use std::borrow::ToOwned;
24use std::sync::OnceLock;
25
26pub(crate) type Riscv64Callee = Callee<Riscv64MachineDeps>;
28
29pub struct Riscv64MachineDeps;
32
33impl IsaFlags for RiscvFlags {}
34
35impl RiscvFlags {
36 pub(crate) fn min_vec_reg_size(&self) -> u64 {
37 let entries = [
38 (self.has_zvl65536b(), 65536),
39 (self.has_zvl32768b(), 32768),
40 (self.has_zvl16384b(), 16384),
41 (self.has_zvl8192b(), 8192),
42 (self.has_zvl4096b(), 4096),
43 (self.has_zvl2048b(), 2048),
44 (self.has_zvl1024b(), 1024),
45 (self.has_zvl512b(), 512),
46 (self.has_zvl256b(), 256),
47 (self.has_v(), 128),
50 (self.has_zvl128b(), 128),
51 (self.has_zvl64b(), 64),
52 (self.has_zvl32b(), 32),
53 ];
54
55 for (has_flag, size) in entries.into_iter() {
56 if !has_flag {
57 continue;
58 }
59
60 return std::cmp::min(size, 1024);
63 }
64
65 return 0;
66 }
67}
68
69impl ABIMachineSpec for Riscv64MachineDeps {
70 type I = Inst;
71 type F = RiscvFlags;
72
73 const STACK_ARG_RET_SIZE_LIMIT: u32 = 128 * 1024 * 1024;
77
78 fn word_bits() -> u32 {
79 64
80 }
81
82 fn stack_align(_call_conv: isa::CallConv) -> u32 {
84 16
85 }
86
87 fn compute_arg_locs(
88 call_conv: isa::CallConv,
89 flags: &settings::Flags,
90 params: &[ir::AbiParam],
91 args_or_rets: ArgsOrRets,
92 add_ret_area_ptr: bool,
93 mut args: ArgsAccumulator,
94 ) -> CodegenResult<(u32, Option<usize>)> {
95 assert_ne!(
98 call_conv,
99 isa::CallConv::Winch,
100 "riscv64 does not support the 'winch' calling convention yet"
101 );
102
103 let (x_start, x_end, f_start, f_end) = match args_or_rets {
106 ArgsOrRets::Args => (10, 17, 10, 17),
107 ArgsOrRets::Rets => (10, 11, 10, 11),
108 };
109 let mut next_x_reg = x_start;
110 let mut next_f_reg = f_start;
111 let mut next_stack: u32 = 0;
113
114 let ret_area_ptr = if add_ret_area_ptr {
115 assert!(ArgsOrRets::Args == args_or_rets);
116 next_x_reg += 1;
117 Some(ABIArg::reg(
118 x_reg(x_start).to_real_reg().unwrap(),
119 I64,
120 ir::ArgumentExtension::None,
121 ir::ArgumentPurpose::Normal,
122 ))
123 } else {
124 None
125 };
126
127 for param in params {
128 if let ir::ArgumentPurpose::StructArgument(_) = param.purpose {
129 panic!(
130 "StructArgument parameters are not supported on riscv64. \
131 Use regular pointer arguments instead."
132 );
133 }
134
135 let (rcs, reg_tys) = Inst::rc_for_type(param.value_type)?;
137 let mut slots = ABIArgSlotVec::new();
138 for (rc, reg_ty) in rcs.iter().zip(reg_tys.iter()) {
139 let next_reg = if (next_x_reg <= x_end) && *rc == RegClass::Int {
140 let x = Some(x_reg(next_x_reg));
141 next_x_reg += 1;
142 x
143 } else if (next_f_reg <= f_end) && *rc == RegClass::Float {
144 let x = Some(f_reg(next_f_reg));
145 next_f_reg += 1;
146 x
147 } else {
148 None
149 };
150 if let Some(reg) = next_reg {
151 slots.push(ABIArgSlot::Reg {
152 reg: reg.to_real_reg().unwrap(),
153 ty: *reg_ty,
154 extension: param.extension,
155 });
156 } else {
157 if args_or_rets == ArgsOrRets::Rets && !flags.enable_multi_ret_implicit_sret() {
158 return Err(crate::CodegenError::Unsupported(
159 "Too many return values to fit in registers. \
160 Use a StructReturn argument instead. (#9510)"
161 .to_owned(),
162 ));
163 }
164
165 let size = reg_ty.bits() / 8;
168 let size = std::cmp::max(size, 8);
169 debug_assert!(size.is_power_of_two());
171 next_stack = align_to(next_stack, size);
172 slots.push(ABIArgSlot::Stack {
173 offset: next_stack as i64,
174 ty: *reg_ty,
175 extension: param.extension,
176 });
177 next_stack += size;
178 }
179 }
180 args.push(ABIArg::Slots {
181 slots,
182 purpose: param.purpose,
183 });
184 }
185 let pos = if let Some(ret_area_ptr) = ret_area_ptr {
186 args.push_non_formal(ret_area_ptr);
187 Some(args.args().len() - 1)
188 } else {
189 None
190 };
191
192 next_stack = align_to(next_stack, Self::stack_align(call_conv));
193
194 Ok((next_stack, pos))
195 }
196
197 fn gen_load_stack(mem: StackAMode, into_reg: Writable<Reg>, ty: Type) -> Inst {
198 Inst::gen_load(into_reg, mem.into(), ty, MemFlags::trusted())
199 }
200
201 fn gen_store_stack(mem: StackAMode, from_reg: Reg, ty: Type) -> Inst {
202 Inst::gen_store(mem.into(), from_reg, ty, MemFlags::trusted())
203 }
204
205 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Inst {
206 Inst::gen_move(to_reg, from_reg, ty)
207 }
208
209 fn gen_extend(
210 to_reg: Writable<Reg>,
211 from_reg: Reg,
212 signed: bool,
213 from_bits: u8,
214 to_bits: u8,
215 ) -> Inst {
216 assert!(from_bits < to_bits);
217 Inst::Extend {
218 rd: to_reg,
219 rn: from_reg,
220 signed,
221 from_bits,
222 to_bits,
223 }
224 }
225
226 fn get_ext_mode(
227 _call_conv: isa::CallConv,
228 specified: ir::ArgumentExtension,
229 ) -> ir::ArgumentExtension {
230 specified
231 }
232
233 fn gen_args(args: Vec<ArgPair>) -> Inst {
234 Inst::Args { args }
235 }
236
237 fn gen_rets(rets: Vec<RetPair>) -> Inst {
238 Inst::Rets { rets }
239 }
240
241 fn get_stacklimit_reg(_call_conv: isa::CallConv) -> Reg {
242 spilltmp_reg()
243 }
244
245 fn gen_add_imm(
246 _call_conv: isa::CallConv,
247 into_reg: Writable<Reg>,
248 from_reg: Reg,
249 imm: u32,
250 ) -> SmallInstVec<Inst> {
251 let mut insts = SmallInstVec::new();
252 if let Some(imm12) = Imm12::maybe_from_u64(imm as u64) {
253 insts.push(Inst::AluRRImm12 {
254 alu_op: AluOPRRI::Addi,
255 rd: into_reg,
256 rs: from_reg,
257 imm12,
258 });
259 } else {
260 insts.extend(Inst::load_constant_u32(
261 writable_spilltmp_reg2(),
262 imm as u64,
263 ));
264 insts.push(Inst::AluRRR {
265 alu_op: AluOPRRR::Add,
266 rd: into_reg,
267 rs1: spilltmp_reg2(),
268 rs2: from_reg,
269 });
270 }
271 insts
272 }
273
274 fn gen_stack_lower_bound_trap(limit_reg: Reg) -> SmallInstVec<Inst> {
275 let mut insts = SmallVec::new();
276 insts.push(Inst::TrapIf {
277 cc: IntCC::UnsignedLessThan,
278 rs1: stack_reg(),
279 rs2: limit_reg,
280 trap_code: ir::TrapCode::STACK_OVERFLOW,
281 });
282 insts
283 }
284
285 fn gen_get_stack_addr(mem: StackAMode, into_reg: Writable<Reg>) -> Inst {
286 Inst::LoadAddr {
287 rd: into_reg,
288 mem: mem.into(),
289 }
290 }
291
292 fn gen_load_base_offset(into_reg: Writable<Reg>, base: Reg, offset: i32, ty: Type) -> Inst {
293 let mem = AMode::RegOffset(base, offset as i64);
294 Inst::gen_load(into_reg, mem, ty, MemFlags::trusted())
295 }
296
297 fn gen_store_base_offset(base: Reg, offset: i32, from_reg: Reg, ty: Type) -> Inst {
298 let mem = AMode::RegOffset(base, offset as i64);
299 Inst::gen_store(mem, from_reg, ty, MemFlags::trusted())
300 }
301
302 fn gen_sp_reg_adjust(amount: i32) -> SmallInstVec<Inst> {
303 let mut insts = SmallVec::new();
304
305 if amount == 0 {
306 return insts;
307 }
308
309 if let Some(imm) = Imm12::maybe_from_i64(amount as i64) {
310 insts.push(Inst::AluRRImm12 {
311 alu_op: AluOPRRI::Addi,
312 rd: writable_stack_reg(),
313 rs: stack_reg(),
314 imm12: imm,
315 })
316 } else {
317 let tmp = writable_spilltmp_reg();
318 insts.extend(Inst::load_constant_u64(tmp, amount as i64 as u64));
319 insts.push(Inst::AluRRR {
320 alu_op: AluOPRRR::Add,
321 rd: writable_stack_reg(),
322 rs1: stack_reg(),
323 rs2: tmp.to_reg(),
324 });
325 }
326
327 insts
328 }
329
330 fn gen_prologue_frame_setup(
331 _call_conv: isa::CallConv,
332 flags: &settings::Flags,
333 _isa_flags: &RiscvFlags,
334 frame_layout: &FrameLayout,
335 ) -> SmallInstVec<Inst> {
336 let mut insts = SmallVec::new();
337
338 if frame_layout.setup_area_size > 0 {
339 insts.extend(Self::gen_sp_reg_adjust(-16));
344 insts.push(Inst::gen_store(
345 AMode::SPOffset(8),
346 link_reg(),
347 I64,
348 MemFlags::trusted(),
349 ));
350 insts.push(Inst::gen_store(
351 AMode::SPOffset(0),
352 fp_reg(),
353 I64,
354 MemFlags::trusted(),
355 ));
356
357 if flags.unwind_info() {
358 insts.push(Inst::Unwind {
359 inst: UnwindInst::PushFrameRegs {
360 offset_upward_to_caller_sp: frame_layout.setup_area_size,
361 },
362 });
363 }
364 insts.push(Inst::Mov {
365 rd: writable_fp_reg(),
366 rm: stack_reg(),
367 ty: I64,
368 });
369 }
370
371 insts
372 }
373 fn gen_epilogue_frame_restore(
375 call_conv: isa::CallConv,
376 _flags: &settings::Flags,
377 _isa_flags: &RiscvFlags,
378 frame_layout: &FrameLayout,
379 ) -> SmallInstVec<Inst> {
380 let mut insts = SmallVec::new();
381
382 if frame_layout.setup_area_size > 0 {
383 insts.push(Inst::gen_load(
384 writable_link_reg(),
385 AMode::SPOffset(8),
386 I64,
387 MemFlags::trusted(),
388 ));
389 insts.push(Inst::gen_load(
390 writable_fp_reg(),
391 AMode::SPOffset(0),
392 I64,
393 MemFlags::trusted(),
394 ));
395 insts.extend(Self::gen_sp_reg_adjust(16));
396 }
397
398 if call_conv == isa::CallConv::Tail && frame_layout.tail_args_size > 0 {
399 insts.extend(Self::gen_sp_reg_adjust(
400 frame_layout.tail_args_size.try_into().unwrap(),
401 ));
402 }
403
404 insts
405 }
406
407 fn gen_return(
408 _call_conv: isa::CallConv,
409 _isa_flags: &RiscvFlags,
410 _frame_layout: &FrameLayout,
411 ) -> SmallInstVec<Inst> {
412 smallvec![Inst::Ret {}]
413 }
414
415 fn gen_probestack(insts: &mut SmallInstVec<Self::I>, frame_size: u32) {
416 insts.extend(Inst::load_constant_u32(writable_a0(), frame_size as u64));
417 let mut info = CallInfo::empty(
418 ExternalName::LibCall(LibCall::Probestack),
419 CallConv::SystemV,
420 );
421 info.uses.push(CallArgPair {
422 vreg: a0(),
423 preg: a0(),
424 });
425 insts.push(Inst::Call {
426 info: Box::new(info),
427 });
428 }
429
430 fn gen_clobber_save(
431 _call_conv: isa::CallConv,
432 flags: &settings::Flags,
433 frame_layout: &FrameLayout,
434 ) -> SmallVec<[Inst; 16]> {
435 let mut insts = SmallVec::new();
436 let setup_frame = frame_layout.setup_area_size > 0;
437
438 let incoming_args_diff = frame_layout.tail_args_size - frame_layout.incoming_args_size;
439 if incoming_args_diff > 0 {
440 insts.extend(Self::gen_sp_reg_adjust(-(incoming_args_diff as i32)));
442
443 if setup_frame {
444 insts.push(Inst::gen_store(
447 AMode::SPOffset(8),
448 link_reg(),
449 I64,
450 MemFlags::trusted(),
451 ));
452 insts.push(Inst::gen_load(
453 writable_fp_reg(),
454 AMode::SPOffset(i64::from(incoming_args_diff)),
455 I64,
456 MemFlags::trusted(),
457 ));
458 insts.push(Inst::gen_store(
459 AMode::SPOffset(0),
460 fp_reg(),
461 I64,
462 MemFlags::trusted(),
463 ));
464
465 insts.push(Inst::gen_move(writable_fp_reg(), stack_reg(), I64));
467 }
468 }
469
470 if flags.unwind_info() && setup_frame {
471 insts.push(Inst::Unwind {
474 inst: UnwindInst::DefineNewFrame {
475 offset_downward_to_clobbers: frame_layout.clobber_size,
476 offset_upward_to_caller_sp: frame_layout.setup_area_size,
477 },
478 });
479 }
480
481 let stack_size = frame_layout.clobber_size
484 + frame_layout.fixed_frame_storage_size
485 + frame_layout.outgoing_args_size;
486
487 if stack_size > 0 {
490 insts.extend(Self::gen_sp_reg_adjust(-(stack_size as i32)));
491
492 let mut cur_offset = 0;
493 for reg in &frame_layout.clobbered_callee_saves {
494 let r_reg = reg.to_reg();
495 let ty = match r_reg.class() {
496 RegClass::Int => I64,
497 RegClass::Float => F64,
498 RegClass::Vector => I8X16,
499 };
500 cur_offset = align_to(cur_offset, ty.bytes());
501 insts.push(Inst::gen_store(
502 AMode::SPOffset(i64::from(stack_size - cur_offset - ty.bytes())),
503 Reg::from(reg.to_reg()),
504 ty,
505 MemFlags::trusted(),
506 ));
507
508 if flags.unwind_info() {
509 insts.push(Inst::Unwind {
510 inst: UnwindInst::SaveReg {
511 clobber_offset: frame_layout.clobber_size - cur_offset - ty.bytes(),
512 reg: r_reg,
513 },
514 });
515 }
516
517 cur_offset += ty.bytes();
518 assert!(cur_offset <= stack_size);
519 }
520 }
521 insts
522 }
523
524 fn gen_clobber_restore(
525 _call_conv: isa::CallConv,
526 _flags: &settings::Flags,
527 frame_layout: &FrameLayout,
528 ) -> SmallVec<[Inst; 16]> {
529 let mut insts = SmallVec::new();
530
531 let stack_size = frame_layout.clobber_size
532 + frame_layout.fixed_frame_storage_size
533 + frame_layout.outgoing_args_size;
534 let mut cur_offset = 0;
535
536 for reg in &frame_layout.clobbered_callee_saves {
537 let rreg = reg.to_reg();
538 let ty = match rreg.class() {
539 RegClass::Int => I64,
540 RegClass::Float => F64,
541 RegClass::Vector => I8X16,
542 };
543 cur_offset = align_to(cur_offset, ty.bytes());
544 insts.push(Inst::gen_load(
545 reg.map(Reg::from),
546 AMode::SPOffset(i64::from(stack_size - cur_offset - ty.bytes())),
547 ty,
548 MemFlags::trusted(),
549 ));
550 cur_offset += ty.bytes();
551 }
552
553 if stack_size > 0 {
554 insts.extend(Self::gen_sp_reg_adjust(stack_size as i32));
555 }
556
557 insts
558 }
559
560 fn gen_memcpy<F: FnMut(Type) -> Writable<Reg>>(
561 call_conv: isa::CallConv,
562 dst: Reg,
563 src: Reg,
564 size: usize,
565 mut alloc_tmp: F,
566 ) -> SmallVec<[Self::I; 8]> {
567 let mut insts = SmallVec::new();
568 let arg0 = Writable::from_reg(x_reg(10));
569 let arg1 = Writable::from_reg(x_reg(11));
570 let arg2 = Writable::from_reg(x_reg(12));
571 let tmp = alloc_tmp(Self::word_type());
572 insts.extend(Inst::load_constant_u64(tmp, size as u64));
573 insts.push(Inst::Call {
574 info: Box::new(CallInfo {
575 dest: ExternalName::LibCall(LibCall::Memcpy),
576 uses: smallvec![
577 CallArgPair {
578 vreg: dst,
579 preg: arg0.to_reg()
580 },
581 CallArgPair {
582 vreg: src,
583 preg: arg1.to_reg()
584 },
585 CallArgPair {
586 vreg: tmp.to_reg(),
587 preg: arg2.to_reg()
588 }
589 ],
590 defs: smallvec![],
591 clobbers: Self::get_regs_clobbered_by_call(call_conv, false),
592 caller_conv: call_conv,
593 callee_conv: call_conv,
594 callee_pop_size: 0,
595 try_call_info: None,
596 }),
597 });
598 insts
599 }
600
601 fn get_number_of_spillslots_for_value(
602 rc: RegClass,
603 _target_vector_bytes: u32,
604 isa_flags: &RiscvFlags,
605 ) -> u32 {
606 match rc {
608 RegClass::Int => 1,
609 RegClass::Float => 1,
610 RegClass::Vector => (isa_flags.min_vec_reg_size() / 8) as u32,
611 }
612 }
613
614 fn get_machine_env(_flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv {
615 static MACHINE_ENV: OnceLock<MachineEnv> = OnceLock::new();
616 MACHINE_ENV.get_or_init(create_reg_environment)
617 }
618
619 fn get_regs_clobbered_by_call(
620 call_conv_of_callee: isa::CallConv,
621 is_exception: bool,
622 ) -> PRegSet {
623 match call_conv_of_callee {
624 isa::CallConv::Tail if is_exception => ALL_CLOBBERS,
625 isa::CallConv::Patchable => NO_CLOBBERS,
626 _ => DEFAULT_CLOBBERS,
627 }
628 }
629
630 fn compute_frame_layout(
631 call_conv: isa::CallConv,
632 flags: &settings::Flags,
633 _sig: &Signature,
634 regs: &[Writable<RealReg>],
635 function_calls: FunctionCalls,
636 incoming_args_size: u32,
637 tail_args_size: u32,
638 stackslots_size: u32,
639 fixed_frame_storage_size: u32,
640 outgoing_args_size: u32,
641 ) -> FrameLayout {
642 let is_callee_saved = |reg: &Writable<RealReg>| match call_conv {
643 isa::CallConv::Patchable => true,
644 _ => DEFAULT_CALLEE_SAVES.contains(reg.to_reg().into()),
645 };
646 let mut regs: Vec<Writable<RealReg>> =
647 regs.iter().cloned().filter(is_callee_saved).collect();
648
649 regs.sort_unstable();
650
651 let clobber_size = compute_clobber_size(®s);
653
654 let setup_area_size = if flags.preserve_frame_pointers()
656 || function_calls != FunctionCalls::None
657 || incoming_args_size > 0
660 || clobber_size > 0
661 || fixed_frame_storage_size > 0
662 {
663 16 } else {
665 0
666 };
667
668 FrameLayout {
670 word_bytes: 8,
671 incoming_args_size,
672 tail_args_size,
673 setup_area_size,
674 clobber_size,
675 fixed_frame_storage_size,
676 stackslots_size,
677 outgoing_args_size,
678 clobbered_callee_saves: regs,
679 function_calls,
680 }
681 }
682
683 fn gen_inline_probestack(
684 insts: &mut SmallInstVec<Self::I>,
685 _call_conv: isa::CallConv,
686 frame_size: u32,
687 guard_size: u32,
688 ) {
689 const PROBE_MAX_UNROLL: u32 = 3;
691
692 let probe_count = frame_size / guard_size;
695 if probe_count == 0 {
696 return;
698 }
699
700 let tmp = Writable::from_reg(x_reg(28)); if probe_count <= PROBE_MAX_UNROLL {
704 Self::gen_probestack_unroll(insts, tmp, guard_size, probe_count)
705 } else {
706 insts.push(Inst::StackProbeLoop {
707 guard_size,
708 probe_count,
709 tmp,
710 });
711 }
712 }
713
714 fn retval_temp_reg(_call_conv_of_callee: isa::CallConv) -> Writable<Reg> {
715 Writable::from_reg(regs::x_reg(12))
718 }
719
720 fn exception_payload_regs(call_conv: isa::CallConv) -> &'static [Reg] {
721 const PAYLOAD_REGS: &'static [Reg] = &[regs::a0(), regs::a1()];
722 match call_conv {
723 isa::CallConv::SystemV | isa::CallConv::Tail => PAYLOAD_REGS,
724 _ => &[],
725 }
726 }
727}
728
729const DEFAULT_CALLEE_SAVES: PRegSet = PRegSet::empty()
731 .with(px_reg(2))
733 .with(px_reg(8))
734 .with(px_reg(9))
735 .with(px_reg(18))
736 .with(px_reg(19))
737 .with(px_reg(20))
738 .with(px_reg(21))
739 .with(px_reg(22))
740 .with(px_reg(23))
741 .with(px_reg(24))
742 .with(px_reg(25))
743 .with(px_reg(26))
744 .with(px_reg(27))
745 .with(pf_reg(8))
747 .with(pf_reg(18))
748 .with(pf_reg(19))
749 .with(pf_reg(20))
750 .with(pf_reg(21))
751 .with(pf_reg(22))
752 .with(pf_reg(23))
753 .with(pf_reg(24))
754 .with(pf_reg(25))
755 .with(pf_reg(26))
756 .with(pf_reg(27));
757
758fn compute_clobber_size(clobbers: &[Writable<RealReg>]) -> u32 {
759 let mut clobbered_size = 0;
760 for reg in clobbers {
761 match reg.to_reg().class() {
762 RegClass::Int => {
763 clobbered_size += 8;
764 }
765 RegClass::Float => {
766 clobbered_size += 8;
767 }
768 RegClass::Vector => {
769 clobbered_size = align_to(clobbered_size, 16);
770 clobbered_size += 16;
771 }
772 }
773 }
774 align_to(clobbered_size, 16)
775}
776
777const DEFAULT_CLOBBERS: PRegSet = PRegSet::empty()
778 .with(px_reg(1))
779 .with(px_reg(5))
780 .with(px_reg(6))
781 .with(px_reg(7))
782 .with(px_reg(10))
783 .with(px_reg(11))
784 .with(px_reg(12))
785 .with(px_reg(13))
786 .with(px_reg(14))
787 .with(px_reg(15))
788 .with(px_reg(16))
789 .with(px_reg(17))
790 .with(px_reg(28))
791 .with(px_reg(29))
792 .with(px_reg(30))
793 .with(px_reg(31))
794 .with(pf_reg(0))
796 .with(pf_reg(1))
797 .with(pf_reg(2))
798 .with(pf_reg(3))
799 .with(pf_reg(4))
800 .with(pf_reg(5))
801 .with(pf_reg(6))
802 .with(pf_reg(7))
803 .with(pf_reg(9))
804 .with(pf_reg(10))
805 .with(pf_reg(11))
806 .with(pf_reg(12))
807 .with(pf_reg(13))
808 .with(pf_reg(14))
809 .with(pf_reg(15))
810 .with(pf_reg(16))
811 .with(pf_reg(17))
812 .with(pf_reg(28))
813 .with(pf_reg(29))
814 .with(pf_reg(30))
815 .with(pf_reg(31))
816 .with(pv_reg(0))
818 .with(pv_reg(1))
819 .with(pv_reg(2))
820 .with(pv_reg(3))
821 .with(pv_reg(4))
822 .with(pv_reg(5))
823 .with(pv_reg(6))
824 .with(pv_reg(7))
825 .with(pv_reg(8))
826 .with(pv_reg(9))
827 .with(pv_reg(10))
828 .with(pv_reg(11))
829 .with(pv_reg(12))
830 .with(pv_reg(13))
831 .with(pv_reg(14))
832 .with(pv_reg(15))
833 .with(pv_reg(16))
834 .with(pv_reg(17))
835 .with(pv_reg(18))
836 .with(pv_reg(19))
837 .with(pv_reg(20))
838 .with(pv_reg(21))
839 .with(pv_reg(22))
840 .with(pv_reg(23))
841 .with(pv_reg(24))
842 .with(pv_reg(25))
843 .with(pv_reg(26))
844 .with(pv_reg(27))
845 .with(pv_reg(28))
846 .with(pv_reg(29))
847 .with(pv_reg(30))
848 .with(pv_reg(31));
849
850const ALL_CLOBBERS: PRegSet = PRegSet::empty()
851 .with(px_reg(3))
853 .with(px_reg(4))
854 .with(px_reg(5))
855 .with(px_reg(6))
856 .with(px_reg(7))
857 .with(px_reg(8))
858 .with(px_reg(9))
859 .with(px_reg(10))
860 .with(px_reg(11))
861 .with(px_reg(12))
862 .with(px_reg(13))
863 .with(px_reg(14))
864 .with(px_reg(15))
865 .with(px_reg(16))
866 .with(px_reg(17))
867 .with(px_reg(18))
868 .with(px_reg(19))
869 .with(px_reg(20))
870 .with(px_reg(21))
871 .with(px_reg(22))
872 .with(px_reg(23))
873 .with(px_reg(24))
874 .with(px_reg(25))
875 .with(px_reg(26))
876 .with(px_reg(27))
877 .with(px_reg(28))
878 .with(px_reg(29))
879 .with(px_reg(30))
880 .with(px_reg(31))
881 .with(pf_reg(0))
883 .with(pf_reg(1))
884 .with(pf_reg(2))
885 .with(pf_reg(3))
886 .with(pf_reg(4))
887 .with(pf_reg(5))
888 .with(pf_reg(6))
889 .with(pf_reg(7))
890 .with(pf_reg(8))
891 .with(pf_reg(9))
892 .with(pf_reg(10))
893 .with(pf_reg(11))
894 .with(pf_reg(12))
895 .with(pf_reg(13))
896 .with(pf_reg(14))
897 .with(pf_reg(15))
898 .with(pf_reg(16))
899 .with(pf_reg(17))
900 .with(pf_reg(18))
901 .with(pf_reg(19))
902 .with(pf_reg(20))
903 .with(pf_reg(21))
904 .with(pf_reg(22))
905 .with(pf_reg(23))
906 .with(pf_reg(24))
907 .with(pf_reg(25))
908 .with(pf_reg(26))
909 .with(pf_reg(27))
910 .with(pf_reg(28))
911 .with(pf_reg(29))
912 .with(pf_reg(30))
913 .with(pf_reg(31))
914 .with(pv_reg(0))
916 .with(pv_reg(1))
917 .with(pv_reg(2))
918 .with(pv_reg(3))
919 .with(pv_reg(4))
920 .with(pv_reg(5))
921 .with(pv_reg(6))
922 .with(pv_reg(7))
923 .with(pv_reg(8))
924 .with(pv_reg(9))
925 .with(pv_reg(10))
926 .with(pv_reg(11))
927 .with(pv_reg(12))
928 .with(pv_reg(13))
929 .with(pv_reg(14))
930 .with(pv_reg(15))
931 .with(pv_reg(16))
932 .with(pv_reg(17))
933 .with(pv_reg(18))
934 .with(pv_reg(19))
935 .with(pv_reg(20))
936 .with(pv_reg(21))
937 .with(pv_reg(22))
938 .with(pv_reg(23))
939 .with(pv_reg(24))
940 .with(pv_reg(25))
941 .with(pv_reg(26))
942 .with(pv_reg(27))
943 .with(pv_reg(28))
944 .with(pv_reg(29))
945 .with(pv_reg(30))
946 .with(pv_reg(31));
947
948const NO_CLOBBERS: PRegSet = PRegSet::empty();
949
950fn create_reg_environment() -> MachineEnv {
951 let preferred_regs_by_class: [Vec<PReg>; 3] = {
962 let x_registers: Vec<PReg> = (10..=15).map(px_reg).collect();
963 let f_registers: Vec<PReg> = (10..=15).map(pf_reg).collect();
964 let v_registers: Vec<PReg> = (8..=15).map(pv_reg).collect();
965
966 [x_registers, f_registers, v_registers]
967 };
968
969 let non_preferred_regs_by_class: [Vec<PReg>; 3] = {
970 let x_registers: Vec<PReg> = (5..=7)
975 .chain(16..=17)
976 .chain(28..=29)
977 .chain(9..=9)
980 .chain(18..=27)
982 .map(px_reg)
983 .collect();
984
985 let f_registers: Vec<PReg> = (0..=7)
987 .chain(16..=17)
988 .chain(28..=31)
989 .chain(8..=9)
992 .chain(18..=27)
993 .map(pf_reg)
994 .collect();
995
996 let v_registers = (0..=7).chain(16..=31).map(pv_reg).collect();
997
998 [x_registers, f_registers, v_registers]
999 };
1000
1001 MachineEnv {
1002 preferred_regs_by_class,
1003 non_preferred_regs_by_class,
1004 fixed_stack_slots: vec![],
1005 scratch_by_class: [None, None, None],
1006 }
1007}
1008
1009impl Riscv64MachineDeps {
1010 fn gen_probestack_unroll(
1011 insts: &mut SmallInstVec<Inst>,
1012 tmp: Writable<Reg>,
1013 guard_size: u32,
1014 probe_count: u32,
1015 ) {
1016 insts.extend(Inst::load_constant_u64(tmp, (-(guard_size as i64)) as u64));
1028
1029 for _ in 0..probe_count {
1030 insts.push(Inst::AluRRR {
1031 alu_op: AluOPRRR::Add,
1032 rd: writable_stack_reg(),
1033 rs1: stack_reg(),
1034 rs2: tmp.to_reg(),
1035 });
1036
1037 insts.push(Inst::gen_store(
1038 AMode::SPOffset(0),
1039 zero_reg(),
1040 I32,
1041 MemFlags::trusted(),
1042 ));
1043 }
1044
1045 insts.extend(Self::gen_sp_reg_adjust((guard_size * probe_count) as i32));
1047 }
1048}