1use crate::ir;
4use crate::ir::types::*;
5
6use crate::isa;
7
8use crate::isa::riscv64::{inst::*, Riscv64Backend};
9use crate::isa::CallConv;
10use crate::machinst::*;
11
12use crate::ir::LibCall;
13use crate::ir::Signature;
14use crate::isa::riscv64::settings::Flags as RiscvFlags;
15use crate::isa::unwind::UnwindInst;
16use crate::settings;
17use crate::CodegenResult;
18use alloc::boxed::Box;
19use alloc::vec::Vec;
20use regalloc2::{MachineEnv, PReg, PRegSet};
21
22use smallvec::{smallvec, SmallVec};
23use std::borrow::ToOwned;
24use std::sync::OnceLock;
25
26pub(crate) type Riscv64Callee = Callee<Riscv64MachineDeps>;
28
29pub(crate) type Riscv64ABICallSite = CallSite<Riscv64MachineDeps>;
31
32pub struct Riscv64MachineDeps;
35
36impl IsaFlags for RiscvFlags {}
37
38impl RiscvFlags {
39 pub(crate) fn min_vec_reg_size(&self) -> u64 {
40 let entries = [
41 (self.has_zvl65536b(), 65536),
42 (self.has_zvl32768b(), 32768),
43 (self.has_zvl16384b(), 16384),
44 (self.has_zvl8192b(), 8192),
45 (self.has_zvl4096b(), 4096),
46 (self.has_zvl2048b(), 2048),
47 (self.has_zvl1024b(), 1024),
48 (self.has_zvl512b(), 512),
49 (self.has_zvl256b(), 256),
50 (self.has_v(), 128),
53 (self.has_zvl128b(), 128),
54 (self.has_zvl64b(), 64),
55 (self.has_zvl32b(), 32),
56 ];
57
58 for (has_flag, size) in entries.into_iter() {
59 if !has_flag {
60 continue;
61 }
62
63 return std::cmp::min(size, 1024);
66 }
67
68 return 0;
69 }
70}
71
72impl ABIMachineSpec for Riscv64MachineDeps {
73 type I = Inst;
74 type F = RiscvFlags;
75
76 const STACK_ARG_RET_SIZE_LIMIT: u32 = 128 * 1024 * 1024;
80
81 fn word_bits() -> u32 {
82 64
83 }
84
85 fn stack_align(_call_conv: isa::CallConv) -> u32 {
87 16
88 }
89
90 fn compute_arg_locs(
91 call_conv: isa::CallConv,
92 flags: &settings::Flags,
93 params: &[ir::AbiParam],
94 args_or_rets: ArgsOrRets,
95 add_ret_area_ptr: bool,
96 mut args: ArgsAccumulator,
97 ) -> CodegenResult<(u32, Option<usize>)> {
98 assert_ne!(
99 call_conv,
100 isa::CallConv::Winch,
101 "riscv64 does not support the 'winch' calling convention yet"
102 );
103
104 let (x_start, x_end, f_start, f_end) = match args_or_rets {
107 ArgsOrRets::Args => (10, 17, 10, 17),
108 ArgsOrRets::Rets => (10, 11, 10, 11),
109 };
110 let mut next_x_reg = x_start;
111 let mut next_f_reg = f_start;
112 let mut next_stack: u32 = 0;
114
115 let ret_area_ptr = if add_ret_area_ptr {
116 assert!(ArgsOrRets::Args == args_or_rets);
117 next_x_reg += 1;
118 Some(ABIArg::reg(
119 x_reg(x_start).to_real_reg().unwrap(),
120 I64,
121 ir::ArgumentExtension::None,
122 ir::ArgumentPurpose::Normal,
123 ))
124 } else {
125 None
126 };
127
128 for param in params {
129 if let ir::ArgumentPurpose::StructArgument(_) = param.purpose {
130 panic!(
131 "StructArgument parameters are not supported on riscv64. \
132 Use regular pointer arguments instead."
133 );
134 }
135
136 let (rcs, reg_tys) = Inst::rc_for_type(param.value_type)?;
138 let mut slots = ABIArgSlotVec::new();
139 for (rc, reg_ty) in rcs.iter().zip(reg_tys.iter()) {
140 let next_reg = if (next_x_reg <= x_end) && *rc == RegClass::Int {
141 let x = Some(x_reg(next_x_reg));
142 next_x_reg += 1;
143 x
144 } else if (next_f_reg <= f_end) && *rc == RegClass::Float {
145 let x = Some(f_reg(next_f_reg));
146 next_f_reg += 1;
147 x
148 } else {
149 None
150 };
151 if let Some(reg) = next_reg {
152 slots.push(ABIArgSlot::Reg {
153 reg: reg.to_real_reg().unwrap(),
154 ty: *reg_ty,
155 extension: param.extension,
156 });
157 } else {
158 if args_or_rets == ArgsOrRets::Rets && !flags.enable_multi_ret_implicit_sret() {
159 return Err(crate::CodegenError::Unsupported(
160 "Too many return values to fit in registers. \
161 Use a StructReturn argument instead. (#9510)"
162 .to_owned(),
163 ));
164 }
165
166 let size = reg_ty.bits() / 8;
169 let size = std::cmp::max(size, 8);
170 debug_assert!(size.is_power_of_two());
172 next_stack = align_to(next_stack, size);
173 slots.push(ABIArgSlot::Stack {
174 offset: next_stack as i64,
175 ty: *reg_ty,
176 extension: param.extension,
177 });
178 next_stack += size;
179 }
180 }
181 args.push(ABIArg::Slots {
182 slots,
183 purpose: param.purpose,
184 });
185 }
186 let pos = if let Some(ret_area_ptr) = ret_area_ptr {
187 args.push_non_formal(ret_area_ptr);
188 Some(args.args().len() - 1)
189 } else {
190 None
191 };
192
193 next_stack = align_to(next_stack, Self::stack_align(call_conv));
194
195 Ok((next_stack, pos))
196 }
197
198 fn gen_load_stack(mem: StackAMode, into_reg: Writable<Reg>, ty: Type) -> Inst {
199 Inst::gen_load(into_reg, mem.into(), ty, MemFlags::trusted())
200 }
201
202 fn gen_store_stack(mem: StackAMode, from_reg: Reg, ty: Type) -> Inst {
203 Inst::gen_store(mem.into(), from_reg, ty, MemFlags::trusted())
204 }
205
206 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Inst {
207 Inst::gen_move(to_reg, from_reg, ty)
208 }
209
210 fn gen_extend(
211 to_reg: Writable<Reg>,
212 from_reg: Reg,
213 signed: bool,
214 from_bits: u8,
215 to_bits: u8,
216 ) -> Inst {
217 assert!(from_bits < to_bits);
218 Inst::Extend {
219 rd: to_reg,
220 rn: from_reg,
221 signed,
222 from_bits,
223 to_bits,
224 }
225 }
226
227 fn get_ext_mode(
228 _call_conv: isa::CallConv,
229 specified: ir::ArgumentExtension,
230 ) -> ir::ArgumentExtension {
231 specified
232 }
233
234 fn gen_args(args: Vec<ArgPair>) -> Inst {
235 Inst::Args { args }
236 }
237
238 fn gen_rets(rets: Vec<RetPair>) -> Inst {
239 Inst::Rets { rets }
240 }
241
242 fn get_stacklimit_reg(_call_conv: isa::CallConv) -> Reg {
243 spilltmp_reg()
244 }
245
246 fn gen_add_imm(
247 _call_conv: isa::CallConv,
248 into_reg: Writable<Reg>,
249 from_reg: Reg,
250 imm: u32,
251 ) -> SmallInstVec<Inst> {
252 let mut insts = SmallInstVec::new();
253 if let Some(imm12) = Imm12::maybe_from_u64(imm as u64) {
254 insts.push(Inst::AluRRImm12 {
255 alu_op: AluOPRRI::Addi,
256 rd: into_reg,
257 rs: from_reg,
258 imm12,
259 });
260 } else {
261 insts.extend(Inst::load_constant_u32(
262 writable_spilltmp_reg2(),
263 imm as u64,
264 ));
265 insts.push(Inst::AluRRR {
266 alu_op: AluOPRRR::Add,
267 rd: into_reg,
268 rs1: spilltmp_reg2(),
269 rs2: from_reg,
270 });
271 }
272 insts
273 }
274
275 fn gen_stack_lower_bound_trap(limit_reg: Reg) -> SmallInstVec<Inst> {
276 let mut insts = SmallVec::new();
277 insts.push(Inst::TrapIf {
278 cc: IntCC::UnsignedLessThan,
279 rs1: stack_reg(),
280 rs2: limit_reg,
281 trap_code: ir::TrapCode::STACK_OVERFLOW,
282 });
283 insts
284 }
285
286 fn gen_get_stack_addr(mem: StackAMode, into_reg: Writable<Reg>) -> Inst {
287 Inst::LoadAddr {
288 rd: into_reg,
289 mem: mem.into(),
290 }
291 }
292
293 fn gen_load_base_offset(into_reg: Writable<Reg>, base: Reg, offset: i32, ty: Type) -> Inst {
294 let mem = AMode::RegOffset(base, offset as i64);
295 Inst::gen_load(into_reg, mem, ty, MemFlags::trusted())
296 }
297
298 fn gen_store_base_offset(base: Reg, offset: i32, from_reg: Reg, ty: Type) -> Inst {
299 let mem = AMode::RegOffset(base, offset as i64);
300 Inst::gen_store(mem, from_reg, ty, MemFlags::trusted())
301 }
302
303 fn gen_sp_reg_adjust(amount: i32) -> SmallInstVec<Inst> {
304 let mut insts = SmallVec::new();
305
306 if amount == 0 {
307 return insts;
308 }
309
310 if let Some(imm) = Imm12::maybe_from_i64(amount as i64) {
311 insts.push(Inst::AluRRImm12 {
312 alu_op: AluOPRRI::Addi,
313 rd: writable_stack_reg(),
314 rs: stack_reg(),
315 imm12: imm,
316 })
317 } else {
318 let tmp = writable_spilltmp_reg();
319 insts.extend(Inst::load_constant_u64(tmp, amount as i64 as u64));
320 insts.push(Inst::AluRRR {
321 alu_op: AluOPRRR::Add,
322 rd: writable_stack_reg(),
323 rs1: stack_reg(),
324 rs2: tmp.to_reg(),
325 });
326 }
327
328 insts
329 }
330
331 fn gen_prologue_frame_setup(
332 _call_conv: isa::CallConv,
333 flags: &settings::Flags,
334 _isa_flags: &RiscvFlags,
335 frame_layout: &FrameLayout,
336 ) -> SmallInstVec<Inst> {
337 let mut insts = SmallVec::new();
338
339 if frame_layout.setup_area_size > 0 {
340 insts.extend(Self::gen_sp_reg_adjust(-16));
345 insts.push(Inst::gen_store(
346 AMode::SPOffset(8),
347 link_reg(),
348 I64,
349 MemFlags::trusted(),
350 ));
351 insts.push(Inst::gen_store(
352 AMode::SPOffset(0),
353 fp_reg(),
354 I64,
355 MemFlags::trusted(),
356 ));
357
358 if flags.unwind_info() {
359 insts.push(Inst::Unwind {
360 inst: UnwindInst::PushFrameRegs {
361 offset_upward_to_caller_sp: frame_layout.setup_area_size,
362 },
363 });
364 }
365 insts.push(Inst::Mov {
366 rd: writable_fp_reg(),
367 rm: stack_reg(),
368 ty: I64,
369 });
370 }
371
372 insts
373 }
374 fn gen_epilogue_frame_restore(
376 call_conv: isa::CallConv,
377 _flags: &settings::Flags,
378 _isa_flags: &RiscvFlags,
379 frame_layout: &FrameLayout,
380 ) -> SmallInstVec<Inst> {
381 let mut insts = SmallVec::new();
382
383 if frame_layout.setup_area_size > 0 {
384 insts.push(Inst::gen_load(
385 writable_link_reg(),
386 AMode::SPOffset(8),
387 I64,
388 MemFlags::trusted(),
389 ));
390 insts.push(Inst::gen_load(
391 writable_fp_reg(),
392 AMode::SPOffset(0),
393 I64,
394 MemFlags::trusted(),
395 ));
396 insts.extend(Self::gen_sp_reg_adjust(16));
397 }
398
399 if call_conv == isa::CallConv::Tail && frame_layout.tail_args_size > 0 {
400 insts.extend(Self::gen_sp_reg_adjust(
401 frame_layout.tail_args_size.try_into().unwrap(),
402 ));
403 }
404
405 insts
406 }
407
408 fn gen_return(
409 _call_conv: isa::CallConv,
410 _isa_flags: &RiscvFlags,
411 _frame_layout: &FrameLayout,
412 ) -> SmallInstVec<Inst> {
413 smallvec![Inst::Ret {}]
414 }
415
416 fn gen_probestack(insts: &mut SmallInstVec<Self::I>, frame_size: u32) {
417 insts.extend(Inst::load_constant_u32(writable_a0(), frame_size as u64));
418 let mut info = CallInfo::empty(
419 ExternalName::LibCall(LibCall::Probestack),
420 CallConv::SystemV,
421 );
422 info.uses.push(CallArgPair {
423 vreg: a0(),
424 preg: a0(),
425 });
426 insts.push(Inst::Call {
427 info: Box::new(info),
428 });
429 }
430
431 fn gen_clobber_save(
432 _call_conv: isa::CallConv,
433 flags: &settings::Flags,
434 frame_layout: &FrameLayout,
435 ) -> SmallVec<[Inst; 16]> {
436 let mut insts = SmallVec::new();
437 let setup_frame = frame_layout.setup_area_size > 0;
438
439 let incoming_args_diff = frame_layout.tail_args_size - frame_layout.incoming_args_size;
440 if incoming_args_diff > 0 {
441 insts.extend(Self::gen_sp_reg_adjust(-(incoming_args_diff as i32)));
443
444 if setup_frame {
445 insts.push(Inst::gen_store(
448 AMode::SPOffset(8),
449 link_reg(),
450 I64,
451 MemFlags::trusted(),
452 ));
453 insts.push(Inst::gen_load(
454 writable_fp_reg(),
455 AMode::SPOffset(i64::from(incoming_args_diff)),
456 I64,
457 MemFlags::trusted(),
458 ));
459 insts.push(Inst::gen_store(
460 AMode::SPOffset(0),
461 fp_reg(),
462 I64,
463 MemFlags::trusted(),
464 ));
465
466 insts.push(Inst::gen_move(writable_fp_reg(), stack_reg(), I64));
468 }
469 }
470
471 if flags.unwind_info() && setup_frame {
472 insts.push(Inst::Unwind {
475 inst: UnwindInst::DefineNewFrame {
476 offset_downward_to_clobbers: frame_layout.clobber_size,
477 offset_upward_to_caller_sp: frame_layout.setup_area_size,
478 },
479 });
480 }
481
482 let stack_size = frame_layout.clobber_size
485 + frame_layout.fixed_frame_storage_size
486 + frame_layout.outgoing_args_size;
487
488 if stack_size > 0 {
491 insts.extend(Self::gen_sp_reg_adjust(-(stack_size as i32)));
492
493 let mut cur_offset = 8;
494 for reg in &frame_layout.clobbered_callee_saves {
495 let r_reg = reg.to_reg();
496 let ty = match r_reg.class() {
497 RegClass::Int => I64,
498 RegClass::Float => F64,
499 RegClass::Vector => unimplemented!("Vector Clobber Saves"),
500 };
501 insts.push(Inst::gen_store(
502 AMode::SPOffset((stack_size - cur_offset) as i64),
503 Reg::from(reg.to_reg()),
504 ty,
505 MemFlags::trusted(),
506 ));
507
508 if flags.unwind_info() {
509 insts.push(Inst::Unwind {
510 inst: UnwindInst::SaveReg {
511 clobber_offset: frame_layout.clobber_size - cur_offset,
512 reg: r_reg,
513 },
514 });
515 }
516
517 cur_offset += 8
518 }
519 }
520 insts
521 }
522
523 fn gen_clobber_restore(
524 _call_conv: isa::CallConv,
525 _flags: &settings::Flags,
526 frame_layout: &FrameLayout,
527 ) -> SmallVec<[Inst; 16]> {
528 let mut insts = SmallVec::new();
529
530 let stack_size = frame_layout.clobber_size
531 + frame_layout.fixed_frame_storage_size
532 + frame_layout.outgoing_args_size;
533
534 let mut cur_offset = 8;
535 for reg in &frame_layout.clobbered_callee_saves {
536 let rreg = reg.to_reg();
537 let ty = match rreg.class() {
538 RegClass::Int => I64,
539 RegClass::Float => F64,
540 RegClass::Vector => unimplemented!("Vector Clobber Restores"),
541 };
542 insts.push(Inst::gen_load(
543 reg.map(Reg::from),
544 AMode::SPOffset(i64::from(stack_size - cur_offset)),
545 ty,
546 MemFlags::trusted(),
547 ));
548 cur_offset += 8
549 }
550
551 if stack_size > 0 {
552 insts.extend(Self::gen_sp_reg_adjust(stack_size as i32));
553 }
554
555 insts
556 }
557
558 fn gen_call(dest: &CallDest, tmp: Writable<Reg>, info: CallInfo<()>) -> SmallVec<[Self::I; 2]> {
559 let mut insts = SmallVec::new();
560 match &dest {
561 CallDest::ExtName(name, RelocDistance::Near) => {
562 let info = Box::new(info.map(|()| name.clone()));
563 insts.push(Inst::Call { info })
564 }
565 CallDest::ExtName(name, RelocDistance::Far) => {
566 insts.push(Inst::LoadExtName {
567 rd: tmp,
568 name: Box::new(name.clone()),
569 offset: 0,
570 });
571 let info = Box::new(info.map(|()| tmp.to_reg()));
572 insts.push(Inst::CallInd { info });
573 }
574 CallDest::Reg(reg) => {
575 let info = Box::new(info.map(|()| *reg));
576 insts.push(Inst::CallInd { info });
577 }
578 }
579 insts
580 }
581
582 fn gen_memcpy<F: FnMut(Type) -> Writable<Reg>>(
583 call_conv: isa::CallConv,
584 dst: Reg,
585 src: Reg,
586 size: usize,
587 mut alloc_tmp: F,
588 ) -> SmallVec<[Self::I; 8]> {
589 let mut insts = SmallVec::new();
590 let arg0 = Writable::from_reg(x_reg(10));
591 let arg1 = Writable::from_reg(x_reg(11));
592 let arg2 = Writable::from_reg(x_reg(12));
593 let tmp = alloc_tmp(Self::word_type());
594 insts.extend(Inst::load_constant_u64(tmp, size as u64).into_iter());
595 insts.push(Inst::Call {
596 info: Box::new(CallInfo {
597 dest: ExternalName::LibCall(LibCall::Memcpy),
598 uses: smallvec![
599 CallArgPair {
600 vreg: dst,
601 preg: arg0.to_reg()
602 },
603 CallArgPair {
604 vreg: src,
605 preg: arg1.to_reg()
606 },
607 CallArgPair {
608 vreg: tmp.to_reg(),
609 preg: arg2.to_reg()
610 }
611 ],
612 defs: smallvec![],
613 clobbers: Self::get_regs_clobbered_by_call(call_conv),
614 caller_conv: call_conv,
615 callee_conv: call_conv,
616 callee_pop_size: 0,
617 }),
618 });
619 insts
620 }
621
622 fn get_number_of_spillslots_for_value(
623 rc: RegClass,
624 _target_vector_bytes: u32,
625 isa_flags: &RiscvFlags,
626 ) -> u32 {
627 match rc {
629 RegClass::Int => 1,
630 RegClass::Float => 1,
631 RegClass::Vector => (isa_flags.min_vec_reg_size() / 8) as u32,
632 }
633 }
634
635 fn get_machine_env(_flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv {
636 static MACHINE_ENV: OnceLock<MachineEnv> = OnceLock::new();
637 MACHINE_ENV.get_or_init(create_reg_environment)
638 }
639
640 fn get_regs_clobbered_by_call(_call_conv_of_callee: isa::CallConv) -> PRegSet {
641 DEFAULT_CLOBBERS
642 }
643
644 fn compute_frame_layout(
645 _call_conv: isa::CallConv,
646 flags: &settings::Flags,
647 _sig: &Signature,
648 regs: &[Writable<RealReg>],
649 is_leaf: bool,
650 incoming_args_size: u32,
651 tail_args_size: u32,
652 fixed_frame_storage_size: u32,
653 outgoing_args_size: u32,
654 ) -> FrameLayout {
655 let mut regs: Vec<Writable<RealReg>> = regs
656 .iter()
657 .cloned()
658 .filter(|r| DEFAULT_CALLEE_SAVES.contains(r.to_reg().into()))
659 .collect();
660
661 regs.sort_unstable();
662
663 let clobber_size = compute_clobber_size(®s);
665
666 let setup_area_size = if flags.preserve_frame_pointers()
668 || !is_leaf
669 || incoming_args_size > 0
672 || clobber_size > 0
673 || fixed_frame_storage_size > 0
674 {
675 16 } else {
677 0
678 };
679
680 FrameLayout {
682 incoming_args_size,
683 tail_args_size,
684 setup_area_size,
685 clobber_size,
686 fixed_frame_storage_size,
687 outgoing_args_size,
688 clobbered_callee_saves: regs,
689 }
690 }
691
692 fn gen_inline_probestack(
693 insts: &mut SmallInstVec<Self::I>,
694 _call_conv: isa::CallConv,
695 frame_size: u32,
696 guard_size: u32,
697 ) {
698 const PROBE_MAX_UNROLL: u32 = 3;
700
701 let probe_count = frame_size / guard_size;
704 if probe_count == 0 {
705 return;
707 }
708
709 let tmp = Writable::from_reg(x_reg(28)); if probe_count <= PROBE_MAX_UNROLL {
713 Self::gen_probestack_unroll(insts, tmp, guard_size, probe_count)
714 } else {
715 insts.push(Inst::StackProbeLoop {
716 guard_size,
717 probe_count,
718 tmp,
719 });
720 }
721 }
722}
723
724impl Riscv64ABICallSite {
725 pub fn emit_return_call(
726 mut self,
727 ctx: &mut Lower<Inst>,
728 args: isle::ValueSlice,
729 _backend: &Riscv64Backend,
730 ) {
731 let new_stack_arg_size =
732 u32::try_from(self.sig(ctx.sigs()).sized_stack_arg_space()).unwrap();
733
734 ctx.abi_mut().accumulate_tail_args_size(new_stack_arg_size);
735
736 self.emit_args(ctx, args);
739 self.emit_stack_ret_arg_for_tail_call(ctx);
740
741 let dest = self.dest().clone();
742 let uses = self.take_uses();
743
744 match dest {
745 CallDest::ExtName(name, RelocDistance::Near) => {
746 let info = Box::new(ReturnCallInfo {
747 dest: name,
748 uses,
749 new_stack_arg_size,
750 });
751 ctx.emit(Inst::ReturnCall { info });
752 }
753 CallDest::ExtName(name, RelocDistance::Far) => {
754 let callee = ctx.alloc_tmp(ir::types::I64).only_reg().unwrap();
755 ctx.emit(Inst::LoadExtName {
756 rd: callee,
757 name: Box::new(name),
758 offset: 0,
759 });
760 let info = Box::new(ReturnCallInfo {
761 dest: callee.to_reg(),
762 uses,
763 new_stack_arg_size,
764 });
765 ctx.emit(Inst::ReturnCallInd { info });
766 }
767 CallDest::Reg(callee) => {
768 let info = Box::new(ReturnCallInfo {
769 dest: callee,
770 uses,
771 new_stack_arg_size,
772 });
773 ctx.emit(Inst::ReturnCallInd { info });
774 }
775 }
776 }
777}
778
779const DEFAULT_CALLEE_SAVES: PRegSet = PRegSet::empty()
781 .with(px_reg(2))
783 .with(px_reg(8))
784 .with(px_reg(9))
785 .with(px_reg(18))
786 .with(px_reg(19))
787 .with(px_reg(20))
788 .with(px_reg(21))
789 .with(px_reg(22))
790 .with(px_reg(23))
791 .with(px_reg(24))
792 .with(px_reg(25))
793 .with(px_reg(26))
794 .with(px_reg(27))
795 .with(pf_reg(8))
797 .with(pf_reg(18))
798 .with(pf_reg(19))
799 .with(pf_reg(20))
800 .with(pf_reg(21))
801 .with(pf_reg(22))
802 .with(pf_reg(23))
803 .with(pf_reg(24))
804 .with(pf_reg(25))
805 .with(pf_reg(26))
806 .with(pf_reg(27));
807
808fn compute_clobber_size(clobbers: &[Writable<RealReg>]) -> u32 {
809 let mut clobbered_size = 0;
810 for reg in clobbers {
811 match reg.to_reg().class() {
812 RegClass::Int => {
813 clobbered_size += 8;
814 }
815 RegClass::Float => {
816 clobbered_size += 8;
817 }
818 RegClass::Vector => unimplemented!("Vector Size Clobbered"),
819 }
820 }
821 align_to(clobbered_size, 16)
822}
823
824const DEFAULT_CLOBBERS: PRegSet = PRegSet::empty()
825 .with(px_reg(1))
826 .with(px_reg(5))
827 .with(px_reg(6))
828 .with(px_reg(7))
829 .with(px_reg(10))
830 .with(px_reg(11))
831 .with(px_reg(12))
832 .with(px_reg(13))
833 .with(px_reg(14))
834 .with(px_reg(15))
835 .with(px_reg(16))
836 .with(px_reg(17))
837 .with(px_reg(28))
838 .with(px_reg(29))
839 .with(px_reg(30))
840 .with(px_reg(31))
841 .with(pf_reg(0))
843 .with(pf_reg(1))
844 .with(pf_reg(2))
845 .with(pf_reg(3))
846 .with(pf_reg(4))
847 .with(pf_reg(5))
848 .with(pf_reg(6))
849 .with(pf_reg(7))
850 .with(pf_reg(9))
851 .with(pf_reg(10))
852 .with(pf_reg(11))
853 .with(pf_reg(12))
854 .with(pf_reg(13))
855 .with(pf_reg(14))
856 .with(pf_reg(15))
857 .with(pf_reg(16))
858 .with(pf_reg(17))
859 .with(pf_reg(28))
860 .with(pf_reg(29))
861 .with(pf_reg(30))
862 .with(pf_reg(31))
863 .with(pv_reg(0))
865 .with(pv_reg(1))
866 .with(pv_reg(2))
867 .with(pv_reg(3))
868 .with(pv_reg(4))
869 .with(pv_reg(5))
870 .with(pv_reg(6))
871 .with(pv_reg(7))
872 .with(pv_reg(8))
873 .with(pv_reg(9))
874 .with(pv_reg(10))
875 .with(pv_reg(11))
876 .with(pv_reg(12))
877 .with(pv_reg(13))
878 .with(pv_reg(14))
879 .with(pv_reg(15))
880 .with(pv_reg(16))
881 .with(pv_reg(17))
882 .with(pv_reg(18))
883 .with(pv_reg(19))
884 .with(pv_reg(20))
885 .with(pv_reg(21))
886 .with(pv_reg(22))
887 .with(pv_reg(23))
888 .with(pv_reg(24))
889 .with(pv_reg(25))
890 .with(pv_reg(26))
891 .with(pv_reg(27))
892 .with(pv_reg(28))
893 .with(pv_reg(29))
894 .with(pv_reg(30))
895 .with(pv_reg(31));
896
897fn create_reg_environment() -> MachineEnv {
898 let preferred_regs_by_class: [Vec<PReg>; 3] = {
909 let x_registers: Vec<PReg> = (10..=15).map(px_reg).collect();
910 let f_registers: Vec<PReg> = (10..=15).map(pf_reg).collect();
911 let v_registers: Vec<PReg> = (8..=15).map(pv_reg).collect();
912
913 [x_registers, f_registers, v_registers]
914 };
915
916 let non_preferred_regs_by_class: [Vec<PReg>; 3] = {
917 let x_registers: Vec<PReg> = (5..=7)
922 .chain(16..=17)
923 .chain(28..=29)
924 .chain(9..=9)
927 .chain(18..=27)
929 .map(px_reg)
930 .collect();
931
932 let f_registers: Vec<PReg> = (0..=7)
934 .chain(16..=17)
935 .chain(28..=31)
936 .chain(8..=9)
939 .chain(18..=27)
940 .map(pf_reg)
941 .collect();
942
943 let v_registers = (0..=7).chain(16..=31).map(pv_reg).collect();
944
945 [x_registers, f_registers, v_registers]
946 };
947
948 MachineEnv {
949 preferred_regs_by_class,
950 non_preferred_regs_by_class,
951 fixed_stack_slots: vec![],
952 scratch_by_class: [None, None, None],
953 }
954}
955
956impl Riscv64MachineDeps {
957 fn gen_probestack_unroll(
958 insts: &mut SmallInstVec<Inst>,
959 tmp: Writable<Reg>,
960 guard_size: u32,
961 probe_count: u32,
962 ) {
963 insts.extend(Inst::load_constant_u64(tmp, (-(guard_size as i64)) as u64));
975
976 for _ in 0..probe_count {
977 insts.push(Inst::AluRRR {
978 alu_op: AluOPRRR::Add,
979 rd: writable_stack_reg(),
980 rs1: stack_reg(),
981 rs2: tmp.to_reg(),
982 });
983
984 insts.push(Inst::gen_store(
985 AMode::SPOffset(0),
986 zero_reg(),
987 I32,
988 MemFlags::trusted(),
989 ));
990 }
991
992 insts.extend(Self::gen_sp_reg_adjust((guard_size * probe_count) as i32));
994 }
995}