1use crate::ir;
4use crate::ir::types::*;
5
6use crate::isa;
7
8use crate::isa::CallConv;
9use crate::isa::riscv64::inst::*;
10use crate::machinst::*;
11
12use crate::CodegenResult;
13use crate::ir::LibCall;
14use crate::ir::Signature;
15use crate::isa::riscv64::settings::Flags as RiscvFlags;
16use crate::isa::unwind::UnwindInst;
17use crate::settings;
18use alloc::boxed::Box;
19use alloc::vec::Vec;
20use regalloc2::{MachineEnv, PRegSet};
21
22use alloc::borrow::ToOwned;
23use smallvec::{SmallVec, smallvec};
24
25pub(crate) type Riscv64Callee = Callee<Riscv64MachineDeps>;
27
28pub struct Riscv64MachineDeps;
31
32impl IsaFlags for RiscvFlags {}
33
34impl RiscvFlags {
35 pub(crate) fn min_vec_reg_size(&self) -> u64 {
36 let entries = [
37 (self.has_zvl65536b(), 65536),
38 (self.has_zvl32768b(), 32768),
39 (self.has_zvl16384b(), 16384),
40 (self.has_zvl8192b(), 8192),
41 (self.has_zvl4096b(), 4096),
42 (self.has_zvl2048b(), 2048),
43 (self.has_zvl1024b(), 1024),
44 (self.has_zvl512b(), 512),
45 (self.has_zvl256b(), 256),
46 (self.has_v(), 128),
49 (self.has_zvl128b(), 128),
50 (self.has_zvl64b(), 64),
51 (self.has_zvl32b(), 32),
52 ];
53
54 for (has_flag, size) in entries.into_iter() {
55 if !has_flag {
56 continue;
57 }
58
59 return core::cmp::min(size, 1024);
62 }
63
64 return 0;
65 }
66}
67
68impl ABIMachineSpec for Riscv64MachineDeps {
69 type I = Inst;
70 type F = RiscvFlags;
71
72 const STACK_ARG_RET_SIZE_LIMIT: u32 = 128 * 1024 * 1024;
76
77 fn word_bits() -> u32 {
78 64
79 }
80
81 fn stack_align(_call_conv: isa::CallConv) -> u32 {
83 16
84 }
85
86 fn compute_arg_locs(
87 call_conv: isa::CallConv,
88 flags: &settings::Flags,
89 params: &[ir::AbiParam],
90 args_or_rets: ArgsOrRets,
91 add_ret_area_ptr: bool,
92 mut args: ArgsAccumulator,
93 ) -> CodegenResult<(u32, Option<usize>)> {
94 assert_ne!(
97 call_conv,
98 isa::CallConv::Winch,
99 "riscv64 does not support the 'winch' calling convention yet"
100 );
101
102 let (x_start, x_end, f_start, f_end) = match args_or_rets {
105 ArgsOrRets::Args => (10, 17, 10, 17),
106 ArgsOrRets::Rets => (10, 11, 10, 11),
107 };
108 let mut next_x_reg = x_start;
109 let mut next_f_reg = f_start;
110 let mut next_stack: u32 = 0;
112
113 let ret_area_ptr = if add_ret_area_ptr {
114 assert!(ArgsOrRets::Args == args_or_rets);
115 next_x_reg += 1;
116 Some(ABIArg::reg(
117 x_reg(x_start).to_real_reg().unwrap(),
118 I64,
119 ir::ArgumentExtension::None,
120 ir::ArgumentPurpose::Normal,
121 ))
122 } else {
123 None
124 };
125
126 for param in params {
127 if let ir::ArgumentPurpose::StructArgument(_) = param.purpose {
128 panic!(
129 "StructArgument parameters are not supported on riscv64. \
130 Use regular pointer arguments instead."
131 );
132 }
133
134 let (rcs, reg_tys) = Inst::rc_for_type(param.value_type)?;
136 let mut slots = ABIArgSlotVec::new();
137 for (rc, reg_ty) in rcs.iter().zip(reg_tys.iter()) {
138 let next_reg = if (next_x_reg <= x_end) && *rc == RegClass::Int {
139 let x = Some(x_reg(next_x_reg));
140 next_x_reg += 1;
141 x
142 } else if (next_f_reg <= f_end) && *rc == RegClass::Float {
143 let x = Some(f_reg(next_f_reg));
144 next_f_reg += 1;
145 x
146 } else {
147 None
148 };
149 if let Some(reg) = next_reg {
150 slots.push(ABIArgSlot::Reg {
151 reg: reg.to_real_reg().unwrap(),
152 ty: *reg_ty,
153 extension: param.extension,
154 });
155 } else {
156 if args_or_rets == ArgsOrRets::Rets && !flags.enable_multi_ret_implicit_sret() {
157 return Err(crate::CodegenError::Unsupported(
158 "Too many return values to fit in registers. \
159 Use a StructReturn argument instead. (#9510)"
160 .to_owned(),
161 ));
162 }
163
164 let size = reg_ty.bits() / 8;
167 let size = core::cmp::max(size, 8);
168 debug_assert!(size.is_power_of_two());
170 next_stack = align_to(next_stack, size);
171 slots.push(ABIArgSlot::Stack {
172 offset: next_stack as i64,
173 ty: *reg_ty,
174 extension: param.extension,
175 });
176 next_stack += size;
177 }
178 }
179 args.push(ABIArg::Slots {
180 slots,
181 purpose: param.purpose,
182 });
183 }
184 let pos = if let Some(ret_area_ptr) = ret_area_ptr {
185 args.push_non_formal(ret_area_ptr);
186 Some(args.args().len() - 1)
187 } else {
188 None
189 };
190
191 next_stack = align_to(next_stack, Self::stack_align(call_conv));
192
193 Ok((next_stack, pos))
194 }
195
196 fn gen_load_stack(mem: StackAMode, into_reg: Writable<Reg>, ty: Type) -> Inst {
197 Inst::gen_load(into_reg, mem.into(), ty, MemFlags::trusted())
198 }
199
200 fn gen_store_stack(mem: StackAMode, from_reg: Reg, ty: Type) -> Inst {
201 Inst::gen_store(mem.into(), from_reg, ty, MemFlags::trusted())
202 }
203
204 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Inst {
205 Inst::gen_move(to_reg, from_reg, ty)
206 }
207
208 fn gen_extend(
209 to_reg: Writable<Reg>,
210 from_reg: Reg,
211 signed: bool,
212 from_bits: u8,
213 to_bits: u8,
214 ) -> Inst {
215 assert!(from_bits < to_bits);
216 Inst::Extend {
217 rd: to_reg,
218 rn: from_reg,
219 signed,
220 from_bits,
221 to_bits,
222 }
223 }
224
225 fn get_ext_mode(
226 _call_conv: isa::CallConv,
227 specified: ir::ArgumentExtension,
228 ) -> ir::ArgumentExtension {
229 specified
230 }
231
232 fn gen_args(args: Vec<ArgPair>) -> Inst {
233 Inst::Args { args }
234 }
235
236 fn gen_rets(rets: Vec<RetPair>) -> Inst {
237 Inst::Rets { rets }
238 }
239
240 fn get_stacklimit_reg(_call_conv: isa::CallConv) -> Reg {
241 spilltmp_reg()
242 }
243
244 fn gen_add_imm(
245 _call_conv: isa::CallConv,
246 into_reg: Writable<Reg>,
247 from_reg: Reg,
248 imm: u32,
249 ) -> SmallInstVec<Inst> {
250 let mut insts = SmallInstVec::new();
251 if let Some(imm12) = Imm12::maybe_from_u64(imm as u64) {
252 insts.push(Inst::AluRRImm12 {
253 alu_op: AluOPRRI::Addi,
254 rd: into_reg,
255 rs: from_reg,
256 imm12,
257 });
258 } else {
259 insts.extend(Inst::load_constant_u32(
260 writable_spilltmp_reg2(),
261 imm as u64,
262 ));
263 insts.push(Inst::AluRRR {
264 alu_op: AluOPRRR::Add,
265 rd: into_reg,
266 rs1: spilltmp_reg2(),
267 rs2: from_reg,
268 });
269 }
270 insts
271 }
272
273 fn gen_stack_lower_bound_trap(limit_reg: Reg) -> SmallInstVec<Inst> {
274 let mut insts = SmallVec::new();
275 insts.push(Inst::TrapIf {
276 cc: IntCC::UnsignedLessThan,
277 rs1: stack_reg(),
278 rs2: limit_reg,
279 trap_code: ir::TrapCode::STACK_OVERFLOW,
280 });
281 insts
282 }
283
284 fn gen_get_stack_addr(mem: StackAMode, into_reg: Writable<Reg>) -> Inst {
285 Inst::LoadAddr {
286 rd: into_reg,
287 mem: mem.into(),
288 }
289 }
290
291 fn gen_load_base_offset(into_reg: Writable<Reg>, base: Reg, offset: i32, ty: Type) -> Inst {
292 let mem = AMode::RegOffset(base, offset as i64);
293 Inst::gen_load(into_reg, mem, ty, MemFlags::trusted())
294 }
295
296 fn gen_store_base_offset(base: Reg, offset: i32, from_reg: Reg, ty: Type) -> Inst {
297 let mem = AMode::RegOffset(base, offset as i64);
298 Inst::gen_store(mem, from_reg, ty, MemFlags::trusted())
299 }
300
301 fn gen_sp_reg_adjust(amount: i32) -> SmallInstVec<Inst> {
302 let mut insts = SmallVec::new();
303
304 if amount == 0 {
305 return insts;
306 }
307
308 if let Some(imm) = Imm12::maybe_from_i64(amount as i64) {
309 insts.push(Inst::AluRRImm12 {
310 alu_op: AluOPRRI::Addi,
311 rd: writable_stack_reg(),
312 rs: stack_reg(),
313 imm12: imm,
314 })
315 } else {
316 let tmp = writable_spilltmp_reg();
317 insts.extend(Inst::load_constant_u64(tmp, amount as i64 as u64));
318 insts.push(Inst::AluRRR {
319 alu_op: AluOPRRR::Add,
320 rd: writable_stack_reg(),
321 rs1: stack_reg(),
322 rs2: tmp.to_reg(),
323 });
324 }
325
326 insts
327 }
328
329 fn gen_prologue_frame_setup(
330 _call_conv: isa::CallConv,
331 flags: &settings::Flags,
332 _isa_flags: &RiscvFlags,
333 frame_layout: &FrameLayout,
334 ) -> SmallInstVec<Inst> {
335 let mut insts = SmallVec::new();
336
337 if frame_layout.setup_area_size > 0 {
338 insts.extend(Self::gen_sp_reg_adjust(-16));
343 insts.push(Inst::gen_store(
344 AMode::SPOffset(8),
345 link_reg(),
346 I64,
347 MemFlags::trusted(),
348 ));
349 insts.push(Inst::gen_store(
350 AMode::SPOffset(0),
351 fp_reg(),
352 I64,
353 MemFlags::trusted(),
354 ));
355
356 if flags.unwind_info() {
357 insts.push(Inst::Unwind {
358 inst: UnwindInst::PushFrameRegs {
359 offset_upward_to_caller_sp: frame_layout.setup_area_size,
360 },
361 });
362 }
363 insts.push(Inst::Mov {
364 rd: writable_fp_reg(),
365 rm: stack_reg(),
366 ty: I64,
367 });
368 }
369
370 insts
371 }
372 fn gen_epilogue_frame_restore(
374 call_conv: isa::CallConv,
375 _flags: &settings::Flags,
376 _isa_flags: &RiscvFlags,
377 frame_layout: &FrameLayout,
378 ) -> SmallInstVec<Inst> {
379 let mut insts = SmallVec::new();
380
381 if frame_layout.setup_area_size > 0 {
382 insts.push(Inst::gen_load(
383 writable_link_reg(),
384 AMode::SPOffset(8),
385 I64,
386 MemFlags::trusted(),
387 ));
388 insts.push(Inst::gen_load(
389 writable_fp_reg(),
390 AMode::SPOffset(0),
391 I64,
392 MemFlags::trusted(),
393 ));
394 insts.extend(Self::gen_sp_reg_adjust(16));
395 }
396
397 if call_conv == isa::CallConv::Tail && frame_layout.tail_args_size > 0 {
398 insts.extend(Self::gen_sp_reg_adjust(
399 frame_layout.tail_args_size.try_into().unwrap(),
400 ));
401 }
402
403 insts
404 }
405
406 fn gen_return(
407 _call_conv: isa::CallConv,
408 _isa_flags: &RiscvFlags,
409 _frame_layout: &FrameLayout,
410 ) -> SmallInstVec<Inst> {
411 smallvec![Inst::Ret {}]
412 }
413
414 fn gen_probestack(insts: &mut SmallInstVec<Self::I>, frame_size: u32) {
415 insts.extend(Inst::load_constant_u32(writable_a0(), frame_size as u64));
416 let mut info = CallInfo::empty(
417 ExternalName::LibCall(LibCall::Probestack),
418 CallConv::SystemV,
419 );
420 info.uses.push(CallArgPair {
421 vreg: a0(),
422 preg: a0(),
423 });
424 insts.push(Inst::Call {
425 info: Box::new(info),
426 });
427 }
428
429 fn gen_clobber_save(
430 _call_conv: isa::CallConv,
431 flags: &settings::Flags,
432 frame_layout: &FrameLayout,
433 ) -> SmallVec<[Inst; 16]> {
434 let mut insts = SmallVec::new();
435 let setup_frame = frame_layout.setup_area_size > 0;
436
437 let incoming_args_diff = frame_layout.tail_args_size - frame_layout.incoming_args_size;
438 if incoming_args_diff > 0 {
439 insts.extend(Self::gen_sp_reg_adjust(-(incoming_args_diff as i32)));
441
442 if setup_frame {
443 insts.push(Inst::gen_store(
446 AMode::SPOffset(8),
447 link_reg(),
448 I64,
449 MemFlags::trusted(),
450 ));
451 insts.push(Inst::gen_load(
452 writable_fp_reg(),
453 AMode::SPOffset(i64::from(incoming_args_diff)),
454 I64,
455 MemFlags::trusted(),
456 ));
457 insts.push(Inst::gen_store(
458 AMode::SPOffset(0),
459 fp_reg(),
460 I64,
461 MemFlags::trusted(),
462 ));
463
464 insts.push(Inst::gen_move(writable_fp_reg(), stack_reg(), I64));
466 }
467 }
468
469 if flags.unwind_info() && setup_frame {
470 insts.push(Inst::Unwind {
473 inst: UnwindInst::DefineNewFrame {
474 offset_downward_to_clobbers: frame_layout.clobber_size,
475 offset_upward_to_caller_sp: frame_layout.setup_area_size,
476 },
477 });
478 }
479
480 let stack_size = frame_layout.clobber_size
483 + frame_layout.fixed_frame_storage_size
484 + frame_layout.outgoing_args_size;
485
486 if stack_size > 0 {
489 insts.extend(Self::gen_sp_reg_adjust(-(stack_size as i32)));
490
491 let mut cur_offset = 0;
492 for reg in &frame_layout.clobbered_callee_saves {
493 let r_reg = reg.to_reg();
494 let ty = match r_reg.class() {
495 RegClass::Int => I64,
496 RegClass::Float => F64,
497 RegClass::Vector => I8X16,
498 };
499 cur_offset = align_to(cur_offset, ty.bytes());
500 insts.push(Inst::gen_store(
501 AMode::SPOffset(i64::from(stack_size - cur_offset - ty.bytes())),
502 Reg::from(reg.to_reg()),
503 ty,
504 MemFlags::trusted(),
505 ));
506
507 if flags.unwind_info() {
508 insts.push(Inst::Unwind {
509 inst: UnwindInst::SaveReg {
510 clobber_offset: frame_layout.clobber_size - cur_offset - ty.bytes(),
511 reg: r_reg,
512 },
513 });
514 }
515
516 cur_offset += ty.bytes();
517 assert!(cur_offset <= stack_size);
518 }
519 }
520 insts
521 }
522
523 fn gen_clobber_restore(
524 _call_conv: isa::CallConv,
525 _flags: &settings::Flags,
526 frame_layout: &FrameLayout,
527 ) -> SmallVec<[Inst; 16]> {
528 let mut insts = SmallVec::new();
529
530 let stack_size = frame_layout.clobber_size
531 + frame_layout.fixed_frame_storage_size
532 + frame_layout.outgoing_args_size;
533 let mut cur_offset = 0;
534
535 for reg in &frame_layout.clobbered_callee_saves {
536 let rreg = reg.to_reg();
537 let ty = match rreg.class() {
538 RegClass::Int => I64,
539 RegClass::Float => F64,
540 RegClass::Vector => I8X16,
541 };
542 cur_offset = align_to(cur_offset, ty.bytes());
543 insts.push(Inst::gen_load(
544 reg.map(Reg::from),
545 AMode::SPOffset(i64::from(stack_size - cur_offset - ty.bytes())),
546 ty,
547 MemFlags::trusted(),
548 ));
549 cur_offset += ty.bytes();
550 }
551
552 if stack_size > 0 {
553 insts.extend(Self::gen_sp_reg_adjust(stack_size as i32));
554 }
555
556 insts
557 }
558
559 fn gen_memcpy<F: FnMut(Type) -> Writable<Reg>>(
560 call_conv: isa::CallConv,
561 dst: Reg,
562 src: Reg,
563 size: usize,
564 mut alloc_tmp: F,
565 ) -> SmallVec<[Self::I; 8]> {
566 let mut insts = SmallVec::new();
567 let arg0 = Writable::from_reg(x_reg(10));
568 let arg1 = Writable::from_reg(x_reg(11));
569 let arg2 = Writable::from_reg(x_reg(12));
570 let tmp = alloc_tmp(Self::word_type());
571 insts.extend(Inst::load_constant_u64(tmp, size as u64));
572 insts.push(Inst::Call {
573 info: Box::new(CallInfo {
574 dest: ExternalName::LibCall(LibCall::Memcpy),
575 uses: smallvec![
576 CallArgPair {
577 vreg: dst,
578 preg: arg0.to_reg()
579 },
580 CallArgPair {
581 vreg: src,
582 preg: arg1.to_reg()
583 },
584 CallArgPair {
585 vreg: tmp.to_reg(),
586 preg: arg2.to_reg()
587 }
588 ],
589 defs: smallvec![],
590 clobbers: Self::get_regs_clobbered_by_call(call_conv, false),
591 caller_conv: call_conv,
592 callee_conv: call_conv,
593 callee_pop_size: 0,
594 try_call_info: None,
595 patchable: false,
596 }),
597 });
598 insts
599 }
600
601 fn get_number_of_spillslots_for_value(
602 rc: RegClass,
603 _target_vector_bytes: u32,
604 isa_flags: &RiscvFlags,
605 ) -> u32 {
606 match rc {
608 RegClass::Int => 1,
609 RegClass::Float => 1,
610 RegClass::Vector => (isa_flags.min_vec_reg_size() / 8) as u32,
611 }
612 }
613
614 fn get_machine_env(_flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv {
615 static MACHINE_ENV: MachineEnv = create_reg_environment();
616 &MACHINE_ENV
617 }
618
619 fn get_regs_clobbered_by_call(
620 call_conv_of_callee: isa::CallConv,
621 is_exception: bool,
622 ) -> PRegSet {
623 match call_conv_of_callee {
624 isa::CallConv::Tail if is_exception => ALL_CLOBBERS,
625 isa::CallConv::PreserveAll if is_exception => ALL_CLOBBERS,
631 isa::CallConv::PreserveAll => NO_CLOBBERS,
632 _ => DEFAULT_CLOBBERS,
633 }
634 }
635
636 fn compute_frame_layout(
637 call_conv: isa::CallConv,
638 flags: &settings::Flags,
639 _sig: &Signature,
640 regs: &[Writable<RealReg>],
641 function_calls: FunctionCalls,
642 incoming_args_size: u32,
643 tail_args_size: u32,
644 stackslots_size: u32,
645 fixed_frame_storage_size: u32,
646 outgoing_args_size: u32,
647 ) -> FrameLayout {
648 let is_callee_saved = |reg: &Writable<RealReg>| match call_conv {
649 isa::CallConv::PreserveAll => true,
650 _ => DEFAULT_CALLEE_SAVES.contains(reg.to_reg().into()),
651 };
652 let mut regs: Vec<Writable<RealReg>> =
653 regs.iter().cloned().filter(is_callee_saved).collect();
654
655 regs.sort_unstable();
656
657 let clobber_size = compute_clobber_size(®s);
659
660 let setup_area_size = if flags.preserve_frame_pointers()
662 || function_calls != FunctionCalls::None
663 || incoming_args_size > 0
666 || clobber_size > 0
667 || fixed_frame_storage_size > 0
668 {
669 16 } else {
671 0
672 };
673
674 FrameLayout {
676 word_bytes: 8,
677 incoming_args_size,
678 tail_args_size,
679 setup_area_size,
680 clobber_size,
681 fixed_frame_storage_size,
682 stackslots_size,
683 outgoing_args_size,
684 clobbered_callee_saves: regs,
685 function_calls,
686 }
687 }
688
689 fn gen_inline_probestack(
690 insts: &mut SmallInstVec<Self::I>,
691 _call_conv: isa::CallConv,
692 frame_size: u32,
693 guard_size: u32,
694 ) {
695 const PROBE_MAX_UNROLL: u32 = 3;
697
698 let probe_count = frame_size / guard_size;
701 if probe_count == 0 {
702 return;
704 }
705
706 let tmp = Writable::from_reg(x_reg(28)); if probe_count <= PROBE_MAX_UNROLL {
710 Self::gen_probestack_unroll(insts, tmp, guard_size, probe_count)
711 } else {
712 insts.push(Inst::StackProbeLoop {
713 guard_size,
714 probe_count,
715 tmp,
716 });
717 }
718 }
719
720 fn retval_temp_reg(_call_conv_of_callee: isa::CallConv) -> Writable<Reg> {
721 Writable::from_reg(regs::x_reg(12))
724 }
725
726 fn exception_payload_regs(call_conv: isa::CallConv) -> &'static [Reg] {
727 const PAYLOAD_REGS: &'static [Reg] = &[regs::a0(), regs::a1()];
728 match call_conv {
729 isa::CallConv::SystemV | isa::CallConv::Tail | isa::CallConv::PreserveAll => {
730 PAYLOAD_REGS
731 }
732 _ => &[],
733 }
734 }
735}
736
737const DEFAULT_CALLEE_SAVES: PRegSet = PRegSet::empty()
739 .with(px_reg(2))
741 .with(px_reg(8))
742 .with(px_reg(9))
743 .with(px_reg(18))
744 .with(px_reg(19))
745 .with(px_reg(20))
746 .with(px_reg(21))
747 .with(px_reg(22))
748 .with(px_reg(23))
749 .with(px_reg(24))
750 .with(px_reg(25))
751 .with(px_reg(26))
752 .with(px_reg(27))
753 .with(pf_reg(8))
755 .with(pf_reg(18))
756 .with(pf_reg(19))
757 .with(pf_reg(20))
758 .with(pf_reg(21))
759 .with(pf_reg(22))
760 .with(pf_reg(23))
761 .with(pf_reg(24))
762 .with(pf_reg(25))
763 .with(pf_reg(26))
764 .with(pf_reg(27));
765
766fn compute_clobber_size(clobbers: &[Writable<RealReg>]) -> u32 {
767 let mut clobbered_size = 0;
768 for reg in clobbers {
769 match reg.to_reg().class() {
770 RegClass::Int => {
771 clobbered_size += 8;
772 }
773 RegClass::Float => {
774 clobbered_size += 8;
775 }
776 RegClass::Vector => {
777 clobbered_size = align_to(clobbered_size, 16);
778 clobbered_size += 16;
779 }
780 }
781 }
782 align_to(clobbered_size, 16)
783}
784
785const DEFAULT_CLOBBERS: PRegSet = PRegSet::empty()
786 .with(px_reg(1))
787 .with(px_reg(5))
788 .with(px_reg(6))
789 .with(px_reg(7))
790 .with(px_reg(10))
791 .with(px_reg(11))
792 .with(px_reg(12))
793 .with(px_reg(13))
794 .with(px_reg(14))
795 .with(px_reg(15))
796 .with(px_reg(16))
797 .with(px_reg(17))
798 .with(px_reg(28))
799 .with(px_reg(29))
800 .with(px_reg(30))
801 .with(px_reg(31))
802 .with(pf_reg(0))
804 .with(pf_reg(1))
805 .with(pf_reg(2))
806 .with(pf_reg(3))
807 .with(pf_reg(4))
808 .with(pf_reg(5))
809 .with(pf_reg(6))
810 .with(pf_reg(7))
811 .with(pf_reg(9))
812 .with(pf_reg(10))
813 .with(pf_reg(11))
814 .with(pf_reg(12))
815 .with(pf_reg(13))
816 .with(pf_reg(14))
817 .with(pf_reg(15))
818 .with(pf_reg(16))
819 .with(pf_reg(17))
820 .with(pf_reg(28))
821 .with(pf_reg(29))
822 .with(pf_reg(30))
823 .with(pf_reg(31))
824 .with(pv_reg(0))
826 .with(pv_reg(1))
827 .with(pv_reg(2))
828 .with(pv_reg(3))
829 .with(pv_reg(4))
830 .with(pv_reg(5))
831 .with(pv_reg(6))
832 .with(pv_reg(7))
833 .with(pv_reg(8))
834 .with(pv_reg(9))
835 .with(pv_reg(10))
836 .with(pv_reg(11))
837 .with(pv_reg(12))
838 .with(pv_reg(13))
839 .with(pv_reg(14))
840 .with(pv_reg(15))
841 .with(pv_reg(16))
842 .with(pv_reg(17))
843 .with(pv_reg(18))
844 .with(pv_reg(19))
845 .with(pv_reg(20))
846 .with(pv_reg(21))
847 .with(pv_reg(22))
848 .with(pv_reg(23))
849 .with(pv_reg(24))
850 .with(pv_reg(25))
851 .with(pv_reg(26))
852 .with(pv_reg(27))
853 .with(pv_reg(28))
854 .with(pv_reg(29))
855 .with(pv_reg(30))
856 .with(pv_reg(31));
857
858const ALL_CLOBBERS: PRegSet = PRegSet::empty()
859 .with(px_reg(3))
861 .with(px_reg(4))
862 .with(px_reg(5))
863 .with(px_reg(6))
864 .with(px_reg(7))
865 .with(px_reg(8))
866 .with(px_reg(9))
867 .with(px_reg(10))
868 .with(px_reg(11))
869 .with(px_reg(12))
870 .with(px_reg(13))
871 .with(px_reg(14))
872 .with(px_reg(15))
873 .with(px_reg(16))
874 .with(px_reg(17))
875 .with(px_reg(18))
876 .with(px_reg(19))
877 .with(px_reg(20))
878 .with(px_reg(21))
879 .with(px_reg(22))
880 .with(px_reg(23))
881 .with(px_reg(24))
882 .with(px_reg(25))
883 .with(px_reg(26))
884 .with(px_reg(27))
885 .with(px_reg(28))
886 .with(px_reg(29))
887 .with(px_reg(30))
888 .with(px_reg(31))
889 .with(pf_reg(0))
891 .with(pf_reg(1))
892 .with(pf_reg(2))
893 .with(pf_reg(3))
894 .with(pf_reg(4))
895 .with(pf_reg(5))
896 .with(pf_reg(6))
897 .with(pf_reg(7))
898 .with(pf_reg(8))
899 .with(pf_reg(9))
900 .with(pf_reg(10))
901 .with(pf_reg(11))
902 .with(pf_reg(12))
903 .with(pf_reg(13))
904 .with(pf_reg(14))
905 .with(pf_reg(15))
906 .with(pf_reg(16))
907 .with(pf_reg(17))
908 .with(pf_reg(18))
909 .with(pf_reg(19))
910 .with(pf_reg(20))
911 .with(pf_reg(21))
912 .with(pf_reg(22))
913 .with(pf_reg(23))
914 .with(pf_reg(24))
915 .with(pf_reg(25))
916 .with(pf_reg(26))
917 .with(pf_reg(27))
918 .with(pf_reg(28))
919 .with(pf_reg(29))
920 .with(pf_reg(30))
921 .with(pf_reg(31))
922 .with(pv_reg(0))
924 .with(pv_reg(1))
925 .with(pv_reg(2))
926 .with(pv_reg(3))
927 .with(pv_reg(4))
928 .with(pv_reg(5))
929 .with(pv_reg(6))
930 .with(pv_reg(7))
931 .with(pv_reg(8))
932 .with(pv_reg(9))
933 .with(pv_reg(10))
934 .with(pv_reg(11))
935 .with(pv_reg(12))
936 .with(pv_reg(13))
937 .with(pv_reg(14))
938 .with(pv_reg(15))
939 .with(pv_reg(16))
940 .with(pv_reg(17))
941 .with(pv_reg(18))
942 .with(pv_reg(19))
943 .with(pv_reg(20))
944 .with(pv_reg(21))
945 .with(pv_reg(22))
946 .with(pv_reg(23))
947 .with(pv_reg(24))
948 .with(pv_reg(25))
949 .with(pv_reg(26))
950 .with(pv_reg(27))
951 .with(pv_reg(28))
952 .with(pv_reg(29))
953 .with(pv_reg(30))
954 .with(pv_reg(31));
955
956const NO_CLOBBERS: PRegSet = PRegSet::empty();
957
958const fn create_reg_environment() -> MachineEnv {
959 let preferred_regs_by_class: [PRegSet; 3] = [
970 PRegSet::empty()
971 .with(px_reg(10))
972 .with(px_reg(11))
973 .with(px_reg(12))
974 .with(px_reg(13))
975 .with(px_reg(14))
976 .with(px_reg(15)),
977 PRegSet::empty()
978 .with(pf_reg(10))
979 .with(pf_reg(11))
980 .with(pf_reg(12))
981 .with(pf_reg(13))
982 .with(pf_reg(14))
983 .with(pf_reg(15)),
984 PRegSet::empty()
985 .with(pv_reg(8))
986 .with(pv_reg(9))
987 .with(pv_reg(10))
988 .with(pv_reg(11))
989 .with(pv_reg(12))
990 .with(pv_reg(13))
991 .with(pv_reg(14))
992 .with(pv_reg(15)),
993 ];
994
995 let non_preferred_regs_by_class: [PRegSet; 3] = [
996 PRegSet::empty()
999 .with(px_reg(5))
1000 .with(px_reg(6))
1001 .with(px_reg(7))
1002 .with(px_reg(16))
1004 .with(px_reg(17))
1005 .with(px_reg(28))
1006 .with(px_reg(29))
1007 .with(px_reg(9))
1010 .with(px_reg(18))
1012 .with(px_reg(19))
1013 .with(px_reg(20))
1014 .with(px_reg(21))
1015 .with(px_reg(22))
1016 .with(px_reg(23))
1017 .with(px_reg(24))
1018 .with(px_reg(25))
1019 .with(px_reg(26))
1020 .with(px_reg(27)),
1021 PRegSet::empty()
1023 .with(pf_reg(0))
1024 .with(pf_reg(1))
1025 .with(pf_reg(2))
1026 .with(pf_reg(3))
1027 .with(pf_reg(4))
1028 .with(pf_reg(5))
1029 .with(pf_reg(6))
1030 .with(pf_reg(7))
1031 .with(pf_reg(16))
1032 .with(pf_reg(17))
1033 .with(pf_reg(28))
1034 .with(pf_reg(29))
1035 .with(pf_reg(30))
1036 .with(pf_reg(31))
1037 .with(pf_reg(8))
1040 .with(pf_reg(9))
1041 .with(pf_reg(18))
1042 .with(pf_reg(19))
1043 .with(pf_reg(20))
1044 .with(pf_reg(21))
1045 .with(pf_reg(22))
1046 .with(pf_reg(23))
1047 .with(pf_reg(24))
1048 .with(pf_reg(25))
1049 .with(pf_reg(26))
1050 .with(pf_reg(27)),
1051 PRegSet::empty()
1052 .with(pv_reg(0))
1053 .with(pv_reg(1))
1054 .with(pv_reg(2))
1055 .with(pv_reg(3))
1056 .with(pv_reg(4))
1057 .with(pv_reg(5))
1058 .with(pv_reg(6))
1059 .with(pv_reg(7))
1060 .with(pv_reg(16))
1061 .with(pv_reg(17))
1062 .with(pv_reg(18))
1063 .with(pv_reg(19))
1064 .with(pv_reg(20))
1065 .with(pv_reg(21))
1066 .with(pv_reg(22))
1067 .with(pv_reg(23))
1068 .with(pv_reg(24))
1069 .with(pv_reg(25))
1070 .with(pv_reg(26))
1071 .with(pv_reg(27))
1072 .with(pv_reg(28))
1073 .with(pv_reg(29))
1074 .with(pv_reg(30))
1075 .with(pv_reg(31)),
1076 ];
1077
1078 MachineEnv {
1079 preferred_regs_by_class,
1080 non_preferred_regs_by_class,
1081 fixed_stack_slots: vec![],
1082 scratch_by_class: [None, None, None],
1083 }
1084}
1085
1086impl Riscv64MachineDeps {
1087 fn gen_probestack_unroll(
1088 insts: &mut SmallInstVec<Inst>,
1089 tmp: Writable<Reg>,
1090 guard_size: u32,
1091 probe_count: u32,
1092 ) {
1093 insts.extend(Inst::load_constant_u64(tmp, (-(guard_size as i64)) as u64));
1105
1106 for _ in 0..probe_count {
1107 insts.push(Inst::AluRRR {
1108 alu_op: AluOPRRR::Add,
1109 rd: writable_stack_reg(),
1110 rs1: stack_reg(),
1111 rs2: tmp.to_reg(),
1112 });
1113
1114 insts.push(Inst::gen_store(
1115 AMode::SPOffset(0),
1116 zero_reg(),
1117 I32,
1118 MemFlags::trusted(),
1119 ));
1120 }
1121
1122 insts.extend(Self::gen_sp_reg_adjust((guard_size * probe_count) as i32));
1124 }
1125}