1use crate::CodegenError;
21use crate::ir::pcc::*;
22use crate::ir::{self, Constant, ConstantData, ValueLabel, types};
23use crate::ranges::Ranges;
24use crate::timing;
25use crate::trace;
26use crate::{LabelValueLoc, ValueLocRange};
27use crate::{machinst::*, trace_log_enabled};
28use regalloc2::{
29 Edit, Function as RegallocFunction, InstOrEdit, InstPosition, InstRange, Operand,
30 OperandConstraint, OperandKind, PRegSet, ProgPoint, RegClass,
31};
32use rustc_hash::FxHashMap;
33
34use core::cmp::Ordering;
35use core::fmt::{self, Write};
36use core::mem::take;
37use cranelift_entity::{Keys, entity_impl};
38use std::collections::HashMap;
39use std::collections::hash_map::Entry;
40
41pub type InsnIndex = regalloc2::Inst;
43
44trait ToBackwardsInsnIndex {
47 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex;
48}
49
50impl ToBackwardsInsnIndex for InsnIndex {
51 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex {
52 BackwardsInsnIndex::new(num_insts - self.index() - 1)
53 }
54}
55
56#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
59#[cfg_attr(
60 feature = "enable-serde",
61 derive(::serde::Serialize, ::serde::Deserialize)
62)]
63pub struct BackwardsInsnIndex(InsnIndex);
64
65impl BackwardsInsnIndex {
66 pub fn new(i: usize) -> Self {
67 BackwardsInsnIndex(InsnIndex::new(i))
68 }
69}
70
71pub type BlockIndex = regalloc2::Block;
73
74pub trait VCodeInst: MachInst + MachInstEmit {}
77impl<I: MachInst + MachInstEmit> VCodeInst for I {}
78
79pub struct VCode<I: VCodeInst> {
92 vreg_types: Vec<Type>,
94
95 insts: Vec<I>,
97
98 user_stack_maps: FxHashMap<BackwardsInsnIndex, ir::UserStackMap>,
105
106 operands: Vec<Operand>,
111
112 operand_ranges: Ranges,
116
117 clobbers: FxHashMap<InsnIndex, PRegSet>,
119
120 srclocs: Vec<RelSourceLoc>,
123
124 entry: BlockIndex,
126
127 block_ranges: Ranges,
129
130 block_succ_range: Ranges,
132
133 block_succs: Vec<regalloc2::Block>,
138
139 block_pred_range: Ranges,
141
142 block_preds: Vec<regalloc2::Block>,
147
148 block_params_range: Ranges,
150
151 block_params: Vec<regalloc2::VReg>,
156
157 branch_block_args: Vec<regalloc2::VReg>,
167
168 branch_block_arg_range: Ranges,
174
175 branch_block_arg_succ_range: Ranges,
178
179 block_order: BlockLoweringOrder,
181
182 pub(crate) abi: Callee<I::ABIMachineSpec>,
184
185 emit_info: I::Info,
188
189 pub(crate) constants: VCodeConstants,
191
192 debug_value_labels: Vec<(VReg, InsnIndex, InsnIndex, u32)>,
194
195 pub(crate) sigs: SigSet,
196
197 facts: Vec<Option<Fact>>,
199
200 log2_min_function_alignment: u8,
201}
202
203pub struct EmitResult {
207 pub buffer: MachBufferFinalized<Stencil>,
209
210 pub bb_offsets: Vec<CodeOffset>,
213
214 pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
217
218 pub disasm: Option<String>,
223
224 pub sized_stackslot_offsets: PrimaryMap<StackSlot, u32>,
226
227 pub dynamic_stackslot_offsets: PrimaryMap<DynamicStackSlot, u32>,
229
230 pub value_labels_ranges: ValueLabelsRanges,
232
233 pub frame_size: u32,
235}
236
237pub struct VCodeBuilder<I: VCodeInst> {
254 pub(crate) vcode: VCode<I>,
256
257 direction: VCodeBuildDirection,
259
260 debug_info: FxHashMap<ValueLabel, Vec<(InsnIndex, InsnIndex, VReg)>>,
264}
265
266#[derive(Clone, Copy, Debug, PartialEq, Eq)]
268pub enum VCodeBuildDirection {
269 Backward,
273}
274
275impl<I: VCodeInst> VCodeBuilder<I> {
276 pub fn new(
278 sigs: SigSet,
279 abi: Callee<I::ABIMachineSpec>,
280 emit_info: I::Info,
281 block_order: BlockLoweringOrder,
282 constants: VCodeConstants,
283 direction: VCodeBuildDirection,
284 log2_min_function_alignment: u8,
285 ) -> Self {
286 let vcode = VCode::new(
287 sigs,
288 abi,
289 emit_info,
290 block_order,
291 constants,
292 log2_min_function_alignment,
293 );
294
295 VCodeBuilder {
296 vcode,
297 direction,
298 debug_info: FxHashMap::default(),
299 }
300 }
301
302 pub fn init_retval_area(&mut self, vregs: &mut VRegAllocator<I>) -> CodegenResult<()> {
303 self.vcode.abi.init_retval_area(&self.vcode.sigs, vregs)
304 }
305
306 pub fn abi(&self) -> &Callee<I::ABIMachineSpec> {
308 &self.vcode.abi
309 }
310
311 pub fn abi_mut(&mut self) -> &mut Callee<I::ABIMachineSpec> {
313 &mut self.vcode.abi
314 }
315
316 pub fn sigs(&self) -> &SigSet {
317 &self.vcode.sigs
318 }
319
320 pub fn sigs_mut(&mut self) -> &mut SigSet {
321 &mut self.vcode.sigs
322 }
323
324 pub fn block_order(&self) -> &BlockLoweringOrder {
326 &self.vcode.block_order
327 }
328
329 pub fn set_entry(&mut self, block: BlockIndex) {
331 self.vcode.entry = block;
332 }
333
334 pub fn end_bb(&mut self) {
337 let end_idx = self.vcode.insts.len();
338 self.vcode.block_ranges.push_end(end_idx);
340 let succ_end = self.vcode.block_succs.len();
342 self.vcode.block_succ_range.push_end(succ_end);
343 let block_params_end = self.vcode.block_params.len();
345 self.vcode.block_params_range.push_end(block_params_end);
346 let branch_block_arg_succ_end = self.vcode.branch_block_arg_range.len();
348 self.vcode
349 .branch_block_arg_succ_range
350 .push_end(branch_block_arg_succ_end);
351 }
352
353 pub fn add_block_param(&mut self, param: VirtualReg) {
354 self.vcode.block_params.push(param.into());
355 }
356
357 fn add_branch_args_for_succ(&mut self, args: &[Reg]) {
358 self.vcode
359 .branch_block_args
360 .extend(args.iter().map(|&arg| VReg::from(arg)));
361 let end = self.vcode.branch_block_args.len();
362 self.vcode.branch_block_arg_range.push_end(end);
363 }
364
365 pub fn push(&mut self, insn: I, loc: RelSourceLoc) {
368 assert!(!insn.is_low_level_branch()); self.vcode.insts.push(insn);
370 self.vcode.srclocs.push(loc);
371 }
372
373 pub fn add_succ(&mut self, block: BlockIndex, args: &[Reg]) {
375 self.vcode.block_succs.push(block);
376 self.add_branch_args_for_succ(args);
377 }
378
379 pub fn add_value_label(&mut self, reg: Reg, label: ValueLabel) {
381 let next_inst_index = self.vcode.insts.len();
401 if next_inst_index == 0 {
402 return;
404 }
405 let next_inst = InsnIndex::new(next_inst_index);
406 let labels = self.debug_info.entry(label).or_insert_with(|| vec![]);
407 let last = labels
408 .last()
409 .map(|(_start, end, _vreg)| *end)
410 .unwrap_or(InsnIndex::new(0));
411 labels.push((last, next_inst, reg.into()));
412 }
413
414 pub fn constants(&mut self) -> &mut VCodeConstants {
416 &mut self.vcode.constants
417 }
418
419 fn compute_preds_from_succs(&mut self) {
420 let mut starts = vec![0u32; self.vcode.num_blocks()];
423 for succ in &self.vcode.block_succs {
424 starts[succ.index()] += 1;
425 }
426
427 self.vcode.block_pred_range.reserve(starts.len());
431 let mut end = 0;
432 for count in starts.iter_mut() {
433 let start = end;
434 end += *count;
435 *count = start;
436 self.vcode.block_pred_range.push_end(end as usize);
437 }
438 let end = end as usize;
439 debug_assert_eq!(end, self.vcode.block_succs.len());
440
441 self.vcode.block_preds.resize(end, BlockIndex::invalid());
447 for (pred, range) in self.vcode.block_succ_range.iter() {
448 let pred = BlockIndex::new(pred);
449 for succ in &self.vcode.block_succs[range] {
450 let pos = &mut starts[succ.index()];
451 self.vcode.block_preds[*pos as usize] = pred;
452 *pos += 1;
453 }
454 }
455 debug_assert!(self.vcode.block_preds.iter().all(|pred| pred.is_valid()));
456 }
457
458 fn reverse_and_finalize(&mut self, vregs: &VRegAllocator<I>) {
462 let n_insts = self.vcode.insts.len();
463 if n_insts == 0 {
464 return;
465 }
466
467 self.vcode.block_ranges.reverse_index();
469 self.vcode.block_ranges.reverse_target(n_insts);
470 self.vcode.block_params_range.reverse_index();
476 self.vcode.block_succ_range.reverse_index();
479 self.vcode.insts.reverse();
480 self.vcode.srclocs.reverse();
481 self.vcode.branch_block_arg_succ_range.reverse_index();
484
485 let translate = |inst: InsnIndex| InsnIndex::new(n_insts - inst.index());
496
497 for (label, tuples) in &self.debug_info {
499 for &(start, end, vreg) in tuples {
500 let vreg = vregs.resolve_vreg_alias(vreg);
501 let fwd_start = translate(end);
502 let fwd_end = translate(start);
503 self.vcode
504 .debug_value_labels
505 .push((vreg, fwd_start, fwd_end, label.as_u32()));
506 }
507 }
508
509 self.vcode
512 .debug_value_labels
513 .sort_unstable_by_key(|(vreg, _, _, _)| *vreg);
514 }
515
516 fn collect_operands(&mut self, vregs: &VRegAllocator<I>) {
517 let allocatable = PRegSet::from(self.vcode.abi.machine_env());
518 for (i, insn) in self.vcode.insts.iter_mut().enumerate() {
519 let mut op_collector =
531 OperandCollector::new(&mut self.vcode.operands, allocatable, |vreg| {
532 vregs.resolve_vreg_alias(vreg)
533 });
534 insn.get_operands(&mut op_collector);
535 let (ops, clobbers) = op_collector.finish();
536 self.vcode.operand_ranges.push_end(ops);
537
538 if clobbers != PRegSet::default() {
539 self.vcode.clobbers.insert(InsnIndex::new(i), clobbers);
540 }
541
542 if let Some((dst, src)) = insn.is_move() {
543 assert!(
546 src.is_virtual(),
547 "the real register {src:?} was used as the source of a move instruction"
548 );
549 assert!(
550 dst.to_reg().is_virtual(),
551 "the real register {:?} was used as the destination of a move instruction",
552 dst.to_reg()
553 );
554 }
555 }
556
557 for arg in &mut self.vcode.branch_block_args {
559 let new_arg = vregs.resolve_vreg_alias(*arg);
560 trace!("operandcollector: block arg {:?} -> {:?}", arg, new_arg);
561 *arg = new_arg;
562 }
563 }
564
565 pub fn build(mut self, mut vregs: VRegAllocator<I>) -> VCode<I> {
567 self.vcode.vreg_types = take(&mut vregs.vreg_types);
568 self.vcode.facts = take(&mut vregs.facts);
569
570 if self.direction == VCodeBuildDirection::Backward {
571 self.reverse_and_finalize(&vregs);
572 }
573 self.collect_operands(&vregs);
574
575 self.compute_preds_from_succs();
576 self.vcode.debug_value_labels.sort_unstable();
577
578 vregs.debug_assert_no_vreg_aliases(self.vcode.operands.iter().map(|op| op.vreg()));
585 vregs.debug_assert_no_vreg_aliases(self.vcode.block_params.iter().copied());
587 vregs.debug_assert_no_vreg_aliases(self.vcode.branch_block_args.iter().copied());
589 vregs.debug_assert_no_vreg_aliases(
591 self.vcode.debug_value_labels.iter().map(|&(vreg, ..)| vreg),
592 );
593 vregs.debug_assert_no_vreg_aliases(
595 self.vcode
596 .facts
597 .iter()
598 .zip(&vregs.vreg_types)
599 .enumerate()
600 .filter(|(_, (fact, _))| fact.is_some())
601 .map(|(vreg, (_, &ty))| {
602 let (regclasses, _) = I::rc_for_type(ty).unwrap();
603 VReg::new(vreg, regclasses[0])
604 }),
605 );
606
607 self.vcode
608 }
609
610 pub fn add_user_stack_map(
612 &mut self,
613 inst: BackwardsInsnIndex,
614 entries: &[ir::UserStackMapEntry],
615 ) {
616 let stack_map = ir::UserStackMap::new(entries, self.vcode.abi.sized_stackslot_offsets());
617 let old_entry = self.vcode.user_stack_maps.insert(inst, stack_map);
618 debug_assert!(old_entry.is_none());
619 }
620}
621
622const NO_INST_OFFSET: CodeOffset = u32::MAX;
623
624impl<I: VCodeInst> VCode<I> {
625 fn new(
627 sigs: SigSet,
628 abi: Callee<I::ABIMachineSpec>,
629 emit_info: I::Info,
630 block_order: BlockLoweringOrder,
631 constants: VCodeConstants,
632 log2_min_function_alignment: u8,
633 ) -> Self {
634 let n_blocks = block_order.lowered_order().len();
635 VCode {
636 sigs,
637 vreg_types: vec![],
638 insts: Vec::with_capacity(10 * n_blocks),
639 user_stack_maps: FxHashMap::default(),
640 operands: Vec::with_capacity(30 * n_blocks),
641 operand_ranges: Ranges::with_capacity(10 * n_blocks),
642 clobbers: FxHashMap::default(),
643 srclocs: Vec::with_capacity(10 * n_blocks),
644 entry: BlockIndex::new(0),
645 block_ranges: Ranges::with_capacity(n_blocks),
646 block_succ_range: Ranges::with_capacity(n_blocks),
647 block_succs: Vec::with_capacity(n_blocks),
648 block_pred_range: Ranges::default(),
649 block_preds: Vec::new(),
650 block_params_range: Ranges::with_capacity(n_blocks),
651 block_params: Vec::with_capacity(5 * n_blocks),
652 branch_block_args: Vec::with_capacity(10 * n_blocks),
653 branch_block_arg_range: Ranges::with_capacity(2 * n_blocks),
654 branch_block_arg_succ_range: Ranges::with_capacity(n_blocks),
655 block_order,
656 abi,
657 emit_info,
658 constants,
659 debug_value_labels: vec![],
660 facts: vec![],
661 log2_min_function_alignment,
662 }
663 }
664
665 pub fn num_blocks(&self) -> usize {
668 self.block_ranges.len()
669 }
670
671 pub fn num_insts(&self) -> usize {
673 self.insts.len()
674 }
675
676 fn compute_clobbers_and_function_calls(
677 &self,
678 regalloc: ®alloc2::Output,
679 ) -> (Vec<Writable<RealReg>>, FunctionCalls) {
680 let mut clobbered = PRegSet::default();
681 let mut function_calls = FunctionCalls::None;
682
683 for (_, Edit::Move { to, .. }) in ®alloc.edits {
685 if let Some(preg) = to.as_reg() {
686 clobbered.add(preg);
687 }
688 }
689
690 for (i, range) in self.operand_ranges.iter() {
691 let operands = &self.operands[range.clone()];
692 let allocs = ®alloc.allocs[range];
693 for (operand, alloc) in operands.iter().zip(allocs.iter()) {
694 if operand.kind() == OperandKind::Def {
695 if let Some(preg) = alloc.as_reg() {
696 clobbered.add(preg);
697 }
698 }
699 }
700
701 function_calls.update(self.insts[i].call_type());
702
703 if self.insts[i].is_included_in_clobbers() {
723 if let Some(&inst_clobbered) = self.clobbers.get(&InsnIndex::new(i)) {
724 clobbered.union_from(inst_clobbered);
725 }
726 }
727 }
728
729 let clobbered_regs = clobbered
730 .into_iter()
731 .map(|preg| Writable::from_reg(RealReg::from(preg)))
732 .collect();
733
734 (clobbered_regs, function_calls)
735 }
736
737 pub fn emit(
745 mut self,
746 regalloc: ®alloc2::Output,
747 want_disasm: bool,
748 flags: &settings::Flags,
749 ctrl_plane: &mut ControlPlane,
750 ) -> EmitResult
751 where
752 I: VCodeInst,
753 {
754 let _tt = timing::vcode_emit();
755 let mut buffer = MachBuffer::new();
756 buffer.set_log2_min_function_alignment(self.log2_min_function_alignment);
757 let mut bb_starts: Vec<Option<CodeOffset>> = vec![];
758
759 buffer.reserve_labels_for_blocks(self.num_blocks());
761
762 buffer.register_constants(&self.constants);
766
767 let mut final_order: SmallVec<[BlockIndex; 16]> = smallvec![];
769 let mut cold_blocks: SmallVec<[BlockIndex; 16]> = smallvec![];
770 for block in 0..self.num_blocks() {
771 let block = BlockIndex::new(block);
772 if self.block_order.is_cold(block) {
773 cold_blocks.push(block);
774 } else {
775 final_order.push(block);
776 }
777 }
778 final_order.extend(cold_blocks.clone());
779
780 let (clobbers, function_calls) = self.compute_clobbers_and_function_calls(regalloc);
789 self.abi.compute_frame_layout(
790 &self.sigs,
791 regalloc.num_spillslots,
792 clobbers,
793 function_calls,
794 );
795
796 let mut cur_srcloc = None;
798 let mut last_offset = None;
799 let mut inst_offsets = vec![];
800 let mut state = I::State::new(&self.abi, std::mem::take(ctrl_plane));
801
802 let mut disasm = String::new();
803
804 if !self.debug_value_labels.is_empty() {
805 inst_offsets.resize(self.insts.len(), NO_INST_OFFSET);
806 }
807
808 let mut ra_edits_per_block: SmallVec<[u32; 64]> = smallvec![];
813 let mut edit_idx = 0;
814 for block in 0..self.num_blocks() {
815 let end_inst = InsnIndex::new(self.block_ranges.get(block).end);
816 let start_edit_idx = edit_idx;
817 while edit_idx < regalloc.edits.len() && regalloc.edits[edit_idx].0.inst() < end_inst {
818 edit_idx += 1;
819 }
820 let end_edit_idx = edit_idx;
821 ra_edits_per_block.push((end_edit_idx - start_edit_idx) as u32);
822 }
823
824 let is_forward_edge_cfi_enabled = self.abi.is_forward_edge_cfi_enabled();
825 let mut bb_padding = match flags.bb_padding_log2_minus_one() {
826 0 => Vec::new(),
827 n => vec![0; 1 << (n - 1)],
828 };
829 let mut total_bb_padding = 0;
830
831 for (block_order_idx, &block) in final_order.iter().enumerate() {
832 trace!("emitting block {:?}", block);
833
834 state.on_new_block();
836
837 let new_offset = I::align_basic_block(buffer.cur_offset());
839 while new_offset > buffer.cur_offset() {
840 let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
842 nop.emit(&mut buffer, &self.emit_info, &mut Default::default());
843 }
844 assert_eq!(buffer.cur_offset(), new_offset);
845
846 let do_emit = |inst: &I,
847 disasm: &mut String,
848 buffer: &mut MachBuffer<I>,
849 state: &mut I::State| {
850 if want_disasm && !inst.is_args() {
851 let mut s = state.clone();
852 writeln!(disasm, " {}", inst.pretty_print_inst(&mut s)).unwrap();
853 }
854 inst.emit(buffer, &self.emit_info, state);
855 };
856
857 if block == self.entry {
859 trace!(" -> entry block");
860 buffer.start_srcloc(Default::default());
861 for inst in &self.abi.gen_prologue() {
862 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
863 }
864 buffer.end_srcloc();
865 }
866
867 buffer.bind_label(MachLabel::from_block(block), state.ctrl_plane_mut());
870
871 if want_disasm {
872 writeln!(&mut disasm, "block{}:", block.index()).unwrap();
873 }
874
875 if flags.machine_code_cfg_info() {
876 let cur_offset = buffer.cur_offset();
879 if last_offset.is_some() && cur_offset <= last_offset.unwrap() {
880 for i in (0..bb_starts.len()).rev() {
881 if bb_starts[i].is_some() && cur_offset > bb_starts[i].unwrap() {
882 break;
883 }
884 bb_starts[i] = None;
885 }
886 }
887 bb_starts.push(Some(cur_offset));
888 last_offset = Some(cur_offset);
889 }
890
891 if let Some(block_start) = I::gen_block_start(
892 self.block_order.is_indirect_branch_target(block),
893 is_forward_edge_cfi_enabled,
894 ) {
895 do_emit(&block_start, &mut disasm, &mut buffer, &mut state);
896 }
897
898 for inst_or_edit in regalloc.block_insts_and_edits(&self, block) {
899 match inst_or_edit {
900 InstOrEdit::Inst(iix) => {
901 if !self.debug_value_labels.is_empty() {
902 if !self.block_order.is_cold(block) {
915 inst_offsets[iix.index()] = buffer.cur_offset();
916 }
917 }
918
919 let srcloc = self.srclocs[iix.index()];
921 if cur_srcloc != Some(srcloc) {
922 if cur_srcloc.is_some() {
923 buffer.end_srcloc();
924 }
925 buffer.start_srcloc(srcloc);
926 cur_srcloc = Some(srcloc);
927 }
928
929 let stack_map_disasm = if self.insts[iix.index()].is_safepoint() {
932 let (user_stack_map, user_stack_map_disasm) = {
933 let index = iix.to_backwards_insn_index(self.num_insts());
940 let user_stack_map = self.user_stack_maps.remove(&index);
941 let user_stack_map_disasm =
942 user_stack_map.as_ref().map(|m| format!(" ; {m:?}"));
943 (user_stack_map, user_stack_map_disasm)
944 };
945
946 state.pre_safepoint(user_stack_map);
947
948 user_stack_map_disasm
949 } else {
950 None
951 };
952
953 if self.insts[iix.index()].is_term() == MachTerminator::Ret {
958 for inst in self.abi.gen_epilogue() {
959 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
960 }
961 } else {
962 let mut allocs = regalloc.inst_allocs(iix).iter();
965 self.insts[iix.index()].get_operands(
966 &mut |reg: &mut Reg, constraint, _kind, _pos| {
967 let alloc =
968 allocs.next().expect("enough allocations for all operands");
969
970 if let Some(alloc) = alloc.as_reg() {
971 let alloc: Reg = alloc.into();
972 if let OperandConstraint::FixedReg(rreg) = constraint {
973 debug_assert_eq!(Reg::from(rreg), alloc);
974 }
975 *reg = alloc;
976 } else if let Some(alloc) = alloc.as_stack() {
977 let alloc: Reg = alloc.into();
978 *reg = alloc;
979 }
980 },
981 );
982 debug_assert!(allocs.next().is_none());
983
984 do_emit(
986 &self.insts[iix.index()],
987 &mut disasm,
988 &mut buffer,
989 &mut state,
990 );
991 if let Some(stack_map_disasm) = stack_map_disasm {
992 disasm.push_str(&stack_map_disasm);
993 disasm.push('\n');
994 }
995 }
996 }
997
998 InstOrEdit::Edit(Edit::Move { from, to }) => {
999 match (from.as_reg(), to.as_reg()) {
1002 (Some(from), Some(to)) => {
1003 let from_rreg = Reg::from(from);
1005 let to_rreg = Writable::from_reg(Reg::from(to));
1006 debug_assert_eq!(from.class(), to.class());
1007 let ty = I::canonical_type_for_rc(from.class());
1008 let mv = I::gen_move(to_rreg, from_rreg, ty);
1009 do_emit(&mv, &mut disasm, &mut buffer, &mut state);
1010 }
1011 (Some(from), None) => {
1012 let to = to.as_stack().unwrap();
1014 let from_rreg = RealReg::from(from);
1015 let spill = self.abi.gen_spill(to, from_rreg);
1016 do_emit(&spill, &mut disasm, &mut buffer, &mut state);
1017 }
1018 (None, Some(to)) => {
1019 let from = from.as_stack().unwrap();
1021 let to_rreg = Writable::from_reg(RealReg::from(to));
1022 let reload = self.abi.gen_reload(to_rreg, from);
1023 do_emit(&reload, &mut disasm, &mut buffer, &mut state);
1024 }
1025 (None, None) => {
1026 panic!("regalloc2 should have eliminated stack-to-stack moves!");
1027 }
1028 }
1029 }
1030 }
1031 }
1032
1033 if cur_srcloc.is_some() {
1034 buffer.end_srcloc();
1035 cur_srcloc = None;
1036 }
1037
1038 let worst_case_next_bb = if block_order_idx < final_order.len() - 1 {
1042 let next_block = final_order[block_order_idx + 1];
1043 let next_block_range = self.block_ranges.get(next_block.index());
1044 let next_block_size = next_block_range.len() as u32;
1045 let next_block_ra_insertions = ra_edits_per_block[next_block.index()];
1046 I::worst_case_size() * (next_block_size + next_block_ra_insertions)
1047 } else {
1048 0
1049 };
1050 let padding = if bb_padding.is_empty() {
1051 0
1052 } else {
1053 bb_padding.len() as u32 + I::LabelUse::ALIGN - 1
1054 };
1055 if buffer.island_needed(padding + worst_case_next_bb) {
1056 buffer.emit_island(padding + worst_case_next_bb, ctrl_plane);
1057 }
1058
1059 if !bb_padding.is_empty() {
1068 buffer.put_data(&bb_padding);
1069 buffer.align_to(I::LabelUse::ALIGN);
1070 total_bb_padding += bb_padding.len();
1071 if total_bb_padding > (150 << 20) {
1072 bb_padding = Vec::new();
1073 }
1074 }
1075 }
1076
1077 debug_assert!(
1078 self.user_stack_maps.is_empty(),
1079 "any stack maps should have been consumed by instruction emission, still have: {:#?}",
1080 self.user_stack_maps,
1081 );
1082
1083 buffer.optimize_branches(ctrl_plane);
1086
1087 *ctrl_plane = state.take_ctrl_plane();
1089
1090 let func_body_len = buffer.cur_offset();
1091
1092 let mut bb_edges = vec![];
1094 let mut bb_offsets = vec![];
1095 if flags.machine_code_cfg_info() {
1096 for block in 0..self.num_blocks() {
1097 if bb_starts[block].is_none() {
1098 continue;
1100 }
1101 let from = bb_starts[block].unwrap();
1102
1103 bb_offsets.push(from);
1104 let succs = self.block_succs(BlockIndex::new(block));
1106 for &succ in succs.iter() {
1107 let to = buffer.resolve_label_offset(MachLabel::from_block(succ));
1108 bb_edges.push((from, to));
1109 }
1110 }
1111 }
1112
1113 self.monotonize_inst_offsets(&mut inst_offsets[..], func_body_len);
1114 let value_labels_ranges =
1115 self.compute_value_labels_ranges(regalloc, &inst_offsets[..], func_body_len);
1116 let frame_size = self.abi.frame_size();
1117
1118 EmitResult {
1119 buffer: buffer.finish(&self.constants, ctrl_plane),
1120 bb_offsets,
1121 bb_edges,
1122 disasm: if want_disasm { Some(disasm) } else { None },
1123 sized_stackslot_offsets: self.abi.sized_stackslot_offsets().clone(),
1124 dynamic_stackslot_offsets: self.abi.dynamic_stackslot_offsets().clone(),
1125 value_labels_ranges,
1126 frame_size,
1127 }
1128 }
1129
1130 fn monotonize_inst_offsets(&self, inst_offsets: &mut [CodeOffset], func_body_len: u32) {
1131 if self.debug_value_labels.is_empty() {
1132 return;
1133 }
1134
1135 let mut next_offset = func_body_len;
1147 for inst_index in (0..(inst_offsets.len() - 1)).rev() {
1148 let inst_offset = inst_offsets[inst_index];
1149
1150 if inst_offset == NO_INST_OFFSET {
1152 continue;
1153 }
1154
1155 if inst_offset > next_offset {
1156 trace!(
1157 "Fixing code offset of the removed Inst {}: {} -> {}",
1158 inst_index, inst_offset, next_offset
1159 );
1160 inst_offsets[inst_index] = next_offset;
1161 continue;
1162 }
1163
1164 next_offset = inst_offset;
1165 }
1166 }
1167
1168 fn compute_value_labels_ranges(
1169 &self,
1170 regalloc: ®alloc2::Output,
1171 inst_offsets: &[CodeOffset],
1172 func_body_len: u32,
1173 ) -> ValueLabelsRanges {
1174 if self.debug_value_labels.is_empty() {
1175 return ValueLabelsRanges::default();
1176 }
1177
1178 if trace_log_enabled!() {
1179 self.log_value_labels_ranges(regalloc, inst_offsets);
1180 }
1181
1182 let mut value_labels_ranges: ValueLabelsRanges = HashMap::new();
1183 for &(label, from, to, alloc) in ®alloc.debug_locations {
1184 let label = ValueLabel::from_u32(label);
1185 let ranges = value_labels_ranges.entry(label).or_insert_with(|| vec![]);
1186 let prog_point_to_inst = |prog_point: ProgPoint| {
1187 let mut inst = prog_point.inst();
1188 if prog_point.pos() == InstPosition::After {
1189 inst = inst.next();
1190 }
1191 inst.index()
1192 };
1193 let from_inst_index = prog_point_to_inst(from);
1194 let to_inst_index = prog_point_to_inst(to);
1195 let from_offset = inst_offsets[from_inst_index];
1196 let to_offset = if to_inst_index == inst_offsets.len() {
1197 func_body_len
1198 } else {
1199 inst_offsets[to_inst_index]
1200 };
1201
1202 if from_offset == NO_INST_OFFSET
1205 || to_offset == NO_INST_OFFSET
1206 || from_offset == to_offset
1207 {
1208 continue;
1209 }
1210
1211 let loc = if let Some(preg) = alloc.as_reg() {
1212 LabelValueLoc::Reg(Reg::from(preg))
1213 } else {
1214 let slot = alloc.as_stack().unwrap();
1215 let slot_offset = self.abi.get_spillslot_offset(slot);
1216 let slot_base_to_caller_sp_offset = self.abi.slot_base_to_caller_sp_offset();
1217 let caller_sp_to_cfa_offset =
1218 crate::isa::unwind::systemv::caller_sp_to_cfa_offset();
1219 let cfa_to_sp_offset =
1221 -((slot_base_to_caller_sp_offset + caller_sp_to_cfa_offset) as i64);
1222 LabelValueLoc::CFAOffset(cfa_to_sp_offset + slot_offset)
1223 };
1224
1225 if let Some(last_loc_range) = ranges.last_mut() {
1228 if last_loc_range.loc == loc && last_loc_range.end == from_offset {
1229 trace!(
1230 "Extending debug range for {:?} in {:?} to Inst {} ({})",
1231 label, loc, to_inst_index, to_offset
1232 );
1233 last_loc_range.end = to_offset;
1234 continue;
1235 }
1236 }
1237
1238 trace!(
1239 "Recording debug range for {:?} in {:?}: [Inst {}..Inst {}) [{}..{})",
1240 label, loc, from_inst_index, to_inst_index, from_offset, to_offset
1241 );
1242
1243 ranges.push(ValueLocRange {
1244 loc,
1245 start: from_offset,
1246 end: to_offset,
1247 });
1248 }
1249
1250 value_labels_ranges
1251 }
1252
1253 fn log_value_labels_ranges(&self, regalloc: ®alloc2::Output, inst_offsets: &[CodeOffset]) {
1254 debug_assert!(trace_log_enabled!());
1255
1256 let mut labels = vec![];
1259 for &(label, _, _, _) in ®alloc.debug_locations {
1260 if Some(&label) == labels.last() {
1261 continue;
1262 }
1263 labels.push(label);
1264 }
1265
1266 let mut vregs = vec![];
1270 for &(vreg, start, end, label) in &self.debug_value_labels {
1271 if matches!(labels.binary_search(&label), Ok(_)) {
1272 vregs.push((label, start, end, vreg));
1273 }
1274 }
1275 vregs.sort_unstable_by(
1276 |(l_label, l_start, _, _), (r_label, r_start, _, _)| match l_label.cmp(r_label) {
1277 Ordering::Equal => l_start.cmp(r_start),
1278 cmp => cmp,
1279 },
1280 );
1281
1282 #[derive(PartialEq)]
1283 enum Mode {
1284 Measure,
1285 Emit,
1286 }
1287 #[derive(PartialEq)]
1288 enum Row {
1289 Head,
1290 Line,
1291 Inst(usize, usize),
1292 }
1293
1294 let mut widths = vec![0; 3 + 2 * labels.len()];
1295 let mut row = String::new();
1296 let mut output_row = |row_kind: Row, mode: Mode| {
1297 let mut column_index = 0;
1298 row.clear();
1299
1300 macro_rules! output_cell_impl {
1301 ($fill:literal, $span:literal, $($cell_fmt:tt)*) => {
1302 let column_start = row.len();
1303 {
1304 row.push('|');
1305 write!(row, $($cell_fmt)*).unwrap();
1306 }
1307
1308 let next_column_index = column_index + $span;
1309 let expected_width: usize = widths[column_index..next_column_index].iter().sum();
1310 if mode == Mode::Measure {
1311 let actual_width = row.len() - column_start;
1312 if actual_width > expected_width {
1313 widths[next_column_index - 1] += actual_width - expected_width;
1314 }
1315 } else {
1316 let column_end = column_start + expected_width;
1317 while row.len() != column_end {
1318 row.push($fill);
1319 }
1320 }
1321 column_index = next_column_index;
1322 };
1323 }
1324 macro_rules! output_cell {
1325 ($($cell_fmt:tt)*) => {
1326 output_cell_impl!(' ', 1, $($cell_fmt)*);
1327 };
1328 }
1329
1330 match row_kind {
1331 Row::Head => {
1332 output_cell!("BB");
1333 output_cell!("Inst");
1334 output_cell!("IP");
1335 for label in &labels {
1336 output_cell_impl!(' ', 2, "{:?}", ValueLabel::from_u32(*label));
1337 }
1338 }
1339 Row::Line => {
1340 debug_assert!(mode == Mode::Emit);
1341 for _ in 0..3 {
1342 output_cell_impl!('-', 1, "");
1343 }
1344 for _ in &labels {
1345 output_cell_impl!('-', 2, "");
1346 }
1347 }
1348 Row::Inst(block_index, inst_index) => {
1349 debug_assert!(inst_index < self.num_insts());
1350 if self.block_ranges.get(block_index).start == inst_index {
1351 output_cell!("B{}", block_index);
1352 } else {
1353 output_cell!("");
1354 }
1355 output_cell!("Inst {inst_index} ");
1356 output_cell!("{} ", inst_offsets[inst_index]);
1357
1358 for label in &labels {
1359 use regalloc2::Inst;
1361 let vreg_cmp = |inst: usize,
1362 vreg_label: &u32,
1363 range_start: &Inst,
1364 range_end: &Inst| {
1365 match vreg_label.cmp(&label) {
1366 Ordering::Equal => {
1367 if range_end.index() <= inst {
1368 Ordering::Less
1369 } else if range_start.index() > inst {
1370 Ordering::Greater
1371 } else {
1372 Ordering::Equal
1373 }
1374 }
1375 cmp => cmp,
1376 }
1377 };
1378 let vreg_index =
1379 vregs.binary_search_by(|(l, s, e, _)| vreg_cmp(inst_index, l, s, e));
1380 if let Ok(vreg_index) = vreg_index {
1381 let mut prev_vreg = None;
1382 if inst_index > 0 {
1383 let prev_vreg_index = vregs.binary_search_by(|(l, s, e, _)| {
1384 vreg_cmp(inst_index - 1, l, s, e)
1385 });
1386 if let Ok(prev_vreg_index) = prev_vreg_index {
1387 prev_vreg = Some(vregs[prev_vreg_index].3);
1388 }
1389 }
1390
1391 let vreg = vregs[vreg_index].3;
1392 if Some(vreg) == prev_vreg {
1393 output_cell!("*");
1394 } else {
1395 output_cell!("{}", vreg);
1396 }
1397 } else {
1398 output_cell!("");
1399 }
1400
1401 let inst_prog_point = ProgPoint::before(Inst::new(inst_index));
1403 let range_index = regalloc.debug_locations.binary_search_by(
1404 |(range_label, range_start, range_end, _)| match range_label.cmp(label)
1405 {
1406 Ordering::Equal => {
1407 if *range_end <= inst_prog_point {
1408 Ordering::Less
1409 } else if *range_start > inst_prog_point {
1410 Ordering::Greater
1411 } else {
1412 Ordering::Equal
1413 }
1414 }
1415 cmp => cmp,
1416 },
1417 );
1418 if let Ok(range_index) = range_index {
1419 if let Some(reg) = regalloc.debug_locations[range_index].3.as_reg() {
1421 output_cell!("{:?}", Reg::from(reg));
1422 } else {
1423 output_cell!("Stk");
1424 }
1425 } else {
1426 output_cell!("");
1428 }
1429 }
1430 }
1431 }
1432 row.push('|');
1433
1434 if mode == Mode::Emit {
1435 trace!("{}", row.as_str());
1436 }
1437 };
1438
1439 for block_index in 0..self.num_blocks() {
1440 for inst_index in self.block_ranges.get(block_index) {
1441 output_row(Row::Inst(block_index, inst_index), Mode::Measure);
1442 }
1443 }
1444 output_row(Row::Head, Mode::Measure);
1445
1446 output_row(Row::Head, Mode::Emit);
1447 output_row(Row::Line, Mode::Emit);
1448 for block_index in 0..self.num_blocks() {
1449 for inst_index in self.block_ranges.get(block_index) {
1450 output_row(Row::Inst(block_index, inst_index), Mode::Emit);
1451 }
1452 }
1453 }
1454
1455 pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
1457 self.block_order.lowered_order()[block.index()].orig_block()
1458 }
1459
1460 pub fn vreg_type(&self, vreg: VReg) -> Type {
1462 self.vreg_types[vreg.vreg()]
1463 }
1464
1465 pub fn vreg_fact(&self, vreg: VReg) -> Option<&Fact> {
1467 self.facts[vreg.vreg()].as_ref()
1468 }
1469
1470 pub fn set_vreg_fact(&mut self, vreg: VReg, fact: Fact) {
1472 trace!("set fact on {}: {:?}", vreg, fact);
1473 self.facts[vreg.vreg()] = Some(fact);
1474 }
1475
1476 pub fn inst_defines_facts(&self, inst: InsnIndex) -> bool {
1478 self.inst_operands(inst)
1479 .iter()
1480 .filter(|o| o.kind() == OperandKind::Def)
1481 .map(|o| o.vreg())
1482 .any(|vreg| self.facts[vreg.vreg()].is_some())
1483 }
1484
1485 pub fn get_user_stack_map(&self, inst: InsnIndex) -> Option<&ir::UserStackMap> {
1487 let index = inst.to_backwards_insn_index(self.num_insts());
1488 self.user_stack_maps.get(&index)
1489 }
1490}
1491
1492impl<I: VCodeInst> std::ops::Index<InsnIndex> for VCode<I> {
1493 type Output = I;
1494 fn index(&self, idx: InsnIndex) -> &Self::Output {
1495 &self.insts[idx.index()]
1496 }
1497}
1498
1499impl<I: VCodeInst> RegallocFunction for VCode<I> {
1500 fn num_insts(&self) -> usize {
1501 self.insts.len()
1502 }
1503
1504 fn num_blocks(&self) -> usize {
1505 self.block_ranges.len()
1506 }
1507
1508 fn entry_block(&self) -> BlockIndex {
1509 self.entry
1510 }
1511
1512 fn block_insns(&self, block: BlockIndex) -> InstRange {
1513 let range = self.block_ranges.get(block.index());
1514 InstRange::new(InsnIndex::new(range.start), InsnIndex::new(range.end))
1515 }
1516
1517 fn block_succs(&self, block: BlockIndex) -> &[BlockIndex] {
1518 let range = self.block_succ_range.get(block.index());
1519 &self.block_succs[range]
1520 }
1521
1522 fn block_preds(&self, block: BlockIndex) -> &[BlockIndex] {
1523 let range = self.block_pred_range.get(block.index());
1524 &self.block_preds[range]
1525 }
1526
1527 fn block_params(&self, block: BlockIndex) -> &[VReg] {
1528 if block == self.entry {
1531 return &[];
1532 }
1533
1534 let range = self.block_params_range.get(block.index());
1535 &self.block_params[range]
1536 }
1537
1538 fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] {
1539 let succ_range = self.branch_block_arg_succ_range.get(block.index());
1540 debug_assert!(succ_idx < succ_range.len());
1541 let branch_block_args = self.branch_block_arg_range.get(succ_range.start + succ_idx);
1542 &self.branch_block_args[branch_block_args]
1543 }
1544
1545 fn is_ret(&self, insn: InsnIndex) -> bool {
1546 match self.insts[insn.index()].is_term() {
1547 MachTerminator::None => self.insts[insn.index()].is_trap(),
1549 MachTerminator::Ret | MachTerminator::RetCall => true,
1550 MachTerminator::Branch => false,
1551 }
1552 }
1553
1554 fn is_branch(&self, insn: InsnIndex) -> bool {
1555 match self.insts[insn.index()].is_term() {
1556 MachTerminator::Branch => true,
1557 _ => false,
1558 }
1559 }
1560
1561 fn inst_operands(&self, insn: InsnIndex) -> &[Operand] {
1562 let range = self.operand_ranges.get(insn.index());
1563 &self.operands[range]
1564 }
1565
1566 fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet {
1567 self.clobbers.get(&insn).cloned().unwrap_or_default()
1568 }
1569
1570 fn num_vregs(&self) -> usize {
1571 self.vreg_types.len()
1572 }
1573
1574 fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] {
1575 &self.debug_value_labels
1576 }
1577
1578 fn spillslot_size(&self, regclass: RegClass) -> usize {
1579 self.abi.get_spillslot_size(regclass) as usize
1580 }
1581
1582 fn allow_multiple_vreg_defs(&self) -> bool {
1583 true
1587 }
1588}
1589
1590impl<I: VCodeInst> Debug for VRegAllocator<I> {
1591 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1592 writeln!(f, "VRegAllocator {{")?;
1593
1594 let mut alias_keys = self.vreg_aliases.keys().cloned().collect::<Vec<_>>();
1595 alias_keys.sort_unstable();
1596 for key in alias_keys {
1597 let dest = self.vreg_aliases.get(&key).unwrap();
1598 writeln!(f, " {:?} := {:?}", Reg::from(key), Reg::from(*dest))?;
1599 }
1600
1601 for (vreg, fact) in self.facts.iter().enumerate() {
1602 if let Some(fact) = fact {
1603 writeln!(f, " v{vreg} ! {fact}")?;
1604 }
1605 }
1606
1607 writeln!(f, "}}")
1608 }
1609}
1610
1611impl<I: VCodeInst> fmt::Debug for VCode<I> {
1612 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1613 writeln!(f, "VCode {{")?;
1614 writeln!(f, " Entry block: {}", self.entry.index())?;
1615
1616 let mut state = Default::default();
1617
1618 for block in 0..self.num_blocks() {
1619 let block = BlockIndex::new(block);
1620 writeln!(
1621 f,
1622 "Block {}({:?}):",
1623 block.index(),
1624 self.block_params(block)
1625 )?;
1626 if let Some(bb) = self.bindex_to_bb(block) {
1627 writeln!(f, " (original IR block: {bb})")?;
1628 }
1629 for (succ_idx, succ) in self.block_succs(block).iter().enumerate() {
1630 writeln!(
1631 f,
1632 " (successor: Block {}({:?}))",
1633 succ.index(),
1634 self.branch_blockparams(block, InsnIndex::new(0) , succ_idx)
1635 )?;
1636 }
1637 for inst in self.block_ranges.get(block.index()) {
1638 writeln!(
1639 f,
1640 " Inst {}: {}",
1641 inst,
1642 self.insts[inst].pretty_print_inst(&mut state)
1643 )?;
1644 if !self.operands.is_empty() {
1645 for operand in self.inst_operands(InsnIndex::new(inst)) {
1646 if operand.kind() == OperandKind::Def {
1647 if let Some(fact) = &self.facts[operand.vreg().vreg()] {
1648 writeln!(f, " v{} ! {}", operand.vreg().vreg(), fact)?;
1649 }
1650 }
1651 }
1652 }
1653 if let Some(user_stack_map) = self.get_user_stack_map(InsnIndex::new(inst)) {
1654 writeln!(f, " {user_stack_map:?}")?;
1655 }
1656 }
1657 }
1658
1659 writeln!(f, "}}")?;
1660 Ok(())
1661 }
1662}
1663
1664pub struct VRegAllocator<I> {
1666 vreg_types: Vec<Type>,
1668
1669 vreg_aliases: FxHashMap<regalloc2::VReg, regalloc2::VReg>,
1676
1677 deferred_error: Option<CodegenError>,
1682
1683 facts: Vec<Option<Fact>>,
1685
1686 _inst: core::marker::PhantomData<I>,
1688}
1689
1690impl<I: VCodeInst> VRegAllocator<I> {
1691 pub fn with_capacity(capacity: usize) -> Self {
1693 let capacity = first_user_vreg_index() + capacity;
1694 let mut vreg_types = Vec::with_capacity(capacity);
1695 vreg_types.resize(first_user_vreg_index(), types::INVALID);
1696 Self {
1697 vreg_types,
1698 vreg_aliases: FxHashMap::with_capacity_and_hasher(capacity, Default::default()),
1699 deferred_error: None,
1700 facts: Vec::with_capacity(capacity),
1701 _inst: core::marker::PhantomData::default(),
1702 }
1703 }
1704
1705 pub fn alloc(&mut self, ty: Type) -> CodegenResult<ValueRegs<Reg>> {
1707 if self.deferred_error.is_some() {
1708 return Err(CodegenError::CodeTooLarge);
1709 }
1710 let v = self.vreg_types.len();
1711 let (regclasses, tys) = I::rc_for_type(ty)?;
1712 if v + regclasses.len() >= VReg::MAX {
1713 return Err(CodegenError::CodeTooLarge);
1714 }
1715
1716 let regs: ValueRegs<Reg> = match regclasses {
1717 &[rc0] => ValueRegs::one(VReg::new(v, rc0).into()),
1718 &[rc0, rc1] => ValueRegs::two(VReg::new(v, rc0).into(), VReg::new(v + 1, rc1).into()),
1719 _ => panic!("Value must reside in 1 or 2 registers"),
1723 };
1724 for (®_ty, ®) in tys.iter().zip(regs.regs().iter()) {
1725 let vreg = reg.to_virtual_reg().unwrap();
1726 debug_assert_eq!(self.vreg_types.len(), vreg.index());
1727 self.vreg_types.push(reg_ty);
1728 }
1729
1730 self.facts.resize(self.vreg_types.len(), None);
1732
1733 Ok(regs)
1734 }
1735
1736 pub fn alloc_with_deferred_error(&mut self, ty: Type) -> ValueRegs<Reg> {
1742 match self.alloc(ty) {
1743 Ok(x) => x,
1744 Err(e) => {
1745 self.deferred_error = Some(e);
1746 self.bogus_for_deferred_error(ty)
1747 }
1748 }
1749 }
1750
1751 pub fn take_deferred_error(&mut self) -> Option<CodegenError> {
1753 self.deferred_error.take()
1754 }
1755
1756 fn bogus_for_deferred_error(&self, ty: Type) -> ValueRegs<Reg> {
1760 let (regclasses, _tys) = I::rc_for_type(ty).expect("must have valid type");
1761 match regclasses {
1762 &[rc0] => ValueRegs::one(VReg::new(0, rc0).into()),
1763 &[rc0, rc1] => ValueRegs::two(VReg::new(0, rc0).into(), VReg::new(1, rc1).into()),
1764 _ => panic!("Value must reside in 1 or 2 registers"),
1765 }
1766 }
1767
1768 pub fn set_vreg_alias(&mut self, from: Reg, to: Reg) {
1770 let from = from.into();
1771 let resolved_to = self.resolve_vreg_alias(to.into());
1772 assert_ne!(resolved_to, from);
1774
1775 if let Some(fact) = self.facts[from.vreg()].take() {
1779 self.set_fact(resolved_to, fact);
1780 }
1781
1782 let old_alias = self.vreg_aliases.insert(from, resolved_to);
1783 debug_assert_eq!(old_alias, None);
1784 }
1785
1786 fn resolve_vreg_alias(&self, mut vreg: regalloc2::VReg) -> regalloc2::VReg {
1787 while let Some(to) = self.vreg_aliases.get(&vreg) {
1797 vreg = *to;
1798 }
1799 vreg
1800 }
1801
1802 #[inline]
1803 fn debug_assert_no_vreg_aliases(&self, mut list: impl Iterator<Item = VReg>) {
1804 debug_assert!(list.all(|vreg| !self.vreg_aliases.contains_key(&vreg)));
1805 }
1806
1807 fn set_fact(&mut self, vreg: regalloc2::VReg, fact: Fact) -> Option<Fact> {
1811 trace!("vreg {:?} has fact: {:?}", vreg, fact);
1812 debug_assert!(!self.vreg_aliases.contains_key(&vreg));
1813 self.facts[vreg.vreg()].replace(fact)
1814 }
1815
1816 pub fn set_fact_if_missing(&mut self, vreg: VirtualReg, fact: Fact) {
1818 let vreg = self.resolve_vreg_alias(vreg.into());
1819 if self.facts[vreg.vreg()].is_none() {
1820 self.set_fact(vreg, fact);
1821 }
1822 }
1823
1824 pub fn alloc_with_maybe_fact(
1827 &mut self,
1828 ty: Type,
1829 fact: Option<Fact>,
1830 ) -> CodegenResult<ValueRegs<Reg>> {
1831 let result = self.alloc(ty)?;
1832
1833 assert!(result.len() == 1 || fact.is_none());
1836 if let Some(fact) = fact {
1837 self.set_fact(result.regs()[0].into(), fact);
1838 }
1839
1840 Ok(result)
1841 }
1842}
1843
1844#[derive(Default)]
1856pub struct VCodeConstants {
1857 constants: PrimaryMap<VCodeConstant, VCodeConstantData>,
1858 pool_uses: HashMap<Constant, VCodeConstant>,
1859 well_known_uses: HashMap<*const [u8], VCodeConstant>,
1860 u64s: HashMap<[u8; 8], VCodeConstant>,
1861}
1862impl VCodeConstants {
1863 pub fn with_capacity(expected_num_constants: usize) -> Self {
1865 Self {
1866 constants: PrimaryMap::with_capacity(expected_num_constants),
1867 pool_uses: HashMap::with_capacity(expected_num_constants),
1868 well_known_uses: HashMap::new(),
1869 u64s: HashMap::new(),
1870 }
1871 }
1872
1873 pub fn insert(&mut self, data: VCodeConstantData) -> VCodeConstant {
1878 match data {
1879 VCodeConstantData::Generated(_) => self.constants.push(data),
1880 VCodeConstantData::Pool(constant, _) => match self.pool_uses.get(&constant) {
1881 None => {
1882 let vcode_constant = self.constants.push(data);
1883 self.pool_uses.insert(constant, vcode_constant);
1884 vcode_constant
1885 }
1886 Some(&vcode_constant) => vcode_constant,
1887 },
1888 VCodeConstantData::WellKnown(data_ref) => {
1889 match self.well_known_uses.entry(data_ref as *const [u8]) {
1890 Entry::Vacant(v) => {
1891 let vcode_constant = self.constants.push(data);
1892 v.insert(vcode_constant);
1893 vcode_constant
1894 }
1895 Entry::Occupied(o) => *o.get(),
1896 }
1897 }
1898 VCodeConstantData::U64(value) => match self.u64s.entry(value) {
1899 Entry::Vacant(v) => {
1900 let vcode_constant = self.constants.push(data);
1901 v.insert(vcode_constant);
1902 vcode_constant
1903 }
1904 Entry::Occupied(o) => *o.get(),
1905 },
1906 }
1907 }
1908
1909 pub fn len(&self) -> usize {
1911 self.constants.len()
1912 }
1913
1914 pub fn keys(&self) -> Keys<VCodeConstant> {
1916 self.constants.keys()
1917 }
1918
1919 pub fn iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)> {
1922 self.constants.iter()
1923 }
1924
1925 pub fn get(&self, c: VCodeConstant) -> &VCodeConstantData {
1927 &self.constants[c]
1928 }
1929
1930 pub fn pool_uses(&self, constant: &VCodeConstantData) -> bool {
1933 match constant {
1934 VCodeConstantData::Pool(c, _) => self.pool_uses.contains_key(c),
1935 _ => false,
1936 }
1937 }
1938}
1939
1940#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1942pub struct VCodeConstant(u32);
1943entity_impl!(VCodeConstant);
1944
1945pub enum VCodeConstantData {
1948 Pool(Constant, ConstantData),
1951 WellKnown(&'static [u8]),
1953 Generated(ConstantData),
1956 U64([u8; 8]),
1960}
1961impl VCodeConstantData {
1962 pub fn as_slice(&self) -> &[u8] {
1964 match self {
1965 VCodeConstantData::Pool(_, d) | VCodeConstantData::Generated(d) => d.as_slice(),
1966 VCodeConstantData::WellKnown(d) => d,
1967 VCodeConstantData::U64(value) => &value[..],
1968 }
1969 }
1970
1971 pub fn alignment(&self) -> u32 {
1973 if self.as_slice().len() <= 8 { 8 } else { 16 }
1974 }
1975}
1976
1977#[cfg(test)]
1978mod test {
1979 use super::*;
1980 use std::mem::size_of;
1981
1982 #[test]
1983 fn size_of_constant_structs() {
1984 assert_eq!(size_of::<Constant>(), 4);
1985 assert_eq!(size_of::<VCodeConstant>(), 4);
1986 assert_eq!(size_of::<ConstantData>(), 3 * size_of::<usize>());
1987 assert_eq!(size_of::<VCodeConstantData>(), 4 * size_of::<usize>());
1988 assert_eq!(
1989 size_of::<PrimaryMap<VCodeConstant, VCodeConstantData>>(),
1990 3 * size_of::<usize>()
1991 );
1992 }
1996}