1use crate::ir::pcc::*;
21use crate::ir::{self, types, Constant, ConstantData, ValueLabel};
22use crate::ranges::Ranges;
23use crate::timing;
24use crate::trace;
25use crate::CodegenError;
26use crate::{machinst::*, trace_log_enabled};
27use crate::{LabelValueLoc, ValueLocRange};
28use regalloc2::{
29 Edit, Function as RegallocFunction, InstOrEdit, InstPosition, InstRange, MachineEnv, Operand,
30 OperandConstraint, OperandKind, PRegSet, ProgPoint, RegClass,
31};
32use rustc_hash::FxHashMap;
33
34use core::cmp::Ordering;
35use core::fmt::{self, Write};
36use core::mem::take;
37use cranelift_entity::{entity_impl, Keys};
38use std::collections::hash_map::Entry;
39use std::collections::HashMap;
40
41pub type InsnIndex = regalloc2::Inst;
43
44trait ToBackwardsInsnIndex {
47 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex;
48}
49
50impl ToBackwardsInsnIndex for InsnIndex {
51 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex {
52 BackwardsInsnIndex::new(num_insts - self.index() - 1)
53 }
54}
55
56#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
59#[cfg_attr(
60 feature = "enable-serde",
61 derive(::serde::Serialize, ::serde::Deserialize)
62)]
63pub struct BackwardsInsnIndex(InsnIndex);
64
65impl BackwardsInsnIndex {
66 pub fn new(i: usize) -> Self {
67 BackwardsInsnIndex(InsnIndex::new(i))
68 }
69}
70
71pub type BlockIndex = regalloc2::Block;
73
74pub trait VCodeInst: MachInst + MachInstEmit {}
77impl<I: MachInst + MachInstEmit> VCodeInst for I {}
78
79pub struct VCode<I: VCodeInst> {
92 vreg_types: Vec<Type>,
94
95 insts: Vec<I>,
97
98 user_stack_maps: FxHashMap<BackwardsInsnIndex, ir::UserStackMap>,
105
106 operands: Vec<Operand>,
111
112 operand_ranges: Ranges,
116
117 clobbers: FxHashMap<InsnIndex, PRegSet>,
119
120 srclocs: Vec<RelSourceLoc>,
123
124 entry: BlockIndex,
126
127 block_ranges: Ranges,
129
130 block_succ_range: Ranges,
132
133 block_succs: Vec<regalloc2::Block>,
138
139 block_pred_range: Ranges,
141
142 block_preds: Vec<regalloc2::Block>,
147
148 block_params_range: Ranges,
150
151 block_params: Vec<regalloc2::VReg>,
156
157 branch_block_args: Vec<regalloc2::VReg>,
167
168 branch_block_arg_range: Ranges,
174
175 branch_block_arg_succ_range: Ranges,
178
179 block_order: BlockLoweringOrder,
181
182 pub(crate) abi: Callee<I::ABIMachineSpec>,
184
185 emit_info: I::Info,
188
189 pub(crate) constants: VCodeConstants,
191
192 debug_value_labels: Vec<(VReg, InsnIndex, InsnIndex, u32)>,
194
195 pub(crate) sigs: SigSet,
196
197 facts: Vec<Option<Fact>>,
199
200 log2_min_function_alignment: u8,
201}
202
203pub struct EmitResult {
207 pub buffer: MachBufferFinalized<Stencil>,
209
210 pub bb_offsets: Vec<CodeOffset>,
213
214 pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
217
218 pub func_body_len: CodeOffset,
220
221 pub disasm: Option<String>,
226
227 pub sized_stackslot_offsets: PrimaryMap<StackSlot, u32>,
229
230 pub dynamic_stackslot_offsets: PrimaryMap<DynamicStackSlot, u32>,
232
233 pub value_labels_ranges: ValueLabelsRanges,
235
236 pub frame_size: u32,
238}
239
240pub struct VCodeBuilder<I: VCodeInst> {
257 pub(crate) vcode: VCode<I>,
259
260 direction: VCodeBuildDirection,
262
263 debug_info: FxHashMap<ValueLabel, Vec<(InsnIndex, InsnIndex, VReg)>>,
267}
268
269#[derive(Clone, Copy, Debug, PartialEq, Eq)]
271pub enum VCodeBuildDirection {
272 Backward,
276}
277
278impl<I: VCodeInst> VCodeBuilder<I> {
279 pub fn new(
281 sigs: SigSet,
282 abi: Callee<I::ABIMachineSpec>,
283 emit_info: I::Info,
284 block_order: BlockLoweringOrder,
285 constants: VCodeConstants,
286 direction: VCodeBuildDirection,
287 log2_min_function_alignment: u8,
288 ) -> Self {
289 let vcode = VCode::new(
290 sigs,
291 abi,
292 emit_info,
293 block_order,
294 constants,
295 log2_min_function_alignment,
296 );
297
298 VCodeBuilder {
299 vcode,
300 direction,
301 debug_info: FxHashMap::default(),
302 }
303 }
304
305 pub fn init_retval_area(&mut self, vregs: &mut VRegAllocator<I>) -> CodegenResult<()> {
306 self.vcode.abi.init_retval_area(&self.vcode.sigs, vregs)
307 }
308
309 pub fn abi(&self) -> &Callee<I::ABIMachineSpec> {
311 &self.vcode.abi
312 }
313
314 pub fn abi_mut(&mut self) -> &mut Callee<I::ABIMachineSpec> {
316 &mut self.vcode.abi
317 }
318
319 pub fn sigs(&self) -> &SigSet {
320 &self.vcode.sigs
321 }
322
323 pub fn sigs_mut(&mut self) -> &mut SigSet {
324 &mut self.vcode.sigs
325 }
326
327 pub fn block_order(&self) -> &BlockLoweringOrder {
329 &self.vcode.block_order
330 }
331
332 pub fn set_entry(&mut self, block: BlockIndex) {
334 self.vcode.entry = block;
335 }
336
337 pub fn end_bb(&mut self) {
340 let end_idx = self.vcode.insts.len();
341 self.vcode.block_ranges.push_end(end_idx);
343 let succ_end = self.vcode.block_succs.len();
345 self.vcode.block_succ_range.push_end(succ_end);
346 let block_params_end = self.vcode.block_params.len();
348 self.vcode.block_params_range.push_end(block_params_end);
349 let branch_block_arg_succ_end = self.vcode.branch_block_arg_range.len();
351 self.vcode
352 .branch_block_arg_succ_range
353 .push_end(branch_block_arg_succ_end);
354 }
355
356 pub fn add_block_param(&mut self, param: VirtualReg) {
357 self.vcode.block_params.push(param.into());
358 }
359
360 fn add_branch_args_for_succ(&mut self, args: &[Reg]) {
361 self.vcode
362 .branch_block_args
363 .extend(args.iter().map(|&arg| VReg::from(arg)));
364 let end = self.vcode.branch_block_args.len();
365 self.vcode.branch_block_arg_range.push_end(end);
366 }
367
368 pub fn push(&mut self, insn: I, loc: RelSourceLoc) {
371 assert!(!insn.is_low_level_branch()); self.vcode.insts.push(insn);
373 self.vcode.srclocs.push(loc);
374 }
375
376 pub fn add_succ(&mut self, block: BlockIndex, args: &[Reg]) {
378 self.vcode.block_succs.push(block);
379 self.add_branch_args_for_succ(args);
380 }
381
382 pub fn add_value_label(&mut self, reg: Reg, label: ValueLabel) {
384 let next_inst_index = self.vcode.insts.len();
404 if next_inst_index == 0 {
405 return;
407 }
408 let next_inst = InsnIndex::new(next_inst_index);
409 let labels = self.debug_info.entry(label).or_insert_with(|| vec![]);
410 let last = labels
411 .last()
412 .map(|(_start, end, _vreg)| *end)
413 .unwrap_or(InsnIndex::new(0));
414 labels.push((last, next_inst, reg.into()));
415 }
416
417 pub fn constants(&mut self) -> &mut VCodeConstants {
419 &mut self.vcode.constants
420 }
421
422 fn compute_preds_from_succs(&mut self) {
423 let mut starts = vec![0u32; self.vcode.num_blocks()];
426 for succ in &self.vcode.block_succs {
427 starts[succ.index()] += 1;
428 }
429
430 self.vcode.block_pred_range.reserve(starts.len());
434 let mut end = 0;
435 for count in starts.iter_mut() {
436 let start = end;
437 end += *count;
438 *count = start;
439 self.vcode.block_pred_range.push_end(end as usize);
440 }
441 let end = end as usize;
442 debug_assert_eq!(end, self.vcode.block_succs.len());
443
444 self.vcode.block_preds.resize(end, BlockIndex::invalid());
450 for (pred, range) in self.vcode.block_succ_range.iter() {
451 let pred = BlockIndex::new(pred);
452 for succ in &self.vcode.block_succs[range] {
453 let pos = &mut starts[succ.index()];
454 self.vcode.block_preds[*pos as usize] = pred;
455 *pos += 1;
456 }
457 }
458 debug_assert!(self.vcode.block_preds.iter().all(|pred| pred.is_valid()));
459 }
460
461 fn reverse_and_finalize(&mut self, vregs: &VRegAllocator<I>) {
465 let n_insts = self.vcode.insts.len();
466 if n_insts == 0 {
467 return;
468 }
469
470 self.vcode.block_ranges.reverse_index();
472 self.vcode.block_ranges.reverse_target(n_insts);
473 self.vcode.block_params_range.reverse_index();
479 self.vcode.block_succ_range.reverse_index();
482 self.vcode.insts.reverse();
483 self.vcode.srclocs.reverse();
484 self.vcode.branch_block_arg_succ_range.reverse_index();
487
488 let translate = |inst: InsnIndex| InsnIndex::new(n_insts - inst.index());
499
500 for (label, tuples) in &self.debug_info {
502 for &(start, end, vreg) in tuples {
503 let vreg = vregs.resolve_vreg_alias(vreg);
504 let fwd_start = translate(end);
505 let fwd_end = translate(start);
506 self.vcode
507 .debug_value_labels
508 .push((vreg, fwd_start, fwd_end, label.as_u32()));
509 }
510 }
511
512 self.vcode
515 .debug_value_labels
516 .sort_unstable_by_key(|(vreg, _, _, _)| *vreg);
517 }
518
519 fn collect_operands(&mut self, vregs: &VRegAllocator<I>) {
520 let allocatable = PRegSet::from(self.vcode.machine_env());
521 for (i, insn) in self.vcode.insts.iter_mut().enumerate() {
522 let mut op_collector =
534 OperandCollector::new(&mut self.vcode.operands, allocatable, |vreg| {
535 vregs.resolve_vreg_alias(vreg)
536 });
537 insn.get_operands(&mut op_collector);
538 let (ops, clobbers) = op_collector.finish();
539 self.vcode.operand_ranges.push_end(ops);
540
541 if clobbers != PRegSet::default() {
542 self.vcode.clobbers.insert(InsnIndex::new(i), clobbers);
543 }
544
545 if let Some((dst, src)) = insn.is_move() {
546 assert!(
549 src.is_virtual(),
550 "the real register {src:?} was used as the source of a move instruction"
551 );
552 assert!(
553 dst.to_reg().is_virtual(),
554 "the real register {:?} was used as the destination of a move instruction",
555 dst.to_reg()
556 );
557 }
558 }
559
560 for arg in &mut self.vcode.branch_block_args {
562 let new_arg = vregs.resolve_vreg_alias(*arg);
563 trace!("operandcollector: block arg {:?} -> {:?}", arg, new_arg);
564 *arg = new_arg;
565 }
566 }
567
568 pub fn build(mut self, mut vregs: VRegAllocator<I>) -> VCode<I> {
570 self.vcode.vreg_types = take(&mut vregs.vreg_types);
571 self.vcode.facts = take(&mut vregs.facts);
572
573 if self.direction == VCodeBuildDirection::Backward {
574 self.reverse_and_finalize(&vregs);
575 }
576 self.collect_operands(&vregs);
577
578 self.compute_preds_from_succs();
579 self.vcode.debug_value_labels.sort_unstable();
580
581 vregs.debug_assert_no_vreg_aliases(self.vcode.operands.iter().map(|op| op.vreg()));
588 vregs.debug_assert_no_vreg_aliases(self.vcode.block_params.iter().copied());
590 vregs.debug_assert_no_vreg_aliases(self.vcode.branch_block_args.iter().copied());
592 vregs.debug_assert_no_vreg_aliases(
594 self.vcode.debug_value_labels.iter().map(|&(vreg, ..)| vreg),
595 );
596 vregs.debug_assert_no_vreg_aliases(
598 self.vcode
599 .facts
600 .iter()
601 .zip(&vregs.vreg_types)
602 .enumerate()
603 .filter(|(_, (fact, _))| fact.is_some())
604 .map(|(vreg, (_, &ty))| {
605 let (regclasses, _) = I::rc_for_type(ty).unwrap();
606 VReg::new(vreg, regclasses[0])
607 }),
608 );
609
610 self.vcode
611 }
612
613 pub fn add_user_stack_map(
615 &mut self,
616 inst: BackwardsInsnIndex,
617 entries: &[ir::UserStackMapEntry],
618 ) {
619 let stack_map = ir::UserStackMap::new(entries, self.vcode.abi.sized_stackslot_offsets());
620 let old_entry = self.vcode.user_stack_maps.insert(inst, stack_map);
621 debug_assert!(old_entry.is_none());
622 }
623}
624
625const NO_INST_OFFSET: CodeOffset = u32::MAX;
626
627impl<I: VCodeInst> VCode<I> {
628 fn new(
630 sigs: SigSet,
631 abi: Callee<I::ABIMachineSpec>,
632 emit_info: I::Info,
633 block_order: BlockLoweringOrder,
634 constants: VCodeConstants,
635 log2_min_function_alignment: u8,
636 ) -> Self {
637 let n_blocks = block_order.lowered_order().len();
638 VCode {
639 sigs,
640 vreg_types: vec![],
641 insts: Vec::with_capacity(10 * n_blocks),
642 user_stack_maps: FxHashMap::default(),
643 operands: Vec::with_capacity(30 * n_blocks),
644 operand_ranges: Ranges::with_capacity(10 * n_blocks),
645 clobbers: FxHashMap::default(),
646 srclocs: Vec::with_capacity(10 * n_blocks),
647 entry: BlockIndex::new(0),
648 block_ranges: Ranges::with_capacity(n_blocks),
649 block_succ_range: Ranges::with_capacity(n_blocks),
650 block_succs: Vec::with_capacity(n_blocks),
651 block_pred_range: Ranges::default(),
652 block_preds: Vec::new(),
653 block_params_range: Ranges::with_capacity(n_blocks),
654 block_params: Vec::with_capacity(5 * n_blocks),
655 branch_block_args: Vec::with_capacity(10 * n_blocks),
656 branch_block_arg_range: Ranges::with_capacity(2 * n_blocks),
657 branch_block_arg_succ_range: Ranges::with_capacity(n_blocks),
658 block_order,
659 abi,
660 emit_info,
661 constants,
662 debug_value_labels: vec![],
663 facts: vec![],
664 log2_min_function_alignment,
665 }
666 }
667
668 pub fn machine_env(&self) -> &MachineEnv {
670 self.abi.machine_env(&self.sigs)
671 }
672
673 pub fn num_blocks(&self) -> usize {
676 self.block_ranges.len()
677 }
678
679 pub fn num_insts(&self) -> usize {
681 self.insts.len()
682 }
683
684 fn compute_clobbers(&self, regalloc: ®alloc2::Output) -> Vec<Writable<RealReg>> {
685 let mut clobbered = PRegSet::default();
686
687 for (_, Edit::Move { to, .. }) in ®alloc.edits {
689 if let Some(preg) = to.as_reg() {
690 clobbered.add(preg);
691 }
692 }
693
694 for (i, range) in self.operand_ranges.iter() {
695 let operands = &self.operands[range.clone()];
696 let allocs = ®alloc.allocs[range];
697 for (operand, alloc) in operands.iter().zip(allocs.iter()) {
698 if operand.kind() == OperandKind::Def {
699 if let Some(preg) = alloc.as_reg() {
700 clobbered.add(preg);
701 }
702 }
703 }
704
705 if self.insts[i].is_included_in_clobbers() {
725 if let Some(&inst_clobbered) = self.clobbers.get(&InsnIndex::new(i)) {
726 clobbered.union_from(inst_clobbered);
727 }
728 }
729 }
730
731 clobbered
732 .into_iter()
733 .map(|preg| Writable::from_reg(RealReg::from(preg)))
734 .collect()
735 }
736
737 pub fn emit(
745 mut self,
746 regalloc: ®alloc2::Output,
747 want_disasm: bool,
748 flags: &settings::Flags,
749 ctrl_plane: &mut ControlPlane,
750 ) -> EmitResult
751 where
752 I: VCodeInst,
753 {
754 let _tt = timing::vcode_emit();
755 let mut buffer = MachBuffer::new();
756 buffer.set_log2_min_function_alignment(self.log2_min_function_alignment);
757 let mut bb_starts: Vec<Option<CodeOffset>> = vec![];
758
759 buffer.reserve_labels_for_blocks(self.num_blocks());
761
762 buffer.register_constants(&self.constants);
766
767 let mut final_order: SmallVec<[BlockIndex; 16]> = smallvec![];
769 let mut cold_blocks: SmallVec<[BlockIndex; 16]> = smallvec![];
770 for block in 0..self.num_blocks() {
771 let block = BlockIndex::new(block);
772 if self.block_order.is_cold(block) {
773 cold_blocks.push(block);
774 } else {
775 final_order.push(block);
776 }
777 }
778 final_order.extend(cold_blocks.clone());
779
780 let clobbers = self.compute_clobbers(regalloc);
789 self.abi
790 .compute_frame_layout(&self.sigs, regalloc.num_spillslots, clobbers);
791
792 let mut cur_srcloc = None;
794 let mut last_offset = None;
795 let mut inst_offsets = vec![];
796 let mut state = I::State::new(&self.abi, std::mem::take(ctrl_plane));
797
798 let mut disasm = String::new();
799
800 if !self.debug_value_labels.is_empty() {
801 inst_offsets.resize(self.insts.len(), NO_INST_OFFSET);
802 }
803
804 let mut ra_edits_per_block: SmallVec<[u32; 64]> = smallvec![];
809 let mut edit_idx = 0;
810 for block in 0..self.num_blocks() {
811 let end_inst = InsnIndex::new(self.block_ranges.get(block).end);
812 let start_edit_idx = edit_idx;
813 while edit_idx < regalloc.edits.len() && regalloc.edits[edit_idx].0.inst() < end_inst {
814 edit_idx += 1;
815 }
816 let end_edit_idx = edit_idx;
817 ra_edits_per_block.push((end_edit_idx - start_edit_idx) as u32);
818 }
819
820 let is_forward_edge_cfi_enabled = self.abi.is_forward_edge_cfi_enabled();
821 let mut bb_padding = match flags.bb_padding_log2_minus_one() {
822 0 => Vec::new(),
823 n => vec![0; 1 << (n - 1)],
824 };
825 let mut total_bb_padding = 0;
826
827 for (block_order_idx, &block) in final_order.iter().enumerate() {
828 trace!("emitting block {:?}", block);
829
830 state.on_new_block();
832
833 let new_offset = I::align_basic_block(buffer.cur_offset());
835 while new_offset > buffer.cur_offset() {
836 let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
838 nop.emit(&mut buffer, &self.emit_info, &mut Default::default());
839 }
840 assert_eq!(buffer.cur_offset(), new_offset);
841
842 let do_emit = |inst: &I,
843 disasm: &mut String,
844 buffer: &mut MachBuffer<I>,
845 state: &mut I::State| {
846 if want_disasm && !inst.is_args() {
847 let mut s = state.clone();
848 writeln!(disasm, " {}", inst.pretty_print_inst(&mut s)).unwrap();
849 }
850 inst.emit(buffer, &self.emit_info, state);
851 };
852
853 if block == self.entry {
855 trace!(" -> entry block");
856 buffer.start_srcloc(Default::default());
857 for inst in &self.abi.gen_prologue() {
858 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
859 }
860 buffer.end_srcloc();
861 }
862
863 buffer.bind_label(MachLabel::from_block(block), state.ctrl_plane_mut());
866
867 if want_disasm {
868 writeln!(&mut disasm, "block{}:", block.index()).unwrap();
869 }
870
871 if flags.machine_code_cfg_info() {
872 let cur_offset = buffer.cur_offset();
875 if last_offset.is_some() && cur_offset <= last_offset.unwrap() {
876 for i in (0..bb_starts.len()).rev() {
877 if bb_starts[i].is_some() && cur_offset > bb_starts[i].unwrap() {
878 break;
879 }
880 bb_starts[i] = None;
881 }
882 }
883 bb_starts.push(Some(cur_offset));
884 last_offset = Some(cur_offset);
885 }
886
887 if let Some(block_start) = I::gen_block_start(
888 self.block_order.is_indirect_branch_target(block),
889 is_forward_edge_cfi_enabled,
890 ) {
891 do_emit(&block_start, &mut disasm, &mut buffer, &mut state);
892 }
893
894 for inst_or_edit in regalloc.block_insts_and_edits(&self, block) {
895 match inst_or_edit {
896 InstOrEdit::Inst(iix) => {
897 if !self.debug_value_labels.is_empty() {
898 if !self.block_order.is_cold(block) {
911 inst_offsets[iix.index()] = buffer.cur_offset();
912 }
913 }
914
915 let srcloc = self.srclocs[iix.index()];
917 if cur_srcloc != Some(srcloc) {
918 if cur_srcloc.is_some() {
919 buffer.end_srcloc();
920 }
921 buffer.start_srcloc(srcloc);
922 cur_srcloc = Some(srcloc);
923 }
924
925 let stack_map_disasm = if self.insts[iix.index()].is_safepoint() {
928 let (user_stack_map, user_stack_map_disasm) = {
929 let index = iix.to_backwards_insn_index(self.num_insts());
936 let user_stack_map = self.user_stack_maps.remove(&index);
937 let user_stack_map_disasm =
938 user_stack_map.as_ref().map(|m| format!(" ; {m:?}"));
939 (user_stack_map, user_stack_map_disasm)
940 };
941
942 state.pre_safepoint(user_stack_map);
943
944 user_stack_map_disasm
945 } else {
946 None
947 };
948
949 if self.insts[iix.index()].is_term() == MachTerminator::Ret {
954 for inst in self.abi.gen_epilogue() {
955 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
956 }
957 } else {
958 let mut allocs = regalloc.inst_allocs(iix).iter();
961 self.insts[iix.index()].get_operands(
962 &mut |reg: &mut Reg, constraint, _kind, _pos| {
963 let alloc =
964 allocs.next().expect("enough allocations for all operands");
965
966 if let Some(alloc) = alloc.as_reg() {
967 let alloc: Reg = alloc.into();
968 if let OperandConstraint::FixedReg(rreg) = constraint {
969 debug_assert_eq!(Reg::from(rreg), alloc);
970 }
971 *reg = alloc;
972 } else if let Some(alloc) = alloc.as_stack() {
973 let alloc: Reg = alloc.into();
974 *reg = alloc;
975 }
976 },
977 );
978 debug_assert!(allocs.next().is_none());
979
980 do_emit(
982 &self.insts[iix.index()],
983 &mut disasm,
984 &mut buffer,
985 &mut state,
986 );
987 if let Some(stack_map_disasm) = stack_map_disasm {
988 disasm.push_str(&stack_map_disasm);
989 disasm.push('\n');
990 }
991 }
992 }
993
994 InstOrEdit::Edit(Edit::Move { from, to }) => {
995 match (from.as_reg(), to.as_reg()) {
998 (Some(from), Some(to)) => {
999 let from_rreg = Reg::from(from);
1001 let to_rreg = Writable::from_reg(Reg::from(to));
1002 debug_assert_eq!(from.class(), to.class());
1003 let ty = I::canonical_type_for_rc(from.class());
1004 let mv = I::gen_move(to_rreg, from_rreg, ty);
1005 do_emit(&mv, &mut disasm, &mut buffer, &mut state);
1006 }
1007 (Some(from), None) => {
1008 let to = to.as_stack().unwrap();
1010 let from_rreg = RealReg::from(from);
1011 let spill = self.abi.gen_spill(to, from_rreg);
1012 do_emit(&spill, &mut disasm, &mut buffer, &mut state);
1013 }
1014 (None, Some(to)) => {
1015 let from = from.as_stack().unwrap();
1017 let to_rreg = Writable::from_reg(RealReg::from(to));
1018 let reload = self.abi.gen_reload(to_rreg, from);
1019 do_emit(&reload, &mut disasm, &mut buffer, &mut state);
1020 }
1021 (None, None) => {
1022 panic!("regalloc2 should have eliminated stack-to-stack moves!");
1023 }
1024 }
1025 }
1026 }
1027 }
1028
1029 if cur_srcloc.is_some() {
1030 buffer.end_srcloc();
1031 cur_srcloc = None;
1032 }
1033
1034 let worst_case_next_bb = if block_order_idx < final_order.len() - 1 {
1038 let next_block = final_order[block_order_idx + 1];
1039 let next_block_range = self.block_ranges.get(next_block.index());
1040 let next_block_size = next_block_range.len() as u32;
1041 let next_block_ra_insertions = ra_edits_per_block[next_block.index()];
1042 I::worst_case_size() * (next_block_size + next_block_ra_insertions)
1043 } else {
1044 0
1045 };
1046 let padding = if bb_padding.is_empty() {
1047 0
1048 } else {
1049 bb_padding.len() as u32 + I::LabelUse::ALIGN - 1
1050 };
1051 if buffer.island_needed(padding + worst_case_next_bb) {
1052 buffer.emit_island(padding + worst_case_next_bb, ctrl_plane);
1053 }
1054
1055 if !bb_padding.is_empty() {
1064 buffer.put_data(&bb_padding);
1065 buffer.align_to(I::LabelUse::ALIGN);
1066 total_bb_padding += bb_padding.len();
1067 if total_bb_padding > (150 << 20) {
1068 bb_padding = Vec::new();
1069 }
1070 }
1071 }
1072
1073 debug_assert!(
1074 self.user_stack_maps.is_empty(),
1075 "any stack maps should have been consumed by instruction emission, still have: {:#?}",
1076 self.user_stack_maps,
1077 );
1078
1079 buffer.optimize_branches(ctrl_plane);
1082
1083 *ctrl_plane = state.take_ctrl_plane();
1085
1086 let func_body_len = buffer.cur_offset();
1087
1088 let mut bb_edges = vec![];
1090 let mut bb_offsets = vec![];
1091 if flags.machine_code_cfg_info() {
1092 for block in 0..self.num_blocks() {
1093 if bb_starts[block].is_none() {
1094 continue;
1096 }
1097 let from = bb_starts[block].unwrap();
1098
1099 bb_offsets.push(from);
1100 let succs = self.block_succs(BlockIndex::new(block));
1102 for &succ in succs.iter() {
1103 let to = buffer.resolve_label_offset(MachLabel::from_block(succ));
1104 bb_edges.push((from, to));
1105 }
1106 }
1107 }
1108
1109 self.monotonize_inst_offsets(&mut inst_offsets[..], func_body_len);
1110 let value_labels_ranges =
1111 self.compute_value_labels_ranges(regalloc, &inst_offsets[..], func_body_len);
1112 let frame_size = self.abi.frame_size();
1113
1114 EmitResult {
1115 buffer: buffer.finish(&self.constants, ctrl_plane),
1116 bb_offsets,
1117 bb_edges,
1118 func_body_len,
1119 disasm: if want_disasm { Some(disasm) } else { None },
1120 sized_stackslot_offsets: self.abi.sized_stackslot_offsets().clone(),
1121 dynamic_stackslot_offsets: self.abi.dynamic_stackslot_offsets().clone(),
1122 value_labels_ranges,
1123 frame_size,
1124 }
1125 }
1126
1127 fn monotonize_inst_offsets(&self, inst_offsets: &mut [CodeOffset], func_body_len: u32) {
1128 if self.debug_value_labels.is_empty() {
1129 return;
1130 }
1131
1132 let mut next_offset = func_body_len;
1144 for inst_index in (0..(inst_offsets.len() - 1)).rev() {
1145 let inst_offset = inst_offsets[inst_index];
1146
1147 if inst_offset == NO_INST_OFFSET {
1149 continue;
1150 }
1151
1152 if inst_offset > next_offset {
1153 trace!(
1154 "Fixing code offset of the removed Inst {}: {} -> {}",
1155 inst_index,
1156 inst_offset,
1157 next_offset
1158 );
1159 inst_offsets[inst_index] = next_offset;
1160 continue;
1161 }
1162
1163 next_offset = inst_offset;
1164 }
1165 }
1166
1167 fn compute_value_labels_ranges(
1168 &self,
1169 regalloc: ®alloc2::Output,
1170 inst_offsets: &[CodeOffset],
1171 func_body_len: u32,
1172 ) -> ValueLabelsRanges {
1173 if self.debug_value_labels.is_empty() {
1174 return ValueLabelsRanges::default();
1175 }
1176
1177 if trace_log_enabled!() {
1178 self.log_value_labels_ranges(regalloc, inst_offsets);
1179 }
1180
1181 let mut value_labels_ranges: ValueLabelsRanges = HashMap::new();
1182 for &(label, from, to, alloc) in ®alloc.debug_locations {
1183 let label = ValueLabel::from_u32(label);
1184 let ranges = value_labels_ranges.entry(label).or_insert_with(|| vec![]);
1185 let prog_point_to_inst = |prog_point: ProgPoint| {
1186 let mut inst = prog_point.inst();
1187 if prog_point.pos() == InstPosition::After {
1188 inst = inst.next();
1189 }
1190 inst.index()
1191 };
1192 let from_inst_index = prog_point_to_inst(from);
1193 let to_inst_index = prog_point_to_inst(to);
1194 let from_offset = inst_offsets[from_inst_index];
1195 let to_offset = if to_inst_index == inst_offsets.len() {
1196 func_body_len
1197 } else {
1198 inst_offsets[to_inst_index]
1199 };
1200
1201 if from_offset == NO_INST_OFFSET
1204 || to_offset == NO_INST_OFFSET
1205 || from_offset == to_offset
1206 {
1207 continue;
1208 }
1209
1210 let loc = if let Some(preg) = alloc.as_reg() {
1211 LabelValueLoc::Reg(Reg::from(preg))
1212 } else {
1213 let slot = alloc.as_stack().unwrap();
1214 let slot_offset = self.abi.get_spillslot_offset(slot);
1215 let slot_base_to_caller_sp_offset = self.abi.slot_base_to_caller_sp_offset();
1216 let caller_sp_to_cfa_offset =
1217 crate::isa::unwind::systemv::caller_sp_to_cfa_offset();
1218 let cfa_to_sp_offset =
1220 -((slot_base_to_caller_sp_offset + caller_sp_to_cfa_offset) as i64);
1221 LabelValueLoc::CFAOffset(cfa_to_sp_offset + slot_offset)
1222 };
1223
1224 if let Some(last_loc_range) = ranges.last_mut() {
1227 if last_loc_range.loc == loc && last_loc_range.end == from_offset {
1228 trace!(
1229 "Extending debug range for {:?} in {:?} to Inst {} ({})",
1230 label,
1231 loc,
1232 to_inst_index,
1233 to_offset
1234 );
1235 last_loc_range.end = to_offset;
1236 continue;
1237 }
1238 }
1239
1240 trace!(
1241 "Recording debug range for {:?} in {:?}: [Inst {}..Inst {}) [{}..{})",
1242 label,
1243 loc,
1244 from_inst_index,
1245 to_inst_index,
1246 from_offset,
1247 to_offset
1248 );
1249
1250 ranges.push(ValueLocRange {
1251 loc,
1252 start: from_offset,
1253 end: to_offset,
1254 });
1255 }
1256
1257 value_labels_ranges
1258 }
1259
1260 fn log_value_labels_ranges(&self, regalloc: ®alloc2::Output, inst_offsets: &[CodeOffset]) {
1261 debug_assert!(trace_log_enabled!());
1262
1263 let mut labels = vec![];
1266 for &(label, _, _, _) in ®alloc.debug_locations {
1267 if Some(&label) == labels.last() {
1268 continue;
1269 }
1270 labels.push(label);
1271 }
1272
1273 let mut vregs = vec![];
1277 for &(vreg, start, end, label) in &self.debug_value_labels {
1278 if matches!(labels.binary_search(&label), Ok(_)) {
1279 vregs.push((label, start, end, vreg));
1280 }
1281 }
1282 vregs.sort_unstable_by(
1283 |(l_label, l_start, _, _), (r_label, r_start, _, _)| match l_label.cmp(r_label) {
1284 Ordering::Equal => l_start.cmp(r_start),
1285 cmp => cmp,
1286 },
1287 );
1288
1289 #[derive(PartialEq)]
1290 enum Mode {
1291 Measure,
1292 Emit,
1293 }
1294 #[derive(PartialEq)]
1295 enum Row {
1296 Head,
1297 Line,
1298 Inst(usize, usize),
1299 }
1300
1301 let mut widths = vec![0; 3 + 2 * labels.len()];
1302 let mut row = String::new();
1303 let mut output_row = |row_kind: Row, mode: Mode| {
1304 let mut column_index = 0;
1305 row.clear();
1306
1307 macro_rules! output_cell_impl {
1308 ($fill:literal, $span:literal, $($cell_fmt:tt)*) => {
1309 let column_start = row.len();
1310 {
1311 row.push('|');
1312 write!(row, $($cell_fmt)*).unwrap();
1313 }
1314
1315 let next_column_index = column_index + $span;
1316 let expected_width: usize = widths[column_index..next_column_index].iter().sum();
1317 if mode == Mode::Measure {
1318 let actual_width = row.len() - column_start;
1319 if actual_width > expected_width {
1320 widths[next_column_index - 1] += actual_width - expected_width;
1321 }
1322 } else {
1323 let column_end = column_start + expected_width;
1324 while row.len() != column_end {
1325 row.push($fill);
1326 }
1327 }
1328 column_index = next_column_index;
1329 };
1330 }
1331 macro_rules! output_cell {
1332 ($($cell_fmt:tt)*) => {
1333 output_cell_impl!(' ', 1, $($cell_fmt)*);
1334 };
1335 }
1336
1337 match row_kind {
1338 Row::Head => {
1339 output_cell!("BB");
1340 output_cell!("Inst");
1341 output_cell!("IP");
1342 for label in &labels {
1343 output_cell_impl!(' ', 2, "{:?}", ValueLabel::from_u32(*label));
1344 }
1345 }
1346 Row::Line => {
1347 debug_assert!(mode == Mode::Emit);
1348 for _ in 0..3 {
1349 output_cell_impl!('-', 1, "");
1350 }
1351 for _ in &labels {
1352 output_cell_impl!('-', 2, "");
1353 }
1354 }
1355 Row::Inst(block_index, inst_index) => {
1356 debug_assert!(inst_index < self.num_insts());
1357 if self.block_ranges.get(block_index).start == inst_index {
1358 output_cell!("B{}", block_index);
1359 } else {
1360 output_cell!("");
1361 }
1362 output_cell!("Inst {inst_index} ");
1363 output_cell!("{} ", inst_offsets[inst_index]);
1364
1365 for label in &labels {
1366 use regalloc2::Inst;
1368 let vreg_cmp = |inst: usize,
1369 vreg_label: &u32,
1370 range_start: &Inst,
1371 range_end: &Inst| {
1372 match vreg_label.cmp(&label) {
1373 Ordering::Equal => {
1374 if range_end.index() <= inst {
1375 Ordering::Less
1376 } else if range_start.index() > inst {
1377 Ordering::Greater
1378 } else {
1379 Ordering::Equal
1380 }
1381 }
1382 cmp => cmp,
1383 }
1384 };
1385 let vreg_index =
1386 vregs.binary_search_by(|(l, s, e, _)| vreg_cmp(inst_index, l, s, e));
1387 if let Ok(vreg_index) = vreg_index {
1388 let mut prev_vreg = None;
1389 if inst_index > 0 {
1390 let prev_vreg_index = vregs.binary_search_by(|(l, s, e, _)| {
1391 vreg_cmp(inst_index - 1, l, s, e)
1392 });
1393 if let Ok(prev_vreg_index) = prev_vreg_index {
1394 prev_vreg = Some(vregs[prev_vreg_index].3);
1395 }
1396 }
1397
1398 let vreg = vregs[vreg_index].3;
1399 if Some(vreg) == prev_vreg {
1400 output_cell!("*");
1401 } else {
1402 output_cell!("{}", vreg);
1403 }
1404 } else {
1405 output_cell!("");
1406 }
1407
1408 let inst_prog_point = ProgPoint::before(Inst::new(inst_index));
1410 let range_index = regalloc.debug_locations.binary_search_by(
1411 |(range_label, range_start, range_end, _)| match range_label.cmp(label)
1412 {
1413 Ordering::Equal => {
1414 if *range_end <= inst_prog_point {
1415 Ordering::Less
1416 } else if *range_start > inst_prog_point {
1417 Ordering::Greater
1418 } else {
1419 Ordering::Equal
1420 }
1421 }
1422 cmp => cmp,
1423 },
1424 );
1425 if let Ok(range_index) = range_index {
1426 if let Some(reg) = regalloc.debug_locations[range_index].3.as_reg() {
1428 output_cell!("{:?}", Reg::from(reg));
1429 } else {
1430 output_cell!("Stk");
1431 }
1432 } else {
1433 output_cell!("");
1435 }
1436 }
1437 }
1438 }
1439 row.push('|');
1440
1441 if mode == Mode::Emit {
1442 trace!("{}", row.as_str());
1443 }
1444 };
1445
1446 for block_index in 0..self.num_blocks() {
1447 for inst_index in self.block_ranges.get(block_index) {
1448 output_row(Row::Inst(block_index, inst_index), Mode::Measure);
1449 }
1450 }
1451 output_row(Row::Head, Mode::Measure);
1452
1453 output_row(Row::Head, Mode::Emit);
1454 output_row(Row::Line, Mode::Emit);
1455 for block_index in 0..self.num_blocks() {
1456 for inst_index in self.block_ranges.get(block_index) {
1457 output_row(Row::Inst(block_index, inst_index), Mode::Emit);
1458 }
1459 }
1460 }
1461
1462 pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
1464 self.block_order.lowered_order()[block.index()].orig_block()
1465 }
1466
1467 pub fn vreg_type(&self, vreg: VReg) -> Type {
1469 self.vreg_types[vreg.vreg()]
1470 }
1471
1472 pub fn vreg_fact(&self, vreg: VReg) -> Option<&Fact> {
1474 self.facts[vreg.vreg()].as_ref()
1475 }
1476
1477 pub fn set_vreg_fact(&mut self, vreg: VReg, fact: Fact) {
1479 trace!("set fact on {}: {:?}", vreg, fact);
1480 self.facts[vreg.vreg()] = Some(fact);
1481 }
1482
1483 pub fn inst_defines_facts(&self, inst: InsnIndex) -> bool {
1485 self.inst_operands(inst)
1486 .iter()
1487 .filter(|o| o.kind() == OperandKind::Def)
1488 .map(|o| o.vreg())
1489 .any(|vreg| self.facts[vreg.vreg()].is_some())
1490 }
1491
1492 pub fn get_user_stack_map(&self, inst: InsnIndex) -> Option<&ir::UserStackMap> {
1494 let index = inst.to_backwards_insn_index(self.num_insts());
1495 self.user_stack_maps.get(&index)
1496 }
1497}
1498
1499impl<I: VCodeInst> std::ops::Index<InsnIndex> for VCode<I> {
1500 type Output = I;
1501 fn index(&self, idx: InsnIndex) -> &Self::Output {
1502 &self.insts[idx.index()]
1503 }
1504}
1505
1506impl<I: VCodeInst> RegallocFunction for VCode<I> {
1507 fn num_insts(&self) -> usize {
1508 self.insts.len()
1509 }
1510
1511 fn num_blocks(&self) -> usize {
1512 self.block_ranges.len()
1513 }
1514
1515 fn entry_block(&self) -> BlockIndex {
1516 self.entry
1517 }
1518
1519 fn block_insns(&self, block: BlockIndex) -> InstRange {
1520 let range = self.block_ranges.get(block.index());
1521 InstRange::new(InsnIndex::new(range.start), InsnIndex::new(range.end))
1522 }
1523
1524 fn block_succs(&self, block: BlockIndex) -> &[BlockIndex] {
1525 let range = self.block_succ_range.get(block.index());
1526 &self.block_succs[range]
1527 }
1528
1529 fn block_preds(&self, block: BlockIndex) -> &[BlockIndex] {
1530 let range = self.block_pred_range.get(block.index());
1531 &self.block_preds[range]
1532 }
1533
1534 fn block_params(&self, block: BlockIndex) -> &[VReg] {
1535 if block == self.entry {
1538 return &[];
1539 }
1540
1541 let range = self.block_params_range.get(block.index());
1542 &self.block_params[range]
1543 }
1544
1545 fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] {
1546 let succ_range = self.branch_block_arg_succ_range.get(block.index());
1547 debug_assert!(succ_idx < succ_range.len());
1548 let branch_block_args = self.branch_block_arg_range.get(succ_range.start + succ_idx);
1549 &self.branch_block_args[branch_block_args]
1550 }
1551
1552 fn is_ret(&self, insn: InsnIndex) -> bool {
1553 match self.insts[insn.index()].is_term() {
1554 MachTerminator::None => self.insts[insn.index()].is_trap(),
1556 MachTerminator::Ret | MachTerminator::RetCall => true,
1557 MachTerminator::Branch => false,
1558 }
1559 }
1560
1561 fn is_branch(&self, insn: InsnIndex) -> bool {
1562 match self.insts[insn.index()].is_term() {
1563 MachTerminator::Branch => true,
1564 _ => false,
1565 }
1566 }
1567
1568 fn inst_operands(&self, insn: InsnIndex) -> &[Operand] {
1569 let range = self.operand_ranges.get(insn.index());
1570 &self.operands[range]
1571 }
1572
1573 fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet {
1574 self.clobbers.get(&insn).cloned().unwrap_or_default()
1575 }
1576
1577 fn num_vregs(&self) -> usize {
1578 self.vreg_types.len()
1579 }
1580
1581 fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] {
1582 &self.debug_value_labels
1583 }
1584
1585 fn spillslot_size(&self, regclass: RegClass) -> usize {
1586 self.abi.get_spillslot_size(regclass) as usize
1587 }
1588
1589 fn allow_multiple_vreg_defs(&self) -> bool {
1590 true
1594 }
1595}
1596
1597impl<I: VCodeInst> Debug for VRegAllocator<I> {
1598 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1599 writeln!(f, "VRegAllocator {{")?;
1600
1601 let mut alias_keys = self.vreg_aliases.keys().cloned().collect::<Vec<_>>();
1602 alias_keys.sort_unstable();
1603 for key in alias_keys {
1604 let dest = self.vreg_aliases.get(&key).unwrap();
1605 writeln!(f, " {:?} := {:?}", Reg::from(key), Reg::from(*dest))?;
1606 }
1607
1608 for (vreg, fact) in self.facts.iter().enumerate() {
1609 if let Some(fact) = fact {
1610 writeln!(f, " v{vreg} ! {fact}")?;
1611 }
1612 }
1613
1614 writeln!(f, "}}")
1615 }
1616}
1617
1618impl<I: VCodeInst> fmt::Debug for VCode<I> {
1619 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1620 writeln!(f, "VCode {{")?;
1621 writeln!(f, " Entry block: {}", self.entry.index())?;
1622
1623 let mut state = Default::default();
1624
1625 for block in 0..self.num_blocks() {
1626 let block = BlockIndex::new(block);
1627 writeln!(
1628 f,
1629 "Block {}({:?}):",
1630 block.index(),
1631 self.block_params(block)
1632 )?;
1633 if let Some(bb) = self.bindex_to_bb(block) {
1634 writeln!(f, " (original IR block: {bb})")?;
1635 }
1636 for (succ_idx, succ) in self.block_succs(block).iter().enumerate() {
1637 writeln!(
1638 f,
1639 " (successor: Block {}({:?}))",
1640 succ.index(),
1641 self.branch_blockparams(block, InsnIndex::new(0) , succ_idx)
1642 )?;
1643 }
1644 for inst in self.block_ranges.get(block.index()) {
1645 writeln!(
1646 f,
1647 " Inst {}: {}",
1648 inst,
1649 self.insts[inst].pretty_print_inst(&mut state)
1650 )?;
1651 if !self.operands.is_empty() {
1652 for operand in self.inst_operands(InsnIndex::new(inst)) {
1653 if operand.kind() == OperandKind::Def {
1654 if let Some(fact) = &self.facts[operand.vreg().vreg()] {
1655 writeln!(f, " v{} ! {}", operand.vreg().vreg(), fact)?;
1656 }
1657 }
1658 }
1659 }
1660 if let Some(user_stack_map) = self.get_user_stack_map(InsnIndex::new(inst)) {
1661 writeln!(f, " {user_stack_map:?}")?;
1662 }
1663 }
1664 }
1665
1666 writeln!(f, "}}")?;
1667 Ok(())
1668 }
1669}
1670
1671pub struct VRegAllocator<I> {
1673 vreg_types: Vec<Type>,
1675
1676 vreg_aliases: FxHashMap<regalloc2::VReg, regalloc2::VReg>,
1683
1684 deferred_error: Option<CodegenError>,
1689
1690 facts: Vec<Option<Fact>>,
1692
1693 _inst: core::marker::PhantomData<I>,
1695}
1696
1697impl<I: VCodeInst> VRegAllocator<I> {
1698 pub fn with_capacity(capacity: usize) -> Self {
1700 let capacity = first_user_vreg_index() + capacity;
1701 let mut vreg_types = Vec::with_capacity(capacity);
1702 vreg_types.resize(first_user_vreg_index(), types::INVALID);
1703 Self {
1704 vreg_types,
1705 vreg_aliases: FxHashMap::with_capacity_and_hasher(capacity, Default::default()),
1706 deferred_error: None,
1707 facts: Vec::with_capacity(capacity),
1708 _inst: core::marker::PhantomData::default(),
1709 }
1710 }
1711
1712 pub fn alloc(&mut self, ty: Type) -> CodegenResult<ValueRegs<Reg>> {
1714 if self.deferred_error.is_some() {
1715 return Err(CodegenError::CodeTooLarge);
1716 }
1717 let v = self.vreg_types.len();
1718 let (regclasses, tys) = I::rc_for_type(ty)?;
1719 if v + regclasses.len() >= VReg::MAX {
1720 return Err(CodegenError::CodeTooLarge);
1721 }
1722
1723 let regs: ValueRegs<Reg> = match regclasses {
1724 &[rc0] => ValueRegs::one(VReg::new(v, rc0).into()),
1725 &[rc0, rc1] => ValueRegs::two(VReg::new(v, rc0).into(), VReg::new(v + 1, rc1).into()),
1726 _ => panic!("Value must reside in 1 or 2 registers"),
1730 };
1731 for (®_ty, ®) in tys.iter().zip(regs.regs().iter()) {
1732 let vreg = reg.to_virtual_reg().unwrap();
1733 debug_assert_eq!(self.vreg_types.len(), vreg.index());
1734 self.vreg_types.push(reg_ty);
1735 }
1736
1737 self.facts.resize(self.vreg_types.len(), None);
1739
1740 Ok(regs)
1741 }
1742
1743 pub fn alloc_with_deferred_error(&mut self, ty: Type) -> ValueRegs<Reg> {
1749 match self.alloc(ty) {
1750 Ok(x) => x,
1751 Err(e) => {
1752 self.deferred_error = Some(e);
1753 self.bogus_for_deferred_error(ty)
1754 }
1755 }
1756 }
1757
1758 pub fn take_deferred_error(&mut self) -> Option<CodegenError> {
1760 self.deferred_error.take()
1761 }
1762
1763 fn bogus_for_deferred_error(&self, ty: Type) -> ValueRegs<Reg> {
1767 let (regclasses, _tys) = I::rc_for_type(ty).expect("must have valid type");
1768 match regclasses {
1769 &[rc0] => ValueRegs::one(VReg::new(0, rc0).into()),
1770 &[rc0, rc1] => ValueRegs::two(VReg::new(0, rc0).into(), VReg::new(1, rc1).into()),
1771 _ => panic!("Value must reside in 1 or 2 registers"),
1772 }
1773 }
1774
1775 pub fn set_vreg_alias(&mut self, from: Reg, to: Reg) {
1777 let from = from.into();
1778 let resolved_to = self.resolve_vreg_alias(to.into());
1779 assert_ne!(resolved_to, from);
1781
1782 if let Some(fact) = self.facts[from.vreg()].take() {
1786 self.set_fact(resolved_to, fact);
1787 }
1788
1789 let old_alias = self.vreg_aliases.insert(from, resolved_to);
1790 debug_assert_eq!(old_alias, None);
1791 }
1792
1793 fn resolve_vreg_alias(&self, mut vreg: regalloc2::VReg) -> regalloc2::VReg {
1794 while let Some(to) = self.vreg_aliases.get(&vreg) {
1804 vreg = *to;
1805 }
1806 vreg
1807 }
1808
1809 #[inline]
1810 fn debug_assert_no_vreg_aliases(&self, mut list: impl Iterator<Item = VReg>) {
1811 debug_assert!(list.all(|vreg| !self.vreg_aliases.contains_key(&vreg)));
1812 }
1813
1814 fn set_fact(&mut self, vreg: regalloc2::VReg, fact: Fact) -> Option<Fact> {
1818 trace!("vreg {:?} has fact: {:?}", vreg, fact);
1819 debug_assert!(!self.vreg_aliases.contains_key(&vreg));
1820 self.facts[vreg.vreg()].replace(fact)
1821 }
1822
1823 pub fn set_fact_if_missing(&mut self, vreg: VirtualReg, fact: Fact) {
1825 let vreg = self.resolve_vreg_alias(vreg.into());
1826 if self.facts[vreg.vreg()].is_none() {
1827 self.set_fact(vreg, fact);
1828 }
1829 }
1830
1831 pub fn alloc_with_maybe_fact(
1834 &mut self,
1835 ty: Type,
1836 fact: Option<Fact>,
1837 ) -> CodegenResult<ValueRegs<Reg>> {
1838 let result = self.alloc(ty)?;
1839
1840 assert!(result.len() == 1 || fact.is_none());
1843 if let Some(fact) = fact {
1844 self.set_fact(result.regs()[0].into(), fact);
1845 }
1846
1847 Ok(result)
1848 }
1849}
1850
1851#[derive(Default)]
1863pub struct VCodeConstants {
1864 constants: PrimaryMap<VCodeConstant, VCodeConstantData>,
1865 pool_uses: HashMap<Constant, VCodeConstant>,
1866 well_known_uses: HashMap<*const [u8], VCodeConstant>,
1867 u64s: HashMap<[u8; 8], VCodeConstant>,
1868}
1869impl VCodeConstants {
1870 pub fn with_capacity(expected_num_constants: usize) -> Self {
1872 Self {
1873 constants: PrimaryMap::with_capacity(expected_num_constants),
1874 pool_uses: HashMap::with_capacity(expected_num_constants),
1875 well_known_uses: HashMap::new(),
1876 u64s: HashMap::new(),
1877 }
1878 }
1879
1880 pub fn insert(&mut self, data: VCodeConstantData) -> VCodeConstant {
1885 match data {
1886 VCodeConstantData::Generated(_) => self.constants.push(data),
1887 VCodeConstantData::Pool(constant, _) => match self.pool_uses.get(&constant) {
1888 None => {
1889 let vcode_constant = self.constants.push(data);
1890 self.pool_uses.insert(constant, vcode_constant);
1891 vcode_constant
1892 }
1893 Some(&vcode_constant) => vcode_constant,
1894 },
1895 VCodeConstantData::WellKnown(data_ref) => {
1896 match self.well_known_uses.entry(data_ref as *const [u8]) {
1897 Entry::Vacant(v) => {
1898 let vcode_constant = self.constants.push(data);
1899 v.insert(vcode_constant);
1900 vcode_constant
1901 }
1902 Entry::Occupied(o) => *o.get(),
1903 }
1904 }
1905 VCodeConstantData::U64(value) => match self.u64s.entry(value) {
1906 Entry::Vacant(v) => {
1907 let vcode_constant = self.constants.push(data);
1908 v.insert(vcode_constant);
1909 vcode_constant
1910 }
1911 Entry::Occupied(o) => *o.get(),
1912 },
1913 }
1914 }
1915
1916 pub fn len(&self) -> usize {
1918 self.constants.len()
1919 }
1920
1921 pub fn keys(&self) -> Keys<VCodeConstant> {
1923 self.constants.keys()
1924 }
1925
1926 pub fn iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)> {
1929 self.constants.iter()
1930 }
1931
1932 pub fn get(&self, c: VCodeConstant) -> &VCodeConstantData {
1934 &self.constants[c]
1935 }
1936
1937 pub fn pool_uses(&self, constant: &VCodeConstantData) -> bool {
1940 match constant {
1941 VCodeConstantData::Pool(c, _) => self.pool_uses.contains_key(c),
1942 _ => false,
1943 }
1944 }
1945}
1946
1947#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1949pub struct VCodeConstant(u32);
1950entity_impl!(VCodeConstant);
1951
1952pub enum VCodeConstantData {
1955 Pool(Constant, ConstantData),
1958 WellKnown(&'static [u8]),
1960 Generated(ConstantData),
1963 U64([u8; 8]),
1967}
1968impl VCodeConstantData {
1969 pub fn as_slice(&self) -> &[u8] {
1971 match self {
1972 VCodeConstantData::Pool(_, d) | VCodeConstantData::Generated(d) => d.as_slice(),
1973 VCodeConstantData::WellKnown(d) => d,
1974 VCodeConstantData::U64(value) => &value[..],
1975 }
1976 }
1977
1978 pub fn alignment(&self) -> u32 {
1980 if self.as_slice().len() <= 8 {
1981 8
1982 } else {
1983 16
1984 }
1985 }
1986}
1987
1988#[cfg(test)]
1989mod test {
1990 use super::*;
1991 use std::mem::size_of;
1992
1993 #[test]
1994 fn size_of_constant_structs() {
1995 assert_eq!(size_of::<Constant>(), 4);
1996 assert_eq!(size_of::<VCodeConstant>(), 4);
1997 assert_eq!(size_of::<ConstantData>(), 3 * size_of::<usize>());
1998 assert_eq!(size_of::<VCodeConstantData>(), 4 * size_of::<usize>());
1999 assert_eq!(
2000 size_of::<PrimaryMap<VCodeConstant, VCodeConstantData>>(),
2001 3 * size_of::<usize>()
2002 );
2003 }
2007}