1use crate::CodegenError;
21use crate::ir::pcc::*;
22use crate::ir::{self, Constant, ConstantData, ValueLabel, types};
23use crate::ranges::Ranges;
24use crate::timing;
25use crate::trace;
26use crate::{LabelValueLoc, ValueLocRange};
27use crate::{machinst::*, trace_log_enabled};
28use regalloc2::{
29 Edit, Function as RegallocFunction, InstOrEdit, InstPosition, InstRange, Operand,
30 OperandConstraint, OperandKind, PRegSet, ProgPoint, RegClass,
31};
32use rustc_hash::FxHashMap;
33
34use core::cmp::Ordering;
35use core::fmt::{self, Write};
36use core::mem::take;
37use core::ops::Range;
38use cranelift_entity::{Keys, entity_impl};
39use std::collections::HashMap;
40use std::collections::hash_map::Entry;
41
42pub type InsnIndex = regalloc2::Inst;
44
45trait ToBackwardsInsnIndex {
48 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex;
49}
50
51impl ToBackwardsInsnIndex for InsnIndex {
52 fn to_backwards_insn_index(&self, num_insts: usize) -> BackwardsInsnIndex {
53 BackwardsInsnIndex::new(num_insts - self.index() - 1)
54 }
55}
56
57#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
60#[cfg_attr(
61 feature = "enable-serde",
62 derive(::serde::Serialize, ::serde::Deserialize)
63)]
64pub struct BackwardsInsnIndex(InsnIndex);
65
66impl BackwardsInsnIndex {
67 pub fn new(i: usize) -> Self {
68 BackwardsInsnIndex(InsnIndex::new(i))
69 }
70}
71
72pub type BlockIndex = regalloc2::Block;
74
75pub trait VCodeInst: MachInst + MachInstEmit {}
78impl<I: MachInst + MachInstEmit> VCodeInst for I {}
79
80pub struct VCode<I: VCodeInst> {
93 vreg_types: Vec<Type>,
95
96 insts: Vec<I>,
98
99 user_stack_maps: FxHashMap<BackwardsInsnIndex, ir::UserStackMap>,
106
107 debug_tags: FxHashMap<BackwardsInsnIndex, Range<u32>>,
111
112 debug_tag_pool: Vec<ir::DebugTag>,
115
116 operands: Vec<Operand>,
121
122 operand_ranges: Ranges,
126
127 clobbers: FxHashMap<InsnIndex, PRegSet>,
129
130 srclocs: Vec<RelSourceLoc>,
133
134 entry: BlockIndex,
136
137 block_ranges: Ranges,
139
140 block_succ_range: Ranges,
142
143 block_succs: Vec<regalloc2::Block>,
148
149 block_pred_range: Ranges,
151
152 block_preds: Vec<regalloc2::Block>,
157
158 block_params_range: Ranges,
160
161 block_params: Vec<regalloc2::VReg>,
166
167 branch_block_args: Vec<regalloc2::VReg>,
177
178 branch_block_arg_range: Ranges,
184
185 branch_block_arg_succ_range: Ranges,
188
189 block_order: BlockLoweringOrder,
191
192 pub(crate) abi: Callee<I::ABIMachineSpec>,
194
195 emit_info: I::Info,
198
199 pub(crate) constants: VCodeConstants,
201
202 debug_value_labels: Vec<(VReg, InsnIndex, InsnIndex, u32)>,
204
205 pub(crate) sigs: SigSet,
206
207 facts: Vec<Option<Fact>>,
209
210 log2_min_function_alignment: u8,
211}
212
213pub struct EmitResult {
217 pub buffer: MachBufferFinalized<Stencil>,
219
220 pub bb_offsets: Vec<CodeOffset>,
223
224 pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
227
228 pub disasm: Option<String>,
233
234 pub value_labels_ranges: ValueLabelsRanges,
236}
237
238pub struct VCodeBuilder<I: VCodeInst> {
255 pub(crate) vcode: VCode<I>,
257
258 direction: VCodeBuildDirection,
260
261 debug_info: FxHashMap<ValueLabel, Vec<(InsnIndex, InsnIndex, VReg)>>,
265}
266
267#[derive(Clone, Copy, Debug, PartialEq, Eq)]
269pub enum VCodeBuildDirection {
270 Backward,
274}
275
276impl<I: VCodeInst> VCodeBuilder<I> {
277 pub fn new(
279 sigs: SigSet,
280 abi: Callee<I::ABIMachineSpec>,
281 emit_info: I::Info,
282 block_order: BlockLoweringOrder,
283 constants: VCodeConstants,
284 direction: VCodeBuildDirection,
285 log2_min_function_alignment: u8,
286 ) -> Self {
287 let vcode = VCode::new(
288 sigs,
289 abi,
290 emit_info,
291 block_order,
292 constants,
293 log2_min_function_alignment,
294 );
295
296 VCodeBuilder {
297 vcode,
298 direction,
299 debug_info: FxHashMap::default(),
300 }
301 }
302
303 pub fn init_retval_area(&mut self, vregs: &mut VRegAllocator<I>) -> CodegenResult<()> {
304 self.vcode.abi.init_retval_area(&self.vcode.sigs, vregs)
305 }
306
307 pub fn abi(&self) -> &Callee<I::ABIMachineSpec> {
309 &self.vcode.abi
310 }
311
312 pub fn abi_mut(&mut self) -> &mut Callee<I::ABIMachineSpec> {
314 &mut self.vcode.abi
315 }
316
317 pub fn sigs(&self) -> &SigSet {
318 &self.vcode.sigs
319 }
320
321 pub fn sigs_mut(&mut self) -> &mut SigSet {
322 &mut self.vcode.sigs
323 }
324
325 pub fn block_order(&self) -> &BlockLoweringOrder {
327 &self.vcode.block_order
328 }
329
330 pub fn set_entry(&mut self, block: BlockIndex) {
332 self.vcode.entry = block;
333 }
334
335 pub fn end_bb(&mut self) {
338 let end_idx = self.vcode.insts.len();
339 self.vcode.block_ranges.push_end(end_idx);
341 let succ_end = self.vcode.block_succs.len();
343 self.vcode.block_succ_range.push_end(succ_end);
344 let block_params_end = self.vcode.block_params.len();
346 self.vcode.block_params_range.push_end(block_params_end);
347 let branch_block_arg_succ_end = self.vcode.branch_block_arg_range.len();
349 self.vcode
350 .branch_block_arg_succ_range
351 .push_end(branch_block_arg_succ_end);
352 }
353
354 pub fn add_block_param(&mut self, param: VirtualReg) {
355 self.vcode.block_params.push(param.into());
356 }
357
358 fn add_branch_args_for_succ(&mut self, args: &[Reg]) {
359 self.vcode
360 .branch_block_args
361 .extend(args.iter().map(|&arg| VReg::from(arg)));
362 let end = self.vcode.branch_block_args.len();
363 self.vcode.branch_block_arg_range.push_end(end);
364 }
365
366 pub fn push(&mut self, insn: I, loc: RelSourceLoc) {
369 assert!(!insn.is_low_level_branch()); self.vcode.insts.push(insn);
371 self.vcode.srclocs.push(loc);
372 }
373
374 pub fn add_succ(&mut self, block: BlockIndex, args: &[Reg]) {
376 self.vcode.block_succs.push(block);
377 self.add_branch_args_for_succ(args);
378 }
379
380 pub fn add_value_label(&mut self, reg: Reg, label: ValueLabel) {
382 let next_inst_index = self.vcode.insts.len();
402 if next_inst_index == 0 {
403 return;
405 }
406 let next_inst = InsnIndex::new(next_inst_index);
407 let labels = self.debug_info.entry(label).or_insert_with(|| vec![]);
408 let last = labels
409 .last()
410 .map(|(_start, end, _vreg)| *end)
411 .unwrap_or(InsnIndex::new(0));
412 labels.push((last, next_inst, reg.into()));
413 }
414
415 pub fn constants(&mut self) -> &mut VCodeConstants {
417 &mut self.vcode.constants
418 }
419
420 fn compute_preds_from_succs(&mut self) {
421 let mut starts = vec![0u32; self.vcode.num_blocks()];
424 for succ in &self.vcode.block_succs {
425 starts[succ.index()] += 1;
426 }
427
428 self.vcode.block_pred_range.reserve(starts.len());
432 let mut end = 0;
433 for count in starts.iter_mut() {
434 let start = end;
435 end += *count;
436 *count = start;
437 self.vcode.block_pred_range.push_end(end as usize);
438 }
439 let end = end as usize;
440 debug_assert_eq!(end, self.vcode.block_succs.len());
441
442 self.vcode.block_preds.resize(end, BlockIndex::invalid());
448 for (pred, range) in self.vcode.block_succ_range.iter() {
449 let pred = BlockIndex::new(pred);
450 for succ in &self.vcode.block_succs[range] {
451 let pos = &mut starts[succ.index()];
452 self.vcode.block_preds[*pos as usize] = pred;
453 *pos += 1;
454 }
455 }
456 debug_assert!(self.vcode.block_preds.iter().all(|pred| pred.is_valid()));
457 }
458
459 fn reverse_and_finalize(&mut self, vregs: &VRegAllocator<I>) {
463 let n_insts = self.vcode.insts.len();
464 if n_insts == 0 {
465 return;
466 }
467
468 self.vcode.block_ranges.reverse_index();
470 self.vcode.block_ranges.reverse_target(n_insts);
471 self.vcode.block_params_range.reverse_index();
477 self.vcode.block_succ_range.reverse_index();
480 self.vcode.insts.reverse();
481 self.vcode.srclocs.reverse();
482 self.vcode.branch_block_arg_succ_range.reverse_index();
485
486 let translate = |inst: InsnIndex| InsnIndex::new(n_insts - inst.index());
497
498 for (label, tuples) in &self.debug_info {
500 for &(start, end, vreg) in tuples {
501 let vreg = vregs.resolve_vreg_alias(vreg);
502 let fwd_start = translate(end);
503 let fwd_end = translate(start);
504 self.vcode
505 .debug_value_labels
506 .push((vreg, fwd_start, fwd_end, label.as_u32()));
507 }
508 }
509
510 self.vcode
513 .debug_value_labels
514 .sort_unstable_by_key(|(vreg, _, _, _)| *vreg);
515 }
516
517 fn collect_operands(&mut self, vregs: &VRegAllocator<I>) {
518 let allocatable = PRegSet::from(self.vcode.abi.machine_env());
519 for (i, insn) in self.vcode.insts.iter_mut().enumerate() {
520 let mut op_collector =
532 OperandCollector::new(&mut self.vcode.operands, allocatable, |vreg| {
533 vregs.resolve_vreg_alias(vreg)
534 });
535 insn.get_operands(&mut op_collector);
536 let (ops, clobbers) = op_collector.finish();
537 self.vcode.operand_ranges.push_end(ops);
538
539 if clobbers != PRegSet::default() {
540 self.vcode.clobbers.insert(InsnIndex::new(i), clobbers);
541 }
542
543 if let Some((dst, src)) = insn.is_move() {
544 assert!(
547 src.is_virtual(),
548 "the real register {src:?} was used as the source of a move instruction"
549 );
550 assert!(
551 dst.to_reg().is_virtual(),
552 "the real register {:?} was used as the destination of a move instruction",
553 dst.to_reg()
554 );
555 }
556 }
557
558 for arg in &mut self.vcode.branch_block_args {
560 let new_arg = vregs.resolve_vreg_alias(*arg);
561 trace!("operandcollector: block arg {:?} -> {:?}", arg, new_arg);
562 *arg = new_arg;
563 }
564 }
565
566 pub fn build(mut self, mut vregs: VRegAllocator<I>) -> VCode<I> {
568 self.vcode.vreg_types = take(&mut vregs.vreg_types);
569 self.vcode.facts = take(&mut vregs.facts);
570
571 if self.direction == VCodeBuildDirection::Backward {
572 self.reverse_and_finalize(&vregs);
573 }
574 self.collect_operands(&vregs);
575
576 self.compute_preds_from_succs();
577 self.vcode.debug_value_labels.sort_unstable();
578
579 vregs.debug_assert_no_vreg_aliases(self.vcode.operands.iter().map(|op| op.vreg()));
586 vregs.debug_assert_no_vreg_aliases(self.vcode.block_params.iter().copied());
588 vregs.debug_assert_no_vreg_aliases(self.vcode.branch_block_args.iter().copied());
590 vregs.debug_assert_no_vreg_aliases(
592 self.vcode.debug_value_labels.iter().map(|&(vreg, ..)| vreg),
593 );
594 vregs.debug_assert_no_vreg_aliases(
596 self.vcode
597 .facts
598 .iter()
599 .zip(&vregs.vreg_types)
600 .enumerate()
601 .filter(|(_, (fact, _))| fact.is_some())
602 .map(|(vreg, (_, &ty))| {
603 let (regclasses, _) = I::rc_for_type(ty).unwrap();
604 VReg::new(vreg, regclasses[0])
605 }),
606 );
607
608 self.vcode
609 }
610
611 pub fn add_user_stack_map(
613 &mut self,
614 inst: BackwardsInsnIndex,
615 entries: &[ir::UserStackMapEntry],
616 ) {
617 let stack_map = ir::UserStackMap::new(entries, self.vcode.abi.sized_stackslot_offsets());
618 let old_entry = self.vcode.user_stack_maps.insert(inst, stack_map);
619 debug_assert!(old_entry.is_none());
620 }
621
622 pub fn add_debug_tags(&mut self, inst: BackwardsInsnIndex, entries: &[ir::DebugTag]) {
624 let start = u32::try_from(self.vcode.debug_tag_pool.len()).unwrap();
625 self.vcode.debug_tag_pool.extend(entries.iter().cloned());
626 let end = u32::try_from(self.vcode.debug_tag_pool.len()).unwrap();
627 self.vcode.debug_tags.insert(inst, start..end);
628 }
629}
630
631const NO_INST_OFFSET: CodeOffset = u32::MAX;
632
633impl<I: VCodeInst> VCode<I> {
634 fn new(
636 sigs: SigSet,
637 abi: Callee<I::ABIMachineSpec>,
638 emit_info: I::Info,
639 block_order: BlockLoweringOrder,
640 constants: VCodeConstants,
641 log2_min_function_alignment: u8,
642 ) -> Self {
643 let n_blocks = block_order.lowered_order().len();
644 VCode {
645 sigs,
646 vreg_types: vec![],
647 insts: Vec::with_capacity(10 * n_blocks),
648 user_stack_maps: FxHashMap::default(),
649 debug_tags: FxHashMap::default(),
650 debug_tag_pool: vec![],
651 operands: Vec::with_capacity(30 * n_blocks),
652 operand_ranges: Ranges::with_capacity(10 * n_blocks),
653 clobbers: FxHashMap::default(),
654 srclocs: Vec::with_capacity(10 * n_blocks),
655 entry: BlockIndex::new(0),
656 block_ranges: Ranges::with_capacity(n_blocks),
657 block_succ_range: Ranges::with_capacity(n_blocks),
658 block_succs: Vec::with_capacity(n_blocks),
659 block_pred_range: Ranges::default(),
660 block_preds: Vec::new(),
661 block_params_range: Ranges::with_capacity(n_blocks),
662 block_params: Vec::with_capacity(5 * n_blocks),
663 branch_block_args: Vec::with_capacity(10 * n_blocks),
664 branch_block_arg_range: Ranges::with_capacity(2 * n_blocks),
665 branch_block_arg_succ_range: Ranges::with_capacity(n_blocks),
666 block_order,
667 abi,
668 emit_info,
669 constants,
670 debug_value_labels: vec![],
671 facts: vec![],
672 log2_min_function_alignment,
673 }
674 }
675
676 pub fn num_blocks(&self) -> usize {
679 self.block_ranges.len()
680 }
681
682 pub fn num_insts(&self) -> usize {
684 self.insts.len()
685 }
686
687 fn compute_clobbers_and_function_calls(
688 &self,
689 regalloc: ®alloc2::Output,
690 ) -> (Vec<Writable<RealReg>>, FunctionCalls) {
691 let mut clobbered = PRegSet::default();
692 let mut function_calls = FunctionCalls::None;
693
694 for (_, Edit::Move { to, .. }) in ®alloc.edits {
696 if let Some(preg) = to.as_reg() {
697 clobbered.add(preg);
698 }
699 }
700
701 for (i, range) in self.operand_ranges.iter() {
702 let operands = &self.operands[range.clone()];
703 let allocs = ®alloc.allocs[range];
704 for (operand, alloc) in operands.iter().zip(allocs.iter()) {
705 if operand.kind() == OperandKind::Def {
706 if let Some(preg) = alloc.as_reg() {
707 clobbered.add(preg);
708 }
709 }
710 }
711
712 function_calls.update(self.insts[i].call_type());
713
714 if self.insts[i].is_included_in_clobbers() {
734 if let Some(&inst_clobbered) = self.clobbers.get(&InsnIndex::new(i)) {
735 clobbered.union_from(inst_clobbered);
736 }
737 }
738 }
739
740 let clobbered_regs = clobbered
741 .into_iter()
742 .map(|preg| Writable::from_reg(RealReg::from(preg)))
743 .collect();
744
745 (clobbered_regs, function_calls)
746 }
747
748 pub fn emit(
756 mut self,
757 regalloc: ®alloc2::Output,
758 want_disasm: bool,
759 flags: &settings::Flags,
760 ctrl_plane: &mut ControlPlane,
761 ) -> EmitResult
762 where
763 I: VCodeInst,
764 {
765 let _tt = timing::vcode_emit();
766 let mut buffer = MachBuffer::new();
767 buffer.set_log2_min_function_alignment(self.log2_min_function_alignment);
768 let mut bb_starts: Vec<Option<CodeOffset>> = vec![];
769
770 buffer.reserve_labels_for_blocks(self.num_blocks());
772
773 buffer.register_constants(&self.constants);
777
778 let mut final_order: SmallVec<[BlockIndex; 16]> = smallvec![];
780 let mut cold_blocks: SmallVec<[BlockIndex; 16]> = smallvec![];
781 for block in 0..self.num_blocks() {
782 let block = BlockIndex::new(block);
783 if self.block_order.is_cold(block) {
784 cold_blocks.push(block);
785 } else {
786 final_order.push(block);
787 }
788 }
789 final_order.extend(cold_blocks.clone());
790
791 let (clobbers, function_calls) = self.compute_clobbers_and_function_calls(regalloc);
800 self.abi.compute_frame_layout(
801 &self.sigs,
802 regalloc.num_spillslots,
803 clobbers,
804 function_calls,
805 );
806
807 let mut cur_srcloc = None;
809 let mut last_offset = None;
810 let mut inst_offsets = vec![];
811 let mut state = I::State::new(&self.abi, std::mem::take(ctrl_plane));
812
813 let mut disasm = String::new();
814
815 if !self.debug_value_labels.is_empty() {
816 inst_offsets.resize(self.insts.len(), NO_INST_OFFSET);
817 }
818
819 let mut ra_edits_per_block: SmallVec<[u32; 64]> = smallvec![];
824 let mut edit_idx = 0;
825 for block in 0..self.num_blocks() {
826 let end_inst = InsnIndex::new(self.block_ranges.get(block).end);
827 let start_edit_idx = edit_idx;
828 while edit_idx < regalloc.edits.len() && regalloc.edits[edit_idx].0.inst() < end_inst {
829 edit_idx += 1;
830 }
831 let end_edit_idx = edit_idx;
832 ra_edits_per_block.push((end_edit_idx - start_edit_idx) as u32);
833 }
834
835 let is_forward_edge_cfi_enabled = self.abi.is_forward_edge_cfi_enabled();
836 let mut bb_padding = match flags.bb_padding_log2_minus_one() {
837 0 => Vec::new(),
838 n => vec![0; 1 << (n - 1)],
839 };
840 let mut total_bb_padding = 0;
841
842 for (block_order_idx, &block) in final_order.iter().enumerate() {
843 trace!("emitting block {:?}", block);
844
845 state.on_new_block();
847
848 let new_offset = I::align_basic_block(buffer.cur_offset());
850 while new_offset > buffer.cur_offset() {
851 let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
853 nop.emit(&mut buffer, &self.emit_info, &mut Default::default());
854 }
855 assert_eq!(buffer.cur_offset(), new_offset);
856
857 let do_emit = |inst: &I,
858 disasm: &mut String,
859 buffer: &mut MachBuffer<I>,
860 state: &mut I::State| {
861 if want_disasm && !inst.is_args() {
862 let mut s = state.clone();
863 writeln!(disasm, " {}", inst.pretty_print_inst(&mut s)).unwrap();
864 }
865 inst.emit(buffer, &self.emit_info, state);
866 };
867
868 if block == self.entry {
870 trace!(" -> entry block");
871 buffer.start_srcloc(Default::default());
872 for inst in &self.abi.gen_prologue() {
873 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
874 }
875 buffer.end_srcloc();
876 }
877
878 buffer.bind_label(MachLabel::from_block(block), state.ctrl_plane_mut());
881
882 if want_disasm {
883 writeln!(&mut disasm, "block{}:", block.index()).unwrap();
884 }
885
886 if flags.machine_code_cfg_info() {
887 let cur_offset = buffer.cur_offset();
890 if last_offset.is_some() && cur_offset <= last_offset.unwrap() {
891 for i in (0..bb_starts.len()).rev() {
892 if bb_starts[i].is_some() && cur_offset > bb_starts[i].unwrap() {
893 break;
894 }
895 bb_starts[i] = None;
896 }
897 }
898 bb_starts.push(Some(cur_offset));
899 last_offset = Some(cur_offset);
900 }
901
902 if let Some(block_start) = I::gen_block_start(
903 self.block_order.is_indirect_branch_target(block),
904 is_forward_edge_cfi_enabled,
905 ) {
906 do_emit(&block_start, &mut disasm, &mut buffer, &mut state);
907 }
908
909 for inst_or_edit in regalloc.block_insts_and_edits(&self, block) {
910 match inst_or_edit {
911 InstOrEdit::Inst(iix) => {
912 if !self.debug_value_labels.is_empty() {
913 if !self.block_order.is_cold(block) {
926 inst_offsets[iix.index()] = buffer.cur_offset();
927 }
928 }
929
930 let srcloc = self.srclocs[iix.index()];
932 if cur_srcloc != Some(srcloc) {
933 if cur_srcloc.is_some() {
934 buffer.end_srcloc();
935 }
936 buffer.start_srcloc(srcloc);
937 cur_srcloc = Some(srcloc);
938 }
939
940 let stack_map_disasm = if self.insts[iix.index()].is_safepoint() {
943 let (user_stack_map, user_stack_map_disasm) = {
944 let index = iix.to_backwards_insn_index(self.num_insts());
951 let user_stack_map = self.user_stack_maps.remove(&index);
952 let user_stack_map_disasm = if want_disasm {
953 user_stack_map.as_ref().map(|m| format!(" ; {m:?}"))
954 } else {
955 None
956 };
957 (user_stack_map, user_stack_map_disasm)
958 };
959
960 state.pre_safepoint(user_stack_map);
961
962 user_stack_map_disasm
963 } else {
964 None
965 };
966
967 let mut debug_tag_disasm = None;
974 let mut place_debug_tags =
975 |this: &VCode<I>, pos: MachDebugTagPos, buffer: &mut MachBuffer<I>| {
976 let debug_tag_range = {
979 let index = iix.to_backwards_insn_index(this.num_insts());
980 this.debug_tags.get(&index)
981 };
982 if let Some(range) = debug_tag_range {
983 let start = usize::try_from(range.start).unwrap();
984 let end = usize::try_from(range.end).unwrap();
985 let tags = &this.debug_tag_pool[start..end];
986
987 if want_disasm {
988 debug_tag_disasm =
989 Some(format!(" ; ^-- debug @ {pos:?}: {tags:?}"));
990 }
991 buffer.push_debug_tags(pos, tags);
992 }
993 };
994 let debug_tag_pos =
995 if self.insts[iix.index()].call_type() == CallType::Regular {
996 MachDebugTagPos::Post
997 } else {
998 MachDebugTagPos::Pre
999 };
1000
1001 if debug_tag_pos == MachDebugTagPos::Pre {
1002 place_debug_tags(&self, debug_tag_pos, &mut buffer);
1003 }
1004
1005 if self.insts[iix.index()].is_term() == MachTerminator::Ret {
1010 log::trace!("emitting epilogue");
1011 for inst in self.abi.gen_epilogue() {
1012 do_emit(&inst, &mut disasm, &mut buffer, &mut state);
1013 }
1014 } else {
1015 let mut allocs = regalloc.inst_allocs(iix).iter();
1018 self.insts[iix.index()].get_operands(
1019 &mut |reg: &mut Reg, constraint, _kind, _pos| {
1020 let alloc =
1021 allocs.next().expect("enough allocations for all operands");
1022
1023 if let Some(alloc) = alloc.as_reg() {
1024 let alloc: Reg = alloc.into();
1025 if let OperandConstraint::FixedReg(rreg) = constraint {
1026 debug_assert_eq!(Reg::from(rreg), alloc);
1027 }
1028 *reg = alloc;
1029 } else if let Some(alloc) = alloc.as_stack() {
1030 let alloc: Reg = alloc.into();
1031 *reg = alloc;
1032 }
1033 },
1034 );
1035 debug_assert!(allocs.next().is_none());
1036
1037 log::trace!("emitting: {:?}", self.insts[iix.index()]);
1038
1039 do_emit(
1041 &self.insts[iix.index()],
1042 &mut disasm,
1043 &mut buffer,
1044 &mut state,
1045 );
1046
1047 if debug_tag_pos == MachDebugTagPos::Post {
1048 place_debug_tags(&self, debug_tag_pos, &mut buffer);
1049 }
1050
1051 if let Some(stack_map_disasm) = stack_map_disasm {
1052 disasm.push_str(&stack_map_disasm);
1053 disasm.push('\n');
1054 }
1055 if let Some(debug_tag_disasm) = debug_tag_disasm {
1056 disasm.push_str(&debug_tag_disasm);
1057 disasm.push('\n');
1058 }
1059 }
1060 }
1061
1062 InstOrEdit::Edit(Edit::Move { from, to }) => {
1063 match (from.as_reg(), to.as_reg()) {
1066 (Some(from), Some(to)) => {
1067 let from_rreg = Reg::from(from);
1069 let to_rreg = Writable::from_reg(Reg::from(to));
1070 debug_assert_eq!(from.class(), to.class());
1071 let ty = I::canonical_type_for_rc(from.class());
1072 let mv = I::gen_move(to_rreg, from_rreg, ty);
1073 do_emit(&mv, &mut disasm, &mut buffer, &mut state);
1074 }
1075 (Some(from), None) => {
1076 let to = to.as_stack().unwrap();
1078 let from_rreg = RealReg::from(from);
1079 let spill = self.abi.gen_spill(to, from_rreg);
1080 do_emit(&spill, &mut disasm, &mut buffer, &mut state);
1081 }
1082 (None, Some(to)) => {
1083 let from = from.as_stack().unwrap();
1085 let to_rreg = Writable::from_reg(RealReg::from(to));
1086 let reload = self.abi.gen_reload(to_rreg, from);
1087 do_emit(&reload, &mut disasm, &mut buffer, &mut state);
1088 }
1089 (None, None) => {
1090 panic!("regalloc2 should have eliminated stack-to-stack moves!");
1091 }
1092 }
1093 }
1094 }
1095 }
1096
1097 if cur_srcloc.is_some() {
1098 buffer.end_srcloc();
1099 cur_srcloc = None;
1100 }
1101
1102 let worst_case_next_bb = if block_order_idx < final_order.len() - 1 {
1106 let next_block = final_order[block_order_idx + 1];
1107 let next_block_range = self.block_ranges.get(next_block.index());
1108 let next_block_size = next_block_range.len() as u32;
1109 let next_block_ra_insertions = ra_edits_per_block[next_block.index()];
1110 I::worst_case_size() * (next_block_size + next_block_ra_insertions)
1111 } else {
1112 0
1113 };
1114 let padding = if bb_padding.is_empty() {
1115 0
1116 } else {
1117 bb_padding.len() as u32 + I::LabelUse::ALIGN - 1
1118 };
1119 if buffer.island_needed(padding + worst_case_next_bb) {
1120 buffer.emit_island(padding + worst_case_next_bb, ctrl_plane);
1121 }
1122
1123 if !bb_padding.is_empty() {
1132 buffer.put_data(&bb_padding);
1133 buffer.align_to(I::LabelUse::ALIGN);
1134 total_bb_padding += bb_padding.len();
1135 if total_bb_padding > (150 << 20) {
1136 bb_padding = Vec::new();
1137 }
1138 }
1139 }
1140
1141 debug_assert!(
1142 self.user_stack_maps.is_empty(),
1143 "any stack maps should have been consumed by instruction emission, still have: {:#?}",
1144 self.user_stack_maps,
1145 );
1146
1147 buffer.optimize_branches(ctrl_plane);
1150
1151 *ctrl_plane = state.take_ctrl_plane();
1153
1154 let func_body_len = buffer.cur_offset();
1155
1156 let mut bb_edges = vec![];
1158 let mut bb_offsets = vec![];
1159 if flags.machine_code_cfg_info() {
1160 for block in 0..self.num_blocks() {
1161 if bb_starts[block].is_none() {
1162 continue;
1164 }
1165 let from = bb_starts[block].unwrap();
1166
1167 bb_offsets.push(from);
1168 let succs = self.block_succs(BlockIndex::new(block));
1170 for &succ in succs.iter() {
1171 let to = buffer.resolve_label_offset(MachLabel::from_block(succ));
1172 bb_edges.push((from, to));
1173 }
1174 }
1175 }
1176
1177 self.monotonize_inst_offsets(&mut inst_offsets[..], func_body_len);
1178 let value_labels_ranges =
1179 self.compute_value_labels_ranges(regalloc, &inst_offsets[..], func_body_len);
1180
1181 buffer.set_frame_layout(self.abi.frame_slot_metadata());
1183
1184 EmitResult {
1185 buffer: buffer.finish(&self.constants, ctrl_plane),
1186 bb_offsets,
1187 bb_edges,
1188 disasm: if want_disasm { Some(disasm) } else { None },
1189 value_labels_ranges,
1190 }
1191 }
1192
1193 fn monotonize_inst_offsets(&self, inst_offsets: &mut [CodeOffset], func_body_len: u32) {
1194 if self.debug_value_labels.is_empty() {
1195 return;
1196 }
1197
1198 let mut next_offset = func_body_len;
1210 for inst_index in (0..(inst_offsets.len() - 1)).rev() {
1211 let inst_offset = inst_offsets[inst_index];
1212
1213 if inst_offset == NO_INST_OFFSET {
1215 continue;
1216 }
1217
1218 if inst_offset > next_offset {
1219 trace!(
1220 "Fixing code offset of the removed Inst {}: {} -> {}",
1221 inst_index, inst_offset, next_offset
1222 );
1223 inst_offsets[inst_index] = next_offset;
1224 continue;
1225 }
1226
1227 next_offset = inst_offset;
1228 }
1229 }
1230
1231 fn compute_value_labels_ranges(
1232 &self,
1233 regalloc: ®alloc2::Output,
1234 inst_offsets: &[CodeOffset],
1235 func_body_len: u32,
1236 ) -> ValueLabelsRanges {
1237 if self.debug_value_labels.is_empty() {
1238 return ValueLabelsRanges::default();
1239 }
1240
1241 if trace_log_enabled!() {
1242 self.log_value_labels_ranges(regalloc, inst_offsets);
1243 }
1244
1245 let mut value_labels_ranges: ValueLabelsRanges = HashMap::new();
1246 for &(label, from, to, alloc) in ®alloc.debug_locations {
1247 let label = ValueLabel::from_u32(label);
1248 let ranges = value_labels_ranges.entry(label).or_insert_with(|| vec![]);
1249 let prog_point_to_inst = |prog_point: ProgPoint| {
1250 let mut inst = prog_point.inst();
1251 if prog_point.pos() == InstPosition::After {
1252 inst = inst.next();
1253 }
1254 inst.index()
1255 };
1256 let from_inst_index = prog_point_to_inst(from);
1257 let to_inst_index = prog_point_to_inst(to);
1258 let from_offset = inst_offsets[from_inst_index];
1259 let to_offset = if to_inst_index == inst_offsets.len() {
1260 func_body_len
1261 } else {
1262 inst_offsets[to_inst_index]
1263 };
1264
1265 if from_offset == NO_INST_OFFSET
1268 || to_offset == NO_INST_OFFSET
1269 || from_offset == to_offset
1270 {
1271 continue;
1272 }
1273
1274 let loc = if let Some(preg) = alloc.as_reg() {
1275 LabelValueLoc::Reg(Reg::from(preg))
1276 } else {
1277 let slot = alloc.as_stack().unwrap();
1278 let slot_offset = self.abi.get_spillslot_offset(slot);
1279 let slot_base_to_caller_sp_offset = self.abi.slot_base_to_caller_sp_offset();
1280 let caller_sp_to_cfa_offset =
1281 crate::isa::unwind::systemv::caller_sp_to_cfa_offset();
1282 let cfa_to_sp_offset =
1284 -((slot_base_to_caller_sp_offset + caller_sp_to_cfa_offset) as i64);
1285 LabelValueLoc::CFAOffset(cfa_to_sp_offset + slot_offset)
1286 };
1287
1288 if let Some(last_loc_range) = ranges.last_mut() {
1291 if last_loc_range.loc == loc && last_loc_range.end == from_offset {
1292 trace!(
1293 "Extending debug range for {:?} in {:?} to Inst {} ({})",
1294 label, loc, to_inst_index, to_offset
1295 );
1296 last_loc_range.end = to_offset;
1297 continue;
1298 }
1299 }
1300
1301 trace!(
1302 "Recording debug range for {:?} in {:?}: [Inst {}..Inst {}) [{}..{})",
1303 label, loc, from_inst_index, to_inst_index, from_offset, to_offset
1304 );
1305
1306 ranges.push(ValueLocRange {
1307 loc,
1308 start: from_offset,
1309 end: to_offset,
1310 });
1311 }
1312
1313 value_labels_ranges
1314 }
1315
1316 fn log_value_labels_ranges(&self, regalloc: ®alloc2::Output, inst_offsets: &[CodeOffset]) {
1317 debug_assert!(trace_log_enabled!());
1318
1319 let mut labels = vec![];
1322 for &(label, _, _, _) in ®alloc.debug_locations {
1323 if Some(&label) == labels.last() {
1324 continue;
1325 }
1326 labels.push(label);
1327 }
1328
1329 let mut vregs = vec![];
1333 for &(vreg, start, end, label) in &self.debug_value_labels {
1334 if matches!(labels.binary_search(&label), Ok(_)) {
1335 vregs.push((label, start, end, vreg));
1336 }
1337 }
1338 vregs.sort_unstable_by(
1339 |(l_label, l_start, _, _), (r_label, r_start, _, _)| match l_label.cmp(r_label) {
1340 Ordering::Equal => l_start.cmp(r_start),
1341 cmp => cmp,
1342 },
1343 );
1344
1345 #[derive(PartialEq)]
1346 enum Mode {
1347 Measure,
1348 Emit,
1349 }
1350 #[derive(PartialEq)]
1351 enum Row {
1352 Head,
1353 Line,
1354 Inst(usize, usize),
1355 }
1356
1357 let mut widths = vec![0; 3 + 2 * labels.len()];
1358 let mut row = String::new();
1359 let mut output_row = |row_kind: Row, mode: Mode| {
1360 let mut column_index = 0;
1361 row.clear();
1362
1363 macro_rules! output_cell_impl {
1364 ($fill:literal, $span:literal, $($cell_fmt:tt)*) => {
1365 let column_start = row.len();
1366 {
1367 row.push('|');
1368 write!(row, $($cell_fmt)*).unwrap();
1369 }
1370
1371 let next_column_index = column_index + $span;
1372 let expected_width: usize = widths[column_index..next_column_index].iter().sum();
1373 if mode == Mode::Measure {
1374 let actual_width = row.len() - column_start;
1375 if actual_width > expected_width {
1376 widths[next_column_index - 1] += actual_width - expected_width;
1377 }
1378 } else {
1379 let column_end = column_start + expected_width;
1380 while row.len() != column_end {
1381 row.push($fill);
1382 }
1383 }
1384 column_index = next_column_index;
1385 };
1386 }
1387 macro_rules! output_cell {
1388 ($($cell_fmt:tt)*) => {
1389 output_cell_impl!(' ', 1, $($cell_fmt)*);
1390 };
1391 }
1392
1393 match row_kind {
1394 Row::Head => {
1395 output_cell!("BB");
1396 output_cell!("Inst");
1397 output_cell!("IP");
1398 for label in &labels {
1399 output_cell_impl!(' ', 2, "{:?}", ValueLabel::from_u32(*label));
1400 }
1401 }
1402 Row::Line => {
1403 debug_assert!(mode == Mode::Emit);
1404 for _ in 0..3 {
1405 output_cell_impl!('-', 1, "");
1406 }
1407 for _ in &labels {
1408 output_cell_impl!('-', 2, "");
1409 }
1410 }
1411 Row::Inst(block_index, inst_index) => {
1412 debug_assert!(inst_index < self.num_insts());
1413 if self.block_ranges.get(block_index).start == inst_index {
1414 output_cell!("B{}", block_index);
1415 } else {
1416 output_cell!("");
1417 }
1418 output_cell!("Inst {inst_index} ");
1419 output_cell!("{} ", inst_offsets[inst_index]);
1420
1421 for label in &labels {
1422 use regalloc2::Inst;
1424 let vreg_cmp = |inst: usize,
1425 vreg_label: &u32,
1426 range_start: &Inst,
1427 range_end: &Inst| {
1428 match vreg_label.cmp(&label) {
1429 Ordering::Equal => {
1430 if range_end.index() <= inst {
1431 Ordering::Less
1432 } else if range_start.index() > inst {
1433 Ordering::Greater
1434 } else {
1435 Ordering::Equal
1436 }
1437 }
1438 cmp => cmp,
1439 }
1440 };
1441 let vreg_index =
1442 vregs.binary_search_by(|(l, s, e, _)| vreg_cmp(inst_index, l, s, e));
1443 if let Ok(vreg_index) = vreg_index {
1444 let mut prev_vreg = None;
1445 if inst_index > 0 {
1446 let prev_vreg_index = vregs.binary_search_by(|(l, s, e, _)| {
1447 vreg_cmp(inst_index - 1, l, s, e)
1448 });
1449 if let Ok(prev_vreg_index) = prev_vreg_index {
1450 prev_vreg = Some(vregs[prev_vreg_index].3);
1451 }
1452 }
1453
1454 let vreg = vregs[vreg_index].3;
1455 if Some(vreg) == prev_vreg {
1456 output_cell!("*");
1457 } else {
1458 output_cell!("{}", vreg);
1459 }
1460 } else {
1461 output_cell!("");
1462 }
1463
1464 let inst_prog_point = ProgPoint::before(Inst::new(inst_index));
1466 let range_index = regalloc.debug_locations.binary_search_by(
1467 |(range_label, range_start, range_end, _)| match range_label.cmp(label)
1468 {
1469 Ordering::Equal => {
1470 if *range_end <= inst_prog_point {
1471 Ordering::Less
1472 } else if *range_start > inst_prog_point {
1473 Ordering::Greater
1474 } else {
1475 Ordering::Equal
1476 }
1477 }
1478 cmp => cmp,
1479 },
1480 );
1481 if let Ok(range_index) = range_index {
1482 if let Some(reg) = regalloc.debug_locations[range_index].3.as_reg() {
1484 output_cell!("{:?}", Reg::from(reg));
1485 } else {
1486 output_cell!("Stk");
1487 }
1488 } else {
1489 output_cell!("");
1491 }
1492 }
1493 }
1494 }
1495 row.push('|');
1496
1497 if mode == Mode::Emit {
1498 trace!("{}", row.as_str());
1499 }
1500 };
1501
1502 for block_index in 0..self.num_blocks() {
1503 for inst_index in self.block_ranges.get(block_index) {
1504 output_row(Row::Inst(block_index, inst_index), Mode::Measure);
1505 }
1506 }
1507 output_row(Row::Head, Mode::Measure);
1508
1509 output_row(Row::Head, Mode::Emit);
1510 output_row(Row::Line, Mode::Emit);
1511 for block_index in 0..self.num_blocks() {
1512 for inst_index in self.block_ranges.get(block_index) {
1513 output_row(Row::Inst(block_index, inst_index), Mode::Emit);
1514 }
1515 }
1516 }
1517
1518 pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
1520 self.block_order.lowered_order()[block.index()].orig_block()
1521 }
1522
1523 pub fn vreg_type(&self, vreg: VReg) -> Type {
1525 self.vreg_types[vreg.vreg()]
1526 }
1527
1528 pub fn vreg_fact(&self, vreg: VReg) -> Option<&Fact> {
1530 self.facts[vreg.vreg()].as_ref()
1531 }
1532
1533 pub fn set_vreg_fact(&mut self, vreg: VReg, fact: Fact) {
1535 trace!("set fact on {}: {:?}", vreg, fact);
1536 self.facts[vreg.vreg()] = Some(fact);
1537 }
1538
1539 pub fn inst_defines_facts(&self, inst: InsnIndex) -> bool {
1541 self.inst_operands(inst)
1542 .iter()
1543 .filter(|o| o.kind() == OperandKind::Def)
1544 .map(|o| o.vreg())
1545 .any(|vreg| self.facts[vreg.vreg()].is_some())
1546 }
1547
1548 pub fn get_user_stack_map(&self, inst: InsnIndex) -> Option<&ir::UserStackMap> {
1550 let index = inst.to_backwards_insn_index(self.num_insts());
1551 self.user_stack_maps.get(&index)
1552 }
1553}
1554
1555impl<I: VCodeInst> std::ops::Index<InsnIndex> for VCode<I> {
1556 type Output = I;
1557 fn index(&self, idx: InsnIndex) -> &Self::Output {
1558 &self.insts[idx.index()]
1559 }
1560}
1561
1562impl<I: VCodeInst> RegallocFunction for VCode<I> {
1563 fn num_insts(&self) -> usize {
1564 self.insts.len()
1565 }
1566
1567 fn num_blocks(&self) -> usize {
1568 self.block_ranges.len()
1569 }
1570
1571 fn entry_block(&self) -> BlockIndex {
1572 self.entry
1573 }
1574
1575 fn block_insns(&self, block: BlockIndex) -> InstRange {
1576 let range = self.block_ranges.get(block.index());
1577 InstRange::new(InsnIndex::new(range.start), InsnIndex::new(range.end))
1578 }
1579
1580 fn block_succs(&self, block: BlockIndex) -> &[BlockIndex] {
1581 let range = self.block_succ_range.get(block.index());
1582 &self.block_succs[range]
1583 }
1584
1585 fn block_preds(&self, block: BlockIndex) -> &[BlockIndex] {
1586 let range = self.block_pred_range.get(block.index());
1587 &self.block_preds[range]
1588 }
1589
1590 fn block_params(&self, block: BlockIndex) -> &[VReg] {
1591 if block == self.entry {
1594 return &[];
1595 }
1596
1597 let range = self.block_params_range.get(block.index());
1598 &self.block_params[range]
1599 }
1600
1601 fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] {
1602 let succ_range = self.branch_block_arg_succ_range.get(block.index());
1603 debug_assert!(succ_idx < succ_range.len());
1604 let branch_block_args = self.branch_block_arg_range.get(succ_range.start + succ_idx);
1605 &self.branch_block_args[branch_block_args]
1606 }
1607
1608 fn is_ret(&self, insn: InsnIndex) -> bool {
1609 match self.insts[insn.index()].is_term() {
1610 MachTerminator::None => self.insts[insn.index()].is_trap(),
1612 MachTerminator::Ret | MachTerminator::RetCall => true,
1613 MachTerminator::Branch => false,
1614 }
1615 }
1616
1617 fn is_branch(&self, insn: InsnIndex) -> bool {
1618 match self.insts[insn.index()].is_term() {
1619 MachTerminator::Branch => true,
1620 _ => false,
1621 }
1622 }
1623
1624 fn inst_operands(&self, insn: InsnIndex) -> &[Operand] {
1625 let range = self.operand_ranges.get(insn.index());
1626 &self.operands[range]
1627 }
1628
1629 fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet {
1630 self.clobbers.get(&insn).cloned().unwrap_or_default()
1631 }
1632
1633 fn num_vregs(&self) -> usize {
1634 self.vreg_types.len()
1635 }
1636
1637 fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] {
1638 &self.debug_value_labels
1639 }
1640
1641 fn spillslot_size(&self, regclass: RegClass) -> usize {
1642 self.abi.get_spillslot_size(regclass) as usize
1643 }
1644
1645 fn allow_multiple_vreg_defs(&self) -> bool {
1646 true
1650 }
1651}
1652
1653impl<I: VCodeInst> Debug for VRegAllocator<I> {
1654 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1655 writeln!(f, "VRegAllocator {{")?;
1656
1657 let mut alias_keys = self.vreg_aliases.keys().cloned().collect::<Vec<_>>();
1658 alias_keys.sort_unstable();
1659 for key in alias_keys {
1660 let dest = self.vreg_aliases.get(&key).unwrap();
1661 writeln!(f, " {:?} := {:?}", Reg::from(key), Reg::from(*dest))?;
1662 }
1663
1664 for (vreg, fact) in self.facts.iter().enumerate() {
1665 if let Some(fact) = fact {
1666 writeln!(f, " v{vreg} ! {fact}")?;
1667 }
1668 }
1669
1670 writeln!(f, "}}")
1671 }
1672}
1673
1674impl<I: VCodeInst> fmt::Debug for VCode<I> {
1675 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1676 writeln!(f, "VCode {{")?;
1677 writeln!(f, " Entry block: {}", self.entry.index())?;
1678
1679 let mut state = Default::default();
1680
1681 for block in 0..self.num_blocks() {
1682 let block = BlockIndex::new(block);
1683 writeln!(
1684 f,
1685 "Block {}({:?}):",
1686 block.index(),
1687 self.block_params(block)
1688 )?;
1689 if let Some(bb) = self.bindex_to_bb(block) {
1690 writeln!(f, " (original IR block: {bb})")?;
1691 }
1692 for (succ_idx, succ) in self.block_succs(block).iter().enumerate() {
1693 writeln!(
1694 f,
1695 " (successor: Block {}({:?}))",
1696 succ.index(),
1697 self.branch_blockparams(block, InsnIndex::new(0) , succ_idx)
1698 )?;
1699 }
1700 for inst in self.block_ranges.get(block.index()) {
1701 writeln!(
1702 f,
1703 " Inst {}: {}",
1704 inst,
1705 self.insts[inst].pretty_print_inst(&mut state)
1706 )?;
1707 if !self.operands.is_empty() {
1708 for operand in self.inst_operands(InsnIndex::new(inst)) {
1709 if operand.kind() == OperandKind::Def {
1710 if let Some(fact) = &self.facts[operand.vreg().vreg()] {
1711 writeln!(f, " v{} ! {}", operand.vreg().vreg(), fact)?;
1712 }
1713 }
1714 }
1715 }
1716 if let Some(user_stack_map) = self.get_user_stack_map(InsnIndex::new(inst)) {
1717 writeln!(f, " {user_stack_map:?}")?;
1718 }
1719 }
1720 }
1721
1722 writeln!(f, "}}")?;
1723 Ok(())
1724 }
1725}
1726
1727pub struct VRegAllocator<I> {
1729 vreg_types: Vec<Type>,
1731
1732 vreg_aliases: FxHashMap<regalloc2::VReg, regalloc2::VReg>,
1739
1740 deferred_error: Option<CodegenError>,
1745
1746 facts: Vec<Option<Fact>>,
1748
1749 _inst: core::marker::PhantomData<I>,
1751}
1752
1753impl<I: VCodeInst> VRegAllocator<I> {
1754 pub fn with_capacity(capacity: usize) -> Self {
1756 let capacity = first_user_vreg_index() + capacity;
1757 let mut vreg_types = Vec::with_capacity(capacity);
1758 vreg_types.resize(first_user_vreg_index(), types::INVALID);
1759 Self {
1760 vreg_types,
1761 vreg_aliases: FxHashMap::with_capacity_and_hasher(capacity, Default::default()),
1762 deferred_error: None,
1763 facts: Vec::with_capacity(capacity),
1764 _inst: core::marker::PhantomData::default(),
1765 }
1766 }
1767
1768 pub fn alloc(&mut self, ty: Type) -> CodegenResult<ValueRegs<Reg>> {
1770 if self.deferred_error.is_some() {
1771 return Err(CodegenError::CodeTooLarge);
1772 }
1773 let v = self.vreg_types.len();
1774 let (regclasses, tys) = I::rc_for_type(ty)?;
1775 if v + regclasses.len() >= VReg::MAX {
1776 return Err(CodegenError::CodeTooLarge);
1777 }
1778
1779 let regs: ValueRegs<Reg> = match regclasses {
1780 &[rc0] => ValueRegs::one(VReg::new(v, rc0).into()),
1781 &[rc0, rc1] => ValueRegs::two(VReg::new(v, rc0).into(), VReg::new(v + 1, rc1).into()),
1782 _ => panic!("Value must reside in 1 or 2 registers"),
1786 };
1787 for (®_ty, ®) in tys.iter().zip(regs.regs().iter()) {
1788 let vreg = reg.to_virtual_reg().unwrap();
1789 debug_assert_eq!(self.vreg_types.len(), vreg.index());
1790 self.vreg_types.push(reg_ty);
1791 }
1792
1793 self.facts.resize(self.vreg_types.len(), None);
1795
1796 Ok(regs)
1797 }
1798
1799 pub fn alloc_with_deferred_error(&mut self, ty: Type) -> ValueRegs<Reg> {
1805 match self.alloc(ty) {
1806 Ok(x) => x,
1807 Err(e) => {
1808 self.deferred_error = Some(e);
1809 self.bogus_for_deferred_error(ty)
1810 }
1811 }
1812 }
1813
1814 pub fn take_deferred_error(&mut self) -> Option<CodegenError> {
1816 self.deferred_error.take()
1817 }
1818
1819 fn bogus_for_deferred_error(&self, ty: Type) -> ValueRegs<Reg> {
1823 let (regclasses, _tys) = I::rc_for_type(ty).expect("must have valid type");
1824 match regclasses {
1825 &[rc0] => ValueRegs::one(VReg::new(0, rc0).into()),
1826 &[rc0, rc1] => ValueRegs::two(VReg::new(0, rc0).into(), VReg::new(1, rc1).into()),
1827 _ => panic!("Value must reside in 1 or 2 registers"),
1828 }
1829 }
1830
1831 pub fn set_vreg_alias(&mut self, from: Reg, to: Reg) {
1833 let from = from.into();
1834 let resolved_to = self.resolve_vreg_alias(to.into());
1835 assert_ne!(resolved_to, from);
1837
1838 if let Some(fact) = self.facts[from.vreg()].take() {
1842 self.set_fact(resolved_to, fact);
1843 }
1844
1845 let old_alias = self.vreg_aliases.insert(from, resolved_to);
1846 debug_assert_eq!(old_alias, None);
1847 }
1848
1849 fn resolve_vreg_alias(&self, mut vreg: regalloc2::VReg) -> regalloc2::VReg {
1850 while let Some(to) = self.vreg_aliases.get(&vreg) {
1860 vreg = *to;
1861 }
1862 vreg
1863 }
1864
1865 #[inline]
1866 fn debug_assert_no_vreg_aliases(&self, mut list: impl Iterator<Item = VReg>) {
1867 debug_assert!(list.all(|vreg| !self.vreg_aliases.contains_key(&vreg)));
1868 }
1869
1870 fn set_fact(&mut self, vreg: regalloc2::VReg, fact: Fact) -> Option<Fact> {
1874 trace!("vreg {:?} has fact: {:?}", vreg, fact);
1875 debug_assert!(!self.vreg_aliases.contains_key(&vreg));
1876 self.facts[vreg.vreg()].replace(fact)
1877 }
1878
1879 pub fn set_fact_if_missing(&mut self, vreg: VirtualReg, fact: Fact) {
1881 let vreg = self.resolve_vreg_alias(vreg.into());
1882 if self.facts[vreg.vreg()].is_none() {
1883 self.set_fact(vreg, fact);
1884 }
1885 }
1886
1887 pub fn alloc_with_maybe_fact(
1890 &mut self,
1891 ty: Type,
1892 fact: Option<Fact>,
1893 ) -> CodegenResult<ValueRegs<Reg>> {
1894 let result = self.alloc(ty)?;
1895
1896 assert!(result.len() == 1 || fact.is_none());
1899 if let Some(fact) = fact {
1900 self.set_fact(result.regs()[0].into(), fact);
1901 }
1902
1903 Ok(result)
1904 }
1905}
1906
1907#[derive(Default)]
1919pub struct VCodeConstants {
1920 constants: PrimaryMap<VCodeConstant, VCodeConstantData>,
1921 pool_uses: HashMap<Constant, VCodeConstant>,
1922 well_known_uses: HashMap<*const [u8], VCodeConstant>,
1923 u64s: HashMap<[u8; 8], VCodeConstant>,
1924}
1925impl VCodeConstants {
1926 pub fn with_capacity(expected_num_constants: usize) -> Self {
1928 Self {
1929 constants: PrimaryMap::with_capacity(expected_num_constants),
1930 pool_uses: HashMap::with_capacity(expected_num_constants),
1931 well_known_uses: HashMap::new(),
1932 u64s: HashMap::new(),
1933 }
1934 }
1935
1936 pub fn insert(&mut self, data: VCodeConstantData) -> VCodeConstant {
1941 match data {
1942 VCodeConstantData::Generated(_) => self.constants.push(data),
1943 VCodeConstantData::Pool(constant, _) => match self.pool_uses.get(&constant) {
1944 None => {
1945 let vcode_constant = self.constants.push(data);
1946 self.pool_uses.insert(constant, vcode_constant);
1947 vcode_constant
1948 }
1949 Some(&vcode_constant) => vcode_constant,
1950 },
1951 VCodeConstantData::WellKnown(data_ref) => {
1952 match self.well_known_uses.entry(data_ref as *const [u8]) {
1953 Entry::Vacant(v) => {
1954 let vcode_constant = self.constants.push(data);
1955 v.insert(vcode_constant);
1956 vcode_constant
1957 }
1958 Entry::Occupied(o) => *o.get(),
1959 }
1960 }
1961 VCodeConstantData::U64(value) => match self.u64s.entry(value) {
1962 Entry::Vacant(v) => {
1963 let vcode_constant = self.constants.push(data);
1964 v.insert(vcode_constant);
1965 vcode_constant
1966 }
1967 Entry::Occupied(o) => *o.get(),
1968 },
1969 }
1970 }
1971
1972 pub fn len(&self) -> usize {
1974 self.constants.len()
1975 }
1976
1977 pub fn keys(&self) -> Keys<VCodeConstant> {
1979 self.constants.keys()
1980 }
1981
1982 pub fn iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)> {
1985 self.constants.iter()
1986 }
1987
1988 pub fn get(&self, c: VCodeConstant) -> &VCodeConstantData {
1990 &self.constants[c]
1991 }
1992
1993 pub fn pool_uses(&self, constant: &VCodeConstantData) -> bool {
1996 match constant {
1997 VCodeConstantData::Pool(c, _) => self.pool_uses.contains_key(c),
1998 _ => false,
1999 }
2000 }
2001}
2002
2003#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2005pub struct VCodeConstant(u32);
2006entity_impl!(VCodeConstant);
2007
2008pub enum VCodeConstantData {
2011 Pool(Constant, ConstantData),
2014 WellKnown(&'static [u8]),
2016 Generated(ConstantData),
2019 U64([u8; 8]),
2023}
2024impl VCodeConstantData {
2025 pub fn as_slice(&self) -> &[u8] {
2027 match self {
2028 VCodeConstantData::Pool(_, d) | VCodeConstantData::Generated(d) => d.as_slice(),
2029 VCodeConstantData::WellKnown(d) => d,
2030 VCodeConstantData::U64(value) => &value[..],
2031 }
2032 }
2033
2034 pub fn alignment(&self) -> u32 {
2036 if self.as_slice().len() <= 8 { 8 } else { 16 }
2037 }
2038}
2039
2040#[cfg(test)]
2041mod test {
2042 use super::*;
2043 use std::mem::size_of;
2044
2045 #[test]
2046 fn size_of_constant_structs() {
2047 assert_eq!(size_of::<Constant>(), 4);
2048 assert_eq!(size_of::<VCodeConstant>(), 4);
2049 assert_eq!(size_of::<ConstantData>(), 3 * size_of::<usize>());
2050 assert_eq!(size_of::<VCodeConstantData>(), 4 * size_of::<usize>());
2051 assert_eq!(
2052 size_of::<PrimaryMap<VCodeConstant, VCodeConstantData>>(),
2053 3 * size_of::<usize>()
2054 );
2055 }
2059}