1use crate::cursor::{Cursor as _, FuncCursor};
23use crate::ir::{self, ExceptionTableData, ExceptionTableItem, InstBuilder as _};
24use crate::result::CodegenResult;
25use crate::trace;
26use crate::traversals::Dfs;
27use alloc::borrow::Cow;
28use alloc::vec::Vec;
29use cranelift_entity::{SecondaryMap, packed_option::PackedOption};
30use smallvec::SmallVec;
31
32type SmallValueVec = SmallVec<[ir::Value; 8]>;
33type SmallBlockArgVec = SmallVec<[ir::BlockArg; 8]>;
34type SmallBlockCallVec = SmallVec<[ir::BlockCall; 8]>;
35
36pub enum InlineCommand<'a> {
38 KeepCall,
40 Inline(Cow<'a, ir::Function>),
46}
47
48pub trait Inline {
53 fn inline(
76 &mut self,
77 caller: &ir::Function,
78 call_inst: ir::Inst,
79 call_opcode: ir::Opcode,
80 callee: ir::FuncRef,
81 call_args: &[ir::Value],
82 ) -> InlineCommand<'_>;
83}
84
85impl<'a, T> Inline for &'a mut T
86where
87 T: Inline,
88{
89 fn inline(
90 &mut self,
91 caller: &ir::Function,
92 inst: ir::Inst,
93 opcode: ir::Opcode,
94 callee: ir::FuncRef,
95 args: &[ir::Value],
96 ) -> InlineCommand<'_> {
97 (*self).inline(caller, inst, opcode, callee, args)
98 }
99}
100
101pub(crate) fn do_inlining(
106 func: &mut ir::Function,
107 mut inliner: impl Inline,
108) -> CodegenResult<bool> {
109 trace!("function {} before inlining: {}", func.name, func);
110
111 let mut inlined_any = false;
112 let mut allocs = InliningAllocs::default();
113
114 let mut cursor = FuncCursor::new(func);
115 while let Some(block) = cursor.next_block() {
116 let mut prev_pos;
123
124 while let Some(inst) = {
125 prev_pos = cursor.position();
126 cursor.next_inst()
127 } {
128 match cursor.func.dfg.insts[inst] {
129 ir::InstructionData::Call {
130 opcode: opcode @ ir::Opcode::Call | opcode @ ir::Opcode::ReturnCall,
131 args: _,
132 func_ref,
133 } => {
134 let args = cursor.func.dfg.inst_args(inst);
135 match inliner.inline(&cursor.func, inst, opcode, func_ref, args) {
136 InlineCommand::KeepCall => continue,
137 InlineCommand::Inline(callee) => {
138 inline_one(
139 &mut allocs,
140 cursor.func,
141 func_ref,
142 block,
143 inst,
144 opcode,
145 &callee,
146 None,
147 );
148 inlined_any = true;
149 cursor.set_position(prev_pos);
150 }
151 }
152 }
153 ir::InstructionData::TryCall {
154 opcode: opcode @ ir::Opcode::TryCall,
155 args: _,
156 func_ref,
157 exception,
158 } => {
159 let args = cursor.func.dfg.inst_args(inst);
160 match inliner.inline(&cursor.func, inst, opcode, func_ref, args) {
161 InlineCommand::KeepCall => continue,
162 InlineCommand::Inline(callee) => {
163 inline_one(
164 &mut allocs,
165 cursor.func,
166 func_ref,
167 block,
168 inst,
169 opcode,
170 &callee,
171 Some(exception),
172 );
173 inlined_any = true;
174 cursor.set_position(prev_pos);
175 }
176 }
177 }
178 _ => continue,
179 }
180 }
181 }
182
183 if inlined_any {
184 trace!("function {} after inlining: {}", func.name, func);
185 } else {
186 trace!("function {} did not have any callees inlined", func.name);
187 }
188
189 Ok(inlined_any)
190}
191
192#[derive(Default)]
193struct InliningAllocs {
194 values: SecondaryMap<ir::Value, PackedOption<ir::Value>>,
196
197 constants: SecondaryMap<ir::Constant, PackedOption<ir::Constant>>,
199
200 calls_needing_exception_table_fixup: Vec<ir::Inst>,
216}
217
218impl InliningAllocs {
219 fn reset(&mut self, callee: &ir::Function) {
220 let InliningAllocs {
221 values,
222 constants,
223 calls_needing_exception_table_fixup,
224 } = self;
225
226 values.clear();
227 values.resize(callee.dfg.len_values());
228
229 constants.clear();
230 constants.resize(callee.dfg.constants.len());
231
232 calls_needing_exception_table_fixup.clear();
236 }
237
238 fn set_inlined_value(
239 &mut self,
240 callee: &ir::Function,
241 callee_val: ir::Value,
242 inlined_val: ir::Value,
243 ) {
244 trace!(" --> callee {callee_val:?} = inlined {inlined_val:?}");
245 debug_assert!(self.values[callee_val].is_none());
246 let resolved_callee_val = callee.dfg.resolve_aliases(callee_val);
247 debug_assert!(self.values[resolved_callee_val].is_none());
248 self.values[resolved_callee_val] = Some(inlined_val).into();
249 }
250
251 fn get_inlined_value(&self, callee: &ir::Function, callee_val: ir::Value) -> Option<ir::Value> {
252 let resolved_callee_val = callee.dfg.resolve_aliases(callee_val);
253 self.values[resolved_callee_val].expand()
254 }
255}
256
257fn inline_one(
259 allocs: &mut InliningAllocs,
260 func: &mut ir::Function,
261 callee_func_ref: ir::FuncRef,
262 call_block: ir::Block,
263 call_inst: ir::Inst,
264 call_opcode: ir::Opcode,
265 callee: &ir::Function,
266 call_exception_table: Option<ir::ExceptionTable>,
267) {
268 trace!(
269 "Inlining call {call_inst:?}: {}\n\
270 with callee = {callee:?}",
271 func.dfg.display_inst(call_inst)
272 );
273
274 let expected_callee_sig = func.dfg.ext_funcs[callee_func_ref].signature;
276 let expected_callee_sig = &func.dfg.signatures[expected_callee_sig];
277 assert_eq!(expected_callee_sig, &callee.signature);
278
279 allocs.reset(callee);
280
281 let entity_map = create_entities(allocs, func, callee);
284
285 let return_block = split_off_return_block(func, call_inst, call_opcode, callee);
288 let call_stack_map = replace_call_with_jump(allocs, func, call_inst, callee, &entity_map);
289
290 inline_block_layout(func, call_block, callee, &entity_map);
294
295 for callee_block in Dfs::new().pre_order_iter(callee) {
301 let inlined_block = entity_map.inlined_block(callee_block);
302 trace!(
303 "Processing instructions in callee block {callee_block:?} (inlined block {inlined_block:?}"
304 );
305
306 let mut next_callee_inst = callee.layout.first_inst(callee_block);
307 while let Some(callee_inst) = next_callee_inst {
308 trace!(
309 "Processing callee instruction {callee_inst:?}: {}",
310 callee.dfg.display_inst(callee_inst)
311 );
312
313 assert_ne!(
314 callee.dfg.insts[callee_inst].opcode(),
315 ir::Opcode::GlobalValue,
316 "callee must already be legalized, we shouldn't see any `global_value` \
317 instructions when inlining; found {callee_inst:?}: {}",
318 callee.dfg.display_inst(callee_inst)
319 );
320
321 let inlined_inst_data = callee.dfg.insts[callee_inst].map(InliningInstRemapper {
324 allocs: &allocs,
325 func,
326 callee,
327 entity_map: &entity_map,
328 });
329 let inlined_inst = func.dfg.make_inst(inlined_inst_data);
330 func.layout.append_inst(inlined_inst, inlined_block);
331
332 let opcode = callee.dfg.insts[callee_inst].opcode();
333 if opcode.is_return() {
334 if let Some(return_block) = return_block {
339 fixup_inst_that_returns(
340 allocs,
341 func,
342 callee,
343 &entity_map,
344 call_opcode,
345 inlined_inst,
346 callee_inst,
347 return_block,
348 call_stack_map.as_ref().map(|es| &**es),
349 );
350 } else {
351 debug_assert_eq!(call_opcode, ir::Opcode::ReturnCall);
356 }
357 } else {
358 let ctrl_typevar = callee.dfg.ctrl_typevar(callee_inst);
360 func.dfg.make_inst_results(inlined_inst, ctrl_typevar);
361
362 let callee_results = callee.dfg.inst_results(callee_inst);
364 let inlined_results = func.dfg.inst_results(inlined_inst);
365 debug_assert_eq!(callee_results.len(), inlined_results.len());
366 for (callee_val, inlined_val) in callee_results.iter().zip(inlined_results) {
367 allocs.set_inlined_value(callee, *callee_val, *inlined_val);
368 }
369
370 if opcode.is_call() {
371 append_stack_map_entries(
372 func,
373 callee,
374 &entity_map,
375 call_stack_map.as_deref(),
376 inlined_inst,
377 callee_inst,
378 );
379
380 debug_assert_eq!(
393 call_opcode == ir::Opcode::TryCall,
394 call_exception_table.is_some()
395 );
396 if call_opcode == ir::Opcode::TryCall {
397 allocs
398 .calls_needing_exception_table_fixup
399 .push(inlined_inst);
400 }
401 }
402 }
403
404 trace!(
405 " --> inserted inlined instruction {inlined_inst:?}: {}",
406 func.dfg.display_inst(inlined_inst)
407 );
408
409 next_callee_inst = callee.layout.next_inst(callee_inst);
410 }
411 }
412
413 for block in entity_map.iter_inlined_blocks(func) {
420 if func.layout.first_inst(block).is_none() {
421 func.layout.remove_block(block);
422 }
423 }
424
425 debug_assert!(
436 allocs.calls_needing_exception_table_fixup.is_empty() || call_exception_table.is_some()
437 );
438 debug_assert_eq!(
439 call_opcode == ir::Opcode::TryCall,
440 call_exception_table.is_some()
441 );
442 if let Some(call_exception_table) = call_exception_table {
443 fixup_inlined_call_exception_tables(allocs, func, call_exception_table);
444 }
445}
446
447fn append_stack_map_entries(
450 func: &mut ir::Function,
451 callee: &ir::Function,
452 entity_map: &EntityMap,
453 call_stack_map: Option<&[ir::UserStackMapEntry]>,
454 inlined_inst: ir::Inst,
455 callee_inst: ir::Inst,
456) {
457 func.dfg.append_user_stack_map_entries(
461 inlined_inst,
462 call_stack_map
463 .iter()
464 .flat_map(|entries| entries.iter().cloned()),
465 );
466
467 func.dfg.append_user_stack_map_entries(
471 inlined_inst,
472 callee
473 .dfg
474 .user_stack_map_entries(callee_inst)
475 .iter()
476 .flat_map(|entries| entries.iter())
477 .map(|entry| ir::UserStackMapEntry {
478 ty: entry.ty,
479 slot: entity_map.inlined_stack_slot(entry.slot),
480 offset: entry.offset,
481 }),
482 );
483}
484
485fn fixup_inlined_call_exception_tables(
489 allocs: &mut InliningAllocs,
490 func: &mut ir::Function,
491 call_exception_table: ir::ExceptionTable,
492) {
493 let split_block_for_new_try_call = |func: &mut ir::Function, inst: ir::Inst| -> ir::Block {
496 debug_assert!(func.dfg.insts[inst].opcode().is_call());
497 debug_assert!(!func.dfg.insts[inst].opcode().is_terminator());
498
499 let next_inst = func
501 .layout
502 .next_inst(inst)
503 .expect("inst is not a terminator, should have a successor");
504 let new_block = func.dfg.blocks.add();
505 func.layout.split_block(new_block, next_inst);
506
507 let old_results = SmallValueVec::from_iter(func.dfg.inst_results(inst).iter().copied());
512 func.dfg.detach_inst_results(inst);
513 for old_result in old_results {
514 let ty = func.dfg.value_type(old_result);
515 let new_block_param = func.dfg.append_block_param(new_block, ty);
516 func.dfg.change_to_alias(old_result, new_block_param);
517 }
518
519 new_block
520 };
521
522 let clone_exception_table_for_this_call = |func: &mut ir::Function,
525 signature: ir::SigRef,
526 new_block: ir::Block|
527 -> ir::ExceptionTable {
528 let mut exception = func.stencil.dfg.exception_tables[call_exception_table]
529 .deep_clone(&mut func.stencil.dfg.value_lists);
530
531 *exception.signature_mut() = signature;
532
533 let returns_len = func.dfg.signatures[signature].returns.len();
534 let returns_len = u32::try_from(returns_len).unwrap();
535
536 *exception.normal_return_mut() = ir::BlockCall::new(
537 new_block,
538 (0..returns_len).map(|i| ir::BlockArg::TryCallRet(i)),
539 &mut func.dfg.value_lists,
540 );
541
542 func.dfg.exception_tables.push(exception)
543 };
544
545 for inst in allocs.calls_needing_exception_table_fixup.drain(..) {
546 debug_assert!(func.dfg.insts[inst].opcode().is_call());
547 debug_assert!(!func.dfg.insts[inst].opcode().is_return());
548 match func.dfg.insts[inst] {
549 ir::InstructionData::Call {
562 opcode: ir::Opcode::Call,
563 args,
564 func_ref,
565 } => {
566 let new_block = split_block_for_new_try_call(func, inst);
567 let signature = func.dfg.ext_funcs[func_ref].signature;
568 let exception = clone_exception_table_for_this_call(func, signature, new_block);
569 func.dfg.insts[inst] = ir::InstructionData::TryCall {
570 opcode: ir::Opcode::TryCall,
571 args,
572 func_ref,
573 exception,
574 };
575 }
576
577 ir::InstructionData::CallIndirect {
590 opcode: ir::Opcode::CallIndirect,
591 args,
592 sig_ref,
593 } => {
594 let new_block = split_block_for_new_try_call(func, inst);
595 let exception = clone_exception_table_for_this_call(func, sig_ref, new_block);
596 func.dfg.insts[inst] = ir::InstructionData::TryCallIndirect {
597 opcode: ir::Opcode::TryCallIndirect,
598 args,
599 exception,
600 };
601 }
602
603 ir::InstructionData::TryCall {
606 opcode: ir::Opcode::TryCall,
607 exception,
608 ..
609 }
610 | ir::InstructionData::TryCallIndirect {
611 opcode: ir::Opcode::TryCallIndirect,
612 exception,
613 ..
614 } => {
615 let sig = func.dfg.exception_tables[exception].signature();
622 let normal_return = *func.dfg.exception_tables[exception].normal_return();
623 let exception_data = ExceptionTableData::new(
624 sig,
625 normal_return,
626 func.dfg.exception_tables[exception]
627 .items()
628 .chain(func.dfg.exception_tables[call_exception_table].items()),
629 )
630 .deep_clone(&mut func.dfg.value_lists);
631
632 func.dfg.exception_tables[exception] = exception_data;
633 }
634
635 otherwise => unreachable!("unknown non-return call instruction: {otherwise:?}"),
636 }
637 }
638}
639
640fn fixup_inst_that_returns(
645 allocs: &mut InliningAllocs,
646 func: &mut ir::Function,
647 callee: &ir::Function,
648 entity_map: &EntityMap,
649 call_opcode: ir::Opcode,
650 inlined_inst: ir::Inst,
651 callee_inst: ir::Inst,
652 return_block: ir::Block,
653 call_stack_map: Option<&[ir::UserStackMapEntry]>,
654) {
655 debug_assert!(func.dfg.insts[inlined_inst].opcode().is_return());
656 match func.dfg.insts[inlined_inst] {
657 ir::InstructionData::MultiAry {
663 opcode: ir::Opcode::Return,
664 args,
665 } => {
666 let rets = SmallBlockArgVec::from_iter(
667 args.as_slice(&func.dfg.value_lists)
668 .iter()
669 .copied()
670 .map(|v| v.into()),
671 );
672 func.dfg.replace(inlined_inst).jump(return_block, &rets);
673 }
674
675 ir::InstructionData::Call {
682 opcode: ir::Opcode::ReturnCall,
683 args,
684 func_ref,
685 } => {
686 func.dfg.insts[inlined_inst] = ir::InstructionData::Call {
687 opcode: ir::Opcode::Call,
688 args,
689 func_ref,
690 };
691 func.dfg.make_inst_results(inlined_inst, ir::types::INVALID);
692
693 append_stack_map_entries(
694 func,
695 callee,
696 &entity_map,
697 call_stack_map,
698 inlined_inst,
699 callee_inst,
700 );
701
702 let rets = SmallBlockArgVec::from_iter(
703 func.dfg
704 .inst_results(inlined_inst)
705 .iter()
706 .copied()
707 .map(|v| v.into()),
708 );
709 let mut cursor = FuncCursor::new(func);
710 cursor.goto_after_inst(inlined_inst);
711 cursor.ins().jump(return_block, &rets);
712
713 if call_opcode == ir::Opcode::TryCall {
714 allocs
715 .calls_needing_exception_table_fixup
716 .push(inlined_inst);
717 }
718 }
719
720 ir::InstructionData::CallIndirect {
727 opcode: ir::Opcode::ReturnCallIndirect,
728 args,
729 sig_ref,
730 } => {
731 func.dfg.insts[inlined_inst] = ir::InstructionData::CallIndirect {
732 opcode: ir::Opcode::CallIndirect,
733 args,
734 sig_ref,
735 };
736 func.dfg.make_inst_results(inlined_inst, ir::types::INVALID);
737
738 append_stack_map_entries(
739 func,
740 callee,
741 &entity_map,
742 call_stack_map,
743 inlined_inst,
744 callee_inst,
745 );
746
747 let rets = SmallBlockArgVec::from_iter(
748 func.dfg
749 .inst_results(inlined_inst)
750 .iter()
751 .copied()
752 .map(|v| v.into()),
753 );
754 let mut cursor = FuncCursor::new(func);
755 cursor.goto_after_inst(inlined_inst);
756 cursor.ins().jump(return_block, &rets);
757
758 if call_opcode == ir::Opcode::TryCall {
759 allocs
760 .calls_needing_exception_table_fixup
761 .push(inlined_inst);
762 }
763 }
764
765 inst_data => unreachable!(
766 "should have handled all `is_return() == true` instructions above; \
767 got {inst_data:?}"
768 ),
769 }
770}
771
772struct InliningInstRemapper<'a> {
775 allocs: &'a InliningAllocs,
776 func: &'a mut ir::Function,
777 callee: &'a ir::Function,
778 entity_map: &'a EntityMap,
779}
780
781impl<'a> ir::instructions::InstructionMapper for InliningInstRemapper<'a> {
782 fn map_value(&mut self, value: ir::Value) -> ir::Value {
783 self.allocs.get_inlined_value(self.callee, value).expect(
784 "defs come before uses; we should have already inlined all values \
785 used by an instruction",
786 )
787 }
788
789 fn map_value_list(&mut self, value_list: ir::ValueList) -> ir::ValueList {
790 let mut inlined_list = ir::ValueList::new();
791 for callee_val in value_list.as_slice(&self.callee.dfg.value_lists) {
792 let inlined_val = self.map_value(*callee_val);
793 inlined_list.push(inlined_val, &mut self.func.dfg.value_lists);
794 }
795 inlined_list
796 }
797
798 fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
799 self.entity_map.inlined_global_value(global_value)
800 }
801
802 fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
803 let inlined_default =
804 self.map_block_call(self.callee.dfg.jump_tables[jump_table].default_block());
805 let inlined_table = self.callee.dfg.jump_tables[jump_table]
806 .as_slice()
807 .iter()
808 .map(|callee_block_call| self.map_block_call(*callee_block_call))
809 .collect::<SmallBlockCallVec>();
810 self.func
811 .dfg
812 .jump_tables
813 .push(ir::JumpTableData::new(inlined_default, &inlined_table))
814 }
815
816 fn map_exception_table(&mut self, exception_table: ir::ExceptionTable) -> ir::ExceptionTable {
817 let exception_table = &self.callee.dfg.exception_tables[exception_table];
818 let inlined_sig_ref = self.map_sig_ref(exception_table.signature());
819 let inlined_normal_return = self.map_block_call(*exception_table.normal_return());
820 let inlined_table = exception_table
821 .items()
822 .map(|item| match item {
823 ExceptionTableItem::Tag(tag, block_call) => {
824 ExceptionTableItem::Tag(tag, self.map_block_call(block_call))
825 }
826 ExceptionTableItem::Default(block_call) => {
827 ExceptionTableItem::Default(self.map_block_call(block_call))
828 }
829 ExceptionTableItem::Context(value) => {
830 ExceptionTableItem::Context(self.map_value(value))
831 }
832 })
833 .collect::<SmallVec<[_; 8]>>();
834 self.func
835 .dfg
836 .exception_tables
837 .push(ir::ExceptionTableData::new(
838 inlined_sig_ref,
839 inlined_normal_return,
840 inlined_table,
841 ))
842 }
843
844 fn map_block_call(&mut self, block_call: ir::BlockCall) -> ir::BlockCall {
845 let callee_block = block_call.block(&self.callee.dfg.value_lists);
846 let inlined_block = self.entity_map.inlined_block(callee_block);
847 let args = block_call
848 .args(&self.callee.dfg.value_lists)
849 .map(|arg| match arg {
850 ir::BlockArg::Value(value) => self.map_value(value).into(),
851 ir::BlockArg::TryCallRet(_) | ir::BlockArg::TryCallExn(_) => arg,
852 })
853 .collect::<SmallBlockArgVec>();
854 ir::BlockCall::new(inlined_block, args, &mut self.func.dfg.value_lists)
855 }
856
857 fn map_func_ref(&mut self, func_ref: ir::FuncRef) -> ir::FuncRef {
858 self.entity_map.inlined_func_ref(func_ref)
859 }
860
861 fn map_sig_ref(&mut self, sig_ref: ir::SigRef) -> ir::SigRef {
862 self.entity_map.inlined_sig_ref(sig_ref)
863 }
864
865 fn map_stack_slot(&mut self, stack_slot: ir::StackSlot) -> ir::StackSlot {
866 self.entity_map.inlined_stack_slot(stack_slot)
867 }
868
869 fn map_dynamic_stack_slot(
870 &mut self,
871 dynamic_stack_slot: ir::DynamicStackSlot,
872 ) -> ir::DynamicStackSlot {
873 self.entity_map
874 .inlined_dynamic_stack_slot(dynamic_stack_slot)
875 }
876
877 fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
878 self.allocs
879 .constants
880 .get(constant)
881 .and_then(|o| o.expand())
882 .expect("should have inlined all callee constants")
883 }
884
885 fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
886 self.entity_map.inlined_immediate(immediate)
887 }
888}
889
890fn inline_block_layout(
892 func: &mut ir::Function,
893 call_block: ir::Block,
894 callee: &ir::Function,
895 entity_map: &EntityMap,
896) {
897 let mut prev_inlined_block = call_block;
900 let mut next_callee_block = callee.layout.entry_block();
901 while let Some(callee_block) = next_callee_block {
902 let inlined_block = entity_map.inlined_block(callee_block);
903 func.layout
904 .insert_block_after(inlined_block, prev_inlined_block);
905
906 prev_inlined_block = inlined_block;
907 next_callee_block = callee.layout.next_block(callee_block);
908 }
909}
910
911fn split_off_return_block(
917 func: &mut ir::Function,
918 call_inst: ir::Inst,
919 opcode: ir::Opcode,
920 callee: &ir::Function,
921) -> Option<ir::Block> {
922 let return_block = func.layout.next_inst(call_inst).map(|next_inst| {
925 let return_block = func.dfg.blocks.add();
926 func.layout.split_block(return_block, next_inst);
927
928 let old_results =
931 SmallValueVec::from_iter(func.dfg.inst_results(call_inst).iter().copied());
932 debug_assert_eq!(old_results.len(), callee.signature.returns.len());
933 func.dfg.detach_inst_results(call_inst);
934 for (abi, old_val) in callee.signature.returns.iter().zip(old_results) {
935 debug_assert_eq!(abi.value_type, func.dfg.value_type(old_val));
936 let ret_param = func.dfg.append_block_param(return_block, abi.value_type);
937 func.dfg.change_to_alias(old_val, ret_param);
938 }
939
940 return_block
941 });
942
943 debug_assert_eq!(
974 return_block.is_none(),
975 opcode == ir::Opcode::ReturnCall || opcode == ir::Opcode::TryCall,
976 );
977 return_block.or_else(|| match func.dfg.insts[call_inst] {
978 ir::InstructionData::TryCall {
979 opcode: ir::Opcode::TryCall,
980 args: _,
981 func_ref: _,
982 exception,
983 } => {
984 let normal_return = func.dfg.exception_tables[exception].normal_return();
985 let normal_return_block = normal_return.block(&func.dfg.value_lists);
986
987 {
989 let normal_return_args = normal_return.args(&func.dfg.value_lists);
990 if normal_return_args.len() == callee.signature.returns.len()
991 && normal_return_args.enumerate().all(|(i, arg)| {
992 let i = u32::try_from(i).unwrap();
993 arg == ir::BlockArg::TryCallRet(i)
994 })
995 {
996 return Some(normal_return_block);
997 }
998 }
999
1000 let return_block = func.dfg.blocks.add();
1004 func.layout.insert_block(return_block, normal_return_block);
1005
1006 let return_block_params = callee
1007 .signature
1008 .returns
1009 .iter()
1010 .map(|abi| func.dfg.append_block_param(return_block, abi.value_type))
1011 .collect::<SmallValueVec>();
1012
1013 let normal_return_args = func.dfg.exception_tables[exception]
1014 .normal_return()
1015 .args(&func.dfg.value_lists)
1016 .collect::<SmallBlockArgVec>();
1017 let jump_args = normal_return_args
1018 .into_iter()
1019 .map(|arg| match arg {
1020 ir::BlockArg::Value(value) => ir::BlockArg::Value(value),
1021 ir::BlockArg::TryCallRet(i) => {
1022 let i = usize::try_from(i).unwrap();
1023 ir::BlockArg::Value(return_block_params[i])
1024 }
1025 ir::BlockArg::TryCallExn(_) => {
1026 unreachable!("normal-return edges cannot use exceptional results")
1027 }
1028 })
1029 .collect::<SmallBlockArgVec>();
1030
1031 let mut cursor = FuncCursor::new(func);
1032 cursor.goto_first_insertion_point(return_block);
1033 cursor.ins().jump(normal_return_block, &jump_args);
1034
1035 Some(return_block)
1036 }
1037 _ => None,
1038 })
1039}
1040
1041fn replace_call_with_jump(
1049 allocs: &mut InliningAllocs,
1050 func: &mut ir::Function,
1051 call_inst: ir::Inst,
1052 callee: &ir::Function,
1053 entity_map: &EntityMap,
1054) -> Option<ir::UserStackMapEntryVec> {
1055 trace!("Replacing `call` with `jump`");
1056 trace!(
1057 " --> call instruction: {call_inst:?}: {}",
1058 func.dfg.display_inst(call_inst)
1059 );
1060
1061 let callee_entry_block = callee
1062 .layout
1063 .entry_block()
1064 .expect("callee function should have an entry block");
1065 let callee_param_values = callee.dfg.block_params(callee_entry_block);
1066 let caller_arg_values = SmallValueVec::from_iter(func.dfg.inst_args(call_inst).iter().copied());
1067 debug_assert_eq!(callee_param_values.len(), caller_arg_values.len());
1068 debug_assert_eq!(callee_param_values.len(), callee.signature.params.len());
1069 for (abi, (callee_param_value, caller_arg_value)) in callee
1070 .signature
1071 .params
1072 .iter()
1073 .zip(callee_param_values.into_iter().zip(caller_arg_values))
1074 {
1075 debug_assert_eq!(abi.value_type, callee.dfg.value_type(*callee_param_value));
1076 debug_assert_eq!(abi.value_type, func.dfg.value_type(caller_arg_value));
1077 allocs.set_inlined_value(callee, *callee_param_value, caller_arg_value);
1078 }
1079
1080 let inlined_entry_block = entity_map.inlined_block(callee_entry_block);
1087 func.dfg.replace(call_inst).jump(inlined_entry_block, &[]);
1088 trace!(
1089 " --> replaced with jump instruction: {call_inst:?}: {}",
1090 func.dfg.display_inst(call_inst)
1091 );
1092
1093 let stack_map_entries = func.dfg.take_user_stack_map_entries(call_inst);
1094 stack_map_entries
1095}
1096
1097#[derive(Default)]
1100struct EntityMap {
1101 block_offset: Option<u32>,
1114 global_value_offset: Option<u32>,
1115 sig_ref_offset: Option<u32>,
1116 func_ref_offset: Option<u32>,
1117 stack_slot_offset: Option<u32>,
1118 dynamic_type_offset: Option<u32>,
1119 dynamic_stack_slot_offset: Option<u32>,
1120 immediate_offset: Option<u32>,
1121}
1122
1123impl EntityMap {
1124 fn inlined_block(&self, callee_block: ir::Block) -> ir::Block {
1125 let offset = self
1126 .block_offset
1127 .expect("must create inlined `ir::Block`s before calling `EntityMap::inlined_block`");
1128 ir::Block::from_u32(offset + callee_block.as_u32())
1129 }
1130
1131 fn iter_inlined_blocks(&self, func: &ir::Function) -> impl Iterator<Item = ir::Block> + use<> {
1132 let start = self.block_offset.expect(
1133 "must create inlined `ir::Block`s before calling `EntityMap::iter_inlined_blocks`",
1134 );
1135
1136 let end = func.dfg.blocks.len();
1137 let end = u32::try_from(end).unwrap();
1138
1139 (start..end).map(|i| ir::Block::from_u32(i))
1140 }
1141
1142 fn inlined_global_value(&self, callee_global_value: ir::GlobalValue) -> ir::GlobalValue {
1143 let offset = self
1144 .global_value_offset
1145 .expect("must create inlined `ir::GlobalValue`s before calling `EntityMap::inlined_global_value`");
1146 ir::GlobalValue::from_u32(offset + callee_global_value.as_u32())
1147 }
1148
1149 fn inlined_sig_ref(&self, callee_sig_ref: ir::SigRef) -> ir::SigRef {
1150 let offset = self.sig_ref_offset.expect(
1151 "must create inlined `ir::SigRef`s before calling `EntityMap::inlined_sig_ref`",
1152 );
1153 ir::SigRef::from_u32(offset + callee_sig_ref.as_u32())
1154 }
1155
1156 fn inlined_func_ref(&self, callee_func_ref: ir::FuncRef) -> ir::FuncRef {
1157 let offset = self.func_ref_offset.expect(
1158 "must create inlined `ir::FuncRef`s before calling `EntityMap::inlined_func_ref`",
1159 );
1160 ir::FuncRef::from_u32(offset + callee_func_ref.as_u32())
1161 }
1162
1163 fn inlined_stack_slot(&self, callee_stack_slot: ir::StackSlot) -> ir::StackSlot {
1164 let offset = self.stack_slot_offset.expect(
1165 "must create inlined `ir::StackSlot`s before calling `EntityMap::inlined_stack_slot`",
1166 );
1167 ir::StackSlot::from_u32(offset + callee_stack_slot.as_u32())
1168 }
1169
1170 fn inlined_dynamic_type(&self, callee_dynamic_type: ir::DynamicType) -> ir::DynamicType {
1171 let offset = self.dynamic_type_offset.expect(
1172 "must create inlined `ir::DynamicType`s before calling `EntityMap::inlined_dynamic_type`",
1173 );
1174 ir::DynamicType::from_u32(offset + callee_dynamic_type.as_u32())
1175 }
1176
1177 fn inlined_dynamic_stack_slot(
1178 &self,
1179 callee_dynamic_stack_slot: ir::DynamicStackSlot,
1180 ) -> ir::DynamicStackSlot {
1181 let offset = self.dynamic_stack_slot_offset.expect(
1182 "must create inlined `ir::DynamicStackSlot`s before calling `EntityMap::inlined_dynamic_stack_slot`",
1183 );
1184 ir::DynamicStackSlot::from_u32(offset + callee_dynamic_stack_slot.as_u32())
1185 }
1186
1187 fn inlined_immediate(&self, callee_immediate: ir::Immediate) -> ir::Immediate {
1188 let offset = self.immediate_offset.expect(
1189 "must create inlined `ir::Immediate`s before calling `EntityMap::inlined_immediate`",
1190 );
1191 ir::Immediate::from_u32(offset + callee_immediate.as_u32())
1192 }
1193}
1194
1195fn create_entities(
1199 allocs: &mut InliningAllocs,
1200 func: &mut ir::Function,
1201 callee: &ir::Function,
1202) -> EntityMap {
1203 let mut entity_map = EntityMap::default();
1204
1205 entity_map.block_offset = Some(create_blocks(allocs, func, callee));
1206 entity_map.global_value_offset = Some(create_global_values(func, callee));
1207 entity_map.sig_ref_offset = Some(create_sig_refs(func, callee));
1208 entity_map.func_ref_offset = Some(create_func_refs(func, callee, &entity_map));
1209 entity_map.stack_slot_offset = Some(create_stack_slots(func, callee));
1210 entity_map.dynamic_type_offset = Some(create_dynamic_types(func, callee, &entity_map));
1211 entity_map.dynamic_stack_slot_offset =
1212 Some(create_dynamic_stack_slots(func, callee, &entity_map));
1213 entity_map.immediate_offset = Some(create_immediates(func, callee));
1214
1215 create_constants(allocs, func, callee);
1219
1220 entity_map
1221}
1222
1223fn create_blocks(
1225 allocs: &mut InliningAllocs,
1226 func: &mut ir::Function,
1227 callee: &ir::Function,
1228) -> u32 {
1229 let offset = func.dfg.blocks.len();
1230 let offset = u32::try_from(offset).unwrap();
1231
1232 func.dfg.blocks.reserve(callee.dfg.blocks.len());
1233 for callee_block in callee.dfg.blocks.iter() {
1234 let caller_block = func.dfg.blocks.add();
1235 trace!("Callee {callee_block:?} = inlined {caller_block:?}");
1236
1237 if callee.layout.is_cold(callee_block) {
1238 func.layout.set_cold(caller_block);
1239 }
1240
1241 if callee.layout.entry_block() != Some(callee_block) {
1245 for callee_param in callee.dfg.blocks[callee_block].params(&callee.dfg.value_lists) {
1246 let ty = callee.dfg.value_type(*callee_param);
1247 let caller_param = func.dfg.append_block_param(caller_block, ty);
1248
1249 allocs.set_inlined_value(callee, *callee_param, caller_param);
1250 }
1251 }
1252 }
1253
1254 offset
1255}
1256
1257fn create_global_values(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1259 let gv_offset = func.global_values.len();
1260 let gv_offset = u32::try_from(gv_offset).unwrap();
1261
1262 func.global_values.reserve(callee.global_values.len());
1263 for gv in callee.global_values.values() {
1264 func.global_values.push(match gv {
1265 ir::GlobalValueData::Load {
1268 base,
1269 offset,
1270 global_type,
1271 flags,
1272 } => ir::GlobalValueData::Load {
1273 base: ir::GlobalValue::from_u32(base.as_u32() + gv_offset),
1274 offset: *offset,
1275 global_type: *global_type,
1276 flags: *flags,
1277 },
1278 ir::GlobalValueData::IAddImm {
1279 base,
1280 offset,
1281 global_type,
1282 } => ir::GlobalValueData::IAddImm {
1283 base: ir::GlobalValue::from_u32(base.as_u32() + gv_offset),
1284 offset: *offset,
1285 global_type: *global_type,
1286 },
1287
1288 ir::GlobalValueData::VMContext
1291 | ir::GlobalValueData::Symbol { .. }
1292 | ir::GlobalValueData::DynScaleTargetConst { .. } => gv.clone(),
1293 });
1294 }
1295
1296 gv_offset
1297}
1298
1299fn create_sig_refs(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1301 let offset = func.dfg.signatures.len();
1302 let offset = u32::try_from(offset).unwrap();
1303
1304 func.dfg.signatures.reserve(callee.dfg.signatures.len());
1305 for sig in callee.dfg.signatures.values() {
1306 func.dfg.signatures.push(sig.clone());
1307 }
1308
1309 offset
1310}
1311
1312fn create_func_refs(func: &mut ir::Function, callee: &ir::Function, entity_map: &EntityMap) -> u32 {
1314 let offset = func.dfg.ext_funcs.len();
1315 let offset = u32::try_from(offset).unwrap();
1316
1317 func.dfg.ext_funcs.reserve(callee.dfg.ext_funcs.len());
1318 for ir::ExtFuncData {
1319 name,
1320 signature,
1321 colocated,
1322 } in callee.dfg.ext_funcs.values()
1323 {
1324 func.dfg.ext_funcs.push(ir::ExtFuncData {
1325 name: name.clone(),
1326 signature: entity_map.inlined_sig_ref(*signature),
1327 colocated: *colocated,
1328 });
1329 }
1330
1331 offset
1332}
1333
1334fn create_stack_slots(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1336 let offset = func.sized_stack_slots.len();
1337 let offset = u32::try_from(offset).unwrap();
1338
1339 func.sized_stack_slots
1340 .reserve(callee.sized_stack_slots.len());
1341 for slot in callee.sized_stack_slots.values() {
1342 func.sized_stack_slots.push(slot.clone());
1343 }
1344
1345 offset
1346}
1347
1348fn create_dynamic_types(
1350 func: &mut ir::Function,
1351 callee: &ir::Function,
1352 entity_map: &EntityMap,
1353) -> u32 {
1354 let offset = func.dynamic_stack_slots.len();
1355 let offset = u32::try_from(offset).unwrap();
1356
1357 func.dfg
1358 .dynamic_types
1359 .reserve(callee.dfg.dynamic_types.len());
1360 for ir::DynamicTypeData {
1361 base_vector_ty,
1362 dynamic_scale,
1363 } in callee.dfg.dynamic_types.values()
1364 {
1365 func.dfg.dynamic_types.push(ir::DynamicTypeData {
1366 base_vector_ty: *base_vector_ty,
1367 dynamic_scale: entity_map.inlined_global_value(*dynamic_scale),
1368 });
1369 }
1370
1371 offset
1372}
1373
1374fn create_dynamic_stack_slots(
1376 func: &mut ir::Function,
1377 callee: &ir::Function,
1378 entity_map: &EntityMap,
1379) -> u32 {
1380 let offset = func.dynamic_stack_slots.len();
1381 let offset = u32::try_from(offset).unwrap();
1382
1383 func.dynamic_stack_slots
1384 .reserve(callee.dynamic_stack_slots.len());
1385 for ir::DynamicStackSlotData { kind, dyn_ty } in callee.dynamic_stack_slots.values() {
1386 func.dynamic_stack_slots.push(ir::DynamicStackSlotData {
1387 kind: *kind,
1388 dyn_ty: entity_map.inlined_dynamic_type(*dyn_ty),
1389 });
1390 }
1391
1392 offset
1393}
1394
1395fn create_immediates(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1397 let offset = func.dfg.immediates.len();
1398 let offset = u32::try_from(offset).unwrap();
1399
1400 func.dfg.immediates.reserve(callee.dfg.immediates.len());
1401 for imm in callee.dfg.immediates.values() {
1402 func.dfg.immediates.push(imm.clone());
1403 }
1404
1405 offset
1406}
1407
1408fn create_constants(allocs: &mut InliningAllocs, func: &mut ir::Function, callee: &ir::Function) {
1410 for (callee_constant, data) in callee.dfg.constants.iter() {
1411 let inlined_constant = func.dfg.constants.insert(data.clone());
1412 allocs.constants[*callee_constant] = Some(inlined_constant).into();
1413 }
1414}