1use core::marker::PhantomData;
4
5use crate::binemit::{Addend, CodeOffset, Reloc};
6use crate::ir::types::{self, F32, F64, I8, I8X16, I16, I32, I64, I128};
7use crate::ir::{self, MemFlags, Type};
8use crate::isa::FunctionAlignment;
9use crate::isa::pulley_shared::abi::PulleyMachineDeps;
10use crate::{CodegenError, CodegenResult, settings};
11use crate::{machinst::*, trace};
12use alloc::string::{String, ToString};
13use regalloc2::RegClass;
14use smallvec::SmallVec;
15
16pub mod regs;
17pub use self::regs::*;
18pub mod args;
19pub use self::args::*;
20pub mod emit;
21pub use self::emit::*;
22
23pub use crate::isa::pulley_shared::lower::isle::generated_code::MInst as Inst;
27pub use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
28
29impl From<RawInst> for Inst {
30 fn from(raw: RawInst) -> Inst {
31 Inst::Raw { raw }
32 }
33}
34
35use super::PulleyTargetKind;
36
37mod generated {
38 use super::*;
39 use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
40
41 include!(concat!(env!("OUT_DIR"), "/pulley_inst_gen.rs"));
42}
43
44#[derive(Clone, Debug)]
46pub struct ReturnCallInfo<T> {
47 pub dest: T,
49
50 pub new_stack_arg_size: u32,
53
54 pub uses: CallArgList,
56}
57
58impl Inst {
59 pub fn gen_load(dst: Writable<Reg>, mem: Amode, ty: Type, flags: MemFlags) -> Inst {
61 if ty.is_vector() {
62 assert_eq!(ty.bytes(), 16);
63 Inst::VLoad {
64 dst: dst.map(|r| VReg::new(r).unwrap()),
65 mem,
66 ty,
67 flags,
68 }
69 } else if ty.is_int() {
70 assert!(ty.bytes() <= 8);
71 Inst::XLoad {
72 dst: dst.map(|r| XReg::new(r).unwrap()),
73 mem,
74 ty,
75 flags,
76 }
77 } else {
78 Inst::FLoad {
79 dst: dst.map(|r| FReg::new(r).unwrap()),
80 mem,
81 ty,
82 flags,
83 }
84 }
85 }
86
87 pub fn gen_store(mem: Amode, from_reg: Reg, ty: Type, flags: MemFlags) -> Inst {
89 if ty.is_vector() {
90 assert_eq!(ty.bytes(), 16);
91 Inst::VStore {
92 mem,
93 src: VReg::new(from_reg).unwrap(),
94 ty,
95 flags,
96 }
97 } else if ty.is_int() {
98 assert!(ty.bytes() <= 8);
99 Inst::XStore {
100 mem,
101 src: XReg::new(from_reg).unwrap(),
102 ty,
103 flags,
104 }
105 } else {
106 Inst::FStore {
107 mem,
108 src: FReg::new(from_reg).unwrap(),
109 ty,
110 flags,
111 }
112 }
113 }
114}
115
116fn pulley_get_operands(inst: &mut Inst, collector: &mut impl OperandVisitor) {
117 match inst {
118 Inst::Args { args } => {
119 for ArgPair { vreg, preg } in args {
120 collector.reg_fixed_def(vreg, *preg);
121 }
122 }
123 Inst::Rets { rets } => {
124 for RetPair { vreg, preg } in rets {
125 collector.reg_fixed_use(vreg, *preg);
126 }
127 }
128
129 Inst::DummyUse { reg } => {
130 collector.reg_use(reg);
131 }
132
133 Inst::Nop => {}
134
135 Inst::TrapIf { cond, code: _ } => {
136 cond.get_operands(collector);
137 }
138
139 Inst::GetSpecial { dst, reg } => {
140 collector.reg_def(dst);
141 assert!(reg.is_special());
145 }
146
147 Inst::LoadExtNameNear { dst, .. } | Inst::LoadExtNameFar { dst, .. } => {
148 collector.reg_def(dst);
149 }
150
151 Inst::Call { info } => {
152 let CallInfo {
153 uses,
154 defs,
155 dest,
156 try_call_info,
157 clobbers,
158 ..
159 } = &mut **info;
160
161 let PulleyCall { args, .. } = dest;
164 for arg in args {
165 collector.reg_use(arg);
166 }
167
168 for CallArgPair { vreg, preg } in uses {
171 collector.reg_fixed_use(vreg, *preg);
172 }
173 for CallRetPair { vreg, location } in defs {
174 match location {
175 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
176 RetLocation::Stack(..) => collector.any_def(vreg),
177 }
178 }
179 collector.reg_clobbers(*clobbers);
180 if let Some(try_call_info) = try_call_info {
181 try_call_info.collect_operands(collector);
182 }
183 }
184 Inst::IndirectCallHost { info } => {
185 let CallInfo {
186 uses,
187 defs,
188 try_call_info,
189 clobbers,
190 ..
191 } = &mut **info;
192 for CallArgPair { vreg, preg } in uses {
193 collector.reg_fixed_use(vreg, *preg);
194 }
195 for CallRetPair { vreg, location } in defs {
196 match location {
197 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
198 RetLocation::Stack(..) => collector.any_def(vreg),
199 }
200 }
201 collector.reg_clobbers(*clobbers);
202 if let Some(try_call_info) = try_call_info {
203 try_call_info.collect_operands(collector);
204 }
205 }
206 Inst::IndirectCall { info } => {
207 collector.reg_use(&mut info.dest);
208 let CallInfo {
209 uses,
210 defs,
211 try_call_info,
212 clobbers,
213 ..
214 } = &mut **info;
215 for CallArgPair { vreg, preg } in uses {
216 collector.reg_fixed_use(vreg, *preg);
217 }
218 for CallRetPair { vreg, location } in defs {
219 match location {
220 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
221 RetLocation::Stack(..) => collector.any_def(vreg),
222 }
223 }
224 collector.reg_clobbers(*clobbers);
225 if let Some(try_call_info) = try_call_info {
226 try_call_info.collect_operands(collector);
227 }
228 }
229 Inst::ReturnCall { info } => {
230 for CallArgPair { vreg, preg } in &mut info.uses {
231 collector.reg_fixed_use(vreg, *preg);
232 }
233 }
234 Inst::ReturnIndirectCall { info } => {
235 collector.reg_fixed_use(&mut info.dest, regs::x15());
246
247 for CallArgPair { vreg, preg } in &mut info.uses {
248 collector.reg_fixed_use(vreg, *preg);
249 }
250 }
251
252 Inst::Jump { .. } => {}
253
254 Inst::BrIf {
255 cond,
256 taken: _,
257 not_taken: _,
258 } => {
259 cond.get_operands(collector);
260 }
261
262 Inst::LoadAddr { dst, mem } => {
263 collector.reg_def(dst);
264 mem.get_operands(collector);
265 }
266
267 Inst::XLoad {
268 dst,
269 mem,
270 ty: _,
271 flags: _,
272 } => {
273 collector.reg_def(dst);
274 mem.get_operands(collector);
275 }
276
277 Inst::XStore {
278 mem,
279 src,
280 ty: _,
281 flags: _,
282 } => {
283 mem.get_operands(collector);
284 collector.reg_use(src);
285 }
286
287 Inst::FLoad {
288 dst,
289 mem,
290 ty: _,
291 flags: _,
292 } => {
293 collector.reg_def(dst);
294 mem.get_operands(collector);
295 }
296
297 Inst::FStore {
298 mem,
299 src,
300 ty: _,
301 flags: _,
302 } => {
303 mem.get_operands(collector);
304 collector.reg_use(src);
305 }
306
307 Inst::VLoad {
308 dst,
309 mem,
310 ty: _,
311 flags: _,
312 } => {
313 collector.reg_def(dst);
314 mem.get_operands(collector);
315 }
316
317 Inst::VStore {
318 mem,
319 src,
320 ty: _,
321 flags: _,
322 } => {
323 mem.get_operands(collector);
324 collector.reg_use(src);
325 }
326
327 Inst::BrTable { idx, .. } => {
328 collector.reg_use(idx);
329 }
330
331 Inst::Raw { raw } => generated::get_operands(raw, collector),
332
333 Inst::EmitIsland { .. } => {}
334
335 Inst::LabelAddress { dst, label: _ } => {
336 collector.reg_def(dst);
337 }
338 }
339}
340
341#[derive(Clone, Debug)]
347pub struct InstAndKind<P>
348where
349 P: PulleyTargetKind,
350{
351 inst: Inst,
352 kind: PhantomData<P>,
353}
354
355impl<P> From<Inst> for InstAndKind<P>
356where
357 P: PulleyTargetKind,
358{
359 fn from(inst: Inst) -> Self {
360 Self {
361 inst,
362 kind: PhantomData,
363 }
364 }
365}
366
367impl<P> From<RawInst> for InstAndKind<P>
368where
369 P: PulleyTargetKind,
370{
371 fn from(inst: RawInst) -> Self {
372 Self {
373 inst: inst.into(),
374 kind: PhantomData,
375 }
376 }
377}
378
379impl<P> From<InstAndKind<P>> for Inst
380where
381 P: PulleyTargetKind,
382{
383 fn from(inst: InstAndKind<P>) -> Self {
384 inst.inst
385 }
386}
387
388impl<P> core::ops::Deref for InstAndKind<P>
389where
390 P: PulleyTargetKind,
391{
392 type Target = Inst;
393
394 fn deref(&self) -> &Self::Target {
395 &self.inst
396 }
397}
398
399impl<P> core::ops::DerefMut for InstAndKind<P>
400where
401 P: PulleyTargetKind,
402{
403 fn deref_mut(&mut self) -> &mut Self::Target {
404 &mut self.inst
405 }
406}
407
408impl<P> MachInst for InstAndKind<P>
409where
410 P: PulleyTargetKind,
411{
412 type LabelUse = LabelUse;
413 type ABIMachineSpec = PulleyMachineDeps<P>;
414
415 const TRAP_OPCODE: &'static [u8] = TRAP_OPCODE;
416
417 fn gen_dummy_use(reg: Reg) -> Self {
418 Inst::DummyUse { reg }.into()
419 }
420
421 fn canonical_type_for_rc(rc: RegClass) -> Type {
422 match rc {
423 regalloc2::RegClass::Int => I64,
424 regalloc2::RegClass::Float => F64,
425 regalloc2::RegClass::Vector => I8X16,
426 }
427 }
428
429 fn is_safepoint(&self) -> bool {
430 match self.inst {
431 Inst::Raw {
432 raw: RawInst::Trap { .. },
433 }
434 | Inst::Call { .. }
435 | Inst::IndirectCall { .. }
436 | Inst::IndirectCallHost { .. } => true,
437 _ => false,
438 }
439 }
440
441 fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
442 pulley_get_operands(self, collector);
443 }
444
445 fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {
446 match self.inst {
447 Inst::Raw {
448 raw: RawInst::Xmov { dst, src },
449 } => Some((Writable::from_reg(*dst.to_reg()), *src)),
450 _ => None,
451 }
452 }
453
454 fn is_included_in_clobbers(&self) -> bool {
455 !self.is_args()
456 }
457
458 fn is_trap(&self) -> bool {
459 match self.inst {
460 Inst::Raw {
461 raw: RawInst::Trap { .. },
462 } => true,
463 _ => false,
464 }
465 }
466
467 fn is_args(&self) -> bool {
468 match self.inst {
469 Inst::Args { .. } => true,
470 _ => false,
471 }
472 }
473
474 fn is_term(&self) -> MachTerminator {
475 match &self.inst {
476 Inst::Raw {
477 raw: RawInst::Ret { .. },
478 }
479 | Inst::Rets { .. } => MachTerminator::Ret,
480 Inst::Jump { .. } => MachTerminator::Branch,
481 Inst::BrIf { .. } => MachTerminator::Branch,
482 Inst::BrTable { .. } => MachTerminator::Branch,
483 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => MachTerminator::RetCall,
484 Inst::Call { info } if info.try_call_info.is_some() => MachTerminator::Branch,
485 Inst::IndirectCall { info } if info.try_call_info.is_some() => MachTerminator::Branch,
486 Inst::IndirectCallHost { info } if info.try_call_info.is_some() => {
487 MachTerminator::Branch
488 }
489 _ => MachTerminator::None,
490 }
491 }
492
493 fn is_mem_access(&self) -> bool {
494 todo!()
495 }
496
497 fn call_type(&self) -> CallType {
498 match &self.inst {
499 Inst::Call { .. } | Inst::IndirectCall { .. } | Inst::IndirectCallHost { .. } => {
500 CallType::Regular
501 }
502
503 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => CallType::TailCall,
504
505 _ => CallType::None,
506 }
507 }
508
509 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self {
510 match ty {
511 ir::types::I8 | ir::types::I16 | ir::types::I32 | ir::types::I64 => RawInst::Xmov {
512 dst: WritableXReg::try_from(to_reg).unwrap(),
513 src: XReg::new(from_reg).unwrap(),
514 }
515 .into(),
516 ir::types::F32 | ir::types::F64 => RawInst::Fmov {
517 dst: WritableFReg::try_from(to_reg).unwrap(),
518 src: FReg::new(from_reg).unwrap(),
519 }
520 .into(),
521 _ if ty.is_vector() => RawInst::Vmov {
522 dst: WritableVReg::try_from(to_reg).unwrap(),
523 src: VReg::new(from_reg).unwrap(),
524 }
525 .into(),
526 _ => panic!("don't know how to generate a move for type {ty}"),
527 }
528 }
529
530 fn gen_nop(_preferred_size: usize) -> Self {
531 todo!()
532 }
533
534 fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])> {
535 match ty {
536 I8 => Ok((&[RegClass::Int], &[I8])),
537 I16 => Ok((&[RegClass::Int], &[I16])),
538 I32 => Ok((&[RegClass::Int], &[I32])),
539 I64 => Ok((&[RegClass::Int], &[I64])),
540 F32 => Ok((&[RegClass::Float], &[F32])),
541 F64 => Ok((&[RegClass::Float], &[F64])),
542 I128 => Ok((&[RegClass::Int, RegClass::Int], &[I64, I64])),
543 _ if ty.is_vector() => {
544 debug_assert!(ty.bits() <= 512);
545
546 const SIMD_TYPES: [[Type; 1]; 6] = [
550 [types::I8X2],
551 [types::I8X4],
552 [types::I8X8],
553 [types::I8X16],
554 [types::I16X16],
555 [types::I32X16],
556 ];
557 let idx = (ty.bytes().ilog2() - 1) as usize;
558 let ty = &SIMD_TYPES[idx][..];
559
560 Ok((&[RegClass::Vector], ty))
561 }
562 _ => Err(CodegenError::Unsupported(format!(
563 "Unexpected SSA-value type: {ty}"
564 ))),
565 }
566 }
567
568 fn gen_jump(label: MachLabel) -> Self {
569 Inst::Jump { label }.into()
570 }
571
572 fn worst_case_size() -> CodeOffset {
573 22
578 }
579
580 fn ref_type_regclass(_settings: &settings::Flags) -> RegClass {
581 RegClass::Int
582 }
583
584 fn function_alignment() -> FunctionAlignment {
585 FunctionAlignment {
586 minimum: 1,
587 preferred: 1,
588 }
589 }
590}
591
592const TRAP_OPCODE: &'static [u8] = &[
593 pulley_interpreter::opcode::Opcode::ExtendedOp as u8,
594 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 0) as u8,
595 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 8) as u8,
596];
597
598#[test]
599fn test_trap_encoding() {
600 let mut dst = std::vec::Vec::new();
601 pulley_interpreter::encode::trap(&mut dst);
602 assert_eq!(dst, TRAP_OPCODE);
603}
604
605pub fn reg_name(reg: Reg) -> String {
609 match reg.to_real_reg() {
610 Some(real) => {
611 let n = real.hw_enc();
612 match (real.class(), n) {
613 (RegClass::Int, 63) => format!("sp"),
614 (RegClass::Int, 62) => format!("lr"),
615 (RegClass::Int, 61) => format!("fp"),
616 (RegClass::Int, 60) => format!("tmp0"),
617 (RegClass::Int, 59) => format!("tmp1"),
618
619 (RegClass::Int, _) => format!("x{n}"),
620 (RegClass::Float, _) => format!("f{n}"),
621 (RegClass::Vector, _) => format!("v{n}"),
622 }
623 }
624 None => {
625 format!("{reg:?}")
626 }
627 }
628}
629
630fn pretty_print_try_call(info: &TryCallInfo) -> String {
631 format!(
632 "; jump {:?}; catch [{}]",
633 info.continuation,
634 info.pretty_print_dests()
635 )
636}
637
638impl Inst {
639 fn print_with_state<P>(&self, _state: &mut EmitState<P>) -> String
640 where
641 P: PulleyTargetKind,
642 {
643 use core::fmt::Write;
644
645 let format_reg = |reg: Reg| -> String { reg_name(reg) };
646
647 match self {
648 Inst::Args { args } => {
649 let mut s = "args".to_string();
650 for arg in args {
651 let preg = format_reg(arg.preg);
652 let def = format_reg(arg.vreg.to_reg());
653 write!(&mut s, " {def}={preg}").unwrap();
654 }
655 s
656 }
657 Inst::Rets { rets } => {
658 let mut s = "rets".to_string();
659 for ret in rets {
660 let preg = format_reg(ret.preg);
661 let vreg = format_reg(ret.vreg);
662 write!(&mut s, " {vreg}={preg}").unwrap();
663 }
664 s
665 }
666
667 Inst::DummyUse { reg } => {
668 let reg = format_reg(*reg);
669 format!("dummy_use {reg}")
670 }
671
672 Inst::TrapIf { cond, code } => {
673 format!("trap_{cond} // code = {code:?}")
674 }
675
676 Inst::Nop => format!("nop"),
677
678 Inst::GetSpecial { dst, reg } => {
679 let dst = format_reg(*dst.to_reg());
680 let reg = format_reg(**reg);
681 format!("xmov {dst}, {reg}")
682 }
683
684 Inst::LoadExtNameNear { dst, name, offset } => {
685 let dst = format_reg(*dst.to_reg());
686 format!("{dst} = load_ext_name_near {name:?}, {offset}")
687 }
688
689 Inst::LoadExtNameFar { dst, name, offset } => {
690 let dst = format_reg(*dst.to_reg());
691 format!("{dst} = load_ext_name_far {name:?}, {offset}")
692 }
693
694 Inst::Call { info } => {
695 let try_call = info
696 .try_call_info
697 .as_ref()
698 .map(|tci| pretty_print_try_call(tci))
699 .unwrap_or_default();
700 format!("call {info:?}{try_call}")
701 }
702
703 Inst::IndirectCall { info } => {
704 let callee = format_reg(*info.dest);
705 let try_call = info
706 .try_call_info
707 .as_ref()
708 .map(|tci| pretty_print_try_call(tci))
709 .unwrap_or_default();
710 format!("indirect_call {callee}, {info:?}{try_call}")
711 }
712
713 Inst::ReturnCall { info } => {
714 format!("return_call {info:?}")
715 }
716
717 Inst::ReturnIndirectCall { info } => {
718 let callee = format_reg(*info.dest);
719 format!("return_indirect_call {callee}, {info:?}")
720 }
721
722 Inst::IndirectCallHost { info } => {
723 let try_call = info
724 .try_call_info
725 .as_ref()
726 .map(|tci| pretty_print_try_call(tci))
727 .unwrap_or_default();
728 format!("indirect_call_host {info:?}{try_call}")
729 }
730
731 Inst::Jump { label } => format!("jump {}", label.to_string()),
732
733 Inst::BrIf {
734 cond,
735 taken,
736 not_taken,
737 } => {
738 let taken = taken.to_string();
739 let not_taken = not_taken.to_string();
740 format!("br_{cond}, {taken}; jump {not_taken}")
741 }
742
743 Inst::LoadAddr { dst, mem } => {
744 let dst = format_reg(*dst.to_reg());
745 let mem = mem.to_string();
746 format!("{dst} = load_addr {mem}")
747 }
748
749 Inst::XLoad {
750 dst,
751 mem,
752 ty,
753 flags,
754 } => {
755 let dst = format_reg(*dst.to_reg());
756 let ty = ty.bits();
757 let mem = mem.to_string();
758 format!("{dst} = xload{ty} {mem} // flags ={flags}")
759 }
760
761 Inst::XStore {
762 mem,
763 src,
764 ty,
765 flags,
766 } => {
767 let ty = ty.bits();
768 let mem = mem.to_string();
769 let src = format_reg(**src);
770 format!("xstore{ty} {mem}, {src} // flags = {flags}")
771 }
772
773 Inst::FLoad {
774 dst,
775 mem,
776 ty,
777 flags,
778 } => {
779 let dst = format_reg(*dst.to_reg());
780 let ty = ty.bits();
781 let mem = mem.to_string();
782 format!("{dst} = fload{ty} {mem} // flags ={flags}")
783 }
784
785 Inst::FStore {
786 mem,
787 src,
788 ty,
789 flags,
790 } => {
791 let ty = ty.bits();
792 let mem = mem.to_string();
793 let src = format_reg(**src);
794 format!("fstore{ty} {mem}, {src} // flags = {flags}")
795 }
796
797 Inst::VLoad {
798 dst,
799 mem,
800 ty,
801 flags,
802 } => {
803 let dst = format_reg(*dst.to_reg());
804 let ty = ty.bits();
805 let mem = mem.to_string();
806 format!("{dst} = vload{ty} {mem} // flags ={flags}")
807 }
808
809 Inst::VStore {
810 mem,
811 src,
812 ty,
813 flags,
814 } => {
815 let ty = ty.bits();
816 let mem = mem.to_string();
817 let src = format_reg(**src);
818 format!("vstore{ty} {mem}, {src} // flags = {flags}")
819 }
820
821 Inst::BrTable {
822 idx,
823 default,
824 targets,
825 } => {
826 let idx = format_reg(**idx);
827 format!("br_table {idx} {default:?} {targets:?}")
828 }
829 Inst::Raw { raw } => generated::print(raw),
830
831 Inst::EmitIsland { space_needed } => format!("emit_island {space_needed}"),
832
833 Inst::LabelAddress { dst, label } => {
834 let dst = format_reg(dst.to_reg().to_reg());
835 format!("label_address {dst}, {label:?}")
836 }
837 }
838 }
839}
840
841#[derive(Clone, Copy, Debug, PartialEq, Eq)]
843pub enum LabelUse {
844 PcRel,
850}
851
852impl MachInstLabelUse for LabelUse {
853 const ALIGN: CodeOffset = 1;
856
857 fn max_pos_range(self) -> CodeOffset {
859 match self {
860 Self::PcRel => 0x7fff_ffff,
861 }
862 }
863
864 fn max_neg_range(self) -> CodeOffset {
866 match self {
867 Self::PcRel => 0x8000_0000,
868 }
869 }
870
871 fn patch_size(self) -> CodeOffset {
873 match self {
874 Self::PcRel => 4,
875 }
876 }
877
878 fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset) {
880 let use_relative = (label_offset as i64) - (use_offset as i64);
881 debug_assert!(use_relative <= self.max_pos_range() as i64);
882 debug_assert!(use_relative >= -(self.max_neg_range() as i64));
883 let pc_rel = i32::try_from(use_relative).unwrap() as u32;
884 match self {
885 Self::PcRel => {
886 let buf: &mut [u8; 4] = buffer.try_into().unwrap();
887 let addend = u32::from_le_bytes(*buf);
888 trace!(
889 "patching label use @ {use_offset:#x} \
890 to label {label_offset:#x} via \
891 PC-relative offset {pc_rel:#x} \
892 adding in {addend:#x}"
893 );
894 let value = pc_rel.wrapping_add(addend);
895 *buf = value.to_le_bytes();
896 }
897 }
898 }
899
900 fn supports_veneer(self) -> bool {
902 match self {
903 Self::PcRel => false,
904 }
905 }
906
907 fn veneer_size(self) -> CodeOffset {
909 match self {
910 Self::PcRel => 0,
911 }
912 }
913
914 fn worst_case_veneer_size() -> CodeOffset {
915 0
916 }
917
918 fn generate_veneer(
921 self,
922 _buffer: &mut [u8],
923 _veneer_offset: CodeOffset,
924 ) -> (CodeOffset, LabelUse) {
925 match self {
926 Self::PcRel => panic!("veneer not supported for {self:?}"),
927 }
928 }
929
930 fn from_reloc(reloc: Reloc, addend: Addend) -> Option<LabelUse> {
931 match (reloc, addend) {
932 (Reloc::PulleyPcRel, 0) => Some(LabelUse::PcRel),
933 _ => None,
934 }
935 }
936}