1use core::marker::PhantomData;
4
5use crate::binemit::{Addend, CodeOffset, Reloc};
6use crate::ir::types::{self, F32, F64, I8, I8X16, I16, I32, I64, I128};
7use crate::ir::{self, MemFlags, Type};
8use crate::isa::FunctionAlignment;
9use crate::isa::pulley_shared::abi::PulleyMachineDeps;
10use crate::{CodegenError, CodegenResult, settings};
11use crate::{machinst::*, trace};
12use alloc::string::{String, ToString};
13use regalloc2::RegClass;
14use smallvec::SmallVec;
15
16pub mod regs;
17pub use self::regs::*;
18pub mod args;
19pub use self::args::*;
20pub mod emit;
21pub use self::emit::*;
22
23pub use crate::isa::pulley_shared::lower::isle::generated_code::MInst as Inst;
27pub use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
28
29impl From<RawInst> for Inst {
30 fn from(raw: RawInst) -> Inst {
31 Inst::Raw { raw }
32 }
33}
34
35use super::PulleyTargetKind;
36
37mod generated {
38 use super::*;
39 use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
40
41 include!(concat!(env!("OUT_DIR"), "/pulley_inst_gen.rs"));
42}
43
44#[derive(Clone, Debug)]
46pub struct ReturnCallInfo<T> {
47 pub dest: T,
49
50 pub new_stack_arg_size: u32,
53
54 pub uses: CallArgList,
56}
57
58impl Inst {
59 pub fn gen_load(dst: Writable<Reg>, mem: Amode, ty: Type, flags: MemFlags) -> Inst {
61 if ty.is_vector() {
62 assert_eq!(ty.bytes(), 16);
63 Inst::VLoad {
64 dst: dst.map(|r| VReg::new(r).unwrap()),
65 mem,
66 ty,
67 flags,
68 }
69 } else if ty.is_int() {
70 assert!(ty.bytes() <= 8);
71 Inst::XLoad {
72 dst: dst.map(|r| XReg::new(r).unwrap()),
73 mem,
74 ty,
75 flags,
76 }
77 } else {
78 Inst::FLoad {
79 dst: dst.map(|r| FReg::new(r).unwrap()),
80 mem,
81 ty,
82 flags,
83 }
84 }
85 }
86
87 pub fn gen_store(mem: Amode, from_reg: Reg, ty: Type, flags: MemFlags) -> Inst {
89 if ty.is_vector() {
90 assert_eq!(ty.bytes(), 16);
91 Inst::VStore {
92 mem,
93 src: VReg::new(from_reg).unwrap(),
94 ty,
95 flags,
96 }
97 } else if ty.is_int() {
98 assert!(ty.bytes() <= 8);
99 Inst::XStore {
100 mem,
101 src: XReg::new(from_reg).unwrap(),
102 ty,
103 flags,
104 }
105 } else {
106 Inst::FStore {
107 mem,
108 src: FReg::new(from_reg).unwrap(),
109 ty,
110 flags,
111 }
112 }
113 }
114}
115
116fn pulley_get_operands(inst: &mut Inst, collector: &mut impl OperandVisitor) {
117 match inst {
118 Inst::Args { args } => {
119 for ArgPair { vreg, preg } in args {
120 collector.reg_fixed_def(vreg, *preg);
121 }
122 }
123 Inst::Rets { rets } => {
124 for RetPair { vreg, preg } in rets {
125 collector.reg_fixed_use(vreg, *preg);
126 }
127 }
128
129 Inst::DummyUse { reg } => {
130 collector.reg_use(reg);
131 }
132
133 Inst::Nop => {}
134
135 Inst::TrapIf { cond, code: _ } => {
136 cond.get_operands(collector);
137 }
138
139 Inst::GetSpecial { dst, reg } => {
140 collector.reg_def(dst);
141 assert!(reg.is_special());
145 }
146
147 Inst::LoadExtNameNear { dst, .. } | Inst::LoadExtNameFar { dst, .. } => {
148 collector.reg_def(dst);
149 }
150
151 Inst::Call { info } => {
152 let CallInfo {
153 uses,
154 defs,
155 dest,
156 try_call_info,
157 clobbers,
158 ..
159 } = &mut **info;
160
161 let PulleyCall { args, .. } = dest;
164 for arg in args {
165 collector.reg_use(arg);
166 }
167
168 for CallArgPair { vreg, preg } in uses {
171 collector.reg_fixed_use(vreg, *preg);
172 }
173 for CallRetPair { vreg, location } in defs {
174 match location {
175 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
176 RetLocation::Stack(..) => collector.any_def(vreg),
177 }
178 }
179 collector.reg_clobbers(*clobbers);
180 if let Some(try_call_info) = try_call_info {
181 try_call_info.collect_operands(collector);
182 }
183 }
184 Inst::IndirectCallHost { info } => {
185 let CallInfo {
186 uses,
187 defs,
188 try_call_info,
189 clobbers,
190 ..
191 } = &mut **info;
192 for CallArgPair { vreg, preg } in uses {
193 collector.reg_fixed_use(vreg, *preg);
194 }
195 for CallRetPair { vreg, location } in defs {
196 match location {
197 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
198 RetLocation::Stack(..) => collector.any_def(vreg),
199 }
200 }
201 collector.reg_clobbers(*clobbers);
202 if let Some(try_call_info) = try_call_info {
203 try_call_info.collect_operands(collector);
204 }
205 }
206 Inst::IndirectCall { info } => {
207 collector.reg_use(&mut info.dest);
208 let CallInfo {
209 uses,
210 defs,
211 try_call_info,
212 clobbers,
213 ..
214 } = &mut **info;
215 for CallArgPair { vreg, preg } in uses {
216 collector.reg_fixed_use(vreg, *preg);
217 }
218 for CallRetPair { vreg, location } in defs {
219 match location {
220 RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
221 RetLocation::Stack(..) => collector.any_def(vreg),
222 }
223 }
224 collector.reg_clobbers(*clobbers);
225 if let Some(try_call_info) = try_call_info {
226 try_call_info.collect_operands(collector);
227 }
228 }
229 Inst::ReturnCall { info } => {
230 for CallArgPair { vreg, preg } in &mut info.uses {
231 collector.reg_fixed_use(vreg, *preg);
232 }
233 }
234 Inst::ReturnIndirectCall { info } => {
235 collector.reg_fixed_use(&mut info.dest, regs::x15());
246
247 for CallArgPair { vreg, preg } in &mut info.uses {
248 collector.reg_fixed_use(vreg, *preg);
249 }
250 }
251
252 Inst::Jump { .. } => {}
253
254 Inst::BrIf {
255 cond,
256 taken: _,
257 not_taken: _,
258 } => {
259 cond.get_operands(collector);
260 }
261
262 Inst::LoadAddr { dst, mem } => {
263 collector.reg_def(dst);
264 mem.get_operands(collector);
265 }
266
267 Inst::XLoad {
268 dst,
269 mem,
270 ty: _,
271 flags: _,
272 } => {
273 collector.reg_def(dst);
274 mem.get_operands(collector);
275 }
276
277 Inst::XStore {
278 mem,
279 src,
280 ty: _,
281 flags: _,
282 } => {
283 mem.get_operands(collector);
284 collector.reg_use(src);
285 }
286
287 Inst::FLoad {
288 dst,
289 mem,
290 ty: _,
291 flags: _,
292 } => {
293 collector.reg_def(dst);
294 mem.get_operands(collector);
295 }
296
297 Inst::FStore {
298 mem,
299 src,
300 ty: _,
301 flags: _,
302 } => {
303 mem.get_operands(collector);
304 collector.reg_use(src);
305 }
306
307 Inst::VLoad {
308 dst,
309 mem,
310 ty: _,
311 flags: _,
312 } => {
313 collector.reg_def(dst);
314 mem.get_operands(collector);
315 }
316
317 Inst::VStore {
318 mem,
319 src,
320 ty: _,
321 flags: _,
322 } => {
323 mem.get_operands(collector);
324 collector.reg_use(src);
325 }
326
327 Inst::BrTable { idx, .. } => {
328 collector.reg_use(idx);
329 }
330
331 Inst::Raw { raw } => generated::get_operands(raw, collector),
332
333 Inst::EmitIsland { .. } => {}
334
335 Inst::LabelAddress { dst, label: _ } => {
336 collector.reg_def(dst);
337 }
338
339 Inst::SequencePoint { .. } => {}
340 }
341}
342
343#[derive(Clone, Debug)]
349pub struct InstAndKind<P>
350where
351 P: PulleyTargetKind,
352{
353 inst: Inst,
354 kind: PhantomData<P>,
355}
356
357impl<P> From<Inst> for InstAndKind<P>
358where
359 P: PulleyTargetKind,
360{
361 fn from(inst: Inst) -> Self {
362 Self {
363 inst,
364 kind: PhantomData,
365 }
366 }
367}
368
369impl<P> From<RawInst> for InstAndKind<P>
370where
371 P: PulleyTargetKind,
372{
373 fn from(inst: RawInst) -> Self {
374 Self {
375 inst: inst.into(),
376 kind: PhantomData,
377 }
378 }
379}
380
381impl<P> From<InstAndKind<P>> for Inst
382where
383 P: PulleyTargetKind,
384{
385 fn from(inst: InstAndKind<P>) -> Self {
386 inst.inst
387 }
388}
389
390impl<P> core::ops::Deref for InstAndKind<P>
391where
392 P: PulleyTargetKind,
393{
394 type Target = Inst;
395
396 fn deref(&self) -> &Self::Target {
397 &self.inst
398 }
399}
400
401impl<P> core::ops::DerefMut for InstAndKind<P>
402where
403 P: PulleyTargetKind,
404{
405 fn deref_mut(&mut self) -> &mut Self::Target {
406 &mut self.inst
407 }
408}
409
410impl<P> MachInst for InstAndKind<P>
411where
412 P: PulleyTargetKind,
413{
414 type LabelUse = LabelUse;
415 type ABIMachineSpec = PulleyMachineDeps<P>;
416
417 const TRAP_OPCODE: &'static [u8] = TRAP_OPCODE;
418
419 fn gen_dummy_use(reg: Reg) -> Self {
420 Inst::DummyUse { reg }.into()
421 }
422
423 fn canonical_type_for_rc(rc: RegClass) -> Type {
424 match rc {
425 regalloc2::RegClass::Int => I64,
426 regalloc2::RegClass::Float => F64,
427 regalloc2::RegClass::Vector => I8X16,
428 }
429 }
430
431 fn is_safepoint(&self) -> bool {
432 match self.inst {
433 Inst::Raw {
434 raw: RawInst::Trap { .. },
435 }
436 | Inst::Call { .. }
437 | Inst::IndirectCall { .. }
438 | Inst::IndirectCallHost { .. } => true,
439 _ => false,
440 }
441 }
442
443 fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
444 pulley_get_operands(self, collector);
445 }
446
447 fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {
448 match self.inst {
449 Inst::Raw {
450 raw: RawInst::Xmov { dst, src },
451 } => Some((Writable::from_reg(*dst.to_reg()), *src)),
452 _ => None,
453 }
454 }
455
456 fn is_included_in_clobbers(&self) -> bool {
457 !self.is_args()
458 }
459
460 fn is_trap(&self) -> bool {
461 match self.inst {
462 Inst::Raw {
463 raw: RawInst::Trap { .. },
464 } => true,
465 _ => false,
466 }
467 }
468
469 fn is_args(&self) -> bool {
470 match self.inst {
471 Inst::Args { .. } => true,
472 _ => false,
473 }
474 }
475
476 fn is_term(&self) -> MachTerminator {
477 match &self.inst {
478 Inst::Raw {
479 raw: RawInst::Ret { .. },
480 }
481 | Inst::Rets { .. } => MachTerminator::Ret,
482 Inst::Jump { .. } => MachTerminator::Branch,
483 Inst::BrIf { .. } => MachTerminator::Branch,
484 Inst::BrTable { .. } => MachTerminator::Branch,
485 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => MachTerminator::RetCall,
486 Inst::Call { info } if info.try_call_info.is_some() => MachTerminator::Branch,
487 Inst::IndirectCall { info } if info.try_call_info.is_some() => MachTerminator::Branch,
488 Inst::IndirectCallHost { info } if info.try_call_info.is_some() => {
489 MachTerminator::Branch
490 }
491 _ => MachTerminator::None,
492 }
493 }
494
495 fn is_mem_access(&self) -> bool {
496 todo!()
497 }
498
499 fn call_type(&self) -> CallType {
500 match &self.inst {
501 Inst::Call { .. } | Inst::IndirectCall { .. } | Inst::IndirectCallHost { .. } => {
502 CallType::Regular
503 }
504
505 Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => CallType::TailCall,
506
507 _ => CallType::None,
508 }
509 }
510
511 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self {
512 match ty {
513 ir::types::I8 | ir::types::I16 | ir::types::I32 | ir::types::I64 => RawInst::Xmov {
514 dst: WritableXReg::try_from(to_reg).unwrap(),
515 src: XReg::new(from_reg).unwrap(),
516 }
517 .into(),
518 ir::types::F32 | ir::types::F64 => RawInst::Fmov {
519 dst: WritableFReg::try_from(to_reg).unwrap(),
520 src: FReg::new(from_reg).unwrap(),
521 }
522 .into(),
523 _ if ty.is_vector() => RawInst::Vmov {
524 dst: WritableVReg::try_from(to_reg).unwrap(),
525 src: VReg::new(from_reg).unwrap(),
526 }
527 .into(),
528 _ => panic!("don't know how to generate a move for type {ty}"),
529 }
530 }
531
532 fn gen_nop(_preferred_size: usize) -> Self {
533 todo!()
534 }
535
536 fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])> {
537 match ty {
538 I8 => Ok((&[RegClass::Int], &[I8])),
539 I16 => Ok((&[RegClass::Int], &[I16])),
540 I32 => Ok((&[RegClass::Int], &[I32])),
541 I64 => Ok((&[RegClass::Int], &[I64])),
542 F32 => Ok((&[RegClass::Float], &[F32])),
543 F64 => Ok((&[RegClass::Float], &[F64])),
544 I128 => Ok((&[RegClass::Int, RegClass::Int], &[I64, I64])),
545 _ if ty.is_vector() => {
546 debug_assert!(ty.bits() <= 512);
547
548 const SIMD_TYPES: [[Type; 1]; 6] = [
552 [types::I8X2],
553 [types::I8X4],
554 [types::I8X8],
555 [types::I8X16],
556 [types::I16X16],
557 [types::I32X16],
558 ];
559 let idx = (ty.bytes().ilog2() - 1) as usize;
560 let ty = &SIMD_TYPES[idx][..];
561
562 Ok((&[RegClass::Vector], ty))
563 }
564 _ => Err(CodegenError::Unsupported(format!(
565 "Unexpected SSA-value type: {ty}"
566 ))),
567 }
568 }
569
570 fn gen_jump(label: MachLabel) -> Self {
571 Inst::Jump { label }.into()
572 }
573
574 fn worst_case_size() -> CodeOffset {
575 22
580 }
581
582 fn ref_type_regclass(_settings: &settings::Flags) -> RegClass {
583 RegClass::Int
584 }
585
586 fn function_alignment() -> FunctionAlignment {
587 FunctionAlignment {
588 minimum: 1,
589 preferred: 1,
590 }
591 }
592}
593
594const TRAP_OPCODE: &'static [u8] = &[
595 pulley_interpreter::opcode::Opcode::ExtendedOp as u8,
596 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 0) as u8,
597 ((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 8) as u8,
598];
599
600#[test]
601fn test_trap_encoding() {
602 let mut dst = std::vec::Vec::new();
603 pulley_interpreter::encode::trap(&mut dst);
604 assert_eq!(dst, TRAP_OPCODE);
605}
606
607pub fn reg_name(reg: Reg) -> String {
611 match reg.to_real_reg() {
612 Some(real) => {
613 let n = real.hw_enc();
614 match (real.class(), n) {
615 (RegClass::Int, 63) => format!("sp"),
616 (RegClass::Int, 62) => format!("lr"),
617 (RegClass::Int, 61) => format!("fp"),
618 (RegClass::Int, 60) => format!("tmp0"),
619 (RegClass::Int, 59) => format!("tmp1"),
620
621 (RegClass::Int, _) => format!("x{n}"),
622 (RegClass::Float, _) => format!("f{n}"),
623 (RegClass::Vector, _) => format!("v{n}"),
624 }
625 }
626 None => {
627 format!("{reg:?}")
628 }
629 }
630}
631
632fn pretty_print_try_call(info: &TryCallInfo) -> String {
633 format!(
634 "; jump {:?}; catch [{}]",
635 info.continuation,
636 info.pretty_print_dests()
637 )
638}
639
640impl Inst {
641 fn print_with_state<P>(&self, _state: &mut EmitState<P>) -> String
642 where
643 P: PulleyTargetKind,
644 {
645 use core::fmt::Write;
646
647 let format_reg = |reg: Reg| -> String { reg_name(reg) };
648
649 match self {
650 Inst::Args { args } => {
651 let mut s = "args".to_string();
652 for arg in args {
653 let preg = format_reg(arg.preg);
654 let def = format_reg(arg.vreg.to_reg());
655 write!(&mut s, " {def}={preg}").unwrap();
656 }
657 s
658 }
659 Inst::Rets { rets } => {
660 let mut s = "rets".to_string();
661 for ret in rets {
662 let preg = format_reg(ret.preg);
663 let vreg = format_reg(ret.vreg);
664 write!(&mut s, " {vreg}={preg}").unwrap();
665 }
666 s
667 }
668
669 Inst::DummyUse { reg } => {
670 let reg = format_reg(*reg);
671 format!("dummy_use {reg}")
672 }
673
674 Inst::TrapIf { cond, code } => {
675 format!("trap_{cond} // code = {code:?}")
676 }
677
678 Inst::Nop => format!("nop"),
679
680 Inst::GetSpecial { dst, reg } => {
681 let dst = format_reg(*dst.to_reg());
682 let reg = format_reg(**reg);
683 format!("xmov {dst}, {reg}")
684 }
685
686 Inst::LoadExtNameNear { dst, name, offset } => {
687 let dst = format_reg(*dst.to_reg());
688 format!("{dst} = load_ext_name_near {name:?}, {offset}")
689 }
690
691 Inst::LoadExtNameFar { dst, name, offset } => {
692 let dst = format_reg(*dst.to_reg());
693 format!("{dst} = load_ext_name_far {name:?}, {offset}")
694 }
695
696 Inst::Call { info } => {
697 let try_call = info
698 .try_call_info
699 .as_ref()
700 .map(|tci| pretty_print_try_call(tci))
701 .unwrap_or_default();
702 format!("call {info:?}{try_call}")
703 }
704
705 Inst::IndirectCall { info } => {
706 let callee = format_reg(*info.dest);
707 let try_call = info
708 .try_call_info
709 .as_ref()
710 .map(|tci| pretty_print_try_call(tci))
711 .unwrap_or_default();
712 format!("indirect_call {callee}, {info:?}{try_call}")
713 }
714
715 Inst::ReturnCall { info } => {
716 format!("return_call {info:?}")
717 }
718
719 Inst::ReturnIndirectCall { info } => {
720 let callee = format_reg(*info.dest);
721 format!("return_indirect_call {callee}, {info:?}")
722 }
723
724 Inst::IndirectCallHost { info } => {
725 let try_call = info
726 .try_call_info
727 .as_ref()
728 .map(|tci| pretty_print_try_call(tci))
729 .unwrap_or_default();
730 format!("indirect_call_host {info:?}{try_call}")
731 }
732
733 Inst::Jump { label } => format!("jump {}", label.to_string()),
734
735 Inst::BrIf {
736 cond,
737 taken,
738 not_taken,
739 } => {
740 let taken = taken.to_string();
741 let not_taken = not_taken.to_string();
742 format!("br_{cond}, {taken}; jump {not_taken}")
743 }
744
745 Inst::LoadAddr { dst, mem } => {
746 let dst = format_reg(*dst.to_reg());
747 let mem = mem.to_string();
748 format!("{dst} = load_addr {mem}")
749 }
750
751 Inst::XLoad {
752 dst,
753 mem,
754 ty,
755 flags,
756 } => {
757 let dst = format_reg(*dst.to_reg());
758 let ty = ty.bits();
759 let mem = mem.to_string();
760 format!("{dst} = xload{ty} {mem} // flags ={flags}")
761 }
762
763 Inst::XStore {
764 mem,
765 src,
766 ty,
767 flags,
768 } => {
769 let ty = ty.bits();
770 let mem = mem.to_string();
771 let src = format_reg(**src);
772 format!("xstore{ty} {mem}, {src} // flags = {flags}")
773 }
774
775 Inst::FLoad {
776 dst,
777 mem,
778 ty,
779 flags,
780 } => {
781 let dst = format_reg(*dst.to_reg());
782 let ty = ty.bits();
783 let mem = mem.to_string();
784 format!("{dst} = fload{ty} {mem} // flags ={flags}")
785 }
786
787 Inst::FStore {
788 mem,
789 src,
790 ty,
791 flags,
792 } => {
793 let ty = ty.bits();
794 let mem = mem.to_string();
795 let src = format_reg(**src);
796 format!("fstore{ty} {mem}, {src} // flags = {flags}")
797 }
798
799 Inst::VLoad {
800 dst,
801 mem,
802 ty,
803 flags,
804 } => {
805 let dst = format_reg(*dst.to_reg());
806 let ty = ty.bits();
807 let mem = mem.to_string();
808 format!("{dst} = vload{ty} {mem} // flags ={flags}")
809 }
810
811 Inst::VStore {
812 mem,
813 src,
814 ty,
815 flags,
816 } => {
817 let ty = ty.bits();
818 let mem = mem.to_string();
819 let src = format_reg(**src);
820 format!("vstore{ty} {mem}, {src} // flags = {flags}")
821 }
822
823 Inst::BrTable {
824 idx,
825 default,
826 targets,
827 } => {
828 let idx = format_reg(**idx);
829 format!("br_table {idx} {default:?} {targets:?}")
830 }
831 Inst::Raw { raw } => generated::print(raw),
832
833 Inst::EmitIsland { space_needed } => format!("emit_island {space_needed}"),
834
835 Inst::LabelAddress { dst, label } => {
836 let dst = format_reg(dst.to_reg().to_reg());
837 format!("label_address {dst}, {label:?}")
838 }
839
840 Inst::SequencePoint {} => {
841 format!("sequence_point")
842 }
843 }
844 }
845}
846
847#[derive(Clone, Copy, Debug, PartialEq, Eq)]
849pub enum LabelUse {
850 PcRel,
856}
857
858impl MachInstLabelUse for LabelUse {
859 const ALIGN: CodeOffset = 1;
862
863 fn max_pos_range(self) -> CodeOffset {
865 match self {
866 Self::PcRel => 0x7fff_ffff,
867 }
868 }
869
870 fn max_neg_range(self) -> CodeOffset {
872 match self {
873 Self::PcRel => 0x8000_0000,
874 }
875 }
876
877 fn patch_size(self) -> CodeOffset {
879 match self {
880 Self::PcRel => 4,
881 }
882 }
883
884 fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset) {
886 let use_relative = (label_offset as i64) - (use_offset as i64);
887 debug_assert!(use_relative <= self.max_pos_range() as i64);
888 debug_assert!(use_relative >= -(self.max_neg_range() as i64));
889 let pc_rel = i32::try_from(use_relative).unwrap() as u32;
890 match self {
891 Self::PcRel => {
892 let buf: &mut [u8; 4] = buffer.try_into().unwrap();
893 let addend = u32::from_le_bytes(*buf);
894 trace!(
895 "patching label use @ {use_offset:#x} \
896 to label {label_offset:#x} via \
897 PC-relative offset {pc_rel:#x} \
898 adding in {addend:#x}"
899 );
900 let value = pc_rel.wrapping_add(addend);
901 *buf = value.to_le_bytes();
902 }
903 }
904 }
905
906 fn supports_veneer(self) -> bool {
908 match self {
909 Self::PcRel => false,
910 }
911 }
912
913 fn veneer_size(self) -> CodeOffset {
915 match self {
916 Self::PcRel => 0,
917 }
918 }
919
920 fn worst_case_veneer_size() -> CodeOffset {
921 0
922 }
923
924 fn generate_veneer(
927 self,
928 _buffer: &mut [u8],
929 _veneer_offset: CodeOffset,
930 ) -> (CodeOffset, LabelUse) {
931 match self {
932 Self::PcRel => panic!("veneer not supported for {self:?}"),
933 }
934 }
935
936 fn from_reloc(reloc: Reloc, addend: Addend) -> Option<LabelUse> {
937 match (reloc, addend) {
938 (Reloc::PulleyPcRel, 0) => Some(LabelUse::PcRel),
939 _ => None,
940 }
941 }
942}