cranelift_jit/
compiled_blob.rs1use std::ptr;
2
3use cranelift_codegen::binemit::Reloc;
4use cranelift_module::{ModuleError, ModuleReloc, ModuleRelocTarget, ModuleResult};
5
6use crate::JITMemoryProvider;
7use crate::memory::JITMemoryKind;
8
9const VENEER_SIZE: usize = 24; unsafe fn modify_inst32(iptr: *mut u32, modifier: impl FnOnce(u32) -> u32) {
14 let inst = iptr.read_unaligned();
15 let new_inst = modifier(inst);
16 iptr.write_unaligned(new_inst);
17}
18
19#[derive(Clone)]
20pub(crate) struct CompiledBlob {
21 ptr: *mut u8,
22 size: usize,
23 relocs: Vec<ModuleReloc>,
24 veneer_count: usize,
25 #[cfg(feature = "wasmtime-unwinder")]
26 wasmtime_exception_data: Option<Vec<u8>>,
27}
28
29unsafe impl Send for CompiledBlob {}
30
31impl CompiledBlob {
32 pub(crate) fn new(
33 memory: &mut dyn JITMemoryProvider,
34 data: &[u8],
35 align: u64,
36 relocs: Vec<ModuleReloc>,
37 #[cfg(feature = "wasmtime-unwinder")] wasmtime_exception_data: Option<Vec<u8>>,
38 kind: JITMemoryKind,
39 ) -> ModuleResult<Self> {
40 let mut veneer_count = 0;
42 for reloc in &relocs {
43 match reloc.kind {
44 Reloc::Arm64Call => veneer_count += 1,
45 _ => {}
46 }
47 }
48
49 let ptr = memory
50 .allocate(data.len() + veneer_count * VENEER_SIZE, align, kind)
51 .map_err(|e| ModuleError::Allocation { err: e })?;
52
53 unsafe {
54 ptr::copy_nonoverlapping(data.as_ptr(), ptr, data.len());
55 }
56
57 Ok(CompiledBlob {
58 ptr,
59 size: data.len(),
60 relocs,
61 veneer_count,
62 #[cfg(feature = "wasmtime-unwinder")]
63 wasmtime_exception_data,
64 })
65 }
66
67 pub(crate) fn new_zeroed(
68 memory: &mut dyn JITMemoryProvider,
69 size: usize,
70 align: u64,
71 relocs: Vec<ModuleReloc>,
72 #[cfg(feature = "wasmtime-unwinder")] wasmtime_exception_data: Option<Vec<u8>>,
73 kind: JITMemoryKind,
74 ) -> ModuleResult<Self> {
75 let ptr = memory
76 .allocate(size, align, kind)
77 .map_err(|e| ModuleError::Allocation { err: e })?;
78
79 unsafe { ptr::write_bytes(ptr, 0, size) };
80
81 Ok(CompiledBlob {
82 ptr,
83 size,
84 relocs,
85 veneer_count: 0,
86 #[cfg(feature = "wasmtime-unwinder")]
87 wasmtime_exception_data,
88 })
89 }
90
91 pub(crate) fn ptr(&self) -> *const u8 {
92 self.ptr
93 }
94
95 pub(crate) fn size(&self) -> usize {
96 self.size
97 }
98
99 #[cfg(feature = "wasmtime-unwinder")]
100 pub(crate) fn wasmtime_exception_data(&self) -> Option<&[u8]> {
101 self.wasmtime_exception_data.as_deref()
102 }
103
104 pub(crate) fn perform_relocations(
105 &self,
106 get_address: impl Fn(&ModuleRelocTarget) -> *const u8,
107 ) {
108 use std::ptr::write_unaligned;
109
110 let mut next_veneer_idx = 0;
111
112 for (
113 i,
114 &ModuleReloc {
115 kind,
116 offset,
117 ref name,
118 addend,
119 },
120 ) in self.relocs.iter().enumerate()
121 {
122 debug_assert!((offset as usize) < self.size);
123 let at = unsafe { self.ptr.offset(isize::try_from(offset).unwrap()) };
124 match kind {
125 Reloc::Abs4 => {
126 let base = get_address(name);
127 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
128 unsafe {
129 write_unaligned(at as *mut u32, u32::try_from(what as usize).unwrap())
130 };
131 }
132 Reloc::Abs8 => {
133 let base = get_address(name);
134 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
135 unsafe {
136 write_unaligned(at as *mut u64, u64::try_from(what as usize).unwrap())
137 };
138 }
139 Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => {
140 let base = get_address(name);
141 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
142 let pcrel = i32::try_from((what as isize) - (at as isize)).unwrap();
143 unsafe { write_unaligned(at as *mut i32, pcrel) };
144 }
145 Reloc::X86GOTPCRel4 => {
146 panic!("GOT relocation shouldn't be generated when !is_pic");
147 }
148 Reloc::X86CallPLTRel4 => {
149 panic!("PLT relocation shouldn't be generated when !is_pic");
150 }
151 Reloc::S390xPCRel32Dbl | Reloc::S390xPLTRel32Dbl => {
152 let base = get_address(name);
153 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
154 let pcrel = i32::try_from(((what as isize) - (at as isize)) >> 1).unwrap();
155 unsafe { write_unaligned(at as *mut i32, pcrel) };
156 }
157 Reloc::Arm64Call => {
158 let base = get_address(name);
159 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
160 let iptr = at as *mut u32;
162
163 let diff = ((what as isize) - (at as isize)) >> 2;
166 if (diff >> 25 == -1) || (diff >> 25 == 0) {
171 let chop = 32 - 26;
174 let imm26 = (diff as u32) << chop >> chop;
175 unsafe { modify_inst32(iptr, |inst| inst | imm26) };
176 } else {
177 let veneer_idx = next_veneer_idx;
180 next_veneer_idx += 1;
181 assert!(veneer_idx <= self.veneer_count);
182 let veneer =
183 unsafe { self.ptr.byte_add(self.size + veneer_idx * VENEER_SIZE) };
184
185 unsafe {
188 write_unaligned(
189 veneer.cast::<u32>(),
190 0x58000050, );
192 write_unaligned(
193 veneer.byte_add(4).cast::<u32>(),
194 0xd61f0200, );
196 write_unaligned(veneer.byte_add(8).cast::<u64>(), what.addr() as u64);
197 };
198
199 let diff = ((veneer as isize) - (at as isize)) >> 2;
201 assert!((diff >> 25 == -1) || (diff >> 25 == 0));
202 let chop = 32 - 26;
203 let imm26 = (diff as u32) << chop >> chop;
204 unsafe { modify_inst32(iptr, |inst| inst | imm26) };
205 }
206 }
207 Reloc::Aarch64AdrGotPage21 => {
208 panic!("GOT relocation shouldn't be generated when !is_pic");
209 }
210 Reloc::Aarch64Ld64GotLo12Nc => {
211 panic!("GOT relocation shouldn't be generated when !is_pic");
212 }
213 Reloc::Aarch64AdrPrelPgHi21 => {
214 let base = get_address(name);
215 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
216 let get_page = |x| x & (!0xfff);
217 let pcrel =
225 i32::try_from(get_page(what as isize) - get_page(at as isize)).unwrap();
226 let iptr = at as *mut u32;
227 let hi21 = (pcrel >> 12).cast_unsigned();
228 let lo = (hi21 & 0x3) << 29;
229 let hi = (hi21 & 0x1ffffc) << 3;
230 unsafe { modify_inst32(iptr, |inst| inst | lo | hi) };
231 }
232 Reloc::Aarch64AddAbsLo12Nc => {
233 let base = get_address(name);
234 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
235 let iptr = at as *mut u32;
236 let imm12 = (what.addr() as u32 & 0xfff) << 10;
237 unsafe { modify_inst32(iptr, |inst| inst | imm12) };
238 }
239 Reloc::RiscvCallPlt => {
240 let base = get_address(name);
246 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
247 let pcrel = i32::try_from((what as isize) - (at as isize)).unwrap() as u32;
248
249 let hi20 = pcrel.wrapping_add(0x800) & 0xFFFFF000;
263 let lo12 = pcrel.wrapping_sub(hi20) & 0xFFF;
264
265 unsafe {
266 let auipc_addr = at as *mut u32;
268 modify_inst32(auipc_addr, |auipc| (auipc & 0xFFF) | hi20);
269
270 let jalr_addr = at.offset(4) as *mut u32;
272 modify_inst32(jalr_addr, |jalr| (jalr & 0xFFFFF) | (lo12 << 20));
273 }
274 }
275 Reloc::PulleyPcRel => {
276 let base = get_address(name);
277 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
278 let pcrel = i32::try_from((what as isize) - (at as isize)).unwrap();
279 let at = at as *mut i32;
280 unsafe {
281 at.write_unaligned(at.read_unaligned().wrapping_add(pcrel));
282 }
283 }
284
285 Reloc::RiscvPCRelHi20 => {
288 let base = get_address(name);
289 let what = unsafe { base.offset(isize::try_from(addend).unwrap()) };
290 let pcrel = i32::try_from((what as isize) - (at as isize) + 0x800)
291 .unwrap()
292 .cast_unsigned();
293 let at = at as *mut u32;
294 unsafe {
295 modify_inst32(at, |i| i | (pcrel & 0xfffff000));
296 }
297 }
298
299 Reloc::RiscvPCRelLo12I => {
305 let prev_reloc = &self.relocs[i - 1];
306 assert_eq!(prev_reloc.kind, Reloc::RiscvPCRelHi20);
307 let lo_target = get_address(name);
308 let hi_address =
309 unsafe { self.ptr.offset(isize::try_from(prev_reloc.offset).unwrap()) };
310 assert_eq!(lo_target, hi_address);
311 let hi_target = get_address(&prev_reloc.name);
312 let pcrel = i32::try_from((hi_target as isize) - (hi_address as isize))
313 .unwrap()
314 .cast_unsigned();
315 let at = at as *mut u32;
316 unsafe {
317 modify_inst32(at, |i| i | ((pcrel & 0xfff) << 20));
318 }
319 }
320
321 other => unimplemented!("unimplemented reloc {other:?}"),
322 }
323 }
324 }
325}