1use crate::debug::DwarfSectionRelocTarget;
2use crate::func_environ::FuncEnvironment;
3use crate::translate::FuncTranslator;
4use crate::TRAP_INTERNAL_ASSERT;
5use crate::{array_call_signature, CompiledFunction, ModuleTextBuilder};
6use crate::{builder::LinkOptions, wasm_call_signature, BuiltinFunctionSignatures};
7use anyhow::{Context as _, Result};
8use cranelift_codegen::binemit::CodeOffset;
9use cranelift_codegen::bitset::CompoundBitSet;
10use cranelift_codegen::ir::condcodes::IntCC;
11use cranelift_codegen::ir::{self, InstBuilder, MemFlags, UserExternalName, UserFuncName, Value};
12use cranelift_codegen::isa::{
13 unwind::{UnwindInfo, UnwindInfoKind},
14 OwnedTargetIsa, TargetIsa,
15};
16use cranelift_codegen::print_errors::pretty_error;
17use cranelift_codegen::{CompiledCode, Context};
18use cranelift_entity::PrimaryMap;
19use cranelift_frontend::FunctionBuilder;
20use object::write::{Object, StandardSegment, SymbolId};
21use object::{RelocationEncoding, RelocationFlags, RelocationKind, SectionKind};
22use std::any::Any;
23use std::cmp;
24use std::collections::HashMap;
25use std::mem;
26use std::path;
27use std::sync::{Arc, Mutex};
28use wasmparser::{FuncValidatorAllocations, FunctionBody};
29use wasmtime_environ::{
30 AddressMapSection, BuiltinFunctionIndex, CacheStore, CompileError, DefinedFuncIndex, FlagValue,
31 FunctionBodyData, FunctionLoc, HostCall, ModuleTranslation, ModuleTypesBuilder, PtrSize,
32 RelocationTarget, StackMapInformation, StaticModuleIndex, TrapEncodingBuilder, TrapSentinel,
33 TripleExt, Tunables, VMOffsets, WasmFuncType, WasmFunctionInfo, WasmValType,
34};
35
36#[cfg(feature = "component-model")]
37mod component;
38
39struct IncrementalCacheContext {
40 #[cfg(feature = "incremental-cache")]
41 cache_store: Arc<dyn CacheStore>,
42 num_hits: usize,
43 num_cached: usize,
44}
45
46struct CompilerContext {
47 func_translator: FuncTranslator,
48 codegen_context: Context,
49 incremental_cache_ctx: Option<IncrementalCacheContext>,
50 validator_allocations: FuncValidatorAllocations,
51}
52
53impl Default for CompilerContext {
54 fn default() -> Self {
55 Self {
56 func_translator: FuncTranslator::new(),
57 codegen_context: Context::new(),
58 incremental_cache_ctx: None,
59 validator_allocations: Default::default(),
60 }
61 }
62}
63
64pub struct Compiler {
67 tunables: Tunables,
68 contexts: Mutex<Vec<CompilerContext>>,
69 isa: OwnedTargetIsa,
70 linkopts: LinkOptions,
71 cache_store: Option<Arc<dyn CacheStore>>,
72 clif_dir: Option<path::PathBuf>,
73 #[cfg(feature = "wmemcheck")]
74 pub(crate) wmemcheck: bool,
75}
76
77impl Drop for Compiler {
78 fn drop(&mut self) {
79 if self.cache_store.is_none() {
80 return;
81 }
82
83 let mut num_hits = 0;
84 let mut num_cached = 0;
85 for ctx in self.contexts.lock().unwrap().iter() {
86 if let Some(ref cache_ctx) = ctx.incremental_cache_ctx {
87 num_hits += cache_ctx.num_hits;
88 num_cached += cache_ctx.num_cached;
89 }
90 }
91
92 let total = num_hits + num_cached;
93 if num_hits + num_cached > 0 {
94 log::trace!(
95 "Incremental compilation cache stats: {}/{} = {}% (hits/lookup)\ncached: {}",
96 num_hits,
97 total,
98 (num_hits as f32) / (total as f32) * 100.0,
99 num_cached
100 );
101 }
102 }
103}
104
105impl Compiler {
106 pub fn new(
107 tunables: Tunables,
108 isa: OwnedTargetIsa,
109 cache_store: Option<Arc<dyn CacheStore>>,
110 linkopts: LinkOptions,
111 clif_dir: Option<path::PathBuf>,
112 wmemcheck: bool,
113 ) -> Compiler {
114 let _ = wmemcheck;
115 Compiler {
116 contexts: Default::default(),
117 tunables,
118 isa,
119 linkopts,
120 cache_store,
121 clif_dir,
122 #[cfg(feature = "wmemcheck")]
123 wmemcheck,
124 }
125 }
126
127 fn call_indirect_host(
136 &self,
137 builder: &mut FunctionBuilder<'_>,
138 hostcall: impl Into<HostCall>,
139 sig: ir::SigRef,
140 addr: Value,
141 args: &[Value],
142 ) -> ir::Inst {
143 let signature = &builder.func.dfg.signatures[sig];
144
145 assert_eq!(signature.call_conv, self.isa.default_call_conv());
149
150 if self.isa.triple().is_pulley() {
156 let mut new_signature = signature.clone();
157 new_signature
158 .params
159 .insert(0, ir::AbiParam::new(self.isa.pointer_type()));
160 let new_sig = builder.func.import_signature(new_signature);
161 let name = ir::ExternalName::User(builder.func.declare_imported_user_function(
162 ir::UserExternalName {
163 namespace: crate::NS_PULLEY_HOSTCALL,
164 index: hostcall.into().index(),
165 },
166 ));
167 let func = builder.func.import_function(ir::ExtFuncData {
168 name,
169 signature: new_sig,
170 colocated: false,
173 });
174 let mut raw_args = vec![addr];
175 raw_args.extend_from_slice(args);
176 return builder.ins().call(func, &raw_args);
177 }
178
179 builder.ins().call_indirect(sig, addr, args)
180 }
181}
182
183impl wasmtime_environ::Compiler for Compiler {
184 fn compile_function(
185 &self,
186 translation: &ModuleTranslation<'_>,
187 func_index: DefinedFuncIndex,
188 input: FunctionBodyData<'_>,
189 types: &ModuleTypesBuilder,
190 ) -> Result<(WasmFunctionInfo, Box<dyn Any + Send>), CompileError> {
191 let isa = &*self.isa;
192 let module = &translation.module;
193 let func_index = module.func_index(func_index);
194 let sig = translation.module.functions[func_index]
195 .signature
196 .unwrap_module_type_index();
197 let wasm_func_ty = types[sig].unwrap_func();
198
199 let mut compiler = self.function_compiler();
200
201 let context = &mut compiler.cx.codegen_context;
202 context.func.signature = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
203 context.func.name = UserFuncName::User(UserExternalName {
204 namespace: crate::NS_WASM_FUNC,
205 index: func_index.as_u32(),
206 });
207
208 if self.tunables.generate_native_debuginfo {
209 context.func.collect_debug_info();
210 }
211
212 let mut func_env = FuncEnvironment::new(self, translation, types, wasm_func_ty);
213
214 if !isa.triple().is_pulley() {
247 let vmctx = context
248 .func
249 .create_global_value(ir::GlobalValueData::VMContext);
250 let interrupts_ptr = context.func.create_global_value(ir::GlobalValueData::Load {
251 base: vmctx,
252 offset: i32::from(func_env.offsets.ptr.vmctx_runtime_limits()).into(),
253 global_type: isa.pointer_type(),
254 flags: MemFlags::trusted().with_readonly(),
255 });
256 let stack_limit = context.func.create_global_value(ir::GlobalValueData::Load {
257 base: interrupts_ptr,
258 offset: i32::from(func_env.offsets.ptr.vmstore_context_stack_limit()).into(),
259 global_type: isa.pointer_type(),
260 flags: MemFlags::trusted(),
261 });
262 if self.tunables.signals_based_traps {
263 context.func.stack_limit = Some(stack_limit);
264 } else {
265 func_env.stack_limit_at_function_entry = Some(stack_limit);
266 }
267 }
268 let FunctionBodyData { validator, body } = input;
269 let mut validator =
270 validator.into_validator(mem::take(&mut compiler.cx.validator_allocations));
271 compiler.cx.func_translator.translate_body(
272 &mut validator,
273 body.clone(),
274 &mut context.func,
275 &mut func_env,
276 )?;
277
278 let (info, func) = compiler.finish_with_info(
279 Some((&body, &self.tunables)),
280 &format!("wasm_func_{}", func_index.as_u32()),
281 )?;
282
283 let timing = cranelift_codegen::timing::take_current();
284 log::debug!("{:?} translated in {:?}", func_index, timing.total());
285 log::trace!("{:?} timing info\n{}", func_index, timing);
286
287 Ok((info, Box::new(func)))
288 }
289
290 fn compile_array_to_wasm_trampoline(
291 &self,
292 translation: &ModuleTranslation<'_>,
293 types: &ModuleTypesBuilder,
294 def_func_index: DefinedFuncIndex,
295 ) -> Result<Box<dyn Any + Send>, CompileError> {
296 let func_index = translation.module.func_index(def_func_index);
297 let sig = translation.module.functions[func_index]
298 .signature
299 .unwrap_module_type_index();
300 let wasm_func_ty = types[sig].unwrap_func();
301
302 let isa = &*self.isa;
303 let pointer_type = isa.pointer_type();
304 let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
305 let array_call_sig = array_call_signature(isa);
306
307 let mut compiler = self.function_compiler();
308 let func = ir::Function::with_name_signature(Default::default(), array_call_sig);
309 let (mut builder, block0) = compiler.builder(func);
310
311 let (vmctx, caller_vmctx, values_vec_ptr, values_vec_len) = {
312 let params = builder.func.dfg.block_params(block0);
313 (params[0], params[1], params[2], params[3])
314 };
315
316 let mut args = self.load_values_from_array(
318 wasm_func_ty.params(),
319 &mut builder,
320 values_vec_ptr,
321 values_vec_len,
322 );
323 args.insert(0, caller_vmctx);
324 args.insert(0, vmctx);
325
326 debug_assert_vmctx_kind(isa, &mut builder, vmctx, wasmtime_environ::VMCONTEXT_MAGIC);
331 let offsets = VMOffsets::new(isa.pointer_bytes(), &translation.module);
332 let vm_runtime_limits_offset = offsets.ptr.vmctx_runtime_limits();
333 save_last_wasm_entry_fp(
334 &mut builder,
335 pointer_type,
336 &offsets.ptr,
337 vm_runtime_limits_offset.into(),
338 vmctx,
339 );
340
341 let call = declare_and_call(&mut builder, wasm_call_sig, func_index.as_u32(), &args);
343 let results = builder.func.dfg.inst_results(call).to_vec();
344
345 self.store_values_to_array(
347 &mut builder,
348 wasm_func_ty.returns(),
349 &results,
350 values_vec_ptr,
351 values_vec_len,
352 );
353
354 let true_return = builder.ins().iconst(ir::types::I8, 1);
359 builder.ins().return_(&[true_return]);
360 builder.finalize();
361
362 Ok(Box::new(compiler.finish(&format!(
363 "array_to_wasm_{}",
364 func_index.as_u32(),
365 ))?))
366 }
367
368 fn compile_wasm_to_array_trampoline(
369 &self,
370 wasm_func_ty: &WasmFuncType,
371 ) -> Result<Box<dyn Any + Send>, CompileError> {
372 let isa = &*self.isa;
373 let pointer_type = isa.pointer_type();
374 let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
375 let array_call_sig = array_call_signature(isa);
376
377 let mut compiler = self.function_compiler();
378 let func = ir::Function::with_name_signature(Default::default(), wasm_call_sig);
379 let (mut builder, block0) = compiler.builder(func);
380
381 let args = builder.func.dfg.block_params(block0).to_vec();
382 let callee_vmctx = args[0];
383 let caller_vmctx = args[1];
384
385 debug_assert_vmctx_kind(
390 isa,
391 &mut builder,
392 caller_vmctx,
393 wasmtime_environ::VMCONTEXT_MAGIC,
394 );
395 let ptr = isa.pointer_bytes();
396 let vm_store_context = builder.ins().load(
397 pointer_type,
398 MemFlags::trusted(),
399 caller_vmctx,
400 i32::from(ptr.vmcontext_store_context()),
401 );
402 save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr, vm_store_context);
403
404 let (args_base, args_len) =
406 self.allocate_stack_array_and_spill_args(wasm_func_ty, &mut builder, &args[2..]);
407 let args_len = builder.ins().iconst(pointer_type, i64::from(args_len));
408
409 let ptr_size = isa.pointer_bytes();
412 let callee = builder.ins().load(
413 pointer_type,
414 MemFlags::trusted(),
415 callee_vmctx,
416 ptr_size.vmarray_call_host_func_context_func_ref() + ptr_size.vm_func_ref_array_call(),
417 );
418
419 let callee_signature = builder.func.import_signature(array_call_sig);
421 let call = self.call_indirect_host(
422 &mut builder,
423 HostCall::ArrayCall,
424 callee_signature,
425 callee,
426 &[callee_vmctx, caller_vmctx, args_base, args_len],
427 );
428 let succeeded = builder.func.dfg.inst_results(call)[0];
429 self.raise_if_host_trapped(&mut builder, caller_vmctx, succeeded);
430 let results =
431 self.load_values_from_array(wasm_func_ty.returns(), &mut builder, args_base, args_len);
432 builder.ins().return_(&results);
433 builder.finalize();
434
435 Ok(Box::new(compiler.finish(&format!(
436 "wasm_to_array_trampoline_{wasm_func_ty}"
437 ))?))
438 }
439
440 fn append_code(
441 &self,
442 obj: &mut Object<'static>,
443 funcs: &[(String, Box<dyn Any + Send>)],
444 resolve_reloc: &dyn Fn(usize, RelocationTarget) -> usize,
445 ) -> Result<Vec<(SymbolId, FunctionLoc)>> {
446 let mut builder =
447 ModuleTextBuilder::new(obj, self, self.isa.text_section_builder(funcs.len()));
448 if self.linkopts.force_jump_veneers {
449 builder.force_veneers();
450 }
451 let mut addrs = AddressMapSection::default();
452 let mut traps = TrapEncodingBuilder::default();
453
454 let mut ret = Vec::with_capacity(funcs.len());
455 for (i, (sym, func)) in funcs.iter().enumerate() {
456 let func = func.downcast_ref::<CompiledFunction>().unwrap();
457 let (sym, range) = builder.append_func(&sym, func, |idx| resolve_reloc(i, idx));
458 if self.tunables.generate_address_map {
459 let addr = func.address_map();
460 addrs.push(range.clone(), &addr.instructions);
461 }
462 traps.push(range.clone(), &func.traps().collect::<Vec<_>>());
463 builder.append_padding(self.linkopts.padding_between_functions);
464 let info = FunctionLoc {
465 start: u32::try_from(range.start).unwrap(),
466 length: u32::try_from(range.end - range.start).unwrap(),
467 };
468 ret.push((sym, info));
469 }
470
471 builder.finish();
472
473 if self.tunables.generate_address_map {
474 addrs.append_to(obj);
475 }
476 traps.append_to(obj);
477
478 Ok(ret)
479 }
480
481 fn triple(&self) -> &target_lexicon::Triple {
482 self.isa.triple()
483 }
484
485 fn flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
486 crate::clif_flags_to_wasmtime(self.isa.flags().iter())
487 }
488
489 fn isa_flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
490 crate::clif_flags_to_wasmtime(self.isa.isa_flags())
491 }
492
493 fn is_branch_protection_enabled(&self) -> bool {
494 self.isa.is_branch_protection_enabled()
495 }
496
497 #[cfg(feature = "component-model")]
498 fn component_compiler(&self) -> &dyn wasmtime_environ::component::ComponentCompiler {
499 self
500 }
501
502 fn append_dwarf<'a>(
503 &self,
504 obj: &mut Object<'_>,
505 translations: &'a PrimaryMap<StaticModuleIndex, ModuleTranslation<'a>>,
506 get_func: &'a dyn Fn(
507 StaticModuleIndex,
508 DefinedFuncIndex,
509 ) -> (SymbolId, &'a (dyn Any + Send)),
510 dwarf_package_bytes: Option<&'a [u8]>,
511 tunables: &'a Tunables,
512 ) -> Result<()> {
513 let get_func = move |m, f| {
514 let (sym, any) = get_func(m, f);
515 (
516 sym,
517 any.downcast_ref::<CompiledFunction>().unwrap().metadata(),
518 )
519 };
520 let mut compilation = crate::debug::Compilation::new(
521 &*self.isa,
522 translations,
523 &get_func,
524 dwarf_package_bytes,
525 tunables,
526 );
527 let dwarf_sections = crate::debug::emit_dwarf(&*self.isa, &mut compilation)
528 .with_context(|| "failed to emit DWARF debug information")?;
529
530 let (debug_bodies, debug_relocs): (Vec<_>, Vec<_>) = dwarf_sections
531 .iter()
532 .map(|s| ((s.name, &s.body), (s.name, &s.relocs)))
533 .unzip();
534 let mut dwarf_sections_ids = HashMap::new();
535 for (name, body) in debug_bodies {
536 let segment = obj.segment_name(StandardSegment::Debug).to_vec();
537 let section_id = obj.add_section(segment, name.as_bytes().to_vec(), SectionKind::Debug);
538 dwarf_sections_ids.insert(name, section_id);
539 obj.append_section_data(section_id, &body, 1);
540 }
541
542 for (name, relocs) in debug_relocs {
544 let section_id = *dwarf_sections_ids.get(name).unwrap();
545 for reloc in relocs {
546 let target_symbol = match reloc.target {
547 DwarfSectionRelocTarget::Func(id) => compilation.symbol_id(id),
548 DwarfSectionRelocTarget::Section(name) => {
549 obj.section_symbol(dwarf_sections_ids[name])
550 }
551 };
552 obj.add_relocation(
553 section_id,
554 object::write::Relocation {
555 offset: u64::from(reloc.offset),
556 symbol: target_symbol,
557 addend: i64::from(reloc.addend),
558 flags: RelocationFlags::Generic {
559 size: reloc.size << 3,
560 kind: RelocationKind::Absolute,
561 encoding: RelocationEncoding::Generic,
562 },
563 },
564 )?;
565 }
566 }
567
568 Ok(())
569 }
570
571 fn create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry> {
572 self.isa.create_systemv_cie()
573 }
574
575 fn compile_wasm_to_builtin(
576 &self,
577 index: BuiltinFunctionIndex,
578 ) -> Result<Box<dyn Any + Send>, CompileError> {
579 let isa = &*self.isa;
580 let ptr_size = isa.pointer_bytes();
581 let pointer_type = isa.pointer_type();
582 let sigs = BuiltinFunctionSignatures::new(self);
583 let wasm_sig = sigs.wasm_signature(index);
584 let host_sig = sigs.host_signature(index);
585
586 let mut compiler = self.function_compiler();
587 let func = ir::Function::with_name_signature(Default::default(), wasm_sig.clone());
588 let (mut builder, block0) = compiler.builder(func);
589 let vmctx = builder.block_params(block0)[0];
590
591 debug_assert_vmctx_kind(isa, &mut builder, vmctx, wasmtime_environ::VMCONTEXT_MAGIC);
595 let vm_store_context = builder.ins().load(
596 pointer_type,
597 MemFlags::trusted(),
598 vmctx,
599 ptr_size.vmcontext_store_context(),
600 );
601 save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr_size, vm_store_context);
602
603 let args = builder.block_params(block0).to_vec();
606 let call = self.call_builtin(&mut builder, vmctx, &args, index, host_sig);
607 let results = builder.func.dfg.inst_results(call).to_vec();
608
609 match index.trap_sentinel() {
616 Some(TrapSentinel::Falsy) => {
617 self.raise_if_host_trapped(&mut builder, vmctx, results[0]);
618 }
619 Some(TrapSentinel::NegativeTwo) => {
620 let ty = builder.func.dfg.value_type(results[0]);
621 let trapped = builder.ins().iconst(ty, -2);
622 let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], trapped);
623 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
624 }
625 Some(TrapSentinel::Negative) => {
626 let ty = builder.func.dfg.value_type(results[0]);
627 let zero = builder.ins().iconst(ty, 0);
628 let succeeded =
629 builder
630 .ins()
631 .icmp(IntCC::SignedGreaterThanOrEqual, results[0], zero);
632 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
633 }
634 Some(TrapSentinel::NegativeOne) => {
635 let ty = builder.func.dfg.value_type(results[0]);
636 let minus_one = builder.ins().iconst(ty, -1);
637 let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], minus_one);
638 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
639 }
640 None => {}
641 }
642
643 builder.ins().return_(&results);
645 builder.finalize();
646
647 Ok(Box::new(
648 compiler.finish(&format!("wasm_to_builtin_{}", index.name()))?,
649 ))
650 }
651
652 fn compiled_function_relocation_targets<'a>(
653 &'a self,
654 func: &'a dyn Any,
655 ) -> Box<dyn Iterator<Item = RelocationTarget> + 'a> {
656 let func = func.downcast_ref::<CompiledFunction>().unwrap();
657 Box::new(func.relocations().map(|r| r.reloc_target))
658 }
659}
660
661#[cfg(feature = "incremental-cache")]
662mod incremental_cache {
663 use super::*;
664
665 struct CraneliftCacheStore(Arc<dyn CacheStore>);
666
667 impl cranelift_codegen::incremental_cache::CacheKvStore for CraneliftCacheStore {
668 fn get(&self, key: &[u8]) -> Option<std::borrow::Cow<[u8]>> {
669 self.0.get(key)
670 }
671 fn insert(&mut self, key: &[u8], val: Vec<u8>) {
672 self.0.insert(key, val);
673 }
674 }
675
676 pub(super) fn compile_maybe_cached<'a>(
677 context: &'a mut Context,
678 isa: &dyn TargetIsa,
679 cache_ctx: Option<&mut IncrementalCacheContext>,
680 ) -> Result<CompiledCode, CompileError> {
681 let cache_ctx = match cache_ctx {
682 Some(ctx) => ctx,
683 None => return compile_uncached(context, isa),
684 };
685
686 let mut cache_store = CraneliftCacheStore(cache_ctx.cache_store.clone());
687 let (_compiled_code, from_cache) = context
688 .compile_with_cache(isa, &mut cache_store, &mut Default::default())
689 .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
690
691 if from_cache {
692 cache_ctx.num_hits += 1;
693 } else {
694 cache_ctx.num_cached += 1;
695 }
696
697 Ok(context.take_compiled_code().unwrap())
698 }
699}
700
701#[cfg(feature = "incremental-cache")]
702use incremental_cache::*;
703
704#[cfg(not(feature = "incremental-cache"))]
705fn compile_maybe_cached<'a>(
706 context: &'a mut Context,
707 isa: &dyn TargetIsa,
708 _cache_ctx: Option<&mut IncrementalCacheContext>,
709) -> Result<CompiledCode, CompileError> {
710 compile_uncached(context, isa)
711}
712
713fn compile_uncached<'a>(
714 context: &'a mut Context,
715 isa: &dyn TargetIsa,
716) -> Result<CompiledCode, CompileError> {
717 context
718 .compile(isa, &mut Default::default())
719 .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
720 Ok(context.take_compiled_code().unwrap())
721}
722
723impl Compiler {
724 fn allocate_stack_array_and_spill_args(
734 &self,
735 ty: &WasmFuncType,
736 builder: &mut FunctionBuilder,
737 args: &[ir::Value],
738 ) -> (Value, u32) {
739 let isa = &*self.isa;
740 let pointer_type = isa.pointer_type();
741
742 let value_size = mem::size_of::<u128>();
744 let values_vec_len = cmp::max(ty.params().len(), ty.returns().len());
745 let values_vec_byte_size = u32::try_from(value_size * values_vec_len).unwrap();
746 let values_vec_len = u32::try_from(values_vec_len).unwrap();
747
748 let slot = builder.func.create_sized_stack_slot(ir::StackSlotData::new(
749 ir::StackSlotKind::ExplicitSlot,
750 values_vec_byte_size,
751 4,
752 ));
753 let values_vec_ptr = builder.ins().stack_addr(pointer_type, slot, 0);
754
755 {
756 let values_vec_len = builder
757 .ins()
758 .iconst(ir::types::I32, i64::from(values_vec_len));
759 self.store_values_to_array(builder, ty.params(), args, values_vec_ptr, values_vec_len);
760 }
761
762 (values_vec_ptr, values_vec_len)
763 }
764
765 fn store_values_to_array(
772 &self,
773 builder: &mut FunctionBuilder,
774 types: &[WasmValType],
775 values: &[Value],
776 values_vec_ptr: Value,
777 values_vec_capacity: Value,
778 ) {
779 debug_assert_eq!(types.len(), values.len());
780 debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
781
782 let flags = ir::MemFlags::new()
788 .with_notrap()
789 .with_endianness(ir::Endianness::Little);
790
791 let value_size = mem::size_of::<u128>();
792 for (i, val) in values.iter().copied().enumerate() {
793 crate::unbarriered_store_type_at_offset(
794 &mut builder.cursor(),
795 flags,
796 values_vec_ptr,
797 i32::try_from(i * value_size).unwrap(),
798 val,
799 );
800 }
801 }
802
803 fn load_values_from_array(
811 &self,
812 types: &[WasmValType],
813 builder: &mut FunctionBuilder,
814 values_vec_ptr: Value,
815 values_vec_capacity: Value,
816 ) -> Vec<ir::Value> {
817 let isa = &*self.isa;
818 let value_size = mem::size_of::<u128>();
819
820 debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
821
822 let flags = MemFlags::new()
825 .with_notrap()
826 .with_endianness(ir::Endianness::Little);
827
828 let mut results = Vec::new();
829 for (i, ty) in types.iter().enumerate() {
830 results.push(crate::unbarriered_load_type_at_offset(
831 isa,
832 &mut builder.cursor(),
833 *ty,
834 flags,
835 values_vec_ptr,
836 i32::try_from(i * value_size).unwrap(),
837 ));
838 }
839 results
840 }
841
842 fn function_compiler(&self) -> FunctionCompiler<'_> {
843 let saved_context = self.contexts.lock().unwrap().pop();
844 FunctionCompiler {
845 compiler: self,
846 cx: saved_context
847 .map(|mut ctx| {
848 ctx.codegen_context.clear();
849 ctx
850 })
851 .unwrap_or_else(|| CompilerContext {
852 #[cfg(feature = "incremental-cache")]
853 incremental_cache_ctx: self.cache_store.as_ref().map(|cache_store| {
854 IncrementalCacheContext {
855 cache_store: cache_store.clone(),
856 num_hits: 0,
857 num_cached: 0,
858 }
859 }),
860 ..Default::default()
861 }),
862 }
863 }
864
865 pub fn raise_if_host_trapped(
879 &self,
880 builder: &mut FunctionBuilder<'_>,
881 vmctx: ir::Value,
882 succeeded: ir::Value,
883 ) {
884 let trapped_block = builder.create_block();
885 let continuation_block = builder.create_block();
886 builder.set_cold_block(trapped_block);
887 builder
888 .ins()
889 .brif(succeeded, continuation_block, &[], trapped_block, &[]);
890
891 builder.seal_block(trapped_block);
892 builder.seal_block(continuation_block);
893
894 builder.switch_to_block(trapped_block);
895 let sigs = BuiltinFunctionSignatures::new(self);
896 let sig = sigs.host_signature(BuiltinFunctionIndex::raise());
897 self.call_builtin(builder, vmctx, &[vmctx], BuiltinFunctionIndex::raise(), sig);
898 builder.ins().trap(TRAP_INTERNAL_ASSERT);
899
900 builder.switch_to_block(continuation_block);
901 }
902
903 fn call_builtin(
906 &self,
907 builder: &mut FunctionBuilder<'_>,
908 vmctx: ir::Value,
909 args: &[ir::Value],
910 builtin: BuiltinFunctionIndex,
911 sig: ir::Signature,
912 ) -> ir::Inst {
913 let isa = &*self.isa;
914 let ptr_size = isa.pointer_bytes();
915 let pointer_type = isa.pointer_type();
916
917 let mem_flags = ir::MemFlags::trusted().with_readonly();
921 let array_addr = builder.ins().load(
922 pointer_type,
923 mem_flags,
924 vmctx,
925 i32::from(ptr_size.vmcontext_builtin_functions()),
926 );
927 let body_offset = i32::try_from(builtin.index() * pointer_type.bytes()).unwrap();
928 let func_addr = builder
929 .ins()
930 .load(pointer_type, mem_flags, array_addr, body_offset);
931
932 let sig = builder.func.import_signature(sig);
933 self.call_indirect_host(builder, builtin, sig, func_addr, args)
934 }
935
936 pub fn isa(&self) -> &dyn TargetIsa {
937 &*self.isa
938 }
939
940 pub fn tunables(&self) -> &Tunables {
941 &self.tunables
942 }
943}
944
945struct FunctionCompiler<'a> {
946 compiler: &'a Compiler,
947 cx: CompilerContext,
948}
949
950impl FunctionCompiler<'_> {
951 fn builder(&mut self, func: ir::Function) -> (FunctionBuilder<'_>, ir::Block) {
952 self.cx.codegen_context.func = func;
953 let mut builder = FunctionBuilder::new(
954 &mut self.cx.codegen_context.func,
955 self.cx.func_translator.context(),
956 );
957
958 let block0 = builder.create_block();
959 builder.append_block_params_for_function_params(block0);
960 builder.switch_to_block(block0);
961 builder.seal_block(block0);
962 (builder, block0)
963 }
964
965 fn finish(self, clif_filename: &str) -> Result<CompiledFunction, CompileError> {
966 let (info, func) = self.finish_with_info(None, clif_filename)?;
967 assert!(info.stack_maps.is_empty());
968 Ok(func)
969 }
970
971 fn finish_with_info(
972 mut self,
973 body_and_tunables: Option<(&FunctionBody<'_>, &Tunables)>,
974 clif_filename: &str,
975 ) -> Result<(WasmFunctionInfo, CompiledFunction), CompileError> {
976 let context = &mut self.cx.codegen_context;
977 let isa = &*self.compiler.isa;
978
979 let compilation_result =
985 compile_maybe_cached(context, isa, self.cx.incremental_cache_ctx.as_mut());
986
987 if let Some(path) = &self.compiler.clif_dir {
988 use std::io::Write;
989
990 let mut path = path.join(clif_filename);
991 path.set_extension("clif");
992
993 let mut output = std::fs::File::create(path).unwrap();
994 write!(output, "{}", context.func.display()).unwrap();
995 }
996
997 let mut compiled_code = compilation_result?;
998
999 let preferred_alignment = if body_and_tunables.is_some() {
1003 self.compiler.isa.function_alignment().preferred
1004 } else {
1005 1
1006 };
1007
1008 let alignment = compiled_code.buffer.alignment.max(preferred_alignment);
1009 let mut compiled_function = CompiledFunction::new(
1010 compiled_code.buffer.clone(),
1011 context.func.params.user_named_funcs().clone(),
1012 alignment,
1013 );
1014
1015 if let Some((body, tunables)) = body_and_tunables {
1016 let data = body.get_binary_reader();
1017 let offset = data.original_position();
1018 let len = data.bytes_remaining();
1019 compiled_function.set_address_map(
1020 offset.try_into().unwrap(),
1021 len.try_into().unwrap(),
1022 tunables.generate_address_map,
1023 );
1024 }
1025
1026 if isa.flags().unwind_info() {
1027 let unwind = compiled_code
1028 .create_unwind_info(isa)
1029 .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1030
1031 if let Some(unwind_info) = unwind {
1032 compiled_function.set_unwind_info(unwind_info);
1033 }
1034 }
1035
1036 if body_and_tunables
1037 .map(|(_, t)| t.generate_native_debuginfo)
1038 .unwrap_or(false)
1039 {
1040 compiled_function.set_value_labels_ranges(compiled_code.value_labels_ranges.clone());
1041
1042 if !matches!(
1044 compiled_function.metadata().unwind_info,
1045 Some(UnwindInfo::SystemV(_))
1046 ) {
1047 let cfa_unwind = compiled_code
1048 .create_unwind_info_of_kind(isa, UnwindInfoKind::SystemV)
1049 .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1050
1051 if let Some(UnwindInfo::SystemV(cfa_unwind_info)) = cfa_unwind {
1052 compiled_function.set_cfa_unwind_info(cfa_unwind_info);
1053 }
1054 }
1055 }
1056
1057 let stack_maps =
1058 clif_to_env_stack_maps(compiled_code.buffer.take_user_stack_maps().into_iter());
1059 compiled_function
1060 .set_sized_stack_slots(std::mem::take(&mut context.func.sized_stack_slots));
1061 self.compiler.contexts.lock().unwrap().push(self.cx);
1062
1063 Ok((
1064 WasmFunctionInfo {
1065 start_srcloc: compiled_function.metadata().address_map.start_srcloc,
1066 stack_maps: stack_maps.into(),
1067 },
1068 compiled_function,
1069 ))
1070 }
1071}
1072
1073fn clif_to_env_stack_maps(
1076 clif_stack_maps: impl ExactSizeIterator<Item = (CodeOffset, u32, ir::UserStackMap)>,
1077) -> Vec<StackMapInformation> {
1078 let mut stack_maps = Vec::with_capacity(clif_stack_maps.len());
1079 for (code_offset, mapped_bytes, stack_map) in clif_stack_maps {
1080 let mut bitset = CompoundBitSet::new();
1081 for (ty, offset) in stack_map.entries() {
1082 assert_eq!(ty, ir::types::I32);
1083 bitset.insert(usize::try_from(offset).unwrap());
1084 }
1085 if bitset.is_empty() {
1086 continue;
1087 }
1088 let stack_map = wasmtime_environ::StackMap::new(mapped_bytes, bitset);
1089 stack_maps.push(StackMapInformation {
1090 code_offset,
1091 stack_map,
1092 });
1093 }
1094 stack_maps.sort_unstable_by_key(|info| info.code_offset);
1095 stack_maps
1096}
1097
1098fn declare_and_call(
1099 builder: &mut FunctionBuilder,
1100 signature: ir::Signature,
1101 func_index: u32,
1102 args: &[ir::Value],
1103) -> ir::Inst {
1104 let name = ir::ExternalName::User(builder.func.declare_imported_user_function(
1105 ir::UserExternalName {
1106 namespace: crate::NS_WASM_FUNC,
1107 index: func_index,
1108 },
1109 ));
1110 let signature = builder.func.import_signature(signature);
1111 let callee = builder.func.dfg.ext_funcs.push(ir::ExtFuncData {
1112 name,
1113 signature,
1114 colocated: true,
1115 });
1116 builder.ins().call(callee, &args)
1117}
1118
1119fn debug_assert_enough_capacity_for_length(
1120 builder: &mut FunctionBuilder,
1121 length: usize,
1122 capacity: ir::Value,
1123) {
1124 if cfg!(debug_assertions) {
1125 let enough_capacity = builder.ins().icmp_imm(
1126 ir::condcodes::IntCC::UnsignedGreaterThanOrEqual,
1127 capacity,
1128 ir::immediates::Imm64::new(length.try_into().unwrap()),
1129 );
1130 builder.ins().trapz(enough_capacity, TRAP_INTERNAL_ASSERT);
1131 }
1132}
1133
1134fn debug_assert_vmctx_kind(
1135 isa: &dyn TargetIsa,
1136 builder: &mut FunctionBuilder,
1137 vmctx: ir::Value,
1138 expected_vmctx_magic: u32,
1139) {
1140 if cfg!(debug_assertions) {
1141 let magic = builder.ins().load(
1142 ir::types::I32,
1143 MemFlags::trusted().with_endianness(isa.endianness()),
1144 vmctx,
1145 0,
1146 );
1147 let is_expected_vmctx = builder.ins().icmp_imm(
1148 ir::condcodes::IntCC::Equal,
1149 magic,
1150 i64::from(expected_vmctx_magic),
1151 );
1152 builder.ins().trapz(is_expected_vmctx, TRAP_INTERNAL_ASSERT);
1153 }
1154}
1155
1156fn save_last_wasm_entry_fp(
1157 builder: &mut FunctionBuilder,
1158 pointer_type: ir::Type,
1159 ptr_size: &impl PtrSize,
1160 vm_store_context_offset: u32,
1161 vmctx: Value,
1162) {
1163 let vm_store_context = builder.ins().load(
1165 pointer_type,
1166 MemFlags::trusted(),
1167 vmctx,
1168 i32::try_from(vm_store_context_offset).unwrap(),
1169 );
1170
1171 let fp = builder.ins().get_frame_pointer(pointer_type);
1173 builder.ins().store(
1174 MemFlags::trusted(),
1175 fp,
1176 vm_store_context,
1177 ptr_size.vmstore_context_last_wasm_entry_fp(),
1178 );
1179}
1180
1181fn save_last_wasm_exit_fp_and_pc(
1182 builder: &mut FunctionBuilder,
1183 pointer_type: ir::Type,
1184 ptr: &impl PtrSize,
1185 limits: Value,
1186) {
1187 let trampoline_fp = builder.ins().get_frame_pointer(pointer_type);
1191 let wasm_fp = builder.ins().load(
1192 pointer_type,
1193 MemFlags::trusted(),
1194 trampoline_fp,
1195 0,
1199 );
1200 builder.ins().store(
1201 MemFlags::trusted(),
1202 wasm_fp,
1203 limits,
1204 ptr.vmstore_context_last_wasm_exit_fp(),
1205 );
1206 let wasm_pc = builder.ins().get_return_address(pointer_type);
1208 builder.ins().store(
1209 MemFlags::trusted(),
1210 wasm_pc,
1211 limits,
1212 ptr.vmstore_context_last_wasm_exit_pc(),
1213 );
1214}