Skip to main content

cranelift_frontend/
frontend.rs

1//! A frontend for building Cranelift IR from other languages.
2use crate::ssa::{SSABuilder, SideEffects};
3use crate::variable::Variable;
4use alloc::vec::Vec;
5use core::fmt::{self, Debug};
6use cranelift_codegen::cursor::{Cursor, CursorPosition, FuncCursor};
7use cranelift_codegen::entity::{EntityRef, EntitySet, PrimaryMap, SecondaryMap};
8use cranelift_codegen::ir;
9use cranelift_codegen::ir::condcodes::IntCC;
10use cranelift_codegen::ir::{
11    AbiParam, Block, DataFlowGraph, DynamicStackSlot, DynamicStackSlotData, ExtFuncData,
12    ExternalName, FuncRef, Function, GlobalValue, GlobalValueData, Inst, InstBuilder,
13    InstBuilderBase, InstructionData, JumpTable, JumpTableData, LibCall, MemFlags, RelSourceLoc,
14    SigRef, Signature, StackSlot, StackSlotData, Type, Value, ValueLabel, ValueLabelAssignments,
15    ValueLabelStart, types,
16};
17use cranelift_codegen::isa::TargetFrontendConfig;
18use cranelift_codegen::packed_option::PackedOption;
19use cranelift_codegen::traversals::Dfs;
20use smallvec::SmallVec;
21
22mod safepoints;
23
24/// Structure used for translating a series of functions into Cranelift IR.
25///
26/// In order to reduce memory reallocations when compiling multiple functions,
27/// [`FunctionBuilderContext`] holds various data structures which are cleared between
28/// functions, rather than dropped, preserving the underlying allocations.
29#[derive(Default)]
30pub struct FunctionBuilderContext {
31    ssa: SSABuilder,
32    status: SecondaryMap<Block, BlockStatus>,
33    variables: PrimaryMap<Variable, Type>,
34    stack_map_vars: EntitySet<Variable>,
35    stack_map_values: EntitySet<Value>,
36    safepoints: safepoints::SafepointSpiller,
37}
38
39/// Temporary object used to build a single Cranelift IR [`Function`].
40pub struct FunctionBuilder<'a> {
41    /// The function currently being built.
42    /// This field is public so the function can be re-borrowed.
43    pub func: &'a mut Function,
44
45    /// Source location to assign to all new instructions.
46    srcloc: ir::SourceLoc,
47
48    func_ctx: &'a mut FunctionBuilderContext,
49    position: PackedOption<Block>,
50}
51
52#[derive(Clone, Default, Eq, PartialEq)]
53enum BlockStatus {
54    /// No instructions have been added.
55    #[default]
56    Empty,
57    /// Some instructions have been added, but no terminator.
58    Partial,
59    /// A terminator has been added; no further instructions may be added.
60    Filled,
61}
62
63impl FunctionBuilderContext {
64    /// Creates a [`FunctionBuilderContext`] structure. The structure is automatically cleared after
65    /// each [`FunctionBuilder`] completes translating a function.
66    pub fn new() -> Self {
67        Self::default()
68    }
69
70    fn clear(&mut self) {
71        let FunctionBuilderContext {
72            ssa,
73            status,
74            variables,
75            stack_map_vars,
76            stack_map_values,
77            safepoints,
78        } = self;
79        ssa.clear();
80        status.clear();
81        variables.clear();
82        stack_map_values.clear();
83        stack_map_vars.clear();
84        safepoints.clear();
85    }
86
87    fn is_empty(&self) -> bool {
88        self.ssa.is_empty() && self.status.is_empty() && self.variables.is_empty()
89    }
90}
91
92/// Implementation of the [`InstBuilder`] that has
93/// one convenience method per Cranelift IR instruction.
94pub struct FuncInstBuilder<'short, 'long: 'short> {
95    builder: &'short mut FunctionBuilder<'long>,
96    block: Block,
97}
98
99impl<'short, 'long> FuncInstBuilder<'short, 'long> {
100    fn new(builder: &'short mut FunctionBuilder<'long>, block: Block) -> Self {
101        Self { builder, block }
102    }
103}
104
105impl<'short, 'long> InstBuilderBase<'short> for FuncInstBuilder<'short, 'long> {
106    fn data_flow_graph(&self) -> &DataFlowGraph {
107        &self.builder.func.dfg
108    }
109
110    fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
111        &mut self.builder.func.dfg
112    }
113
114    // This implementation is richer than `InsertBuilder` because we use the data of the
115    // instruction being inserted to add related info to the DFG and the SSA building system,
116    // and perform debug sanity checks.
117    fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'short mut DataFlowGraph) {
118        // We only insert the Block in the layout when an instruction is added to it
119        self.builder.ensure_inserted_block();
120
121        let inst = self.builder.func.dfg.make_inst(data);
122        self.builder.func.dfg.make_inst_results(inst, ctrl_typevar);
123        self.builder.func.layout.append_inst(inst, self.block);
124        if !self.builder.srcloc.is_default() {
125            self.builder.func.set_srcloc(inst, self.builder.srcloc);
126        }
127
128        match &self.builder.func.dfg.insts[inst] {
129            ir::InstructionData::Jump {
130                destination: dest, ..
131            } => {
132                // If the user has supplied jump arguments we must adapt the arguments of
133                // the destination block
134                let block = dest.block(&self.builder.func.dfg.value_lists);
135                self.builder.declare_successor(block, inst);
136            }
137
138            ir::InstructionData::Brif {
139                blocks: [branch_then, branch_else],
140                ..
141            } => {
142                let block_then = branch_then.block(&self.builder.func.dfg.value_lists);
143                let block_else = branch_else.block(&self.builder.func.dfg.value_lists);
144
145                self.builder.declare_successor(block_then, inst);
146                if block_then != block_else {
147                    self.builder.declare_successor(block_else, inst);
148                }
149            }
150
151            ir::InstructionData::BranchTable { table, .. } => {
152                let pool = &self.builder.func.dfg.value_lists;
153
154                // Unlike most other jumps/branches and like try_call,
155                // jump tables are capable of having the same successor appear
156                // multiple times, so we must deduplicate.
157                let mut unique = EntitySet::<Block>::new();
158                for dest_block in self
159                    .builder
160                    .func
161                    .stencil
162                    .dfg
163                    .jump_tables
164                    .get(*table)
165                    .expect("you are referencing an undeclared jump table")
166                    .all_branches()
167                {
168                    let block = dest_block.block(pool);
169                    if !unique.insert(block) {
170                        continue;
171                    }
172
173                    // Call `declare_block_predecessor` instead of `declare_successor` for
174                    // avoiding the borrow checker.
175                    self.builder
176                        .func_ctx
177                        .ssa
178                        .declare_block_predecessor(block, inst);
179                }
180            }
181
182            ir::InstructionData::TryCall { exception, .. }
183            | ir::InstructionData::TryCallIndirect { exception, .. } => {
184                let pool = &self.builder.func.dfg.value_lists;
185
186                // Unlike most other jumps/branches and like br_table,
187                // exception tables are capable of having the same successor
188                // appear multiple times, so we must deduplicate.
189                let mut unique = EntitySet::<Block>::new();
190                for dest_block in self
191                    .builder
192                    .func
193                    .stencil
194                    .dfg
195                    .exception_tables
196                    .get(*exception)
197                    .expect("you are referencing an undeclared exception table")
198                    .all_branches()
199                {
200                    let block = dest_block.block(pool);
201                    if !unique.insert(block) {
202                        continue;
203                    }
204
205                    // Call `declare_block_predecessor` instead of `declare_successor` for
206                    // avoiding the borrow checker.
207                    self.builder
208                        .func_ctx
209                        .ssa
210                        .declare_block_predecessor(block, inst);
211                }
212            }
213
214            inst => assert!(!inst.opcode().is_branch()),
215        }
216
217        if data.opcode().is_terminator() {
218            self.builder.fill_current_block()
219        }
220        (inst, &mut self.builder.func.dfg)
221    }
222}
223
224#[derive(Debug, Copy, Clone, PartialEq, Eq)]
225/// An error encountered when calling [`FunctionBuilder::try_use_var`].
226pub enum UseVariableError {
227    UsedBeforeDeclared(Variable),
228}
229
230impl fmt::Display for UseVariableError {
231    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
232        match self {
233            UseVariableError::UsedBeforeDeclared(variable) => {
234                write!(
235                    f,
236                    "variable {} was used before it was defined",
237                    variable.index()
238                )?;
239            }
240        }
241        Ok(())
242    }
243}
244
245impl std::error::Error for UseVariableError {}
246
247#[derive(Debug, Copy, Clone, Eq, PartialEq)]
248/// An error encountered when defining the initial value of a variable.
249pub enum DefVariableError {
250    /// The variable was instantiated with a value of the wrong type.
251    ///
252    /// note: to obtain the type of the value, you can call
253    /// [`cranelift_codegen::ir::dfg::DataFlowGraph::value_type`] (using the
254    /// `FunctionBuilder.func.dfg` field)
255    TypeMismatch(Variable, Value),
256    /// The value was defined (in a call to [`FunctionBuilder::def_var`]) before
257    /// it was declared (in a call to [`FunctionBuilder::declare_var`]).
258    DefinedBeforeDeclared(Variable),
259}
260
261impl fmt::Display for DefVariableError {
262    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
263        match self {
264            DefVariableError::TypeMismatch(variable, value) => {
265                write!(
266                    f,
267                    "the types of variable {} and value {} are not the same.
268                    The `Value` supplied to `def_var` must be of the same type as
269                    the variable was declared to be of in `declare_var`.",
270                    variable.index(),
271                    value.as_u32()
272                )?;
273            }
274            DefVariableError::DefinedBeforeDeclared(variable) => {
275                write!(
276                    f,
277                    "the value of variable {} was declared before it was defined",
278                    variable.index()
279                )?;
280            }
281        }
282        Ok(())
283    }
284}
285
286/// This module allows you to create a function in Cranelift IR in a straightforward way, hiding
287/// all the complexity of its internal representation.
288///
289/// The module is parametrized by one type which is the representation of variables in your
290/// origin language. It offers a way to conveniently append instruction to your program flow.
291/// You are responsible to split your instruction flow into extended blocks (declared with
292/// [`create_block`](Self::create_block)) whose properties are:
293///
294/// - branch and jump instructions can only point at the top of extended blocks;
295/// - the last instruction of each block is a terminator instruction which has no natural successor,
296///   and those instructions can only appear at the end of extended blocks.
297///
298/// The parameters of Cranelift IR instructions are Cranelift IR values, which can only be created
299/// as results of other Cranelift IR instructions. To be able to create variables redefined multiple
300/// times in your program, use the [`def_var`](Self::def_var) and [`use_var`](Self::use_var) command,
301/// that will maintain the correspondence between your variables and Cranelift IR SSA values.
302///
303/// The first block for which you call [`switch_to_block`](Self::switch_to_block) will be assumed to
304/// be the beginning of the function.
305///
306/// At creation, a [`FunctionBuilder`] instance borrows an already allocated `Function` which it
307/// modifies with the information stored in the mutable borrowed
308/// [`FunctionBuilderContext`]. The function passed in argument should be newly created with
309/// [`Function::with_name_signature()`], whereas the [`FunctionBuilderContext`] can be kept as is
310/// between two function translations.
311///
312/// # Errors
313///
314/// The functions below will panic in debug mode whenever you try to modify the Cranelift IR
315/// function in a way that violate the coherence of the code. For instance: switching to a new
316/// [`Block`] when you haven't filled the current one with a terminator instruction, inserting a
317/// return instruction with arguments that don't match the function's signature.
318impl<'a> FunctionBuilder<'a> {
319    /// Creates a new [`FunctionBuilder`] structure that will operate on a [`Function`] using a
320    /// [`FunctionBuilderContext`].
321    pub fn new(func: &'a mut Function, func_ctx: &'a mut FunctionBuilderContext) -> Self {
322        debug_assert!(func_ctx.is_empty());
323        Self {
324            func,
325            srcloc: Default::default(),
326            func_ctx,
327            position: Default::default(),
328        }
329    }
330
331    /// Get the block that this builder is currently at.
332    pub fn current_block(&self) -> Option<Block> {
333        self.position.expand()
334    }
335
336    /// Set the source location that should be assigned to all new instructions.
337    pub fn set_srcloc(&mut self, srcloc: ir::SourceLoc) {
338        self.srcloc = srcloc;
339    }
340
341    /// Get the current source location that this builder is using.
342    pub fn srcloc(&self) -> ir::SourceLoc {
343        self.srcloc
344    }
345
346    /// Creates a new [`Block`] and returns its reference.
347    pub fn create_block(&mut self) -> Block {
348        let block = self.func.dfg.make_block();
349        self.func_ctx.ssa.declare_block(block);
350        block
351    }
352
353    /// Mark a block as "cold".
354    ///
355    /// This will try to move it out of the ordinary path of execution
356    /// when lowered to machine code.
357    pub fn set_cold_block(&mut self, block: Block) {
358        self.func.layout.set_cold(block);
359    }
360
361    /// Insert `block` in the layout *after* the existing block `after`.
362    pub fn insert_block_after(&mut self, block: Block, after: Block) {
363        self.func.layout.insert_block_after(block, after);
364    }
365
366    /// After the call to this function, new instructions will be inserted into the designated
367    /// block, in the order they are declared. You must declare the types of the [`Block`] arguments
368    /// you will use here.
369    ///
370    /// When inserting the terminator instruction (which doesn't have a fallthrough to its immediate
371    /// successor), the block will be declared filled and it will not be possible to append
372    /// instructions to it.
373    pub fn switch_to_block(&mut self, block: Block) {
374        log::trace!("switch to {block:?}");
375
376        // First we check that the previous block has been filled.
377        debug_assert!(
378            self.position.is_none()
379                || self.is_unreachable()
380                || self.is_pristine(self.position.unwrap())
381                || self.is_filled(self.position.unwrap()),
382            "you have to fill your block before switching"
383        );
384        // We cannot switch to a filled block
385        debug_assert!(
386            !self.is_filled(block),
387            "you cannot switch to a block which is already filled"
388        );
389
390        // Then we change the cursor position.
391        self.position = PackedOption::from(block);
392    }
393
394    /// Declares that all the predecessors of this block are known.
395    ///
396    /// Function to call with `block` as soon as the last branch instruction to `block` has been
397    /// created. Forgetting to call this method on every block will cause inconsistencies in the
398    /// produced functions.
399    pub fn seal_block(&mut self, block: Block) {
400        let side_effects = self.func_ctx.ssa.seal_block(block, self.func);
401        self.handle_ssa_side_effects(side_effects);
402    }
403
404    /// Effectively calls [seal_block](Self::seal_block) on all unsealed blocks in the function.
405    ///
406    /// It's more efficient to seal [`Block`]s as soon as possible, during
407    /// translation, but for frontends where this is impractical to do, this
408    /// function can be used at the end of translating all blocks to ensure
409    /// that everything is sealed.
410    pub fn seal_all_blocks(&mut self) {
411        let side_effects = self.func_ctx.ssa.seal_all_blocks(self.func);
412        self.handle_ssa_side_effects(side_effects);
413    }
414
415    /// Declares the type of a variable.
416    ///
417    /// This allows the variable to be defined and used later (by calling
418    /// [`FunctionBuilder::def_var`] and [`FunctionBuilder::use_var`]
419    /// respectively).
420    pub fn declare_var(&mut self, ty: Type) -> Variable {
421        self.func_ctx.variables.push(ty)
422    }
423
424    /// Declare that all uses of the given variable must be included in stack
425    /// map metadata.
426    ///
427    /// All values that are uses of this variable will be spilled to the stack
428    /// before each safepoint and reloaded afterwards. Stack maps allow the
429    /// garbage collector to identify the on-stack GC roots. Between spilling
430    /// the stack and it being reloading again, the stack can be updated to
431    /// facilitate moving GCs.
432    ///
433    /// This does not affect any pre-existing uses of the variable.
434    ///
435    /// # Panics
436    ///
437    /// Panics if the variable's type is larger than 16 bytes or if this
438    /// variable has not been declared yet.
439    pub fn declare_var_needs_stack_map(&mut self, var: Variable) {
440        log::trace!("declare_var_needs_stack_map({var:?})");
441        let ty = self.func_ctx.variables[var];
442        assert!(ty != types::INVALID);
443        assert!(ty.bytes() <= 16);
444        self.func_ctx.stack_map_vars.insert(var);
445    }
446
447    /// Returns the Cranelift IR necessary to use a previously defined user
448    /// variable, returning an error if this is not possible.
449    pub fn try_use_var(&mut self, var: Variable) -> Result<Value, UseVariableError> {
450        // Assert that we're about to add instructions to this block using the definition of the
451        // given variable. ssa.use_var is the only part of this crate which can add block parameters
452        // behind the caller's back. If we disallow calling append_block_param as soon as use_var is
453        // called, then we enforce a strict separation between user parameters and SSA parameters.
454        self.ensure_inserted_block();
455
456        let (val, side_effects) = {
457            let ty = *self
458                .func_ctx
459                .variables
460                .get(var)
461                .ok_or(UseVariableError::UsedBeforeDeclared(var))?;
462            debug_assert_ne!(
463                ty,
464                types::INVALID,
465                "variable {var:?} is used but its type has not been declared"
466            );
467            self.func_ctx
468                .ssa
469                .use_var(self.func, var, ty, self.position.unwrap())
470        };
471        self.handle_ssa_side_effects(side_effects);
472
473        Ok(val)
474    }
475
476    /// Returns the Cranelift IR value corresponding to the utilization at the current program
477    /// position of a previously defined user variable.
478    pub fn use_var(&mut self, var: Variable) -> Value {
479        self.try_use_var(var).unwrap_or_else(|_| {
480            panic!("variable {var:?} is used but its type has not been declared")
481        })
482    }
483
484    /// Registers a new definition of a user variable. This function will return
485    /// an error if the value supplied does not match the type the variable was
486    /// declared to have.
487    pub fn try_def_var(&mut self, var: Variable, val: Value) -> Result<(), DefVariableError> {
488        log::trace!("try_def_var: {var:?} = {val:?}");
489
490        let var_ty = *self
491            .func_ctx
492            .variables
493            .get(var)
494            .ok_or(DefVariableError::DefinedBeforeDeclared(var))?;
495        if var_ty != self.func.dfg.value_type(val) {
496            return Err(DefVariableError::TypeMismatch(var, val));
497        }
498
499        self.func_ctx.ssa.def_var(var, val, self.position.unwrap());
500        Ok(())
501    }
502
503    /// Register a new definition of a user variable. The type of the value must be
504    /// the same as the type registered for the variable.
505    pub fn def_var(&mut self, var: Variable, val: Value) {
506        self.try_def_var(var, val)
507            .unwrap_or_else(|error| match error {
508                DefVariableError::TypeMismatch(var, val) => {
509                    panic!("declared type of variable {var:?} doesn't match type of value {val}");
510                }
511                DefVariableError::DefinedBeforeDeclared(var) => {
512                    panic!("variable {var:?} is used but its type has not been declared");
513                }
514            })
515    }
516
517    /// Set label for [`Value`]
518    ///
519    /// This will not do anything unless
520    /// [`func.dfg.collect_debug_info`](DataFlowGraph::collect_debug_info) is called first.
521    pub fn set_val_label(&mut self, val: Value, label: ValueLabel) {
522        if let Some(values_labels) = self.func.stencil.dfg.values_labels.as_mut() {
523            use alloc::collections::btree_map::Entry;
524
525            let start = ValueLabelStart {
526                from: RelSourceLoc::from_base_offset(self.func.params.base_srcloc(), self.srcloc),
527                label,
528            };
529
530            match values_labels.entry(val) {
531                Entry::Occupied(mut e) => match e.get_mut() {
532                    ValueLabelAssignments::Starts(starts) => starts.push(start),
533                    _ => panic!("Unexpected ValueLabelAssignments at this stage"),
534                },
535                Entry::Vacant(e) => {
536                    e.insert(ValueLabelAssignments::Starts(vec![start]));
537                }
538            }
539        }
540    }
541
542    /// Declare that the given value is a GC reference that requires inclusion
543    /// in a stack map when it is live across GC safepoints.
544    ///
545    /// All values that are uses of this variable will be spilled to the stack
546    /// before each safepoint and reloaded afterwards. Stack maps allow the
547    /// garbage collector to identify the on-stack GC roots. Between spilling
548    /// the stack and it being reloading again, the stack can be updated to
549    /// facilitate moving GCs.
550    ///
551    /// # Panics
552    ///
553    /// Panics if `val` is larger than 16 bytes.
554    pub fn declare_value_needs_stack_map(&mut self, val: Value) {
555        log::trace!("declare_value_needs_stack_map({val:?})");
556
557        // We rely on these properties in `insert_safepoint_spills`.
558        let size = self.func.dfg.value_type(val).bytes();
559        assert!(size <= 16);
560        assert!(size.is_power_of_two());
561
562        self.func_ctx.stack_map_values.insert(val);
563    }
564
565    /// Creates a jump table in the function, to be used by [`br_table`](InstBuilder::br_table) instructions.
566    pub fn create_jump_table(&mut self, data: JumpTableData) -> JumpTable {
567        self.func.create_jump_table(data)
568    }
569
570    /// Creates a sized stack slot in the function, to be used by [`stack_load`](InstBuilder::stack_load),
571    /// [`stack_store`](InstBuilder::stack_store) and [`stack_addr`](InstBuilder::stack_addr) instructions.
572    pub fn create_sized_stack_slot(&mut self, data: StackSlotData) -> StackSlot {
573        self.func.create_sized_stack_slot(data)
574    }
575
576    /// Creates a dynamic stack slot in the function, to be used by
577    /// [`dynamic_stack_load`](InstBuilder::dynamic_stack_load),
578    /// [`dynamic_stack_store`](InstBuilder::dynamic_stack_store) and
579    /// [`dynamic_stack_addr`](InstBuilder::dynamic_stack_addr) instructions.
580    pub fn create_dynamic_stack_slot(&mut self, data: DynamicStackSlotData) -> DynamicStackSlot {
581        self.func.create_dynamic_stack_slot(data)
582    }
583
584    /// Adds a signature which can later be used to declare an external function import.
585    pub fn import_signature(&mut self, signature: Signature) -> SigRef {
586        self.func.import_signature(signature)
587    }
588
589    /// Declare an external function import.
590    pub fn import_function(&mut self, data: ExtFuncData) -> FuncRef {
591        self.func.import_function(data)
592    }
593
594    /// Declares a global value accessible to the function.
595    pub fn create_global_value(&mut self, data: GlobalValueData) -> GlobalValue {
596        self.func.create_global_value(data)
597    }
598
599    /// Returns an object with the [`InstBuilder`]
600    /// trait that allows to conveniently append an instruction to the current [`Block`] being built.
601    pub fn ins<'short>(&'short mut self) -> FuncInstBuilder<'short, 'a> {
602        let block = self
603            .position
604            .expect("Please call switch_to_block before inserting instructions");
605        FuncInstBuilder::new(self, block)
606    }
607
608    /// Make sure that the current block is inserted in the layout.
609    pub fn ensure_inserted_block(&mut self) {
610        let block = self.position.unwrap();
611        if self.is_pristine(block) {
612            if !self.func.layout.is_block_inserted(block) {
613                self.func.layout.append_block(block);
614            }
615            self.func_ctx.status[block] = BlockStatus::Partial;
616        } else {
617            debug_assert!(
618                !self.is_filled(block),
619                "you cannot add an instruction to a block already filled"
620            );
621        }
622    }
623
624    /// Returns a [`FuncCursor`] pointed at the current position ready for inserting instructions.
625    ///
626    /// This can be used to insert SSA code that doesn't need to access locals and that doesn't
627    /// need to know about [`FunctionBuilder`] at all.
628    pub fn cursor(&mut self) -> FuncCursor<'_> {
629        self.ensure_inserted_block();
630        FuncCursor::new(self.func)
631            .with_srcloc(self.srcloc)
632            .at_bottom(self.position.unwrap())
633    }
634
635    /// Append parameters to the given [`Block`] corresponding to the function
636    /// parameters. This can be used to set up the block parameters for the
637    /// entry block.
638    pub fn append_block_params_for_function_params(&mut self, block: Block) {
639        debug_assert!(
640            !self.func_ctx.ssa.has_any_predecessors(block),
641            "block parameters for function parameters should only be added to the entry block"
642        );
643
644        // These parameters count as "user" parameters here because they aren't
645        // inserted by the SSABuilder.
646        debug_assert!(
647            self.is_pristine(block),
648            "You can't add block parameters after adding any instruction"
649        );
650
651        for argtyp in &self.func.stencil.signature.params {
652            self.func
653                .stencil
654                .dfg
655                .append_block_param(block, argtyp.value_type);
656        }
657    }
658
659    /// Append parameters to the given [`Block`] corresponding to the function
660    /// return values. This can be used to set up the block parameters for a
661    /// function exit block.
662    pub fn append_block_params_for_function_returns(&mut self, block: Block) {
663        // These parameters count as "user" parameters here because they aren't
664        // inserted by the SSABuilder.
665        debug_assert!(
666            self.is_pristine(block),
667            "You can't add block parameters after adding any instruction"
668        );
669
670        for argtyp in &self.func.stencil.signature.returns {
671            self.func
672                .stencil
673                .dfg
674                .append_block_param(block, argtyp.value_type);
675        }
676    }
677
678    /// Declare that translation of the current function is complete.
679    ///
680    /// This resets the state of the [`FunctionBuilderContext`] in preparation to
681    /// be used for another function.
682    pub fn finalize(mut self) {
683        // Check that all the `Block`s are filled and sealed.
684        #[cfg(debug_assertions)]
685        {
686            for block in self.func_ctx.status.keys() {
687                if !self.is_pristine(block) {
688                    assert!(
689                        self.func_ctx.ssa.is_sealed(block),
690                        "FunctionBuilder finalized, but block {block} is not sealed",
691                    );
692                    assert!(
693                        self.is_filled(block),
694                        "FunctionBuilder finalized, but block {block} is not filled",
695                    );
696                }
697            }
698        }
699
700        // In debug mode, check that all blocks are valid basic blocks.
701        #[cfg(debug_assertions)]
702        {
703            // Iterate manually to provide more helpful error messages.
704            for block in self.func_ctx.status.keys() {
705                if let Err((inst, msg)) = self.func.is_block_basic(block) {
706                    let inst_str = self.func.dfg.display_inst(inst);
707                    panic!("{block} failed basic block invariants on {inst_str}: {msg}");
708                }
709            }
710        }
711
712        // Propagate the needs-stack-map bit from variables to each of their
713        // associated values.
714        for var in self.func_ctx.stack_map_vars.iter() {
715            for val in self.func_ctx.ssa.values_for_var(var) {
716                log::trace!("propagating needs-stack-map from {var:?} to {val:?}");
717                debug_assert_eq!(self.func.dfg.value_type(val), self.func_ctx.variables[var]);
718                self.func_ctx.stack_map_values.insert(val);
719            }
720        }
721
722        // If we have any values that need inclusion in stack maps, then we need
723        // to run our pass to spill those values to the stack at safepoints and
724        // generate stack maps.
725        if !self.func_ctx.stack_map_values.is_empty() {
726            self.func_ctx
727                .safepoints
728                .run(&mut self.func, &self.func_ctx.stack_map_values);
729        }
730
731        // Clear the state (but preserve the allocated buffers) in preparation
732        // for translation another function.
733        self.func_ctx.clear();
734    }
735}
736
737/// All the functions documented in the previous block are write-only and help you build a valid
738/// Cranelift IR functions via multiple debug asserts. However, you might need to improve the
739/// performance of your translation perform more complex transformations to your Cranelift IR
740/// function. The functions below help you inspect the function you're creating and modify it
741/// in ways that can be unsafe if used incorrectly.
742impl<'a> FunctionBuilder<'a> {
743    /// Retrieves all the parameters for a [`Block`] currently inferred from the jump instructions
744    /// inserted that target it and the SSA construction.
745    pub fn block_params(&self, block: Block) -> &[Value] {
746        self.func.dfg.block_params(block)
747    }
748
749    /// Retrieves the signature with reference `sigref` previously added with
750    /// [`import_signature`](Self::import_signature).
751    pub fn signature(&self, sigref: SigRef) -> Option<&Signature> {
752        self.func.dfg.signatures.get(sigref)
753    }
754
755    /// Creates a parameter for a specific [`Block`] by appending it to the list of already existing
756    /// parameters.
757    ///
758    /// **Note:** this function has to be called at the creation of the `Block` before adding
759    /// instructions to it, otherwise this could interfere with SSA construction.
760    pub fn append_block_param(&mut self, block: Block, ty: Type) -> Value {
761        debug_assert!(
762            self.is_pristine(block),
763            "You can't add block parameters after adding any instruction"
764        );
765        self.func.dfg.append_block_param(block, ty)
766    }
767
768    /// Returns the result values of an instruction.
769    pub fn inst_results(&self, inst: Inst) -> &[Value] {
770        self.func.dfg.inst_results(inst)
771    }
772
773    /// Changes the destination of a jump instruction after creation.
774    ///
775    /// **Note:** You are responsible for maintaining the coherence with the arguments of
776    /// other jump instructions.
777    pub fn change_jump_destination(&mut self, inst: Inst, old_block: Block, new_block: Block) {
778        let dfg = &mut self.func.dfg;
779        for block in
780            dfg.insts[inst].branch_destination_mut(&mut dfg.jump_tables, &mut dfg.exception_tables)
781        {
782            if block.block(&dfg.value_lists) == old_block {
783                self.func_ctx.ssa.remove_block_predecessor(old_block, inst);
784                block.set_block(new_block, &mut dfg.value_lists);
785                self.func_ctx.ssa.declare_block_predecessor(new_block, inst);
786            }
787        }
788    }
789
790    /// Returns `true` if and only if the current [`Block`] is sealed and has no predecessors declared.
791    ///
792    /// The entry block of a function is never unreachable.
793    pub fn is_unreachable(&self) -> bool {
794        let is_entry = match self.func.layout.entry_block() {
795            None => false,
796            Some(entry) => self.position.unwrap() == entry,
797        };
798        !is_entry
799            && self.func_ctx.ssa.is_sealed(self.position.unwrap())
800            && !self
801                .func_ctx
802                .ssa
803                .has_any_predecessors(self.position.unwrap())
804    }
805
806    /// Returns `true` if and only if no instructions have been added since the last call to
807    /// [`switch_to_block`](Self::switch_to_block).
808    fn is_pristine(&self, block: Block) -> bool {
809        self.func_ctx.status[block] == BlockStatus::Empty
810    }
811
812    /// Returns `true` if and only if a terminator instruction has been inserted since the
813    /// last call to [`switch_to_block`](Self::switch_to_block).
814    fn is_filled(&self, block: Block) -> bool {
815        self.func_ctx.status[block] == BlockStatus::Filled
816    }
817}
818
819/// Helper functions
820impl<'a> FunctionBuilder<'a> {
821    /// Calls libc.memcpy
822    ///
823    /// Copies the `size` bytes from `src` to `dest`, assumes that `src + size`
824    /// won't overlap onto `dest`. If `dest` and `src` overlap, the behavior is
825    /// undefined. Applications in which `dest` and `src` might overlap should
826    /// use `call_memmove` instead.
827    pub fn call_memcpy(
828        &mut self,
829        config: TargetFrontendConfig,
830        dest: Value,
831        src: Value,
832        size: Value,
833    ) {
834        let pointer_type = config.pointer_type();
835        let signature = {
836            let mut s = Signature::new(config.default_call_conv);
837            s.params.push(AbiParam::new(pointer_type));
838            s.params.push(AbiParam::new(pointer_type));
839            s.params.push(AbiParam::new(pointer_type));
840            s.returns.push(AbiParam::new(pointer_type));
841            self.import_signature(s)
842        };
843
844        let libc_memcpy = self.import_function(ExtFuncData {
845            name: ExternalName::LibCall(LibCall::Memcpy),
846            signature,
847            colocated: false,
848            patchable: false,
849        });
850
851        self.ins().call(libc_memcpy, &[dest, src, size]);
852    }
853
854    /// Optimised memcpy or memmove for small copies.
855    ///
856    /// # Codegen safety
857    ///
858    /// The following properties must hold to prevent UB:
859    ///
860    /// * `src_align` and `dest_align` are an upper-bound on the alignment of `src` respectively `dest`.
861    /// * If `non_overlapping` is true, then this must be correct.
862    pub fn emit_small_memory_copy(
863        &mut self,
864        config: TargetFrontendConfig,
865        dest: Value,
866        src: Value,
867        size: u64,
868        dest_align: u8,
869        src_align: u8,
870        non_overlapping: bool,
871        mut flags: MemFlags,
872    ) {
873        // Currently the result of guess work, not actual profiling.
874        const THRESHOLD: u64 = 4;
875
876        if size == 0 {
877            return;
878        }
879
880        let access_size = greatest_divisible_power_of_two(size);
881        assert!(
882            access_size.is_power_of_two(),
883            "`size` is not a power of two"
884        );
885        assert!(
886            access_size >= u64::from(::core::cmp::min(src_align, dest_align)),
887            "`size` is smaller than `dest` and `src`'s alignment value."
888        );
889
890        let (access_size, int_type) = if access_size <= 8 {
891            (access_size, Type::int((access_size * 8) as u16).unwrap())
892        } else {
893            (8, types::I64)
894        };
895
896        let load_and_store_amount = size / access_size;
897
898        if load_and_store_amount > THRESHOLD {
899            let size_value = self.ins().iconst(config.pointer_type(), size as i64);
900            if non_overlapping {
901                self.call_memcpy(config, dest, src, size_value);
902            } else {
903                self.call_memmove(config, dest, src, size_value);
904            }
905            return;
906        }
907
908        if u64::from(src_align) >= access_size && u64::from(dest_align) >= access_size {
909            flags.set_aligned();
910        }
911
912        // Load all of the memory first. This is necessary in case `dest` overlaps.
913        // It can also improve performance a bit.
914        let registers: smallvec::SmallVec<[_; THRESHOLD as usize]> = (0..load_and_store_amount)
915            .map(|i| {
916                let offset = (access_size * i) as i32;
917                (self.ins().load(int_type, flags, src, offset), offset)
918            })
919            .collect();
920
921        for (value, offset) in registers {
922            self.ins().store(flags, value, dest, offset);
923        }
924    }
925
926    /// Calls libc.memset
927    ///
928    /// Writes `size` bytes of i8 value `ch` to memory starting at `buffer`.
929    pub fn call_memset(
930        &mut self,
931        config: TargetFrontendConfig,
932        buffer: Value,
933        ch: Value,
934        size: Value,
935    ) {
936        let pointer_type = config.pointer_type();
937        let signature = {
938            let mut s = Signature::new(config.default_call_conv);
939            s.params.push(AbiParam::new(pointer_type));
940            s.params.push(AbiParam::new(types::I32));
941            s.params.push(AbiParam::new(pointer_type));
942            s.returns.push(AbiParam::new(pointer_type));
943            self.import_signature(s)
944        };
945
946        let libc_memset = self.import_function(ExtFuncData {
947            name: ExternalName::LibCall(LibCall::Memset),
948            signature,
949            colocated: false,
950            patchable: false,
951        });
952
953        let ch = self.ins().uextend(types::I32, ch);
954        self.ins().call(libc_memset, &[buffer, ch, size]);
955    }
956
957    /// Calls libc.memset
958    ///
959    /// Writes `size` bytes of value `ch` to memory starting at `buffer`.
960    pub fn emit_small_memset(
961        &mut self,
962        config: TargetFrontendConfig,
963        buffer: Value,
964        ch: u8,
965        size: u64,
966        buffer_align: u8,
967        mut flags: MemFlags,
968    ) {
969        // Currently the result of guess work, not actual profiling.
970        const THRESHOLD: u64 = 4;
971
972        if size == 0 {
973            return;
974        }
975
976        let access_size = greatest_divisible_power_of_two(size);
977        assert!(
978            access_size.is_power_of_two(),
979            "`size` is not a power of two"
980        );
981        assert!(
982            access_size >= u64::from(buffer_align),
983            "`size` is smaller than `dest` and `src`'s alignment value."
984        );
985
986        let (access_size, int_type) = if access_size <= 8 {
987            (access_size, Type::int((access_size * 8) as u16).unwrap())
988        } else {
989            (8, types::I64)
990        };
991
992        let load_and_store_amount = size / access_size;
993
994        if load_and_store_amount > THRESHOLD {
995            let ch = self.ins().iconst(types::I8, i64::from(ch));
996            let size = self.ins().iconst(config.pointer_type(), size as i64);
997            self.call_memset(config, buffer, ch, size);
998        } else {
999            if u64::from(buffer_align) >= access_size {
1000                flags.set_aligned();
1001            }
1002
1003            let ch = u64::from(ch);
1004            let raw_value = if int_type == types::I64 {
1005                ch * 0x0101010101010101_u64
1006            } else if int_type == types::I32 {
1007                ch * 0x01010101_u64
1008            } else if int_type == types::I16 {
1009                (ch << 8) | ch
1010            } else {
1011                assert_eq!(int_type, types::I8);
1012                ch
1013            };
1014
1015            let value = self.ins().iconst(int_type, raw_value as i64);
1016            for i in 0..load_and_store_amount {
1017                let offset = (access_size * i) as i32;
1018                self.ins().store(flags, value, buffer, offset);
1019            }
1020        }
1021    }
1022
1023    /// Calls libc.memmove
1024    ///
1025    /// Copies `size` bytes from memory starting at `source` to memory starting
1026    /// at `dest`. `source` is always read before writing to `dest`.
1027    pub fn call_memmove(
1028        &mut self,
1029        config: TargetFrontendConfig,
1030        dest: Value,
1031        source: Value,
1032        size: Value,
1033    ) {
1034        let pointer_type = config.pointer_type();
1035        let signature = {
1036            let mut s = Signature::new(config.default_call_conv);
1037            s.params.push(AbiParam::new(pointer_type));
1038            s.params.push(AbiParam::new(pointer_type));
1039            s.params.push(AbiParam::new(pointer_type));
1040            s.returns.push(AbiParam::new(pointer_type));
1041            self.import_signature(s)
1042        };
1043
1044        let libc_memmove = self.import_function(ExtFuncData {
1045            name: ExternalName::LibCall(LibCall::Memmove),
1046            signature,
1047            colocated: false,
1048            patchable: false,
1049        });
1050
1051        self.ins().call(libc_memmove, &[dest, source, size]);
1052    }
1053
1054    /// Calls libc.memcmp
1055    ///
1056    /// Compares `size` bytes from memory starting at `left` to memory starting
1057    /// at `right`. Returns `0` if all `n` bytes are equal.  If the first difference
1058    /// is at offset `i`, returns a positive integer if `ugt(left[i], right[i])`
1059    /// and a negative integer if `ult(left[i], right[i])`.
1060    ///
1061    /// Returns a C `int`, which is currently always [`types::I32`].
1062    pub fn call_memcmp(
1063        &mut self,
1064        config: TargetFrontendConfig,
1065        left: Value,
1066        right: Value,
1067        size: Value,
1068    ) -> Value {
1069        let pointer_type = config.pointer_type();
1070        let signature = {
1071            let mut s = Signature::new(config.default_call_conv);
1072            s.params.reserve(3);
1073            s.params.push(AbiParam::new(pointer_type));
1074            s.params.push(AbiParam::new(pointer_type));
1075            s.params.push(AbiParam::new(pointer_type));
1076            s.returns.push(AbiParam::new(types::I32));
1077            self.import_signature(s)
1078        };
1079
1080        let libc_memcmp = self.import_function(ExtFuncData {
1081            name: ExternalName::LibCall(LibCall::Memcmp),
1082            signature,
1083            colocated: false,
1084            patchable: false,
1085        });
1086
1087        let call = self.ins().call(libc_memcmp, &[left, right, size]);
1088        self.func.dfg.first_result(call)
1089    }
1090
1091    /// Optimised [`Self::call_memcmp`] for small copies.
1092    ///
1093    /// This implements the byte slice comparison `int_cc(left[..size], right[..size])`.
1094    ///
1095    /// `left_align` and `right_align` are the statically-known alignments of the
1096    /// `left` and `right` pointers respectively.  These are used to know whether
1097    /// to mark `load`s as aligned.  It's always fine to pass `1` for these, but
1098    /// passing something higher than the true alignment may trap or otherwise
1099    /// misbehave as described in [`MemFlags::aligned`].
1100    ///
1101    /// Note that `memcmp` is a *big-endian* and *unsigned* comparison.
1102    /// As such, this panics when called with `IntCC::Signed*`.
1103    pub fn emit_small_memory_compare(
1104        &mut self,
1105        config: TargetFrontendConfig,
1106        int_cc: IntCC,
1107        left: Value,
1108        right: Value,
1109        size: u64,
1110        left_align: std::num::NonZeroU8,
1111        right_align: std::num::NonZeroU8,
1112        flags: MemFlags,
1113    ) -> Value {
1114        use IntCC::*;
1115        let (zero_cc, empty_imm) = match int_cc {
1116            //
1117            Equal => (Equal, 1),
1118            NotEqual => (NotEqual, 0),
1119
1120            UnsignedLessThan => (SignedLessThan, 0),
1121            UnsignedGreaterThanOrEqual => (SignedGreaterThanOrEqual, 1),
1122            UnsignedGreaterThan => (SignedGreaterThan, 0),
1123            UnsignedLessThanOrEqual => (SignedLessThanOrEqual, 1),
1124
1125            SignedLessThan
1126            | SignedGreaterThanOrEqual
1127            | SignedGreaterThan
1128            | SignedLessThanOrEqual => {
1129                panic!("Signed comparison {int_cc} not supported by memcmp")
1130            }
1131        };
1132
1133        if size == 0 {
1134            return self.ins().iconst(types::I8, empty_imm);
1135        }
1136
1137        // Future work could consider expanding this to handle more-complex scenarios.
1138        if let Some(small_type) = size.try_into().ok().and_then(Type::int_with_byte_size) {
1139            if let Equal | NotEqual = zero_cc {
1140                let mut left_flags = flags;
1141                if size == left_align.get() as u64 {
1142                    left_flags.set_aligned();
1143                }
1144                let mut right_flags = flags;
1145                if size == right_align.get() as u64 {
1146                    right_flags.set_aligned();
1147                }
1148                let left_val = self.ins().load(small_type, left_flags, left, 0);
1149                let right_val = self.ins().load(small_type, right_flags, right, 0);
1150                return self.ins().icmp(int_cc, left_val, right_val);
1151            } else if small_type == types::I8 {
1152                // Once the big-endian loads from wasmtime#2492 are implemented in
1153                // the backends, we could easily handle comparisons for more sizes here.
1154                // But for now, just handle single bytes where we don't need to worry.
1155
1156                let mut aligned_flags = flags;
1157                aligned_flags.set_aligned();
1158                let left_val = self.ins().load(small_type, aligned_flags, left, 0);
1159                let right_val = self.ins().load(small_type, aligned_flags, right, 0);
1160                return self.ins().icmp(int_cc, left_val, right_val);
1161            }
1162        }
1163
1164        let pointer_type = config.pointer_type();
1165        let size = self.ins().iconst(pointer_type, size as i64);
1166        let cmp = self.call_memcmp(config, left, right, size);
1167        self.ins().icmp_imm(zero_cc, cmp, 0)
1168    }
1169}
1170
1171fn greatest_divisible_power_of_two(size: u64) -> u64 {
1172    (size as i64 & -(size as i64)) as u64
1173}
1174
1175// Helper functions
1176impl<'a> FunctionBuilder<'a> {
1177    /// A Block is 'filled' when a terminator instruction is present.
1178    fn fill_current_block(&mut self) {
1179        self.func_ctx.status[self.position.unwrap()] = BlockStatus::Filled;
1180    }
1181
1182    fn declare_successor(&mut self, dest_block: Block, jump_inst: Inst) {
1183        self.func_ctx
1184            .ssa
1185            .declare_block_predecessor(dest_block, jump_inst);
1186    }
1187
1188    fn handle_ssa_side_effects(&mut self, side_effects: SideEffects) {
1189        let SideEffects {
1190            instructions_added_to_blocks,
1191        } = side_effects;
1192
1193        for modified_block in instructions_added_to_blocks {
1194            if self.is_pristine(modified_block) {
1195                self.func_ctx.status[modified_block] = BlockStatus::Partial;
1196            }
1197        }
1198    }
1199}
1200
1201#[cfg(test)]
1202mod tests {
1203    use super::greatest_divisible_power_of_two;
1204    use crate::Variable;
1205    use crate::frontend::{
1206        DefVariableError, FunctionBuilder, FunctionBuilderContext, UseVariableError,
1207    };
1208    use alloc::string::ToString;
1209    use cranelift_codegen::ir::condcodes::IntCC;
1210    use cranelift_codegen::ir::{
1211        AbiParam, BlockCall, ExceptionTableData, ExtFuncData, ExternalName, Function, InstBuilder,
1212        MemFlags, Signature, UserExternalName, UserFuncName, Value, types::*,
1213    };
1214    use cranelift_codegen::isa::{CallConv, TargetFrontendConfig, TargetIsa};
1215    use cranelift_codegen::settings;
1216    use cranelift_codegen::verifier::verify_function;
1217    use target_lexicon::PointerWidth;
1218
1219    fn sample_function(lazy_seal: bool) {
1220        let mut sig = Signature::new(CallConv::SystemV);
1221        sig.returns.push(AbiParam::new(I32));
1222        sig.params.push(AbiParam::new(I32));
1223
1224        let mut fn_ctx = FunctionBuilderContext::new();
1225        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1226        {
1227            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1228
1229            let block0 = builder.create_block();
1230            let block1 = builder.create_block();
1231            let block2 = builder.create_block();
1232            let block3 = builder.create_block();
1233            let x = builder.declare_var(I32);
1234            let y = builder.declare_var(I32);
1235            let z = builder.declare_var(I32);
1236
1237            builder.append_block_params_for_function_params(block0);
1238
1239            builder.switch_to_block(block0);
1240            if !lazy_seal {
1241                builder.seal_block(block0);
1242            }
1243            {
1244                let tmp = builder.block_params(block0)[0]; // the first function parameter
1245                builder.def_var(x, tmp);
1246            }
1247            {
1248                let tmp = builder.ins().iconst(I32, 2);
1249                builder.def_var(y, tmp);
1250            }
1251            {
1252                let arg1 = builder.use_var(x);
1253                let arg2 = builder.use_var(y);
1254                let tmp = builder.ins().iadd(arg1, arg2);
1255                builder.def_var(z, tmp);
1256            }
1257            builder.ins().jump(block1, &[]);
1258
1259            builder.switch_to_block(block1);
1260            {
1261                let arg1 = builder.use_var(y);
1262                let arg2 = builder.use_var(z);
1263                let tmp = builder.ins().iadd(arg1, arg2);
1264                builder.def_var(z, tmp);
1265            }
1266            {
1267                let arg = builder.use_var(y);
1268                builder.ins().brif(arg, block3, &[], block2, &[]);
1269            }
1270
1271            builder.switch_to_block(block2);
1272            if !lazy_seal {
1273                builder.seal_block(block2);
1274            }
1275            {
1276                let arg1 = builder.use_var(z);
1277                let arg2 = builder.use_var(x);
1278                let tmp = builder.ins().isub(arg1, arg2);
1279                builder.def_var(z, tmp);
1280            }
1281            {
1282                let arg = builder.use_var(y);
1283                builder.ins().return_(&[arg]);
1284            }
1285
1286            builder.switch_to_block(block3);
1287            if !lazy_seal {
1288                builder.seal_block(block3);
1289            }
1290
1291            {
1292                let arg1 = builder.use_var(y);
1293                let arg2 = builder.use_var(x);
1294                let tmp = builder.ins().isub(arg1, arg2);
1295                builder.def_var(y, tmp);
1296            }
1297            builder.ins().jump(block1, &[]);
1298            if !lazy_seal {
1299                builder.seal_block(block1);
1300            }
1301
1302            if lazy_seal {
1303                builder.seal_all_blocks();
1304            }
1305
1306            builder.finalize();
1307        }
1308
1309        let flags = settings::Flags::new(settings::builder());
1310        // println!("{}", func.display(None));
1311        if let Err(errors) = verify_function(&func, &flags) {
1312            panic!("{}\n{}", func.display(), errors)
1313        }
1314    }
1315
1316    #[test]
1317    fn sample() {
1318        sample_function(false)
1319    }
1320
1321    #[test]
1322    fn sample_with_lazy_seal() {
1323        sample_function(true)
1324    }
1325
1326    #[track_caller]
1327    fn check(func: &Function, expected_ir: &str) {
1328        let expected_ir = expected_ir.trim();
1329        let actual_ir = func.display().to_string();
1330        let actual_ir = actual_ir.trim();
1331        assert!(
1332            expected_ir == actual_ir,
1333            "Expected:\n{expected_ir}\nGot:\n{actual_ir}"
1334        );
1335    }
1336
1337    /// Helper function to construct a fixed frontend configuration.
1338    fn systemv_frontend_config() -> TargetFrontendConfig {
1339        TargetFrontendConfig {
1340            default_call_conv: CallConv::SystemV,
1341            pointer_width: PointerWidth::U64,
1342            page_size_align_log2: 12,
1343        }
1344    }
1345
1346    #[test]
1347    fn memcpy() {
1348        let frontend_config = systemv_frontend_config();
1349        let mut sig = Signature::new(frontend_config.default_call_conv);
1350        sig.returns.push(AbiParam::new(I32));
1351
1352        let mut fn_ctx = FunctionBuilderContext::new();
1353        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1354        {
1355            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1356
1357            let block0 = builder.create_block();
1358            let x = builder.declare_var(frontend_config.pointer_type());
1359            let y = builder.declare_var(frontend_config.pointer_type());
1360            let _z = builder.declare_var(I32);
1361
1362            builder.append_block_params_for_function_params(block0);
1363            builder.switch_to_block(block0);
1364
1365            let src = builder.use_var(x);
1366            let dest = builder.use_var(y);
1367            let size = builder.use_var(y);
1368            builder.call_memcpy(frontend_config, dest, src, size);
1369            builder.ins().return_(&[size]);
1370
1371            builder.seal_all_blocks();
1372            builder.finalize();
1373        }
1374
1375        check(
1376            &func,
1377            "function %sample() -> i32 system_v {
1378    sig0 = (i64, i64, i64) -> i64 system_v
1379    fn0 = %Memcpy sig0
1380
1381block0:
1382    v4 = iconst.i64 0
1383    v1 -> v4
1384    v3 = iconst.i64 0
1385    v0 -> v3
1386    v2 = call fn0(v1, v0, v1)  ; v1 = 0, v0 = 0, v1 = 0
1387    return v1  ; v1 = 0
1388}
1389",
1390        );
1391    }
1392
1393    #[test]
1394    fn small_memcpy() {
1395        let frontend_config = systemv_frontend_config();
1396        let mut sig = Signature::new(frontend_config.default_call_conv);
1397        sig.returns.push(AbiParam::new(I32));
1398
1399        let mut fn_ctx = FunctionBuilderContext::new();
1400        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1401        {
1402            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1403
1404            let block0 = builder.create_block();
1405            let x = builder.declare_var(frontend_config.pointer_type());
1406            let y = builder.declare_var(frontend_config.pointer_type());
1407
1408            builder.append_block_params_for_function_params(block0);
1409            builder.switch_to_block(block0);
1410
1411            let src = builder.use_var(x);
1412            let dest = builder.use_var(y);
1413            let size = 8;
1414            builder.emit_small_memory_copy(
1415                frontend_config,
1416                dest,
1417                src,
1418                size,
1419                8,
1420                8,
1421                true,
1422                MemFlags::new(),
1423            );
1424            builder.ins().return_(&[dest]);
1425
1426            builder.seal_all_blocks();
1427            builder.finalize();
1428        }
1429
1430        check(
1431            &func,
1432            "function %sample() -> i32 system_v {
1433block0:
1434    v4 = iconst.i64 0
1435    v1 -> v4
1436    v3 = iconst.i64 0
1437    v0 -> v3
1438    v2 = load.i64 aligned v0  ; v0 = 0
1439    store aligned v2, v1  ; v1 = 0
1440    return v1  ; v1 = 0
1441}
1442",
1443        );
1444    }
1445
1446    #[test]
1447    fn not_so_small_memcpy() {
1448        let frontend_config = systemv_frontend_config();
1449        let mut sig = Signature::new(frontend_config.default_call_conv);
1450        sig.returns.push(AbiParam::new(I32));
1451
1452        let mut fn_ctx = FunctionBuilderContext::new();
1453        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1454        {
1455            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1456
1457            let block0 = builder.create_block();
1458            let x = builder.declare_var(frontend_config.pointer_type());
1459            let y = builder.declare_var(frontend_config.pointer_type());
1460            builder.append_block_params_for_function_params(block0);
1461            builder.switch_to_block(block0);
1462
1463            let src = builder.use_var(x);
1464            let dest = builder.use_var(y);
1465            let size = 8192;
1466            builder.emit_small_memory_copy(
1467                frontend_config,
1468                dest,
1469                src,
1470                size,
1471                8,
1472                8,
1473                true,
1474                MemFlags::new(),
1475            );
1476            builder.ins().return_(&[dest]);
1477
1478            builder.seal_all_blocks();
1479            builder.finalize();
1480        }
1481
1482        check(
1483            &func,
1484            "function %sample() -> i32 system_v {
1485    sig0 = (i64, i64, i64) -> i64 system_v
1486    fn0 = %Memcpy sig0
1487
1488block0:
1489    v5 = iconst.i64 0
1490    v1 -> v5
1491    v4 = iconst.i64 0
1492    v0 -> v4
1493    v2 = iconst.i64 8192
1494    v3 = call fn0(v1, v0, v2)  ; v1 = 0, v0 = 0, v2 = 8192
1495    return v1  ; v1 = 0
1496}
1497",
1498        );
1499    }
1500
1501    #[test]
1502    fn small_memset() {
1503        let frontend_config = systemv_frontend_config();
1504        let mut sig = Signature::new(frontend_config.default_call_conv);
1505        sig.returns.push(AbiParam::new(I32));
1506
1507        let mut fn_ctx = FunctionBuilderContext::new();
1508        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1509        {
1510            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1511
1512            let block0 = builder.create_block();
1513            let y = builder.declare_var(frontend_config.pointer_type());
1514            builder.append_block_params_for_function_params(block0);
1515            builder.switch_to_block(block0);
1516
1517            let dest = builder.use_var(y);
1518            let size = 8;
1519            builder.emit_small_memset(frontend_config, dest, 1, size, 8, MemFlags::new());
1520            builder.ins().return_(&[dest]);
1521
1522            builder.seal_all_blocks();
1523            builder.finalize();
1524        }
1525
1526        check(
1527            &func,
1528            "function %sample() -> i32 system_v {
1529block0:
1530    v2 = iconst.i64 0
1531    v0 -> v2
1532    v1 = iconst.i64 0x0101_0101_0101_0101
1533    store aligned v1, v0  ; v1 = 0x0101_0101_0101_0101, v0 = 0
1534    return v0  ; v0 = 0
1535}
1536",
1537        );
1538    }
1539
1540    #[test]
1541    fn not_so_small_memset() {
1542        let frontend_config = systemv_frontend_config();
1543        let mut sig = Signature::new(frontend_config.default_call_conv);
1544        sig.returns.push(AbiParam::new(I32));
1545
1546        let mut fn_ctx = FunctionBuilderContext::new();
1547        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1548        {
1549            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1550
1551            let block0 = builder.create_block();
1552            let y = builder.declare_var(frontend_config.pointer_type());
1553            builder.append_block_params_for_function_params(block0);
1554            builder.switch_to_block(block0);
1555
1556            let dest = builder.use_var(y);
1557            let size = 8192;
1558            builder.emit_small_memset(frontend_config, dest, 1, size, 8, MemFlags::new());
1559            builder.ins().return_(&[dest]);
1560
1561            builder.seal_all_blocks();
1562            builder.finalize();
1563        }
1564
1565        check(
1566            &func,
1567            "function %sample() -> i32 system_v {
1568    sig0 = (i64, i32, i64) -> i64 system_v
1569    fn0 = %Memset sig0
1570
1571block0:
1572    v5 = iconst.i64 0
1573    v0 -> v5
1574    v1 = iconst.i8 1
1575    v2 = iconst.i64 8192
1576    v3 = uextend.i32 v1  ; v1 = 1
1577    v4 = call fn0(v0, v3, v2)  ; v0 = 0, v2 = 8192
1578    return v0  ; v0 = 0
1579}
1580",
1581        );
1582    }
1583
1584    #[test]
1585    fn memcmp() {
1586        use core::str::FromStr;
1587        use cranelift_codegen::isa;
1588
1589        let shared_builder = settings::builder();
1590        let shared_flags = settings::Flags::new(shared_builder);
1591
1592        let triple =
1593            ::target_lexicon::Triple::from_str("x86_64").expect("Couldn't create x86_64 triple");
1594
1595        let target = isa::lookup(triple)
1596            .ok()
1597            .map(|b| b.finish(shared_flags))
1598            .expect("This test requires x86_64 support.")
1599            .expect("Should be able to create backend with default flags");
1600
1601        let mut sig = Signature::new(target.default_call_conv());
1602        sig.returns.push(AbiParam::new(I32));
1603
1604        let mut fn_ctx = FunctionBuilderContext::new();
1605        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1606        {
1607            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1608
1609            let block0 = builder.create_block();
1610            let x = builder.declare_var(target.pointer_type());
1611            let y = builder.declare_var(target.pointer_type());
1612            let z = builder.declare_var(target.pointer_type());
1613            builder.append_block_params_for_function_params(block0);
1614            builder.switch_to_block(block0);
1615
1616            let left = builder.use_var(x);
1617            let right = builder.use_var(y);
1618            let size = builder.use_var(z);
1619            let cmp = builder.call_memcmp(target.frontend_config(), left, right, size);
1620            builder.ins().return_(&[cmp]);
1621
1622            builder.seal_all_blocks();
1623            builder.finalize();
1624        }
1625
1626        check(
1627            &func,
1628            "function %sample() -> i32 system_v {
1629    sig0 = (i64, i64, i64) -> i32 system_v
1630    fn0 = %Memcmp sig0
1631
1632block0:
1633    v6 = iconst.i64 0
1634    v2 -> v6
1635    v5 = iconst.i64 0
1636    v1 -> v5
1637    v4 = iconst.i64 0
1638    v0 -> v4
1639    v3 = call fn0(v0, v1, v2)  ; v0 = 0, v1 = 0, v2 = 0
1640    return v3
1641}
1642",
1643        );
1644    }
1645
1646    #[test]
1647    fn small_memcmp_zero_size() {
1648        let align_eight = std::num::NonZeroU8::new(8).unwrap();
1649        small_memcmp_helper(
1650            "
1651block0:
1652    v4 = iconst.i64 0
1653    v1 -> v4
1654    v3 = iconst.i64 0
1655    v0 -> v3
1656    v2 = iconst.i8 1
1657    return v2  ; v2 = 1",
1658            |builder, target, x, y| {
1659                builder.emit_small_memory_compare(
1660                    target.frontend_config(),
1661                    IntCC::UnsignedGreaterThanOrEqual,
1662                    x,
1663                    y,
1664                    0,
1665                    align_eight,
1666                    align_eight,
1667                    MemFlags::new(),
1668                )
1669            },
1670        );
1671    }
1672
1673    #[test]
1674    fn small_memcmp_byte_ugt() {
1675        let align_one = std::num::NonZeroU8::new(1).unwrap();
1676        small_memcmp_helper(
1677            "
1678block0:
1679    v6 = iconst.i64 0
1680    v1 -> v6
1681    v5 = iconst.i64 0
1682    v0 -> v5
1683    v2 = load.i8 aligned v0  ; v0 = 0
1684    v3 = load.i8 aligned v1  ; v1 = 0
1685    v4 = icmp ugt v2, v3
1686    return v4",
1687            |builder, target, x, y| {
1688                builder.emit_small_memory_compare(
1689                    target.frontend_config(),
1690                    IntCC::UnsignedGreaterThan,
1691                    x,
1692                    y,
1693                    1,
1694                    align_one,
1695                    align_one,
1696                    MemFlags::new(),
1697                )
1698            },
1699        );
1700    }
1701
1702    #[test]
1703    fn small_memcmp_aligned_eq() {
1704        let align_four = std::num::NonZeroU8::new(4).unwrap();
1705        small_memcmp_helper(
1706            "
1707block0:
1708    v6 = iconst.i64 0
1709    v1 -> v6
1710    v5 = iconst.i64 0
1711    v0 -> v5
1712    v2 = load.i32 aligned v0  ; v0 = 0
1713    v3 = load.i32 aligned v1  ; v1 = 0
1714    v4 = icmp eq v2, v3
1715    return v4",
1716            |builder, target, x, y| {
1717                builder.emit_small_memory_compare(
1718                    target.frontend_config(),
1719                    IntCC::Equal,
1720                    x,
1721                    y,
1722                    4,
1723                    align_four,
1724                    align_four,
1725                    MemFlags::new(),
1726                )
1727            },
1728        );
1729    }
1730
1731    #[test]
1732    fn small_memcmp_ipv6_ne() {
1733        let align_two = std::num::NonZeroU8::new(2).unwrap();
1734        small_memcmp_helper(
1735            "
1736block0:
1737    v6 = iconst.i64 0
1738    v1 -> v6
1739    v5 = iconst.i64 0
1740    v0 -> v5
1741    v2 = load.i128 v0  ; v0 = 0
1742    v3 = load.i128 v1  ; v1 = 0
1743    v4 = icmp ne v2, v3
1744    return v4",
1745            |builder, target, x, y| {
1746                builder.emit_small_memory_compare(
1747                    target.frontend_config(),
1748                    IntCC::NotEqual,
1749                    x,
1750                    y,
1751                    16,
1752                    align_two,
1753                    align_two,
1754                    MemFlags::new(),
1755                )
1756            },
1757        );
1758    }
1759
1760    #[test]
1761    fn small_memcmp_odd_size_uge() {
1762        let one = std::num::NonZeroU8::new(1).unwrap();
1763        small_memcmp_helper(
1764            "
1765    sig0 = (i64, i64, i64) -> i32 system_v
1766    fn0 = %Memcmp sig0
1767
1768block0:
1769    v6 = iconst.i64 0
1770    v1 -> v6
1771    v5 = iconst.i64 0
1772    v0 -> v5
1773    v2 = iconst.i64 3
1774    v3 = call fn0(v0, v1, v2)  ; v0 = 0, v1 = 0, v2 = 3
1775    v4 = icmp_imm sge v3, 0
1776    return v4",
1777            |builder, target, x, y| {
1778                builder.emit_small_memory_compare(
1779                    target.frontend_config(),
1780                    IntCC::UnsignedGreaterThanOrEqual,
1781                    x,
1782                    y,
1783                    3,
1784                    one,
1785                    one,
1786                    MemFlags::new(),
1787                )
1788            },
1789        );
1790    }
1791
1792    fn small_memcmp_helper(
1793        expected: &str,
1794        f: impl FnOnce(&mut FunctionBuilder, &dyn TargetIsa, Value, Value) -> Value,
1795    ) {
1796        use core::str::FromStr;
1797        use cranelift_codegen::isa;
1798
1799        let shared_builder = settings::builder();
1800        let shared_flags = settings::Flags::new(shared_builder);
1801
1802        let triple =
1803            ::target_lexicon::Triple::from_str("x86_64").expect("Couldn't create x86_64 triple");
1804
1805        let target = isa::lookup(triple)
1806            .ok()
1807            .map(|b| b.finish(shared_flags))
1808            .expect("This test requires x86_64 support.")
1809            .expect("Should be able to create backend with default flags");
1810
1811        let mut sig = Signature::new(target.default_call_conv());
1812        sig.returns.push(AbiParam::new(I8));
1813
1814        let mut fn_ctx = FunctionBuilderContext::new();
1815        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1816        {
1817            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1818
1819            let block0 = builder.create_block();
1820            let x = builder.declare_var(target.pointer_type());
1821            let y = builder.declare_var(target.pointer_type());
1822            builder.append_block_params_for_function_params(block0);
1823            builder.switch_to_block(block0);
1824
1825            let left = builder.use_var(x);
1826            let right = builder.use_var(y);
1827            let ret = f(&mut builder, &*target, left, right);
1828            builder.ins().return_(&[ret]);
1829
1830            builder.seal_all_blocks();
1831            builder.finalize();
1832        }
1833
1834        check(
1835            &func,
1836            &format!("function %sample() -> i8 system_v {{{expected}\n}}\n"),
1837        );
1838    }
1839
1840    #[test]
1841    fn undef_vector_vars() {
1842        let mut sig = Signature::new(CallConv::SystemV);
1843        sig.returns.push(AbiParam::new(I8X16));
1844        sig.returns.push(AbiParam::new(I8X16));
1845        sig.returns.push(AbiParam::new(F32X4));
1846
1847        let mut fn_ctx = FunctionBuilderContext::new();
1848        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1849        {
1850            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1851
1852            let block0 = builder.create_block();
1853            let a = builder.declare_var(I8X16);
1854            let b = builder.declare_var(I8X16);
1855            let c = builder.declare_var(F32X4);
1856            builder.switch_to_block(block0);
1857
1858            let a = builder.use_var(a);
1859            let b = builder.use_var(b);
1860            let c = builder.use_var(c);
1861            builder.ins().return_(&[a, b, c]);
1862
1863            builder.seal_all_blocks();
1864            builder.finalize();
1865        }
1866
1867        check(
1868            &func,
1869            "function %sample() -> i8x16, i8x16, f32x4 system_v {
1870    const0 = 0x00000000000000000000000000000000
1871
1872block0:
1873    v5 = f32const 0.0
1874    v6 = splat.f32x4 v5  ; v5 = 0.0
1875    v2 -> v6
1876    v4 = vconst.i8x16 const0
1877    v1 -> v4
1878    v3 = vconst.i8x16 const0
1879    v0 -> v3
1880    return v0, v1, v2  ; v0 = const0, v1 = const0
1881}
1882",
1883        );
1884    }
1885
1886    #[test]
1887    fn test_greatest_divisible_power_of_two() {
1888        assert_eq!(64, greatest_divisible_power_of_two(64));
1889        assert_eq!(16, greatest_divisible_power_of_two(48));
1890        assert_eq!(8, greatest_divisible_power_of_two(24));
1891        assert_eq!(1, greatest_divisible_power_of_two(25));
1892    }
1893
1894    #[test]
1895    fn try_use_var() {
1896        let sig = Signature::new(CallConv::SystemV);
1897
1898        let mut fn_ctx = FunctionBuilderContext::new();
1899        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1900        {
1901            let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1902
1903            let block0 = builder.create_block();
1904            builder.append_block_params_for_function_params(block0);
1905            builder.switch_to_block(block0);
1906
1907            assert_eq!(
1908                builder.try_use_var(Variable::from_u32(0)),
1909                Err(UseVariableError::UsedBeforeDeclared(Variable::from_u32(0)))
1910            );
1911
1912            let value = builder.ins().iconst(cranelift_codegen::ir::types::I32, 0);
1913
1914            assert_eq!(
1915                builder.try_def_var(Variable::from_u32(0), value),
1916                Err(DefVariableError::DefinedBeforeDeclared(Variable::from_u32(
1917                    0
1918                )))
1919            );
1920        }
1921    }
1922
1923    #[test]
1924    fn test_builder_with_iconst_and_negative_constant() {
1925        let sig = Signature::new(CallConv::SystemV);
1926        let mut fn_ctx = FunctionBuilderContext::new();
1927        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1928
1929        let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1930
1931        let block0 = builder.create_block();
1932        builder.switch_to_block(block0);
1933        builder.ins().iconst(I32, -1);
1934        builder.ins().return_(&[]);
1935
1936        builder.seal_all_blocks();
1937        builder.finalize();
1938
1939        let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
1940        let ctx = cranelift_codegen::Context::for_function(func);
1941        ctx.verify(&flags).expect("should be valid");
1942
1943        check(
1944            &ctx.func,
1945            "function %sample() system_v {
1946block0:
1947    v0 = iconst.i32 -1
1948    return
1949}",
1950        );
1951    }
1952
1953    #[test]
1954    fn try_call() {
1955        let mut sig = Signature::new(CallConv::SystemV);
1956        sig.params.push(AbiParam::new(I8));
1957        sig.returns.push(AbiParam::new(I32));
1958        let mut fn_ctx = FunctionBuilderContext::new();
1959        let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1960
1961        let sig0 = func.import_signature(Signature::new(CallConv::SystemV));
1962        let name = func.declare_imported_user_function(UserExternalName::new(0, 0));
1963        let fn0 = func.import_function(ExtFuncData {
1964            name: ExternalName::User(name),
1965            signature: sig0,
1966            colocated: false,
1967            patchable: false,
1968        });
1969
1970        let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1971
1972        let block0 = builder.create_block();
1973        let block1 = builder.create_block();
1974        let block2 = builder.create_block();
1975        let block3 = builder.create_block();
1976
1977        let my_var = builder.declare_var(I32);
1978
1979        builder.switch_to_block(block0);
1980        let branch_val = builder.append_block_param(block0, I8);
1981        builder.ins().brif(branch_val, block1, &[], block2, &[]);
1982
1983        builder.switch_to_block(block1);
1984        let one = builder.ins().iconst(I32, 1);
1985        builder.def_var(my_var, one);
1986
1987        let normal_return = BlockCall::new(block3, [], &mut builder.func.dfg.value_lists);
1988        let exception_table = builder
1989            .func
1990            .dfg
1991            .exception_tables
1992            .push(ExceptionTableData::new(sig0, normal_return, []));
1993        builder.ins().try_call(fn0, &[], exception_table);
1994
1995        builder.switch_to_block(block2);
1996        let two = builder.ins().iconst(I32, 2);
1997        builder.def_var(my_var, two);
1998
1999        let normal_return = BlockCall::new(block3, [], &mut builder.func.dfg.value_lists);
2000        let exception_table = builder
2001            .func
2002            .dfg
2003            .exception_tables
2004            .push(ExceptionTableData::new(sig0, normal_return, []));
2005        builder.ins().try_call(fn0, &[], exception_table);
2006
2007        builder.switch_to_block(block3);
2008        let ret_val = builder.use_var(my_var);
2009        builder.ins().return_(&[ret_val]);
2010
2011        builder.seal_all_blocks();
2012        builder.finalize();
2013
2014        let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
2015        let ctx = cranelift_codegen::Context::for_function(func);
2016        ctx.verify(&flags).expect("should be valid");
2017
2018        check(
2019            &ctx.func,
2020            "function %sample(i8) -> i32 system_v {
2021    sig0 = () system_v
2022    fn0 = u0:0 sig0
2023
2024block0(v0: i8):
2025    brif v0, block1, block2
2026
2027block1:
2028    v1 = iconst.i32 1
2029    try_call fn0(), sig0, block3(v1), []  ; v1 = 1
2030
2031block2:
2032    v2 = iconst.i32 2
2033    try_call fn0(), sig0, block3(v2), []  ; v2 = 2
2034
2035block3(v3: i32):
2036    return v3
2037}",
2038        );
2039    }
2040}