Skip to main content

winch_codegen/codegen/
control.rs

1//! Data structures for control flow emission.
2//!
3//! Winch currently doesn't apply any sort of optimizations to control flow, but
4//! as a future optimization, for starters, we could perform a look ahead to the
5//! next instruction when reaching any of the comparison instructions. If the
6//! next instruction is a control instruction, we could avoid emitting
7//! a [`crate::masm::MacroAssembler::cmp_with_set`] and instead emit
8//! a conditional jump inline when emitting the control flow instruction.
9use super::{CodeGenContext, CodeGenError, Emission, OperandSize, Reg, TypedReg};
10use crate::{
11    CallingConvention, Result,
12    abi::{ABI, ABIOperand, ABIResults, ABISig, RetArea},
13    bail, ensure, format_err,
14    masm::{IntCmpKind, MacroAssembler, MemMoveDirection, RegImm, SPOffset},
15    reg::writable,
16    stack::Val,
17};
18use cranelift_codegen::MachLabel;
19use wasmtime_environ::{WasmFuncType, WasmValType, collections::TryClone as _};
20
21/// Categorization of the type of the block.
22#[derive(Debug)]
23pub(crate) enum BlockType {
24    /// Doesn't produce or consume any values.
25    Void,
26    /// Produces a single value.
27    Single(WasmValType),
28    /// Consumes multiple values and produces multiple values.
29    Func(WasmFuncType),
30    /// An already resolved ABI signature.
31    ABISig(ABISig),
32}
33
34impl Clone for BlockType {
35    fn clone(&self) -> Self {
36        match self {
37            Self::Void => Self::Void,
38            Self::Single(x) => Self::Single(*x),
39            Self::ABISig(x) => Self::ABISig(x.clone()),
40            Self::Func(f) => Self::Func(f.clone_panic_on_oom()),
41        }
42    }
43}
44
45/// Holds all the information about the signature of the block.
46#[derive(Debug, Clone)]
47pub(crate) struct BlockSig {
48    /// The type of the block.
49    pub ty: BlockType,
50    /// ABI representation of the results of the block.
51    results: Option<ABIResults>,
52    /// ABI representation of the params of the block interpreted as results.
53    params: Option<ABIResults>,
54}
55
56impl BlockSig {
57    /// Create a new [BlockSig].
58    pub fn new(ty: BlockType) -> Self {
59        Self {
60            ty,
61            results: None,
62            params: None,
63        }
64    }
65
66    /// Create a new [BlockSig] from an [ABISig].
67    pub fn from_sig(sig: ABISig) -> Self {
68        Self {
69            ty: BlockType::sig(sig),
70            results: None,
71            params: None,
72        }
73    }
74
75    /// Return the ABI representation of the results of the block.
76    /// This method will lazily initialize the results if not present.
77    pub fn results<M>(&mut self) -> Result<&mut ABIResults>
78    where
79        M: MacroAssembler,
80    {
81        if self.ty.is_sig() {
82            return match &mut self.ty {
83                BlockType::ABISig(sig) => Ok(&mut sig.results),
84                _ => unreachable!(),
85            };
86        }
87
88        if self.results.is_some() {
89            return Ok(self.results.as_mut().unwrap());
90        }
91
92        let results = match &self.ty {
93            BlockType::Void => <M::ABI as ABI>::abi_results(&[], &CallingConvention::Default),
94            BlockType::Single(ty) => {
95                <M::ABI as ABI>::abi_results(&[*ty], &CallingConvention::Default)
96            }
97            BlockType::Func(f) => {
98                <M::ABI as ABI>::abi_results(f.results(), &CallingConvention::Default)
99            }
100            BlockType::ABISig(_) => unreachable!(),
101        };
102
103        self.results = Some(results?);
104        Ok(self.results.as_mut().unwrap())
105    }
106
107    /// Construct an ABI result representation of the params of the block.
108    /// This is needed for loops and for handling cases in which params flow as
109    /// the block's results, i.e. in the presence of an empty then or else.
110    pub fn params<M>(&mut self) -> Result<&mut ABIResults>
111    where
112        M: MacroAssembler,
113    {
114        if self.params.is_some() {
115            return Ok(self.params.as_mut().unwrap());
116        }
117
118        let params_as_results = match &self.ty {
119            BlockType::Void | BlockType::Single(_) => {
120                <M::ABI as ABI>::abi_results(&[], &CallingConvention::Default)
121            }
122            BlockType::Func(f) => {
123                <M::ABI as ABI>::abi_results(f.params(), &CallingConvention::Default)
124            }
125            // Once we have created a block type from a known signature, we
126            // can't modify its meaning. This should only be used for the
127            // function body block, in which case there's no need for treating
128            // params as results.
129            BlockType::ABISig(_) => unreachable!(),
130        };
131
132        self.params = Some(params_as_results?);
133        Ok(self.params.as_mut().unwrap())
134    }
135
136    /// Returns the signature param count.
137    pub fn param_count(&self) -> usize {
138        match &self.ty {
139            BlockType::Void | BlockType::Single(_) => 0,
140            BlockType::Func(f) => f.params().len(),
141            BlockType::ABISig(sig) => sig.params_without_retptr().len(),
142        }
143    }
144
145    /// Returns the signature return count.
146    pub fn return_count(&self) -> usize {
147        match &self.ty {
148            BlockType::Void => 0,
149            BlockType::Single(_) => 1,
150            BlockType::Func(f) => f.results().len(),
151            BlockType::ABISig(sig) => sig.results().len(),
152        }
153    }
154}
155
156impl BlockType {
157    /// Create a [BlockType::Void].
158    pub fn void() -> Self {
159        Self::Void
160    }
161
162    /// Create a [BlockType::Single] from the given [WasmType].
163    pub fn single(ty: WasmValType) -> Self {
164        Self::Single(ty)
165    }
166
167    /// Create a [BlockType::Func] from the given [WasmFuncType].
168    pub fn func(ty: WasmFuncType) -> Self {
169        Self::Func(ty)
170    }
171
172    /// Create a [BlockType::ABISig].
173    pub fn sig(sig: ABISig) -> Self {
174        Self::ABISig(sig)
175    }
176
177    /// Returns true if the type of the block is [BlockType::ABISig].
178    pub fn is_sig(&self) -> bool {
179        match self {
180            Self::ABISig(_) => true,
181            _ => false,
182        }
183    }
184}
185
186/// The expected value and machine stack state when entering and exiting the block.
187#[derive(Debug, Default, Copy, Clone)]
188pub(crate) struct StackState {
189    /// The base stack pointer offset.
190    /// This offset is set when entering the block, after saving any live
191    /// registers and locals.
192    /// It is calculated by subtracting the size, in bytes, of any block params
193    /// to the current stack pointer offset.
194    pub base_offset: SPOffset,
195    /// The target stack pointer offset.
196    /// This offset is calculated by adding the size of the stack results
197    /// to the base stack pointer offset.
198    pub target_offset: SPOffset,
199    /// The base length of the value stack when entering the block.
200    /// Which is the current length of the value stack minus any block parameters.
201    pub base_len: usize,
202    /// The target length of the value stack when exiting the block.
203    /// Calculate by adding the number of results to the base value stack
204    /// length.
205    pub target_len: usize,
206}
207
208/// Holds the all the metadata to support the emission
209/// of control flow instructions.
210#[derive(Debug)]
211pub(crate) enum ControlStackFrame {
212    If {
213        /// The if continuation label.
214        cont: MachLabel,
215        /// The exit label of the block.
216        exit: MachLabel,
217        /// The signature of the block.
218        sig: BlockSig,
219        /// The stack state of the block.
220        stack_state: StackState,
221        /// Local reachability state when entering the block.
222        reachable: bool,
223    },
224    Else {
225        /// The exit label of the block.
226        exit: MachLabel,
227        /// The signature of the block.
228        sig: BlockSig,
229        /// The stack state of the block.
230        stack_state: StackState,
231        /// Local reachability state when entering the block.
232        reachable: bool,
233    },
234    Block {
235        /// The block exit label.
236        exit: MachLabel,
237        /// The signature of the block.
238        sig: BlockSig,
239        /// The stack state of the block.
240        stack_state: StackState,
241        /// Exit state of the block.
242        ///
243        /// This flag is used to determine if a block is a branch
244        /// target. By default, this is false, and it's updated when
245        /// emitting a `br` or `br_if`.
246        is_branch_target: bool,
247    },
248    Loop {
249        /// The start of the Loop.
250        head: MachLabel,
251        /// The stack state of the block.
252        stack_state: StackState,
253        /// The signature of the block.
254        sig: BlockSig,
255    },
256}
257
258impl ControlStackFrame {
259    /// Returns [`ControlStackFrame`] for an if.
260    pub fn r#if<M: MacroAssembler>(
261        sig: BlockSig,
262        masm: &mut M,
263        context: &mut CodeGenContext<Emission>,
264    ) -> Result<Self> {
265        let mut control = Self::If {
266            cont: masm.get_label()?,
267            exit: masm.get_label()?,
268            sig,
269            reachable: context.reachable,
270            stack_state: Default::default(),
271        };
272
273        control.emit(masm, context)?;
274        Ok(control)
275    }
276
277    /// Returns [`ControlStackFrame`] for a block.
278    pub fn block<M: MacroAssembler>(
279        sig: BlockSig,
280        masm: &mut M,
281        context: &mut CodeGenContext<Emission>,
282    ) -> Result<Self> {
283        let mut control = Self::Block {
284            sig,
285            is_branch_target: false,
286            exit: masm.get_label()?,
287            stack_state: Default::default(),
288        };
289
290        control.emit(masm, context)?;
291        Ok(control)
292    }
293
294    /// Returns [`ControlStackFrame`] for a loop.
295    pub fn r#loop<M: MacroAssembler>(
296        sig: BlockSig,
297        masm: &mut M,
298        context: &mut CodeGenContext<Emission>,
299    ) -> Result<Self> {
300        let mut control = Self::Loop {
301            stack_state: Default::default(),
302            sig,
303            head: masm.get_label()?,
304        };
305
306        control.emit(masm, context)?;
307        Ok(control)
308    }
309
310    fn init<M: MacroAssembler>(
311        &mut self,
312        masm: &mut M,
313        context: &mut CodeGenContext<Emission>,
314    ) -> Result<()> {
315        self.calculate_stack_state(context, masm)?;
316        // If the block has stack results, immediately resolve the return area
317        // base.
318        if self.results::<M>()?.on_stack() {
319            let results_base = self.stack_state().target_offset;
320            self.results::<M>()?.set_ret_area(RetArea::sp(results_base));
321        }
322
323        if self.is_if() || self.is_loop() {
324            // Preemptively handle block params as results so that the params
325            // are correctly placed in memory. This is especially
326            // important for control flow joins with empty blocks:
327            //
328            //(module
329            //  (func (export "params") (param i32) (result i32)
330            //       (i32.const 2)
331            //       (if (param i32) (result i32) (local.get 0)
332            //       (then))
333            //     (i32.const 3)
334            //     (i32.add)
335            //   )
336            //)
337            let base_offset = self.stack_state().base_offset;
338            if self.params::<M>()?.on_stack() {
339                let offset = base_offset.as_u32() + self.params::<M>()?.size();
340                self.params::<M>()?
341                    .set_ret_area(RetArea::sp(SPOffset::from_u32(offset)));
342            }
343            Self::top_abi_results_impl(
344                self.params::<M>()?,
345                context,
346                masm,
347                |params: &ABIResults, _, _| Ok(params.ret_area().copied()),
348            )?;
349        }
350        Ok(())
351    }
352
353    /// Calculates the [StackState] of the block.
354    fn calculate_stack_state<M: MacroAssembler>(
355        &mut self,
356        context: &mut CodeGenContext<Emission>,
357        masm: &mut M,
358    ) -> Result<()> {
359        use ControlStackFrame::*;
360        let sig = self.sig();
361        // If the block type contains a full [ABISig], do not take into account
362        // the params, since these are the params of the function that is
363        // currently being compiled and the value stack doesn't currently
364        // contain any values anyway.
365        let param_count = if sig.ty.is_sig() {
366            0
367        } else {
368            sig.param_count()
369        };
370        let return_count = sig.return_count();
371        ensure!(
372            context.stack.len() >= param_count,
373            CodeGenError::missing_values_in_stack()
374        );
375        let results_size = self.results::<M>()?.size();
376
377        // Save any live registers and locals.
378        context.spill(masm)?;
379
380        let base_len = context.stack.len() - param_count;
381        let stack_consumed = context.stack.sizeof(param_count);
382        let current_sp = masm.sp_offset()?;
383        let base_offset = SPOffset::from_u32(current_sp.as_u32() - stack_consumed);
384
385        match self {
386            If { stack_state, .. } | Block { stack_state, .. } | Loop { stack_state, .. } => {
387                stack_state.base_offset = base_offset;
388                stack_state.base_len = base_len;
389                stack_state.target_offset = SPOffset::from_u32(base_offset.as_u32() + results_size);
390                stack_state.target_len = base_len + return_count;
391            }
392            _ => {}
393        }
394        Ok(())
395    }
396
397    /// This function ensures that the state of the -- machine and value --
398    /// stack  is the right one when reaching a control frame branch in which
399    /// reachability is restored or when reaching the end of a function in an
400    /// unreachable state. This function is intended to be called when handling
401    /// an unreachable else or end.
402    //
403    /// This function will truncate the value stack to the base length of
404    /// the control frame and will also set the stack pointer offset to reflect
405    /// the offset expected by the target branch.
406    ///
407    // NB: This method is assumed to be called *before* pushing any block
408    // results to the value stack, so that any excess values are cleaned up.
409    pub fn ensure_stack_state<M: MacroAssembler>(
410        &mut self,
411        masm: &mut M,
412        context: &mut CodeGenContext<Emission>,
413    ) -> Result<()> {
414        let state = self.stack_state();
415        // This assumes that at jump sites, the machine stack pointer will be
416        // adjusted to match the expectations of the target branch (e.g.
417        // `target_offset`); after performing the jump, the MacroAssembler
418        // implementation will soft-reset the stack pointer offset to its
419        // original offset, ensure that other parts of the program have access
420        // to the right offset, this is especially important in conditional
421        // branches.
422        // When restoring reachability we ensure that the MacroAssembler offset
423        // is set to match the expectations of the target branch, similar to how
424        // the machine stack pointer was adjusted at jump sites.
425        masm.reset_stack_pointer(state.target_offset)?;
426        // We use the base length, because this function is assumed to be called
427        // *before* pushing any results to the value stack. This way, any excess
428        // values will be discarded.
429        context.truncate_stack_to(state.base_len)
430    }
431
432    /// Return the type information of the block.
433    pub fn sig(&self) -> &BlockSig {
434        use ControlStackFrame::*;
435        match self {
436            If { sig, .. } | Else { sig, .. } | Loop { sig, .. } | Block { sig, .. } => sig,
437        }
438    }
439
440    fn emit<M: MacroAssembler>(
441        &mut self,
442        masm: &mut M,
443        context: &mut CodeGenContext<Emission>,
444    ) -> Result<()> {
445        use ControlStackFrame::*;
446
447        // Do not perform any emissions if we are in an unreachable state.
448        if !context.reachable {
449            return Ok(());
450        }
451
452        match *self {
453            If { cont, .. } => {
454                // Pop the condition value.
455                // Because in the case of Self::If, Self::init, will top the
456                // branch params, we exclude any result registers from being
457                // used as the branch test.
458                let top = context.without::<Result<TypedReg>, _, _>(
459                    self.params::<M>()?.regs(),
460                    masm,
461                    |cx, masm| cx.pop_to_reg(masm, None),
462                )??;
463                self.init(masm, context)?;
464                masm.branch(
465                    IntCmpKind::Eq,
466                    top.reg,
467                    top.reg.into(),
468                    cont,
469                    OperandSize::S32,
470                )?;
471                context.free_reg(top);
472                Ok(())
473            }
474            Block { .. } => self.init(masm, context),
475            Loop { head, .. } => {
476                self.init(masm, context)?;
477                masm.bind(head)?;
478                Ok(())
479            }
480            _ => Err(format_err!(CodeGenError::if_control_frame_expected())),
481        }
482    }
483
484    /// Handles the else branch if the current control stack frame is
485    /// [`ControlStackFrame::If`].
486    pub fn emit_else<M: MacroAssembler>(
487        &mut self,
488        masm: &mut M,
489        context: &mut CodeGenContext<Emission>,
490    ) -> Result<()> {
491        ensure!(self.is_if(), CodeGenError::if_control_frame_expected());
492        let state = self.stack_state();
493
494        ensure!(
495            state.target_len == context.stack.len(),
496            CodeGenError::control_frame_state_mismatch()
497        );
498        self.pop_abi_results(context, masm, |results, _, _| {
499            Ok(results.ret_area().copied())
500        })?;
501        masm.jmp(*self.exit_label().unwrap())?;
502        self.bind_else(masm, context)?;
503        Ok(())
504    }
505
506    /// Binds the else branch label and converts `self` to
507    /// [`ControlStackFrame::Else`].
508    pub fn bind_else<M: MacroAssembler>(
509        &mut self,
510        masm: &mut M,
511        context: &mut CodeGenContext<Emission>,
512    ) -> Result<()> {
513        use ControlStackFrame::*;
514        match self {
515            If {
516                cont,
517                sig,
518                stack_state,
519                exit,
520                ..
521            } => {
522                // Bind the else branch.
523                masm.bind(*cont)?;
524
525                // Push the abi results to the value stack, so that they are
526                // used as params for the else branch. At the beginning of the
527                // if block, any params are preemptively resolved as results;
528                // when reaching the else all params are already materialized as
529                // stack results. As part of ensuring the right state when
530                // entering the else branch, the following snippet also soft
531                // resets the stack pointer so that it matches the expectations
532                // of the else branch: the stack pointer is expected to be at
533                // the base stack pointer, plus the params stack size in bytes.
534                let params_size = sig.params::<M>()?.size();
535                context.push_abi_results::<M, _>(sig.params::<M>()?, masm, |params, _, _| {
536                    params.ret_area().copied()
537                })?;
538                masm.reset_stack_pointer(SPOffset::from_u32(
539                    stack_state.base_offset.as_u32() + params_size,
540                ))?;
541
542                // Update the stack control frame with an else control frame.
543                *self = ControlStackFrame::Else {
544                    exit: *exit,
545                    stack_state: *stack_state,
546                    reachable: context.reachable,
547                    sig: sig.clone(),
548                };
549            }
550            _ => bail!(CodeGenError::if_control_frame_expected()),
551        }
552        Ok(())
553    }
554
555    /// Handles the end of a control stack frame.
556    pub fn emit_end<M: MacroAssembler>(
557        &mut self,
558        masm: &mut M,
559        context: &mut CodeGenContext<Emission>,
560    ) -> Result<()> {
561        use ControlStackFrame::*;
562        match self {
563            If { stack_state, .. } | Else { stack_state, .. } | Block { stack_state, .. } => {
564                ensure!(
565                    stack_state.target_len == context.stack.len(),
566                    CodeGenError::control_frame_state_mismatch()
567                );
568                // Before binding the exit label, we handle the block results.
569                self.pop_abi_results(context, masm, |results, _, _| {
570                    Ok(results.ret_area().copied())
571                })?;
572                self.bind_end(masm, context)?;
573            }
574            Loop { stack_state, .. } => {
575                ensure!(
576                    stack_state.target_len == context.stack.len(),
577                    CodeGenError::control_frame_state_mismatch()
578                );
579            }
580        };
581
582        Ok(())
583    }
584
585    /// Binds the exit label of the current control stack frame and pushes the
586    /// ABI results to the value stack.
587    pub fn bind_end<M: MacroAssembler>(
588        &mut self,
589        masm: &mut M,
590        context: &mut CodeGenContext<Emission>,
591    ) -> Result<()> {
592        self.push_abi_results(context, masm)?;
593        self.bind_exit_label(masm)
594    }
595
596    /// Binds the exit label of the control stack frame.
597    pub fn bind_exit_label<M: MacroAssembler>(&self, masm: &mut M) -> Result<()> {
598        use ControlStackFrame::*;
599        match self {
600            // We use an explicit label to track the exit of an if block. In case there's no
601            // else, we bind the if's continuation block to make sure that any jumps from the if
602            // condition are reachable and we bind the explicit exit label as well to ensure that any
603            // branching instructions are able to correctly reach the block's end.
604            If { cont, .. } => masm.bind(*cont)?,
605            _ => {}
606        }
607        if let Some(label) = self.exit_label() {
608            masm.bind(*label)?;
609        }
610        Ok(())
611    }
612
613    /// Returns the continuation label of the current control stack frame.
614    pub fn label(&self) -> &MachLabel {
615        use ControlStackFrame::*;
616
617        match self {
618            If { exit, .. } | Else { exit, .. } | Block { exit, .. } => exit,
619            Loop { head, .. } => head,
620        }
621    }
622
623    /// Returns the exit label of the current control stack frame. Note that
624    /// this is similar to [`ControlStackFrame::label`], with the only difference that it
625    /// returns `None` for `Loop` since its label doesn't represent an exit.
626    pub fn exit_label(&self) -> Option<&MachLabel> {
627        use ControlStackFrame::*;
628
629        match self {
630            If { exit, .. } | Else { exit, .. } | Block { exit, .. } => Some(exit),
631            Loop { .. } => None,
632        }
633    }
634
635    /// Set the current control stack frame as a branch target.
636    pub fn set_as_target(&mut self) {
637        match self {
638            ControlStackFrame::Block {
639                is_branch_target, ..
640            } => {
641                *is_branch_target = true;
642            }
643            _ => {}
644        }
645    }
646
647    /// Returns [`crate::abi::ABIResults`] of the control stack frame
648    /// block.
649    pub fn results<M>(&mut self) -> Result<&mut ABIResults>
650    where
651        M: MacroAssembler,
652    {
653        use ControlStackFrame::*;
654
655        match self {
656            If { sig, .. } | Else { sig, .. } | Block { sig, .. } => sig.results::<M>(),
657            Loop { sig, .. } => sig.params::<M>(),
658        }
659    }
660
661    /// Returns the block params interpreted as [crate::abi::ABIResults].
662    pub fn params<M>(&mut self) -> Result<&mut ABIResults>
663    where
664        M: MacroAssembler,
665    {
666        use ControlStackFrame::*;
667        match self {
668            If { sig, .. } | Else { sig, .. } | Block { sig, .. } | Loop { sig, .. } => {
669                sig.params::<M>()
670            }
671        }
672    }
673
674    /// Orchestrates how block results are handled.
675    /// Results are handled in reverse order, starting from register results
676    /// continuing to memory values. This guarantees that the stack ordering
677    /// invariant is maintained. See [ABIResults] for more details.
678    ///
679    /// This function will iterate through each result and invoke the provided
680    /// callback if there are results on the stack.
681    ///
682    /// Calculating the return area involves ensuring that there's enough stack
683    /// space to store the block's results. To make the process of handling
684    /// multiple results easier, this function will save all live registers and
685    /// locals right after handling any register results. This will ensure that
686    /// the top `n` values in the value stack are correctly placed in the memory
687    /// locations corresponding to multiple stack results. Once the iteration
688    /// over all the results is done, the stack result area of the block will be
689    /// updated.
690    pub fn pop_abi_results<M, F>(
691        &mut self,
692        context: &mut CodeGenContext<Emission>,
693        masm: &mut M,
694        calculate_ret_area: F,
695    ) -> Result<()>
696    where
697        M: MacroAssembler,
698        F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
699    {
700        Self::pop_abi_results_impl(self.results::<M>()?, context, masm, calculate_ret_area)
701    }
702
703    /// Shared implementation for popping the ABI results.
704    /// This is needed because, in some cases, params must be interpreted and
705    /// used as the results of the block. When emitting code at control flow
706    /// joins, the block params are interpreted as results, to ensure that they
707    /// can correctly "flow" as the results of the block. This is especially
708    /// important in the presence of empty then, else and loop blocks. This
709    /// interpretation is an internal detail of the control module, and having
710    /// a shared implementation allows the caller to decide how the
711    /// results should be interpreted.
712    pub fn pop_abi_results_impl<M, F>(
713        results: &mut ABIResults,
714        context: &mut CodeGenContext<Emission>,
715        masm: &mut M,
716        mut calculate_ret_area: F,
717    ) -> Result<()>
718    where
719        M: MacroAssembler,
720        F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
721    {
722        let mut iter = results.operands().iter().rev().peekable();
723
724        while let Some(ABIOperand::Reg { reg, .. }) = iter.peek() {
725            let TypedReg { reg, .. } = context.pop_to_reg(masm, Some(*reg))?;
726            context.free_reg(reg);
727            iter.next().unwrap();
728        }
729
730        let ret_area = calculate_ret_area(results, context, masm)?;
731
732        let retptr = Self::maybe_load_retptr(ret_area.as_ref(), &results, context, masm)?;
733        if let Some(area) = ret_area {
734            if area.is_sp() {
735                Self::ensure_ret_area(&area, context, masm)?;
736            }
737        }
738
739        if let Some(retptr) = retptr {
740            while let Some(ABIOperand::Stack { offset, .. }) = iter.peek() {
741                let addr = masm.address_at_reg(retptr, *offset)?;
742                context.pop_to_addr(masm, addr)?;
743                iter.next().unwrap();
744            }
745            context.free_reg(retptr);
746        }
747
748        if let Some(area) = ret_area {
749            if area.is_sp() {
750                Self::adjust_stack_results(area, results, context, masm)?;
751            }
752        }
753
754        Ok(())
755    }
756
757    /// Convenience wrapper around [CodeGenContext::push_abi_results] using the
758    /// results of the current frame.
759    fn push_abi_results<M>(
760        &mut self,
761        context: &mut CodeGenContext<Emission>,
762        masm: &mut M,
763    ) -> Result<()>
764    where
765        M: MacroAssembler,
766    {
767        context.push_abi_results(self.results::<M>()?, masm, |results, _, _| {
768            results.ret_area().copied()
769        })
770    }
771
772    /// Preemptively handles the ABI results of the current frame.
773    /// This function is meant to be used when emitting control flow with joins,
774    /// in which it's not possible to know at compile time which branch will be
775    /// taken.
776    pub fn top_abi_results<M, F>(
777        &mut self,
778        context: &mut CodeGenContext<Emission>,
779        masm: &mut M,
780        calculate_ret_area: F,
781    ) -> Result<()>
782    where
783        M: MacroAssembler,
784        F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
785    {
786        Self::top_abi_results_impl::<M, _>(self.results::<M>()?, context, masm, calculate_ret_area)
787    }
788
789    /// Internal implementation of [Self::top_abi_results].
790    /// See [Self::pop_abi_results_impl] on why an internal implementation is
791    /// needed.
792    fn top_abi_results_impl<M, F>(
793        results: &mut ABIResults,
794        context: &mut CodeGenContext<Emission>,
795        masm: &mut M,
796        mut calculate_ret_area: F,
797    ) -> Result<()>
798    where
799        M: MacroAssembler,
800        F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
801    {
802        let mut area = None;
803        Self::pop_abi_results_impl::<M, _>(results, context, masm, |r, context, masm| {
804            area = calculate_ret_area(r, context, masm)?;
805            Ok(area)
806        })?;
807        // Use the previously calculated area to ensure that the ret area is
808        // kept in sync between both operations.
809        context.push_abi_results::<M, _>(results, masm, |_, _, _| area)
810    }
811
812    // If the results on the stack are handled via the stack pointer, ensure
813    // that the stack results are correctly located. In general, since values in
814    // the value stack are spilled when exiting the block, the top `n` entries
815    // in the value stack, representing the `n` stack results of the block are
816    // almost correctly located. However, since constants are not
817    // spilled, their presence complicate block exits. For this reason, the
818    // last step for finalizing multiple block results involves:
819    // * Scanning the value stack from oldest to newest memory values and
820    // calculating the source and destination of each value, if the source
821    // is closer to the stack pointer (greater) than the destination,
822    // perform a memory move of the bytes to its destination, else stop,
823    // because the memory values are in place.
824    // * Scanning the value stack from newest to oldest and calculating the
825    // source and destination of each value, if the source is closer to the
826    // frame pointer (less) than the destination, perform a memory move of
827    // the bytes to its destination, else stop, because the memory values
828    // are in place.
829    // * Lastly, iterate over the top `n` elements of the value stack,
830    // and spill any constant values, placing them in their respective
831    // memory location.
832    //
833    // The implementation in Winch is inspired by how this is handled in
834    // SpiderMonkey's WebAssembly Baseline Compiler:
835    // https://wingolog.org/archives/2020/04/03/multi-value-webassembly-in-firefox-from-1-to-n
836    fn adjust_stack_results<M>(
837        ret_area: RetArea,
838        results: &ABIResults,
839        context: &mut CodeGenContext<Emission>,
840        masm: &mut M,
841    ) -> Result<()>
842    where
843        M: MacroAssembler,
844    {
845        ensure!(ret_area.is_sp(), CodeGenError::sp_addressing_expected());
846        let results_offset = ret_area.unwrap_sp();
847
848        // Start iterating from memory values that are closer to the
849        // frame pointer (oldest entries first).
850        for (i, operand) in results.operands().iter().enumerate() {
851            if operand.is_reg() {
852                break;
853            }
854
855            let value_index = (context.stack.len() - results.stack_operands_len()) + i;
856            let val = context.stack.inner()[value_index];
857
858            match (val, operand) {
859                (Val::Memory(mem), ABIOperand::Stack { offset, size, .. }) => {
860                    let dst = results_offset.as_u32() - *offset;
861                    let src = mem.slot.offset;
862
863                    // Values are moved from lower (SP) to higher (FP)
864                    // addresses.
865                    if src.as_u32() <= dst {
866                        break;
867                    }
868
869                    masm.memmove(
870                        src,
871                        SPOffset::from_u32(dst),
872                        *size,
873                        MemMoveDirection::LowToHigh,
874                    )?;
875                }
876                _ => {}
877            }
878        }
879
880        // Start iterating from memory values that are closer to the
881        // stack pointer (newest entries first).
882        for (i, operand) in results
883            .operands()
884            .iter()
885            .rev()
886            // Skip any register results.
887            .skip(results.regs().len())
888            .enumerate()
889        {
890            let value_index = context.stack.len() - i - 1;
891            let val = context.stack.inner()[value_index];
892            match (val, operand) {
893                (Val::Memory(mem), ABIOperand::Stack { offset, size, .. }) => {
894                    let dst = results_offset.as_u32() - *offset;
895                    let src = mem.slot.offset;
896
897                    // Values are moved from higher (FP) to lower (SP)
898                    // addresses.
899                    if src.as_u32() >= dst {
900                        break;
901                    }
902
903                    masm.memmove(
904                        src,
905                        SPOffset::from_u32(dst),
906                        *size,
907                        MemMoveDirection::HighToLow,
908                    )?;
909                }
910                _ => {}
911            }
912        }
913
914        // Finally store any constants in the value stack in their respective
915        // locations.
916        for operand in results
917            .operands()
918            .iter()
919            .take(results.stack_operands_len())
920            .rev()
921        {
922            // If we want to do this, we should start from newest, essentially from top to
923            // bottom in the iteration of the operands.
924            match (operand, context.stack.peek().unwrap()) {
925                (ABIOperand::Stack { ty, offset, .. }, Val::I32(v)) => {
926                    let addr = masm
927                        .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
928                    masm.store(RegImm::i32(*v), addr, (*ty).try_into()?)?;
929                }
930                (ABIOperand::Stack { ty, offset, .. }, Val::I64(v)) => {
931                    let addr = masm
932                        .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
933                    masm.store(RegImm::i64(*v), addr, (*ty).try_into()?)?;
934                }
935                (ABIOperand::Stack { ty, offset, .. }, Val::F32(v)) => {
936                    let addr = masm
937                        .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
938                    masm.store(RegImm::f32(v.bits()), addr, (*ty).try_into()?)?;
939                }
940                (ABIOperand::Stack { ty, offset, .. }, Val::F64(v)) => {
941                    let addr = masm
942                        .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
943                    masm.store(RegImm::f64(v.bits()), addr, (*ty).try_into()?)?;
944                }
945                (ABIOperand::Stack { ty, offset, .. }, Val::V128(v)) => {
946                    let addr = masm
947                        .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
948                    masm.store(RegImm::v128(*v), addr, (*ty).try_into()?)?;
949                }
950                (_, v) => debug_assert!(v.is_mem()),
951            }
952
953            let _ = context.stack.pop().unwrap();
954        }
955
956        // Adjust any excess stack space: the stack space after handling the
957        // block's results should be the exact amount needed by the return area.
958        ensure!(
959            masm.sp_offset()?.as_u32() >= results_offset.as_u32(),
960            CodeGenError::invalid_sp_offset()
961        );
962        masm.free_stack(masm.sp_offset()?.as_u32() - results_offset.as_u32())?;
963        Ok(())
964    }
965
966    /// Ensures that there is enough space for return values on the stack.
967    /// This function is called at the end of all blocks and when branching from
968    /// within blocks.
969    fn ensure_ret_area<M>(
970        ret_area: &RetArea,
971        context: &mut CodeGenContext<Emission>,
972        masm: &mut M,
973    ) -> Result<()>
974    where
975        M: MacroAssembler,
976    {
977        ensure!(ret_area.is_sp(), CodeGenError::sp_addressing_expected());
978        // Save any live registers and locals when exiting the block to ensure
979        // that the respective values are correctly located in memory.
980        // See [Self::adjust_stack_results] for more details.
981        context.spill(masm)?;
982        if ret_area.unwrap_sp() > masm.sp_offset()? {
983            masm.reserve_stack(ret_area.unwrap_sp().as_u32() - masm.sp_offset()?.as_u32())?
984        }
985
986        Ok(())
987    }
988
989    /// Loads the return pointer, if it exists, into the next available register.
990    fn maybe_load_retptr<M>(
991        ret_area: Option<&RetArea>,
992        results: &ABIResults,
993        context: &mut CodeGenContext<Emission>,
994        masm: &mut M,
995    ) -> Result<Option<Reg>>
996    where
997        M: MacroAssembler,
998    {
999        if let Some(area) = ret_area {
1000            match area {
1001                RetArea::Slot(slot) => {
1002                    let base = context.without::<Result<Reg>, M, _>(
1003                        results.regs(),
1004                        masm,
1005                        |cx, masm| cx.any_gpr(masm),
1006                    )??;
1007                    let local_addr = masm.local_address(&slot)?;
1008                    masm.load_ptr(local_addr, writable!(base))?;
1009                    Ok(Some(base))
1010                }
1011                _ => Ok(None),
1012            }
1013        } else {
1014            Ok(None)
1015        }
1016    }
1017
1018    /// This function is used at the end of unreachable code handling
1019    /// to determine if the reachability status should be updated.
1020    pub fn is_next_sequence_reachable(&self) -> bool {
1021        use ControlStackFrame::*;
1022
1023        match self {
1024            // For if/else, the reachability of the next sequence is determined
1025            // by the reachability state at the start of the block. An else
1026            // block will be reachable if the if block is also reachable at
1027            // entry.
1028            If { reachable, .. } | Else { reachable, .. } => *reachable,
1029            // For blocks, the reachability of the next sequence is determined
1030            // if they're a branch target.
1031            Block {
1032                is_branch_target, ..
1033            } => *is_branch_target,
1034            // Loops are not used for reachability analysis,
1035            // given that they don't have exit branches.
1036            Loop { .. } => false,
1037        }
1038    }
1039
1040    /// Returns a reference to the [StackState] of the block.
1041    pub fn stack_state(&self) -> &StackState {
1042        use ControlStackFrame::*;
1043        match self {
1044            If { stack_state, .. }
1045            | Else { stack_state, .. }
1046            | Block { stack_state, .. }
1047            | Loop { stack_state, .. } => stack_state,
1048        }
1049    }
1050
1051    /// Returns true if the current frame is [ControlStackFrame::If].
1052    pub fn is_if(&self) -> bool {
1053        match self {
1054            Self::If { .. } => true,
1055            _ => false,
1056        }
1057    }
1058
1059    /// Returns true if the current frame is [ControlStackFrame::Loop].
1060    pub fn is_loop(&self) -> bool {
1061        match self {
1062            Self::Loop { .. } => true,
1063            _ => false,
1064        }
1065    }
1066
1067    /// Returns true if the current stack pointer is unbalanced
1068    /// relative to the the expected control frame stack pointer
1069    /// offset. The stack pointer is considered unbalanced relative
1070    /// to the control frame if the stack pointer is greater than the
1071    /// the target stack pointer offset expected by the control frame.
1072    pub fn unbalanced<M: MacroAssembler>(&self, masm: &mut M) -> Result<bool> {
1073        Ok(masm.sp_offset()? > self.stack_state().target_offset)
1074    }
1075}