winch_codegen/codegen/control.rs
1//! Data structures for control flow emission.
2//!
3//! Winch currently doesn't apply any sort of optimizations to control flow, but
4//! as a future optimization, for starters, we could perform a look ahead to the
5//! next instruction when reaching any of the comparison instructions. If the
6//! next instruction is a control instruction, we could avoid emitting
7//! a [`crate::masm::MacroAssembler::cmp_with_set`] and instead emit
8//! a conditional jump inline when emitting the control flow instruction.
9use super::{CodeGenContext, CodeGenError, Emission, OperandSize, Reg, TypedReg};
10use crate::{
11 abi::{ABIOperand, ABIResults, ABISig, RetArea, ABI},
12 masm::{IntCmpKind, MacroAssembler, MemMoveDirection, RegImm, SPOffset},
13 reg::writable,
14 stack::Val,
15 CallingConvention,
16};
17use anyhow::{anyhow, bail, ensure, Result};
18use cranelift_codegen::MachLabel;
19use wasmtime_environ::{WasmFuncType, WasmValType};
20
21/// Categorization of the type of the block.
22#[derive(Debug, Clone)]
23pub(crate) enum BlockType {
24 /// Doesn't produce or consume any values.
25 Void,
26 /// Produces a single value.
27 Single(WasmValType),
28 /// Consumes multiple values and produces multiple values.
29 Func(WasmFuncType),
30 /// An already resolved ABI signature.
31 ABISig(ABISig),
32}
33
34/// Holds all the information about the signature of the block.
35#[derive(Debug, Clone)]
36pub(crate) struct BlockSig {
37 /// The type of the block.
38 pub ty: BlockType,
39 /// ABI representation of the results of the block.
40 results: Option<ABIResults>,
41 /// ABI representation of the params of the block interpreted as results.
42 params: Option<ABIResults>,
43}
44
45impl BlockSig {
46 /// Create a new [BlockSig].
47 pub fn new(ty: BlockType) -> Self {
48 Self {
49 ty,
50 results: None,
51 params: None,
52 }
53 }
54
55 /// Create a new [BlockSig] from an [ABISig].
56 pub fn from_sig(sig: ABISig) -> Self {
57 Self {
58 ty: BlockType::sig(sig),
59 results: None,
60 params: None,
61 }
62 }
63
64 /// Return the ABI representation of the results of the block.
65 /// This method will lazily initialize the results if not present.
66 pub fn results<M>(&mut self) -> Result<&mut ABIResults>
67 where
68 M: MacroAssembler,
69 {
70 if self.ty.is_sig() {
71 return match &mut self.ty {
72 BlockType::ABISig(sig) => Ok(&mut sig.results),
73 _ => unreachable!(),
74 };
75 }
76
77 if self.results.is_some() {
78 return Ok(self.results.as_mut().unwrap());
79 }
80
81 let results = match &self.ty {
82 BlockType::Void => <M::ABI as ABI>::abi_results(&[], &CallingConvention::Default),
83 BlockType::Single(ty) => {
84 <M::ABI as ABI>::abi_results(&[*ty], &CallingConvention::Default)
85 }
86 BlockType::Func(f) => {
87 <M::ABI as ABI>::abi_results(f.returns(), &CallingConvention::Default)
88 }
89 BlockType::ABISig(_) => unreachable!(),
90 };
91
92 self.results = Some(results?);
93 Ok(self.results.as_mut().unwrap())
94 }
95
96 /// Construct an ABI result representation of the params of the block.
97 /// This is needed for loops and for handling cases in which params flow as
98 /// the block's results, i.e. in the presence of an empty then or else.
99 pub fn params<M>(&mut self) -> Result<&mut ABIResults>
100 where
101 M: MacroAssembler,
102 {
103 if self.params.is_some() {
104 return Ok(self.params.as_mut().unwrap());
105 }
106
107 let params_as_results = match &self.ty {
108 BlockType::Void | BlockType::Single(_) => {
109 <M::ABI as ABI>::abi_results(&[], &CallingConvention::Default)
110 }
111 BlockType::Func(f) => {
112 <M::ABI as ABI>::abi_results(f.params(), &CallingConvention::Default)
113 }
114 // Once we have created a block type from a known signature, we
115 // can't modify its meaning. This should only be used for the
116 // function body block, in which case there's no need for treating
117 // params as results.
118 BlockType::ABISig(_) => unreachable!(),
119 };
120
121 self.params = Some(params_as_results?);
122 Ok(self.params.as_mut().unwrap())
123 }
124
125 /// Returns the signature param count.
126 pub fn param_count(&self) -> usize {
127 match &self.ty {
128 BlockType::Void | BlockType::Single(_) => 0,
129 BlockType::Func(f) => f.params().len(),
130 BlockType::ABISig(sig) => sig.params_without_retptr().len(),
131 }
132 }
133
134 /// Returns the signature return count.
135 pub fn return_count(&self) -> usize {
136 match &self.ty {
137 BlockType::Void => 0,
138 BlockType::Single(_) => 1,
139 BlockType::Func(f) => f.returns().len(),
140 BlockType::ABISig(sig) => sig.results().len(),
141 }
142 }
143}
144
145impl BlockType {
146 /// Create a [BlockType::Void].
147 pub fn void() -> Self {
148 Self::Void
149 }
150
151 /// Create a [BlockType::Single] from the given [WasmType].
152 pub fn single(ty: WasmValType) -> Self {
153 Self::Single(ty)
154 }
155
156 /// Create a [BlockType::Func] from the given [WasmFuncType].
157 pub fn func(ty: WasmFuncType) -> Self {
158 Self::Func(ty)
159 }
160
161 /// Create a [BlockType::ABISig].
162 pub fn sig(sig: ABISig) -> Self {
163 Self::ABISig(sig)
164 }
165
166 /// Returns true if the type of the block is [BlockType::ABISig].
167 pub fn is_sig(&self) -> bool {
168 match self {
169 Self::ABISig(_) => true,
170 _ => false,
171 }
172 }
173}
174
175/// The expected value and machine stack state when entering and exiting the block.
176#[derive(Debug, Default, Copy, Clone)]
177pub(crate) struct StackState {
178 /// The base stack pointer offset.
179 /// This offset is set when entering the block, after saving any live
180 /// registers and locals.
181 /// It is calculated by subtracting the size, in bytes, of any block params
182 /// to the current stack pointer offset.
183 pub base_offset: SPOffset,
184 /// The target stack pointer offset.
185 /// This offset is calculated by adding the size of the stack results
186 /// to the base stack pointer offset.
187 pub target_offset: SPOffset,
188 /// The base length of the value stack when entering the block.
189 /// Which is the current length of the value stack minus any block parameters.
190 pub base_len: usize,
191 /// The target length of the value stack when exiting the block.
192 /// Calculate by adding the number of results to the base value stack
193 /// length.
194 pub target_len: usize,
195}
196
197/// Holds the all the metadata to support the emission
198/// of control flow instructions.
199#[derive(Debug)]
200pub(crate) enum ControlStackFrame {
201 If {
202 /// The if continuation label.
203 cont: MachLabel,
204 /// The exit label of the block.
205 exit: MachLabel,
206 /// The signature of the block.
207 sig: BlockSig,
208 /// The stack state of the block.
209 stack_state: StackState,
210 /// Local reachability state when entering the block.
211 reachable: bool,
212 },
213 Else {
214 /// The exit label of the block.
215 exit: MachLabel,
216 /// The signature of the block.
217 sig: BlockSig,
218 /// The stack state of the block.
219 stack_state: StackState,
220 /// Local reachability state when entering the block.
221 reachable: bool,
222 },
223 Block {
224 /// The block exit label.
225 exit: MachLabel,
226 /// The signature of the block.
227 sig: BlockSig,
228 /// The stack state of the block.
229 stack_state: StackState,
230 /// Exit state of the block.
231 ///
232 /// This flag is used to determine if a block is a branch
233 /// target. By default, this is false, and it's updated when
234 /// emitting a `br` or `br_if`.
235 is_branch_target: bool,
236 },
237 Loop {
238 /// The start of the Loop.
239 head: MachLabel,
240 /// The stack state of the block.
241 stack_state: StackState,
242 /// The signature of the block.
243 sig: BlockSig,
244 },
245}
246
247impl ControlStackFrame {
248 /// Returns [`ControlStackFrame`] for an if.
249 pub fn r#if<M: MacroAssembler>(
250 sig: BlockSig,
251 masm: &mut M,
252 context: &mut CodeGenContext<Emission>,
253 ) -> Result<Self> {
254 let mut control = Self::If {
255 cont: masm.get_label()?,
256 exit: masm.get_label()?,
257 sig,
258 reachable: context.reachable,
259 stack_state: Default::default(),
260 };
261
262 control.emit(masm, context)?;
263 Ok(control)
264 }
265
266 /// Returns [`ControlStackFrame`] for a block.
267 pub fn block<M: MacroAssembler>(
268 sig: BlockSig,
269 masm: &mut M,
270 context: &mut CodeGenContext<Emission>,
271 ) -> Result<Self> {
272 let mut control = Self::Block {
273 sig,
274 is_branch_target: false,
275 exit: masm.get_label()?,
276 stack_state: Default::default(),
277 };
278
279 control.emit(masm, context)?;
280 Ok(control)
281 }
282
283 /// Returns [`ControlStackFrame`] for a loop.
284 pub fn r#loop<M: MacroAssembler>(
285 sig: BlockSig,
286 masm: &mut M,
287 context: &mut CodeGenContext<Emission>,
288 ) -> Result<Self> {
289 let mut control = Self::Loop {
290 stack_state: Default::default(),
291 sig,
292 head: masm.get_label()?,
293 };
294
295 control.emit(masm, context)?;
296 Ok(control)
297 }
298
299 fn init<M: MacroAssembler>(
300 &mut self,
301 masm: &mut M,
302 context: &mut CodeGenContext<Emission>,
303 ) -> Result<()> {
304 self.calculate_stack_state(context, masm)?;
305 // If the block has stack results, immediately resolve the return area
306 // base.
307 if self.results::<M>()?.on_stack() {
308 let results_base = self.stack_state().target_offset;
309 self.results::<M>()?.set_ret_area(RetArea::sp(results_base));
310 }
311
312 if self.is_if() || self.is_loop() {
313 // Preemptively handle block params as results so that the params
314 // are correctly placed in memory. This is especially
315 // important for control flow joins with empty blocks:
316 //
317 //(module
318 // (func (export "params") (param i32) (result i32)
319 // (i32.const 2)
320 // (if (param i32) (result i32) (local.get 0)
321 // (then))
322 // (i32.const 3)
323 // (i32.add)
324 // )
325 //)
326 let base_offset = self.stack_state().base_offset;
327 if self.params::<M>()?.on_stack() {
328 let offset = base_offset.as_u32() + self.params::<M>()?.size();
329 self.params::<M>()?
330 .set_ret_area(RetArea::sp(SPOffset::from_u32(offset)));
331 }
332 Self::top_abi_results_impl(
333 self.params::<M>()?,
334 context,
335 masm,
336 |params: &ABIResults, _, _| Ok(params.ret_area().copied()),
337 )?;
338 }
339 Ok(())
340 }
341
342 /// Calculates the [StackState] of the block.
343 fn calculate_stack_state<M: MacroAssembler>(
344 &mut self,
345 context: &mut CodeGenContext<Emission>,
346 masm: &mut M,
347 ) -> Result<()> {
348 use ControlStackFrame::*;
349 let sig = self.sig();
350 // If the block type contains a full [ABISig], do not take into account
351 // the params, since these are the params of the function that is
352 // currently being compiled and the value stack doesn't currently
353 // contain any values anyway.
354 let param_count = if sig.ty.is_sig() {
355 0
356 } else {
357 sig.param_count()
358 };
359 let return_count = sig.return_count();
360 ensure!(
361 context.stack.len() >= param_count,
362 CodeGenError::missing_values_in_stack()
363 );
364 let results_size = self.results::<M>()?.size();
365
366 // Save any live registers and locals.
367 context.spill(masm)?;
368
369 let base_len = context.stack.len() - param_count;
370 let stack_consumed = context.stack.sizeof(param_count);
371 let current_sp = masm.sp_offset()?;
372 let base_offset = SPOffset::from_u32(current_sp.as_u32() - stack_consumed);
373
374 match self {
375 If { stack_state, .. } | Block { stack_state, .. } | Loop { stack_state, .. } => {
376 stack_state.base_offset = base_offset;
377 stack_state.base_len = base_len;
378 stack_state.target_offset = SPOffset::from_u32(base_offset.as_u32() + results_size);
379 stack_state.target_len = base_len + return_count;
380 }
381 _ => {}
382 }
383 Ok(())
384 }
385
386 /// This function ensures that the state of the -- machine and value --
387 /// stack is the right one when reaching a control frame branch in which
388 /// reachability is restored or when reaching the end of a function in an
389 /// unreachable state. This function is intended to be called when handling
390 /// an unreachable else or end.
391 //
392 /// This function will truncate the value stack to the base length of
393 /// the control frame and will also set the stack pointer offset to reflect
394 /// the offset expected by the target branch.
395 ///
396 // NB: This method is assumed to be called *before* pushing any block
397 // results to the value stack, so that any excess values are cleaned up.
398 pub fn ensure_stack_state<M: MacroAssembler>(
399 &mut self,
400 masm: &mut M,
401 context: &mut CodeGenContext<Emission>,
402 ) -> Result<()> {
403 let state = self.stack_state();
404 // This assumes that at jump sites, the machine stack pointer will be
405 // adjusted to match the expectations of the target branch (e.g.
406 // `target_offset`); after performing the jump, the MacroAssembler
407 // implementation will soft-reset the stack pointer offset to its
408 // original offset, ensure that other parts of the program have access
409 // to the right offset, this is especially important in conditional
410 // branches.
411 // When restoring reachability we ensure that the MacroAssembler offset
412 // is set to match the expectations of the target branch, similar to how
413 // the machine stack pointer was adjusted at jump sites.
414 masm.reset_stack_pointer(state.target_offset)?;
415 // We use the base length, because this function is assumed to be called
416 // *before* pushing any results to the value stack. This way, any excess
417 // values will be discarded.
418 context.truncate_stack_to(state.base_len)
419 }
420
421 /// Return the type information of the block.
422 pub fn sig(&self) -> &BlockSig {
423 use ControlStackFrame::*;
424 match self {
425 If { sig, .. } | Else { sig, .. } | Loop { sig, .. } | Block { sig, .. } => sig,
426 }
427 }
428
429 fn emit<M: MacroAssembler>(
430 &mut self,
431 masm: &mut M,
432 context: &mut CodeGenContext<Emission>,
433 ) -> Result<()> {
434 use ControlStackFrame::*;
435
436 // Do not perform any emissions if we are in an unreachable state.
437 if !context.reachable {
438 return Ok(());
439 }
440
441 match *self {
442 If { cont, .. } => {
443 // Pop the condition value.
444 // Because in the case of Self::If, Self::init, will top the
445 // branch params, we exclude any result registers from being
446 // used as the branch test.
447 let top = context.without::<Result<TypedReg>, _, _>(
448 self.params::<M>()?.regs(),
449 masm,
450 |cx, masm| cx.pop_to_reg(masm, None),
451 )??;
452 self.init(masm, context)?;
453 masm.branch(
454 IntCmpKind::Eq,
455 top.reg.into(),
456 top.reg.into(),
457 cont,
458 OperandSize::S32,
459 )?;
460 context.free_reg(top);
461 Ok(())
462 }
463 Block { .. } => self.init(masm, context),
464 Loop { head, .. } => {
465 self.init(masm, context)?;
466 masm.bind(head)?;
467 Ok(())
468 }
469 _ => Err(anyhow!(CodeGenError::if_control_frame_expected())),
470 }
471 }
472
473 /// Handles the else branch if the current control stack frame is
474 /// [`ControlStackFrame::If`].
475 pub fn emit_else<M: MacroAssembler>(
476 &mut self,
477 masm: &mut M,
478 context: &mut CodeGenContext<Emission>,
479 ) -> Result<()> {
480 ensure!(self.is_if(), CodeGenError::if_control_frame_expected());
481 let state = self.stack_state();
482
483 ensure!(
484 state.target_len == context.stack.len(),
485 CodeGenError::control_frame_state_mismatch()
486 );
487 self.pop_abi_results(context, masm, |results, _, _| {
488 Ok(results.ret_area().copied())
489 })?;
490 masm.jmp(*self.exit_label().unwrap())?;
491 self.bind_else(masm, context)?;
492 Ok(())
493 }
494
495 /// Binds the else branch label and converts `self` to
496 /// [`ControlStackFrame::Else`].
497 pub fn bind_else<M: MacroAssembler>(
498 &mut self,
499 masm: &mut M,
500 context: &mut CodeGenContext<Emission>,
501 ) -> Result<()> {
502 use ControlStackFrame::*;
503 match self {
504 If {
505 cont,
506 sig,
507 stack_state,
508 exit,
509 ..
510 } => {
511 // Bind the else branch.
512 masm.bind(*cont)?;
513
514 // Push the abi results to the value stack, so that they are
515 // used as params for the else branch. At the beginning of the
516 // if block, any params are preemptively resolved as results;
517 // when reaching the else all params are already materialized as
518 // stack results. As part of ensuring the right state when
519 // entering the else branch, the following snippet also soft
520 // resets the stack pointer so that it matches the expectations
521 // of the else branch: the stack pointer is expected to be at
522 // the base stack pointer, plus the params stack size in bytes.
523 let params_size = sig.params::<M>()?.size();
524 context.push_abi_results::<M, _>(sig.params::<M>()?, masm, |params, _, _| {
525 params.ret_area().copied()
526 })?;
527 masm.reset_stack_pointer(SPOffset::from_u32(
528 stack_state.base_offset.as_u32() + params_size,
529 ))?;
530
531 // Update the stack control frame with an else control frame.
532 *self = ControlStackFrame::Else {
533 exit: *exit,
534 stack_state: *stack_state,
535 reachable: context.reachable,
536 sig: sig.clone(),
537 };
538 }
539 _ => bail!(CodeGenError::if_control_frame_expected()),
540 }
541 Ok(())
542 }
543
544 /// Handles the end of a control stack frame.
545 pub fn emit_end<M: MacroAssembler>(
546 &mut self,
547 masm: &mut M,
548 context: &mut CodeGenContext<Emission>,
549 ) -> Result<()> {
550 use ControlStackFrame::*;
551 match self {
552 If { stack_state, .. } | Else { stack_state, .. } | Block { stack_state, .. } => {
553 ensure!(
554 stack_state.target_len == context.stack.len(),
555 CodeGenError::control_frame_state_mismatch()
556 );
557 // Before binding the exit label, we handle the block results.
558 self.pop_abi_results(context, masm, |results, _, _| {
559 Ok(results.ret_area().copied())
560 })?;
561 self.bind_end(masm, context)?;
562 }
563 Loop { stack_state, .. } => {
564 ensure!(
565 stack_state.target_len == context.stack.len(),
566 CodeGenError::control_frame_state_mismatch()
567 );
568 }
569 };
570
571 Ok(())
572 }
573
574 /// Binds the exit label of the current control stack frame and pushes the
575 /// ABI results to the value stack.
576 pub fn bind_end<M: MacroAssembler>(
577 &mut self,
578 masm: &mut M,
579 context: &mut CodeGenContext<Emission>,
580 ) -> Result<()> {
581 self.push_abi_results(context, masm)?;
582 self.bind_exit_label(masm)
583 }
584
585 /// Binds the exit label of the control stack frame.
586 pub fn bind_exit_label<M: MacroAssembler>(&self, masm: &mut M) -> Result<()> {
587 use ControlStackFrame::*;
588 match self {
589 // We use an explicit label to track the exit of an if block. In case there's no
590 // else, we bind the if's continuation block to make sure that any jumps from the if
591 // condition are reachable and we bind the explicit exit label as well to ensure that any
592 // branching instructions are able to correctly reach the block's end.
593 If { cont, .. } => masm.bind(*cont)?,
594 _ => {}
595 }
596 if let Some(label) = self.exit_label() {
597 masm.bind(*label)?;
598 }
599 Ok(())
600 }
601
602 /// Returns the continuation label of the current control stack frame.
603 pub fn label(&self) -> &MachLabel {
604 use ControlStackFrame::*;
605
606 match self {
607 If { exit, .. } | Else { exit, .. } | Block { exit, .. } => exit,
608 Loop { head, .. } => head,
609 }
610 }
611
612 /// Returns the exit label of the current control stack frame. Note that
613 /// this is similar to [`ControlStackFrame::label`], with the only difference that it
614 /// returns `None` for `Loop` since its label doesn't represent an exit.
615 pub fn exit_label(&self) -> Option<&MachLabel> {
616 use ControlStackFrame::*;
617
618 match self {
619 If { exit, .. } | Else { exit, .. } | Block { exit, .. } => Some(exit),
620 Loop { .. } => None,
621 }
622 }
623
624 /// Set the current control stack frame as a branch target.
625 pub fn set_as_target(&mut self) {
626 match self {
627 ControlStackFrame::Block {
628 is_branch_target, ..
629 } => {
630 *is_branch_target = true;
631 }
632 _ => {}
633 }
634 }
635
636 /// Returns [`crate::abi::ABIResults`] of the control stack frame
637 /// block.
638 pub fn results<M>(&mut self) -> Result<&mut ABIResults>
639 where
640 M: MacroAssembler,
641 {
642 use ControlStackFrame::*;
643
644 match self {
645 If { sig, .. } | Else { sig, .. } | Block { sig, .. } => sig.results::<M>(),
646 Loop { sig, .. } => sig.params::<M>(),
647 }
648 }
649
650 /// Returns the block params interpreted as [crate::abi::ABIResults].
651 pub fn params<M>(&mut self) -> Result<&mut ABIResults>
652 where
653 M: MacroAssembler,
654 {
655 use ControlStackFrame::*;
656 match self {
657 If { sig, .. } | Else { sig, .. } | Block { sig, .. } | Loop { sig, .. } => {
658 sig.params::<M>()
659 }
660 }
661 }
662
663 /// Orchestrates how block results are handled.
664 /// Results are handled in reverse order, starting from register results
665 /// continuing to memory values. This guarantees that the stack ordering
666 /// invariant is maintained. See [ABIResults] for more details.
667 ///
668 /// This function will iterate through each result and invoke the provided
669 /// callback if there are results on the stack.
670 ///
671 /// Calculating the return area involves ensuring that there's enough stack
672 /// space to store the block's results. To make the process of handling
673 /// multiple results easier, this function will save all live registers and
674 /// locals right after handling any register results. This will ensure that
675 /// the top `n` values in the value stack are correctly placed in the memory
676 /// locations corresponding to multiple stack results. Once the iteration
677 /// over all the results is done, the stack result area of the block will be
678 /// updated.
679 pub fn pop_abi_results<M, F>(
680 &mut self,
681 context: &mut CodeGenContext<Emission>,
682 masm: &mut M,
683 calculate_ret_area: F,
684 ) -> Result<()>
685 where
686 M: MacroAssembler,
687 F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
688 {
689 Self::pop_abi_results_impl(self.results::<M>()?, context, masm, calculate_ret_area)
690 }
691
692 /// Shared implementation for poppping the ABI results.
693 /// This is needed because, in some cases, params must be interpreted and
694 /// used as the results of the block. When emitting code at control flow
695 /// joins, the block params are interpreted as results, to ensure that they
696 /// can correctly "flow" as the results of the block. This is especially
697 /// important in the presence of empty then, else and loop blocks. This
698 /// interpretation is an internal detail of the control module, and having
699 /// a shared implementation allows the caller to decide how the
700 /// results should be interpreted.
701 pub fn pop_abi_results_impl<M, F>(
702 results: &mut ABIResults,
703 context: &mut CodeGenContext<Emission>,
704 masm: &mut M,
705 mut calculate_ret_area: F,
706 ) -> Result<()>
707 where
708 M: MacroAssembler,
709 F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
710 {
711 let mut iter = results.operands().iter().rev().peekable();
712
713 while let Some(ABIOperand::Reg { reg, .. }) = iter.peek() {
714 let TypedReg { reg, .. } = context.pop_to_reg(masm, Some(*reg))?;
715 context.free_reg(reg);
716 iter.next().unwrap();
717 }
718
719 let ret_area = calculate_ret_area(results, context, masm)?;
720
721 let retptr = Self::maybe_load_retptr(ret_area.as_ref(), &results, context, masm)?;
722 if let Some(area) = ret_area {
723 if area.is_sp() {
724 Self::ensure_ret_area(&area, context, masm)?;
725 }
726 }
727
728 if let Some(retptr) = retptr {
729 while let Some(ABIOperand::Stack { offset, .. }) = iter.peek() {
730 let addr = masm.address_at_reg(retptr, *offset)?;
731 context.pop_to_addr(masm, addr)?;
732 iter.next().unwrap();
733 }
734 context.free_reg(retptr);
735 }
736
737 if let Some(area) = ret_area {
738 if area.is_sp() {
739 Self::adjust_stack_results(area, results, context, masm)?;
740 }
741 }
742
743 Ok(())
744 }
745
746 /// Convenience wrapper around [CodeGenContext::push_abi_results] using the
747 /// results of the current frame.
748 fn push_abi_results<M>(
749 &mut self,
750 context: &mut CodeGenContext<Emission>,
751 masm: &mut M,
752 ) -> Result<()>
753 where
754 M: MacroAssembler,
755 {
756 context.push_abi_results(self.results::<M>()?, masm, |results, _, _| {
757 results.ret_area().copied()
758 })
759 }
760
761 /// Preemptively handles the ABI results of the current frame.
762 /// This function is meant to be used when emitting control flow with joins,
763 /// in which it's not possible to know at compile time which branch will be
764 /// taken.
765 pub fn top_abi_results<M, F>(
766 &mut self,
767 context: &mut CodeGenContext<Emission>,
768 masm: &mut M,
769 calculate_ret_area: F,
770 ) -> Result<()>
771 where
772 M: MacroAssembler,
773 F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
774 {
775 Self::top_abi_results_impl::<M, _>(self.results::<M>()?, context, masm, calculate_ret_area)
776 }
777
778 /// Internal implementation of [Self::top_abi_results].
779 /// See [Self::pop_abi_results_impl] on why an internal implementation is
780 /// needed.
781 fn top_abi_results_impl<M, F>(
782 results: &mut ABIResults,
783 context: &mut CodeGenContext<Emission>,
784 masm: &mut M,
785 mut calculate_ret_area: F,
786 ) -> Result<()>
787 where
788 M: MacroAssembler,
789 F: FnMut(&ABIResults, &mut CodeGenContext<Emission>, &mut M) -> Result<Option<RetArea>>,
790 {
791 let mut area = None;
792 Self::pop_abi_results_impl::<M, _>(results, context, masm, |r, context, masm| {
793 area = calculate_ret_area(r, context, masm)?;
794 Ok(area)
795 })?;
796 // Use the previously calculated area to ensure that the ret area is
797 // kept in sync between both operations.
798 context.push_abi_results::<M, _>(results, masm, |_, _, _| area)
799 }
800
801 // If the results on the stack are handled via the stack pointer, ensure
802 // that the stack results are correctly located. In general, since values in
803 // the value stack are spilled when exiting the block, the top `n` entries
804 // in the value stack, representing the `n` stack results of the block are
805 // almost correctly located. However, since constants are not
806 // spilled, their presence complicate block exits. For this reason, the
807 // last step for finalizing multiple block results involves:
808 // * Scanning the value stack from oldest to newest memory values and
809 // calculating the source and destination of each value, if the source
810 // is closer to the stack pointer (greater) than the destination,
811 // perform a memory move of the bytes to its destination, else stop,
812 // because the memory values are in place.
813 // * Scanning the value stack from newest to oldest and calculating the
814 // source and destination of each value, if the source is closer to the
815 // frame pointer (less) than the destination, perform a memory move of
816 // the bytes to its destination, else stop, because the memory values
817 // are in place.
818 // * Lastly, iterate over the top `n` elements of the value stack,
819 // and spill any constant values, placing them in their respective
820 // memory location.
821 //
822 // The implementation in Winch is inspired by how this is handled in
823 // SpiderMonkey's WebAssembly Baseline Compiler:
824 // https://wingolog.org/archives/2020/04/03/multi-value-webassembly-in-firefox-from-1-to-n
825 fn adjust_stack_results<M>(
826 ret_area: RetArea,
827 results: &ABIResults,
828 context: &mut CodeGenContext<Emission>,
829 masm: &mut M,
830 ) -> Result<()>
831 where
832 M: MacroAssembler,
833 {
834 ensure!(ret_area.is_sp(), CodeGenError::sp_addressing_expected());
835 let results_offset = ret_area.unwrap_sp();
836
837 // Start iterating from memory values that are closer to the
838 // frame pointer (oldest entries first).
839 for (i, operand) in results.operands().iter().enumerate() {
840 if operand.is_reg() {
841 break;
842 }
843
844 let value_index = (context.stack.len() - results.stack_operands_len()) + i;
845 let val = context.stack.inner()[value_index];
846
847 match (val, operand) {
848 (Val::Memory(mem), ABIOperand::Stack { offset, size, .. }) => {
849 let dst = results_offset.as_u32() - *offset;
850 let src = mem.slot.offset;
851
852 // Values are moved from lower (SP) to higher (FP)
853 // addresses.
854 if src.as_u32() <= dst {
855 break;
856 }
857
858 masm.memmove(
859 src,
860 SPOffset::from_u32(dst),
861 *size,
862 MemMoveDirection::LowToHigh,
863 )?;
864 }
865 _ => {}
866 }
867 }
868
869 // Start iterating from memory values that are closer to the
870 // stack pointer (newest entries first).
871 for (i, operand) in results
872 .operands()
873 .iter()
874 .rev()
875 // Skip any register results.
876 .skip(results.regs().len())
877 .enumerate()
878 {
879 let value_index = context.stack.len() - i - 1;
880 let val = context.stack.inner()[value_index];
881 match (val, operand) {
882 (Val::Memory(mem), ABIOperand::Stack { offset, size, .. }) => {
883 let dst = results_offset.as_u32() - *offset;
884 let src = mem.slot.offset;
885
886 // Values are moved from higher (FP) to lower (SP)
887 // addresses.
888 if src.as_u32() >= dst {
889 break;
890 }
891
892 masm.memmove(
893 src,
894 SPOffset::from_u32(dst),
895 *size,
896 MemMoveDirection::HighToLow,
897 )?;
898 }
899 _ => {}
900 }
901 }
902
903 // Finally store any constants in the value stack in their respective
904 // locations.
905 for operand in results
906 .operands()
907 .iter()
908 .take(results.stack_operands_len())
909 .rev()
910 {
911 // If we want to do this, we should start from newest, essentially from top to
912 // bottom in the iteration of the operands.
913 match (operand, context.stack.peek().unwrap()) {
914 (ABIOperand::Stack { ty, offset, .. }, Val::I32(v)) => {
915 let addr = masm
916 .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
917 masm.store(RegImm::i32(*v), addr, (*ty).try_into()?)?;
918 }
919 (ABIOperand::Stack { ty, offset, .. }, Val::I64(v)) => {
920 let addr = masm
921 .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
922 masm.store(RegImm::i64(*v), addr, (*ty).try_into()?)?;
923 }
924 (ABIOperand::Stack { ty, offset, .. }, Val::F32(v)) => {
925 let addr = masm
926 .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
927 masm.store(RegImm::f32(v.bits()), addr, (*ty).try_into()?)?;
928 }
929 (ABIOperand::Stack { ty, offset, .. }, Val::F64(v)) => {
930 let addr = masm
931 .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
932 masm.store(RegImm::f64(v.bits()), addr, (*ty).try_into()?)?;
933 }
934 (ABIOperand::Stack { ty, offset, .. }, Val::V128(v)) => {
935 let addr = masm
936 .address_from_sp(SPOffset::from_u32(results_offset.as_u32() - *offset))?;
937 masm.store(RegImm::v128(*v), addr, (*ty).try_into()?)?;
938 }
939 (_, v) => debug_assert!(v.is_mem()),
940 }
941
942 let _ = context.stack.pop().unwrap();
943 }
944
945 // Adjust any excess stack space: the stack space after handling the
946 // block's results should be the exact amount needed by the return area.
947 ensure!(
948 masm.sp_offset()?.as_u32() >= results_offset.as_u32(),
949 CodeGenError::invalid_sp_offset()
950 );
951 masm.free_stack(masm.sp_offset()?.as_u32() - results_offset.as_u32())?;
952 Ok(())
953 }
954
955 /// Ensures that there is enough space for return values on the stack.
956 /// This function is called at the end of all blocks and when branching from
957 /// within blocks.
958 fn ensure_ret_area<M>(
959 ret_area: &RetArea,
960 context: &mut CodeGenContext<Emission>,
961 masm: &mut M,
962 ) -> Result<()>
963 where
964 M: MacroAssembler,
965 {
966 ensure!(ret_area.is_sp(), CodeGenError::sp_addressing_expected());
967 // Save any live registers and locals when exiting the block to ensure
968 // that the respective values are correctly located in memory.
969 // See [Self::adjust_stack_results] for more details.
970 context.spill(masm)?;
971 if ret_area.unwrap_sp() > masm.sp_offset()? {
972 masm.reserve_stack(ret_area.unwrap_sp().as_u32() - masm.sp_offset()?.as_u32())?
973 }
974
975 Ok(())
976 }
977
978 /// Loads the return pointer, if it exists, into the next available register.
979 fn maybe_load_retptr<M>(
980 ret_area: Option<&RetArea>,
981 results: &ABIResults,
982 context: &mut CodeGenContext<Emission>,
983 masm: &mut M,
984 ) -> Result<Option<Reg>>
985 where
986 M: MacroAssembler,
987 {
988 if let Some(area) = ret_area {
989 match area {
990 RetArea::Slot(slot) => {
991 let base = context.without::<Result<Reg>, M, _>(
992 results.regs(),
993 masm,
994 |cx, masm| cx.any_gpr(masm),
995 )??;
996 let local_addr = masm.local_address(&slot)?;
997 masm.load_ptr(local_addr, writable!(base))?;
998 Ok(Some(base))
999 }
1000 _ => Ok(None),
1001 }
1002 } else {
1003 Ok(None)
1004 }
1005 }
1006
1007 /// This function is used at the end of unreachable code handling
1008 /// to determine if the reachability status should be updated.
1009 pub fn is_next_sequence_reachable(&self) -> bool {
1010 use ControlStackFrame::*;
1011
1012 match self {
1013 // For if/else, the reachability of the next sequence is determined
1014 // by the reachability state at the start of the block. An else
1015 // block will be reachable if the if block is also reachable at
1016 // entry.
1017 If { reachable, .. } | Else { reachable, .. } => *reachable,
1018 // For blocks, the reachability of the next sequence is determined
1019 // if they're a branch target.
1020 Block {
1021 is_branch_target, ..
1022 } => *is_branch_target,
1023 // Loops are not used for reachability analysis,
1024 // given that they don't have exit branches.
1025 Loop { .. } => false,
1026 }
1027 }
1028
1029 /// Returns a reference to the [StackState] of the block.
1030 pub fn stack_state(&self) -> &StackState {
1031 use ControlStackFrame::*;
1032 match self {
1033 If { stack_state, .. }
1034 | Else { stack_state, .. }
1035 | Block { stack_state, .. }
1036 | Loop { stack_state, .. } => stack_state,
1037 }
1038 }
1039
1040 /// Returns true if the current frame is [ControlStackFrame::If].
1041 pub fn is_if(&self) -> bool {
1042 match self {
1043 Self::If { .. } => true,
1044 _ => false,
1045 }
1046 }
1047
1048 /// Returns true if the current frame is [ControlStackFrame::Loop].
1049 pub fn is_loop(&self) -> bool {
1050 match self {
1051 Self::Loop { .. } => true,
1052 _ => false,
1053 }
1054 }
1055}