1use crate::constant_hash::Table;
10use alloc::vec::Vec;
11use core::fmt::{self, Display, Formatter};
12use core::ops::{Deref, DerefMut};
13use core::str::FromStr;
14
15#[cfg(feature = "enable-serde")]
16use serde_derive::{Deserialize, Serialize};
17
18use crate::bitset::ScalarBitSet;
19use crate::entity;
20use crate::ir::{
21 self, Block, ExceptionTable, ExceptionTables, FuncRef, MemFlags, SigRef, StackSlot, Type,
22 Value,
23 condcodes::{FloatCC, IntCC},
24 trapcode::TrapCode,
25 types,
26};
27
28pub type ValueList = entity::EntityList<Value>;
32
33pub type ValueListPool = entity::ListPool<Value>;
35
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
53#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
54pub struct BlockCall {
55 values: entity::EntityList<Value>,
59}
60
61impl BlockCall {
62 fn value_to_block(val: Value) -> Block {
65 Block::from_u32(val.as_u32())
66 }
67
68 fn block_to_value(block: Block) -> Value {
71 Value::from_u32(block.as_u32())
72 }
73
74 pub fn new(
76 block: Block,
77 args: impl IntoIterator<Item = BlockArg>,
78 pool: &mut ValueListPool,
79 ) -> Self {
80 let mut values = ValueList::default();
81 values.push(Self::block_to_value(block), pool);
82 values.extend(args.into_iter().map(|arg| arg.encode_as_value()), pool);
83 Self { values }
84 }
85
86 pub fn block(&self, pool: &ValueListPool) -> Block {
88 let val = self.values.first(pool).unwrap();
89 Self::value_to_block(val)
90 }
91
92 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
94 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
95 }
96
97 pub fn append_argument(&mut self, arg: impl Into<BlockArg>, pool: &mut ValueListPool) {
99 self.values.push(arg.into().encode_as_value(), pool);
100 }
101
102 pub fn len(&self, pool: &ValueListPool) -> usize {
104 self.values.len(pool) - 1
105 }
106
107 pub fn args<'a>(
109 &self,
110 pool: &'a ValueListPool,
111 ) -> impl ExactSizeIterator<Item = BlockArg> + DoubleEndedIterator<Item = BlockArg> + use<'a>
112 {
113 self.values.as_slice(pool)[1..]
114 .iter()
115 .map(|value| BlockArg::decode_from_value(*value))
116 }
117
118 pub fn update_args<F: FnMut(BlockArg) -> BlockArg>(
120 &mut self,
121 pool: &mut ValueListPool,
122 mut f: F,
123 ) {
124 for raw in self.values.as_mut_slice(pool)[1..].iter_mut() {
125 let new = f(BlockArg::decode_from_value(*raw));
126 *raw = new.encode_as_value();
127 }
128 }
129
130 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
132 self.values.remove(1 + ix, pool)
133 }
134
135 pub fn clear(&mut self, pool: &mut ValueListPool) {
137 self.values.truncate(1, pool)
138 }
139
140 pub fn extend<I, T>(&mut self, elements: I, pool: &mut ValueListPool)
142 where
143 I: IntoIterator<Item = T>,
144 T: Into<BlockArg>,
145 {
146 self.values.extend(
147 elements
148 .into_iter()
149 .map(|elem| elem.into().encode_as_value()),
150 pool,
151 )
152 }
153
154 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
156 DisplayBlockCall { block: *self, pool }
157 }
158
159 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
163 Self {
164 values: self.values.deep_clone(pool),
165 }
166 }
167}
168
169pub struct DisplayBlockCall<'a> {
171 block: BlockCall,
172 pool: &'a ValueListPool,
173}
174
175impl<'a> Display for DisplayBlockCall<'a> {
176 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
177 write!(f, "{}", self.block.block(&self.pool))?;
178 if self.block.len(self.pool) > 0 {
179 write!(f, "(")?;
180 for (ix, arg) in self.block.args(self.pool).enumerate() {
181 if ix > 0 {
182 write!(f, ", ")?;
183 }
184 write!(f, "{arg}")?;
185 }
186 write!(f, ")")?;
187 }
188 Ok(())
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
199pub enum BlockArg {
200 Value(Value),
203
204 TryCallRet(u32),
209
210 TryCallExn(u32),
216}
217
218impl BlockArg {
219 fn encode_as_value(&self) -> Value {
223 let (tag, payload) = match *self {
224 BlockArg::Value(v) => (0, v.as_bits()),
225 BlockArg::TryCallRet(i) => (1, i),
226 BlockArg::TryCallExn(i) => (2, i),
227 };
228 assert!(payload < (1 << 30));
229 let raw = (tag << 30) | payload;
230 Value::from_bits(raw)
231 }
232
233 fn decode_from_value(v: Value) -> Self {
235 let raw = v.as_u32();
236 let tag = raw >> 30;
237 let payload = raw & ((1 << 30) - 1);
238 match tag {
239 0 => BlockArg::Value(Value::from_bits(payload)),
240 1 => BlockArg::TryCallRet(payload),
241 2 => BlockArg::TryCallExn(payload),
242 _ => unreachable!(),
243 }
244 }
245
246 pub fn as_value(&self) -> Option<Value> {
249 match *self {
250 BlockArg::Value(v) => Some(v),
251 _ => None,
252 }
253 }
254
255 pub fn map_value<F: FnMut(Value) -> Value>(&self, mut f: F) -> Self {
257 match *self {
258 BlockArg::Value(v) => BlockArg::Value(f(v)),
259 other => other,
260 }
261 }
262}
263
264impl Display for BlockArg {
265 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
266 match self {
267 BlockArg::Value(v) => write!(f, "{v}"),
268 BlockArg::TryCallRet(i) => write!(f, "ret{i}"),
269 BlockArg::TryCallExn(i) => write!(f, "exn{i}"),
270 }
271 }
272}
273
274impl From<Value> for BlockArg {
275 fn from(value: Value) -> BlockArg {
276 BlockArg::Value(value)
277 }
278}
279
280include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
296
297impl Display for Opcode {
298 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
299 write!(f, "{}", opcode_name(*self))
300 }
301}
302
303impl Opcode {
304 pub fn format(self) -> InstructionFormat {
306 OPCODE_FORMAT[self as usize - 1]
307 }
308
309 pub fn constraints(self) -> OpcodeConstraints {
312 OPCODE_CONSTRAINTS[self as usize - 1]
313 }
314
315 #[inline]
319 pub fn is_safepoint(self) -> bool {
320 self.is_call() && !self.is_return()
321 }
322}
323
324impl FromStr for Opcode {
329 type Err = &'static str;
330
331 fn from_str(s: &str) -> Result<Self, &'static str> {
333 use crate::constant_hash::{probe, simple_hash};
334
335 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
336 Err(_) => Err("Unknown opcode"),
337 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
340 }
341 }
342}
343
344impl<'a> Table<&'a str> for [Option<Opcode>] {
345 fn len(&self) -> usize {
346 self.len()
347 }
348
349 fn key(&self, idx: usize) -> Option<&'a str> {
350 self[idx].map(opcode_name)
351 }
352}
353
354#[derive(Clone, Debug)]
357pub struct VariableArgs(Vec<Value>);
358
359impl VariableArgs {
360 pub fn new() -> Self {
362 Self(Vec::new())
363 }
364
365 pub fn push(&mut self, v: Value) {
367 self.0.push(v)
368 }
369
370 pub fn is_empty(&self) -> bool {
372 self.0.is_empty()
373 }
374
375 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
377 let mut vlist = ValueList::default();
378 vlist.extend(fixed.iter().cloned(), pool);
379 vlist.extend(self.0, pool);
380 vlist
381 }
382}
383
384impl Deref for VariableArgs {
386 type Target = [Value];
387
388 fn deref(&self) -> &[Value] {
389 &self.0
390 }
391}
392
393impl DerefMut for VariableArgs {
394 fn deref_mut(&mut self) -> &mut [Value] {
395 &mut self.0
396 }
397}
398
399impl Display for VariableArgs {
400 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
401 for (i, val) in self.0.iter().enumerate() {
402 if i == 0 {
403 write!(fmt, "{val}")?;
404 } else {
405 write!(fmt, ", {val}")?;
406 }
407 }
408 Ok(())
409 }
410}
411
412impl Default for VariableArgs {
413 fn default() -> Self {
414 Self::new()
415 }
416}
417
418impl InstructionData {
423 pub fn branch_destination<'a>(
427 &'a self,
428 jump_tables: &'a ir::JumpTables,
429 exception_tables: &'a ir::ExceptionTables,
430 ) -> &'a [BlockCall] {
431 match self {
432 Self::Jump { destination, .. } => std::slice::from_ref(destination),
433 Self::Brif { blocks, .. } => blocks.as_slice(),
434 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
435 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
436 exception_tables.get(*exception).unwrap().all_branches()
437 }
438 _ => {
439 debug_assert!(!self.opcode().is_branch());
440 &[]
441 }
442 }
443 }
444
445 pub fn branch_destination_mut<'a>(
449 &'a mut self,
450 jump_tables: &'a mut ir::JumpTables,
451 exception_tables: &'a mut ir::ExceptionTables,
452 ) -> &'a mut [BlockCall] {
453 match self {
454 Self::Jump { destination, .. } => std::slice::from_mut(destination),
455 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
456 Self::BranchTable { table, .. } => {
457 jump_tables.get_mut(*table).unwrap().all_branches_mut()
458 }
459 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
460 exception_tables
461 .get_mut(*exception)
462 .unwrap()
463 .all_branches_mut()
464 }
465 _ => {
466 debug_assert!(!self.opcode().is_branch());
467 &mut []
468 }
469 }
470 }
471
472 pub fn map_values(
475 &mut self,
476 pool: &mut ValueListPool,
477 jump_tables: &mut ir::JumpTables,
478 exception_tables: &mut ir::ExceptionTables,
479 mut f: impl FnMut(Value) -> Value,
480 ) {
481 for arg in self.arguments_mut(pool) {
482 *arg = f(*arg);
483 }
484
485 for block in self.branch_destination_mut(jump_tables, exception_tables) {
486 block.update_args(pool, |arg| arg.map_value(|val| f(val)));
487 }
488 }
489
490 pub fn trap_code(&self) -> Option<TrapCode> {
493 match *self {
494 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
495 _ => None,
496 }
497 }
498
499 pub fn cond_code(&self) -> Option<IntCC> {
502 match self {
503 &InstructionData::IntCompare { cond, .. }
504 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
505 _ => None,
506 }
507 }
508
509 pub fn fp_cond_code(&self) -> Option<FloatCC> {
512 match self {
513 &InstructionData::FloatCompare { cond, .. } => Some(cond),
514 _ => None,
515 }
516 }
517
518 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
521 match self {
522 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
523 _ => None,
524 }
525 }
526
527 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
529 match self {
530 &InstructionData::AtomicRmw { op, .. } => Some(op),
531 _ => None,
532 }
533 }
534
535 pub fn load_store_offset(&self) -> Option<i32> {
537 match self {
538 &InstructionData::Load { offset, .. }
539 | &InstructionData::StackLoad { offset, .. }
540 | &InstructionData::Store { offset, .. }
541 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
542 _ => None,
543 }
544 }
545
546 pub fn memflags(&self) -> Option<MemFlags> {
548 match self {
549 &InstructionData::Load { flags, .. }
550 | &InstructionData::LoadNoOffset { flags, .. }
551 | &InstructionData::Store { flags, .. }
552 | &InstructionData::StoreNoOffset { flags, .. }
553 | &InstructionData::AtomicCas { flags, .. }
554 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
555 _ => None,
556 }
557 }
558
559 pub fn stack_slot(&self) -> Option<StackSlot> {
561 match self {
562 &InstructionData::StackStore { stack_slot, .. }
563 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
564 _ => None,
565 }
566 }
567
568 pub fn analyze_call<'a>(
572 &'a self,
573 pool: &'a ValueListPool,
574 exception_tables: &ExceptionTables,
575 ) -> CallInfo<'a> {
576 match *self {
577 Self::Call {
578 func_ref, ref args, ..
579 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
580 Self::CallIndirect {
581 sig_ref, ref args, ..
582 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
583 Self::TryCall {
584 func_ref,
585 ref args,
586 exception,
587 ..
588 } => {
589 let exdata = &exception_tables[exception];
590 CallInfo::DirectWithSig(func_ref, exdata.signature(), args.as_slice(pool))
591 }
592 Self::TryCallIndirect {
593 exception,
594 ref args,
595 ..
596 } => {
597 let exdata = &exception_tables[exception];
598 CallInfo::Indirect(exdata.signature(), &args.as_slice(pool)[1..])
599 }
600 Self::Ternary {
601 opcode: Opcode::StackSwitch,
602 ..
603 } => {
604 CallInfo::NotACall
607 }
608 _ => {
609 debug_assert!(!self.opcode().is_call());
610 CallInfo::NotACall
611 }
612 }
613 }
614
615 #[inline]
616 pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
617 if ctrl_typevar.is_invalid() {
618 return;
619 }
620
621 let bit_width = ctrl_typevar.bits();
622
623 match self {
624 Self::UnaryImm { opcode: _, imm } => {
625 *imm = imm.mask_to_width(bit_width);
626 }
627 Self::BinaryImm64 {
628 opcode,
629 arg: _,
630 imm,
631 } => {
632 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
633 *imm = imm.mask_to_width(bit_width);
634 }
635 }
636 Self::IntCompareImm {
637 opcode,
638 arg: _,
639 cond,
640 imm,
641 } => {
642 debug_assert_eq!(*opcode, Opcode::IcmpImm);
643 if cond.unsigned() != *cond {
644 *imm = imm.mask_to_width(bit_width);
645 }
646 }
647 _ => {}
648 }
649 }
650
651 pub fn exception_table(&self) -> Option<ExceptionTable> {
653 match self {
654 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
655 Some(*exception)
656 }
657 _ => None,
658 }
659 }
660}
661
662pub enum CallInfo<'a> {
664 NotACall,
666
667 Direct(FuncRef, &'a [Value]),
670
671 Indirect(SigRef, &'a [Value]),
673
674 DirectWithSig(FuncRef, SigRef, &'a [Value]),
678}
679
680#[derive(Clone, Copy)]
686pub struct OpcodeConstraints {
687 flags: u8,
706
707 typeset_offset: u8,
709
710 constraint_offset: u16,
714}
715
716impl OpcodeConstraints {
717 pub fn use_typevar_operand(self) -> bool {
721 (self.flags & 0x8) != 0
722 }
723
724 pub fn requires_typevar_operand(self) -> bool {
731 (self.flags & 0x10) != 0
732 }
733
734 pub fn num_fixed_results(self) -> usize {
737 (self.flags & 0x7) as usize
738 }
739
740 pub fn num_fixed_value_arguments(self) -> usize {
748 ((self.flags >> 5) & 0x7) as usize
749 }
750
751 fn typeset_offset(self) -> Option<usize> {
754 let offset = usize::from(self.typeset_offset);
755 if offset < TYPE_SETS.len() {
756 Some(offset)
757 } else {
758 None
759 }
760 }
761
762 fn constraint_offset(self) -> usize {
764 self.constraint_offset as usize
765 }
766
767 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
770 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
771 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
772 ResolvedConstraint::Bound(t) => t,
773 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {ts:?}"),
774 }
775 }
776
777 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
783 debug_assert!(
784 n < self.num_fixed_value_arguments(),
785 "Invalid value argument index"
786 );
787 let offset = self.constraint_offset() + self.num_fixed_results();
788 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
789 }
790
791 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
794 self.typeset_offset().map(|offset| TYPE_SETS[offset])
795 }
796
797 pub fn is_polymorphic(self) -> bool {
799 self.ctrl_typeset().is_some()
800 }
801}
802
803type BitSet8 = ScalarBitSet<u8>;
804type BitSet16 = ScalarBitSet<u16>;
805
806#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
808pub struct ValueTypeSet {
809 pub lanes: BitSet16,
811 pub ints: BitSet8,
813 pub floats: BitSet8,
815 pub dynamic_lanes: BitSet16,
817}
818
819impl ValueTypeSet {
820 fn is_base_type(self, scalar: Type) -> bool {
824 let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
825 if scalar.is_int() {
826 self.ints.contains(l2b)
827 } else if scalar.is_float() {
828 self.floats.contains(l2b)
829 } else {
830 false
831 }
832 }
833
834 pub fn contains(self, typ: Type) -> bool {
836 if typ.is_dynamic_vector() {
837 let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
838 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
839 } else {
840 let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
841 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
842 }
843 }
844
845 pub fn example(self) -> Type {
849 let t = if self.ints.max().unwrap_or(0) > 5 {
850 types::I32
851 } else if self.floats.max().unwrap_or(0) > 5 {
852 types::F32
853 } else {
854 types::I8
855 };
856 t.by(1 << self.lanes.min().unwrap()).unwrap()
857 }
858}
859
860enum OperandConstraint {
862 Concrete(Type),
864
865 Free(u8),
868
869 Same,
871
872 LaneOf,
874
875 AsTruthy,
877
878 HalfWidth,
880
881 DoubleWidth,
883
884 SplitLanes,
886
887 MergeLanes,
889
890 DynamicToVector,
892
893 Narrower,
895
896 Wider,
898}
899
900impl OperandConstraint {
901 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
904 use self::OperandConstraint::*;
905 use self::ResolvedConstraint::Bound;
906 match *self {
907 Concrete(t) => Bound(t),
908 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
909 Same => Bound(ctrl_type),
910 LaneOf => Bound(ctrl_type.lane_of()),
911 AsTruthy => Bound(ctrl_type.as_truthy()),
912 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
913 DoubleWidth => Bound(
914 ctrl_type
915 .double_width()
916 .expect("invalid type for double_width"),
917 ),
918 SplitLanes => {
919 if ctrl_type.is_dynamic_vector() {
920 Bound(
921 ctrl_type
922 .dynamic_to_vector()
923 .expect("invalid type for dynamic_to_vector")
924 .split_lanes()
925 .expect("invalid type for split_lanes")
926 .vector_to_dynamic()
927 .expect("invalid dynamic type"),
928 )
929 } else {
930 Bound(
931 ctrl_type
932 .split_lanes()
933 .expect("invalid type for split_lanes"),
934 )
935 }
936 }
937 MergeLanes => {
938 if ctrl_type.is_dynamic_vector() {
939 Bound(
940 ctrl_type
941 .dynamic_to_vector()
942 .expect("invalid type for dynamic_to_vector")
943 .merge_lanes()
944 .expect("invalid type for merge_lanes")
945 .vector_to_dynamic()
946 .expect("invalid dynamic type"),
947 )
948 } else {
949 Bound(
950 ctrl_type
951 .merge_lanes()
952 .expect("invalid type for merge_lanes"),
953 )
954 }
955 }
956 DynamicToVector => Bound(
957 ctrl_type
958 .dynamic_to_vector()
959 .expect("invalid type for dynamic_to_vector"),
960 ),
961 Narrower => {
962 let ctrl_type_bits = ctrl_type.log2_lane_bits();
963 let mut tys = ValueTypeSet::default();
964
965 tys.lanes = ScalarBitSet::from_range(0, 1);
967
968 if ctrl_type.is_int() {
969 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
972 } else if ctrl_type.is_float() {
973 tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
976 } else {
977 panic!(
978 "The Narrower constraint only operates on floats or ints, got {ctrl_type:?}"
979 );
980 }
981 ResolvedConstraint::Free(tys)
982 }
983 Wider => {
984 let ctrl_type_bits = ctrl_type.log2_lane_bits();
985 let mut tys = ValueTypeSet::default();
986
987 tys.lanes = ScalarBitSet::from_range(0, 1);
989
990 if ctrl_type.is_int() {
991 let lower_bound = ctrl_type_bits as u8 + 1;
992 if lower_bound < BitSet8::capacity() {
998 tys.ints = BitSet8::from_range(lower_bound, 8);
1002 }
1003 } else if ctrl_type.is_float() {
1004 let lower_bound = ctrl_type_bits as u8 + 1;
1006 if lower_bound < BitSet8::capacity() {
1007 tys.floats = BitSet8::from_range(lower_bound, 8);
1008 }
1009 } else {
1010 panic!(
1011 "The Wider constraint only operates on floats or ints, got {ctrl_type:?}"
1012 );
1013 }
1014
1015 ResolvedConstraint::Free(tys)
1016 }
1017 }
1018 }
1019}
1020
1021#[derive(Copy, Clone, Debug, PartialEq, Eq)]
1023pub enum ResolvedConstraint {
1024 Bound(Type),
1026 Free(ValueTypeSet),
1028}
1029
1030pub trait InstructionMapper {
1033 fn map_value(&mut self, value: Value) -> Value;
1035
1036 fn map_value_list(&mut self, value_list: ValueList) -> ValueList;
1038
1039 fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue;
1041
1042 fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable;
1044
1045 fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable;
1047
1048 fn map_block_call(&mut self, block_call: BlockCall) -> BlockCall;
1050
1051 fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef;
1053
1054 fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef;
1056
1057 fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot;
1059
1060 fn map_dynamic_stack_slot(
1062 &mut self,
1063 dynamic_stack_slot: ir::DynamicStackSlot,
1064 ) -> ir::DynamicStackSlot;
1065
1066 fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant;
1068
1069 fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate;
1071}
1072
1073impl<'a, T> InstructionMapper for &'a mut T
1074where
1075 T: InstructionMapper,
1076{
1077 fn map_value(&mut self, value: Value) -> Value {
1078 (**self).map_value(value)
1079 }
1080
1081 fn map_value_list(&mut self, value_list: ValueList) -> ValueList {
1082 (**self).map_value_list(value_list)
1083 }
1084
1085 fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
1086 (**self).map_global_value(global_value)
1087 }
1088
1089 fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
1090 (**self).map_jump_table(jump_table)
1091 }
1092
1093 fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable {
1094 (**self).map_exception_table(exception_table)
1095 }
1096
1097 fn map_block_call(&mut self, block_call: BlockCall) -> BlockCall {
1098 (**self).map_block_call(block_call)
1099 }
1100
1101 fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef {
1102 (**self).map_func_ref(func_ref)
1103 }
1104
1105 fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef {
1106 (**self).map_sig_ref(sig_ref)
1107 }
1108
1109 fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot {
1110 (**self).map_stack_slot(stack_slot)
1111 }
1112
1113 fn map_dynamic_stack_slot(
1114 &mut self,
1115 dynamic_stack_slot: ir::DynamicStackSlot,
1116 ) -> ir::DynamicStackSlot {
1117 (**self).map_dynamic_stack_slot(dynamic_stack_slot)
1118 }
1119
1120 fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
1121 (**self).map_constant(constant)
1122 }
1123
1124 fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
1125 (**self).map_immediate(immediate)
1126 }
1127}
1128
1129#[cfg(test)]
1130mod tests {
1131 use super::*;
1132 use alloc::string::ToString;
1133 use ir::{DynamicStackSlot, GlobalValue, JumpTable};
1134
1135 #[test]
1136 fn inst_data_is_copy() {
1137 fn is_copy<T: Copy>() {}
1138 is_copy::<InstructionData>();
1139 }
1140
1141 #[test]
1142 fn inst_data_size() {
1143 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
1146 }
1147
1148 #[test]
1149 fn opcodes() {
1150 use core::mem;
1151
1152 let x = Opcode::Iadd;
1153 let mut y = Opcode::Isub;
1154
1155 assert!(x != y);
1156 y = Opcode::Iadd;
1157 assert_eq!(x, y);
1158 assert_eq!(x.format(), InstructionFormat::Binary);
1159
1160 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
1161 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
1162
1163 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
1165 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
1166 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
1167 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
1168 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
1169
1170 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
1175 }
1176
1177 #[test]
1178 fn instruction_data() {
1179 use core::mem;
1180 assert_eq!(mem::size_of::<InstructionData>(), 16);
1185 }
1186
1187 #[test]
1188 fn constraints() {
1189 let a = Opcode::Iadd.constraints();
1190 assert!(a.use_typevar_operand());
1191 assert!(!a.requires_typevar_operand());
1192 assert_eq!(a.num_fixed_results(), 1);
1193 assert_eq!(a.num_fixed_value_arguments(), 2);
1194 assert_eq!(a.result_type(0, types::I32), types::I32);
1195 assert_eq!(a.result_type(0, types::I8), types::I8);
1196 assert_eq!(
1197 a.value_argument_constraint(0, types::I32),
1198 ResolvedConstraint::Bound(types::I32)
1199 );
1200 assert_eq!(
1201 a.value_argument_constraint(1, types::I32),
1202 ResolvedConstraint::Bound(types::I32)
1203 );
1204
1205 let b = Opcode::Bitcast.constraints();
1206 assert!(!b.use_typevar_operand());
1207 assert!(!b.requires_typevar_operand());
1208 assert_eq!(b.num_fixed_results(), 1);
1209 assert_eq!(b.num_fixed_value_arguments(), 1);
1210 assert_eq!(b.result_type(0, types::I32), types::I32);
1211 assert_eq!(b.result_type(0, types::I8), types::I8);
1212 match b.value_argument_constraint(0, types::I32) {
1213 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
1214 _ => panic!("Unexpected constraint from value_argument_constraint"),
1215 }
1216
1217 let c = Opcode::Call.constraints();
1218 assert_eq!(c.num_fixed_results(), 0);
1219 assert_eq!(c.num_fixed_value_arguments(), 0);
1220
1221 let i = Opcode::CallIndirect.constraints();
1222 assert_eq!(i.num_fixed_results(), 0);
1223 assert_eq!(i.num_fixed_value_arguments(), 1);
1224
1225 let cmp = Opcode::Icmp.constraints();
1226 assert!(cmp.use_typevar_operand());
1227 assert!(cmp.requires_typevar_operand());
1228 assert_eq!(cmp.num_fixed_results(), 1);
1229 assert_eq!(cmp.num_fixed_value_arguments(), 2);
1230 assert_eq!(cmp.result_type(0, types::I64), types::I8);
1231 }
1232
1233 #[test]
1234 fn value_set() {
1235 use crate::ir::types::*;
1236
1237 let vts = ValueTypeSet {
1238 lanes: BitSet16::from_range(0, 8),
1239 ints: BitSet8::from_range(4, 7),
1240 floats: BitSet8::from_range(0, 0),
1241 dynamic_lanes: BitSet16::from_range(0, 4),
1242 };
1243 assert!(!vts.contains(I8));
1244 assert!(vts.contains(I32));
1245 assert!(vts.contains(I64));
1246 assert!(vts.contains(I32X4));
1247 assert!(vts.contains(I32X4XN));
1248 assert!(!vts.contains(F16));
1249 assert!(!vts.contains(F32));
1250 assert!(!vts.contains(F128));
1251 assert_eq!(vts.example().to_string(), "i32");
1252
1253 let vts = ValueTypeSet {
1254 lanes: BitSet16::from_range(0, 8),
1255 ints: BitSet8::from_range(0, 0),
1256 floats: BitSet8::from_range(5, 7),
1257 dynamic_lanes: BitSet16::from_range(0, 8),
1258 };
1259 assert_eq!(vts.example().to_string(), "f32");
1260
1261 let vts = ValueTypeSet {
1262 lanes: BitSet16::from_range(1, 8),
1263 ints: BitSet8::from_range(0, 0),
1264 floats: BitSet8::from_range(5, 7),
1265 dynamic_lanes: BitSet16::from_range(0, 8),
1266 };
1267 assert_eq!(vts.example().to_string(), "f32x2");
1268
1269 let vts = ValueTypeSet {
1270 lanes: BitSet16::from_range(2, 8),
1271 ints: BitSet8::from_range(3, 7),
1272 floats: BitSet8::from_range(0, 0),
1273 dynamic_lanes: BitSet16::from_range(0, 8),
1274 };
1275 assert_eq!(vts.example().to_string(), "i32x4");
1276
1277 let vts = ValueTypeSet {
1278 lanes: BitSet16::from_range(0, 9),
1280 ints: BitSet8::from_range(3, 7),
1281 floats: BitSet8::from_range(0, 0),
1282 dynamic_lanes: BitSet16::from_range(0, 8),
1283 };
1284 assert!(vts.contains(I32));
1285 assert!(vts.contains(I32X4));
1286 }
1287
1288 #[test]
1289 fn instruction_data_map() {
1290 struct TestMapper;
1291
1292 impl InstructionMapper for TestMapper {
1293 fn map_value(&mut self, value: Value) -> Value {
1294 Value::from_u32(value.as_u32() + 1)
1295 }
1296
1297 fn map_value_list(&mut self, _value_list: ValueList) -> ValueList {
1298 ValueList::new()
1299 }
1300
1301 fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
1302 GlobalValue::from_u32(global_value.as_u32() + 1)
1303 }
1304
1305 fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
1306 JumpTable::from_u32(jump_table.as_u32() + 1)
1307 }
1308
1309 fn map_exception_table(&mut self, exception_table: ExceptionTable) -> ExceptionTable {
1310 ExceptionTable::from_u32(exception_table.as_u32() + 1)
1311 }
1312
1313 fn map_block_call(&mut self, _block_call: BlockCall) -> BlockCall {
1314 let block = Block::from_u32(42);
1315 let mut pool = ValueListPool::new();
1316 BlockCall::new(block, [], &mut pool)
1317 }
1318
1319 fn map_func_ref(&mut self, func_ref: FuncRef) -> FuncRef {
1320 FuncRef::from_u32(func_ref.as_u32() + 1)
1321 }
1322
1323 fn map_sig_ref(&mut self, sig_ref: SigRef) -> SigRef {
1324 SigRef::from_u32(sig_ref.as_u32() + 1)
1325 }
1326
1327 fn map_stack_slot(&mut self, stack_slot: StackSlot) -> StackSlot {
1328 StackSlot::from_u32(stack_slot.as_u32() + 1)
1329 }
1330
1331 fn map_dynamic_stack_slot(
1332 &mut self,
1333 dynamic_stack_slot: ir::DynamicStackSlot,
1334 ) -> ir::DynamicStackSlot {
1335 DynamicStackSlot::from_u32(dynamic_stack_slot.as_u32() + 1)
1336 }
1337
1338 fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
1339 ir::Constant::from_u32(constant.as_u32() + 1)
1340 }
1341
1342 fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
1343 ir::Immediate::from_u32(immediate.as_u32() + 1)
1344 }
1345 }
1346
1347 let mut pool = ValueListPool::new();
1348 let map = |inst: InstructionData| inst.map(TestMapper);
1349
1350 assert_eq!(
1352 map(InstructionData::Binary {
1353 opcode: Opcode::Iadd,
1354 args: [Value::from_u32(10), Value::from_u32(20)]
1355 }),
1356 InstructionData::Binary {
1357 opcode: Opcode::Iadd,
1358 args: [Value::from_u32(11), Value::from_u32(21)]
1359 }
1360 );
1361
1362 let mut args = ValueList::new();
1364 args.push(Value::from_u32(42), &mut pool);
1365 let func_ref = FuncRef::from_u32(99);
1366 let inst = map(InstructionData::Call {
1367 opcode: Opcode::Call,
1368 args,
1369 func_ref,
1370 });
1371 let InstructionData::Call {
1372 opcode: Opcode::Call,
1373 args,
1374 func_ref,
1375 } = inst
1376 else {
1377 panic!()
1378 };
1379 assert!(args.is_empty());
1380 assert_eq!(func_ref, FuncRef::from_u32(100));
1381
1382 assert_eq!(
1384 map(InstructionData::UnaryGlobalValue {
1385 opcode: Opcode::GlobalValue,
1386 global_value: GlobalValue::from_u32(4),
1387 }),
1388 InstructionData::UnaryGlobalValue {
1389 opcode: Opcode::GlobalValue,
1390 global_value: GlobalValue::from_u32(5),
1391 }
1392 );
1393
1394 assert_eq!(
1396 map(InstructionData::BranchTable {
1397 opcode: Opcode::BrTable,
1398 arg: Value::from_u32(0),
1399 table: JumpTable::from_u32(1),
1400 }),
1401 InstructionData::BranchTable {
1402 opcode: Opcode::BrTable,
1403 arg: Value::from_u32(1),
1404 table: JumpTable::from_u32(2),
1405 }
1406 );
1407
1408 assert_eq!(
1410 map(InstructionData::TryCall {
1411 opcode: Opcode::TryCall,
1412 args,
1413 func_ref: FuncRef::from_u32(0),
1414 exception: ExceptionTable::from_u32(1),
1415 }),
1416 InstructionData::TryCall {
1417 opcode: Opcode::TryCall,
1418 args,
1419 func_ref: FuncRef::from_u32(1),
1420 exception: ExceptionTable::from_u32(2),
1421 }
1422 );
1423
1424 assert_eq!(
1426 map(InstructionData::Jump {
1427 opcode: Opcode::Jump,
1428 destination: BlockCall::new(Block::from_u32(99), [], &mut pool),
1429 }),
1430 map(InstructionData::Jump {
1431 opcode: Opcode::Jump,
1432 destination: BlockCall::new(Block::from_u32(42), [], &mut pool),
1433 })
1434 );
1435
1436 assert_eq!(
1438 map(InstructionData::CallIndirect {
1439 opcode: Opcode::CallIndirect,
1440 args,
1441 sig_ref: SigRef::from_u32(11)
1442 }),
1443 InstructionData::CallIndirect {
1444 opcode: Opcode::CallIndirect,
1445 args: ValueList::new(),
1446 sig_ref: SigRef::from_u32(12)
1447 }
1448 );
1449
1450 assert_eq!(
1452 map(InstructionData::StackLoad {
1453 opcode: Opcode::StackLoad,
1454 stack_slot: StackSlot::from_u32(0),
1455 offset: 0.into()
1456 }),
1457 InstructionData::StackLoad {
1458 opcode: Opcode::StackLoad,
1459 stack_slot: StackSlot::from_u32(1),
1460 offset: 0.into()
1461 },
1462 );
1463
1464 assert_eq!(
1466 map(InstructionData::DynamicStackLoad {
1467 opcode: Opcode::DynamicStackLoad,
1468 dynamic_stack_slot: DynamicStackSlot::from_u32(0),
1469 }),
1470 InstructionData::DynamicStackLoad {
1471 opcode: Opcode::DynamicStackLoad,
1472 dynamic_stack_slot: DynamicStackSlot::from_u32(1),
1473 },
1474 );
1475
1476 assert_eq!(
1478 map(InstructionData::UnaryConst {
1479 opcode: ir::Opcode::Vconst,
1480 constant_handle: ir::Constant::from_u32(2)
1481 }),
1482 InstructionData::UnaryConst {
1483 opcode: ir::Opcode::Vconst,
1484 constant_handle: ir::Constant::from_u32(3)
1485 },
1486 );
1487
1488 assert_eq!(
1490 map(InstructionData::Shuffle {
1491 opcode: ir::Opcode::Shuffle,
1492 args: [Value::from_u32(0), Value::from_u32(1)],
1493 imm: ir::Immediate::from_u32(41),
1494 }),
1495 InstructionData::Shuffle {
1496 opcode: ir::Opcode::Shuffle,
1497 args: [Value::from_u32(1), Value::from_u32(2)],
1498 imm: ir::Immediate::from_u32(42),
1499 },
1500 );
1501 }
1502}