1use crate::constant_hash::Table;
10use alloc::vec::Vec;
11use core::fmt::{self, Display, Formatter};
12use core::ops::{Deref, DerefMut};
13use core::str::FromStr;
14
15#[cfg(feature = "enable-serde")]
16use serde_derive::{Deserialize, Serialize};
17
18use crate::bitset::ScalarBitSet;
19use crate::entity;
20use crate::ir::{
21 self,
22 condcodes::{FloatCC, IntCC},
23 trapcode::TrapCode,
24 types, Block, ExceptionTable, ExceptionTables, FuncRef, MemFlags, SigRef, StackSlot, Type,
25 Value,
26};
27
28pub type ValueList = entity::EntityList<Value>;
32
33pub type ValueListPool = entity::ListPool<Value>;
35
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
53#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
54pub struct BlockCall {
55 values: entity::EntityList<Value>,
59}
60
61impl BlockCall {
62 fn value_to_block(val: Value) -> Block {
65 Block::from_u32(val.as_u32())
66 }
67
68 fn block_to_value(block: Block) -> Value {
71 Value::from_u32(block.as_u32())
72 }
73
74 pub fn new(
76 block: Block,
77 args: impl Iterator<Item = BlockArg>,
78 pool: &mut ValueListPool,
79 ) -> Self {
80 let mut values = ValueList::default();
81 values.push(Self::block_to_value(block), pool);
82 values.extend(args.map(|arg| arg.encode_as_value()), pool);
83 Self { values }
84 }
85
86 pub fn block(&self, pool: &ValueListPool) -> Block {
88 let val = self.values.first(pool).unwrap();
89 Self::value_to_block(val)
90 }
91
92 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
94 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
95 }
96
97 pub fn append_argument(&mut self, arg: impl Into<BlockArg>, pool: &mut ValueListPool) {
99 self.values.push(arg.into().encode_as_value(), pool);
100 }
101
102 pub fn len(&self, pool: &ValueListPool) -> usize {
104 self.values.len(pool) - 1
105 }
106
107 pub fn args<'a>(
109 &self,
110 pool: &'a ValueListPool,
111 ) -> impl ExactSizeIterator<Item = BlockArg> + DoubleEndedIterator<Item = BlockArg> + use<'a>
112 {
113 self.values.as_slice(pool)[1..]
114 .iter()
115 .map(|value| BlockArg::decode_from_value(*value))
116 }
117
118 pub fn update_args<F: FnMut(BlockArg) -> BlockArg>(
120 &mut self,
121 pool: &mut ValueListPool,
122 mut f: F,
123 ) {
124 for raw in self.values.as_mut_slice(pool)[1..].iter_mut() {
125 let new = f(BlockArg::decode_from_value(*raw));
126 *raw = new.encode_as_value();
127 }
128 }
129
130 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
132 self.values.remove(1 + ix, pool)
133 }
134
135 pub fn clear(&mut self, pool: &mut ValueListPool) {
137 self.values.truncate(1, pool)
138 }
139
140 pub fn extend<I, T>(&mut self, elements: I, pool: &mut ValueListPool)
142 where
143 I: IntoIterator<Item = T>,
144 T: Into<BlockArg>,
145 {
146 self.values.extend(
147 elements
148 .into_iter()
149 .map(|elem| elem.into().encode_as_value()),
150 pool,
151 )
152 }
153
154 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
156 DisplayBlockCall { block: *self, pool }
157 }
158
159 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
163 Self {
164 values: self.values.deep_clone(pool),
165 }
166 }
167}
168
169pub struct DisplayBlockCall<'a> {
171 block: BlockCall,
172 pool: &'a ValueListPool,
173}
174
175impl<'a> Display for DisplayBlockCall<'a> {
176 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
177 write!(f, "{}", self.block.block(&self.pool))?;
178 if self.block.len(self.pool) > 0 {
179 write!(f, "(")?;
180 for (ix, arg) in self.block.args(self.pool).enumerate() {
181 if ix > 0 {
182 write!(f, ", ")?;
183 }
184 write!(f, "{arg}")?;
185 }
186 write!(f, ")")?;
187 }
188 Ok(())
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
199pub enum BlockArg {
200 Value(Value),
203
204 TryCallRet(u32),
209
210 TryCallExn(u32),
216}
217
218impl BlockArg {
219 fn encode_as_value(&self) -> Value {
223 let (tag, payload) = match *self {
224 BlockArg::Value(v) => (0, v.as_bits()),
225 BlockArg::TryCallRet(i) => (1, i),
226 BlockArg::TryCallExn(i) => (2, i),
227 };
228 assert!(payload < (1 << 30));
229 let raw = (tag << 30) | payload;
230 Value::from_bits(raw)
231 }
232
233 fn decode_from_value(v: Value) -> Self {
235 let raw = v.as_u32();
236 let tag = raw >> 30;
237 let payload = raw & ((1 << 30) - 1);
238 match tag {
239 0 => BlockArg::Value(Value::from_bits(payload)),
240 1 => BlockArg::TryCallRet(payload),
241 2 => BlockArg::TryCallExn(payload),
242 _ => unreachable!(),
243 }
244 }
245
246 pub fn as_value(&self) -> Option<Value> {
249 match *self {
250 BlockArg::Value(v) => Some(v),
251 _ => None,
252 }
253 }
254
255 pub fn map_value<F: FnMut(Value) -> Value>(&self, mut f: F) -> Self {
257 match *self {
258 BlockArg::Value(v) => BlockArg::Value(f(v)),
259 other => other,
260 }
261 }
262}
263
264impl Display for BlockArg {
265 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
266 match self {
267 BlockArg::Value(v) => write!(f, "{v}"),
268 BlockArg::TryCallRet(i) => write!(f, "ret{i}"),
269 BlockArg::TryCallExn(i) => write!(f, "exn{i}"),
270 }
271 }
272}
273
274impl From<Value> for BlockArg {
275 fn from(value: Value) -> BlockArg {
276 BlockArg::Value(value)
277 }
278}
279
280include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
296
297impl Display for Opcode {
298 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
299 write!(f, "{}", opcode_name(*self))
300 }
301}
302
303impl Opcode {
304 pub fn format(self) -> InstructionFormat {
306 OPCODE_FORMAT[self as usize - 1]
307 }
308
309 pub fn constraints(self) -> OpcodeConstraints {
312 OPCODE_CONSTRAINTS[self as usize - 1]
313 }
314
315 #[inline]
319 pub fn is_safepoint(self) -> bool {
320 self.is_call() && !self.is_return()
321 }
322}
323
324impl FromStr for Opcode {
329 type Err = &'static str;
330
331 fn from_str(s: &str) -> Result<Self, &'static str> {
333 use crate::constant_hash::{probe, simple_hash};
334
335 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
336 Err(_) => Err("Unknown opcode"),
337 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
340 }
341 }
342}
343
344impl<'a> Table<&'a str> for [Option<Opcode>] {
345 fn len(&self) -> usize {
346 self.len()
347 }
348
349 fn key(&self, idx: usize) -> Option<&'a str> {
350 self[idx].map(opcode_name)
351 }
352}
353
354#[derive(Clone, Debug)]
357pub struct VariableArgs(Vec<Value>);
358
359impl VariableArgs {
360 pub fn new() -> Self {
362 Self(Vec::new())
363 }
364
365 pub fn push(&mut self, v: Value) {
367 self.0.push(v)
368 }
369
370 pub fn is_empty(&self) -> bool {
372 self.0.is_empty()
373 }
374
375 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
377 let mut vlist = ValueList::default();
378 vlist.extend(fixed.iter().cloned(), pool);
379 vlist.extend(self.0, pool);
380 vlist
381 }
382}
383
384impl Deref for VariableArgs {
386 type Target = [Value];
387
388 fn deref(&self) -> &[Value] {
389 &self.0
390 }
391}
392
393impl DerefMut for VariableArgs {
394 fn deref_mut(&mut self) -> &mut [Value] {
395 &mut self.0
396 }
397}
398
399impl Display for VariableArgs {
400 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
401 for (i, val) in self.0.iter().enumerate() {
402 if i == 0 {
403 write!(fmt, "{val}")?;
404 } else {
405 write!(fmt, ", {val}")?;
406 }
407 }
408 Ok(())
409 }
410}
411
412impl Default for VariableArgs {
413 fn default() -> Self {
414 Self::new()
415 }
416}
417
418impl InstructionData {
423 pub fn branch_destination<'a>(
427 &'a self,
428 jump_tables: &'a ir::JumpTables,
429 exception_tables: &'a ir::ExceptionTables,
430 ) -> &'a [BlockCall] {
431 match self {
432 Self::Jump { destination, .. } => std::slice::from_ref(destination),
433 Self::Brif { blocks, .. } => blocks.as_slice(),
434 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
435 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
436 exception_tables.get(*exception).unwrap().all_branches()
437 }
438 _ => {
439 debug_assert!(!self.opcode().is_branch());
440 &[]
441 }
442 }
443 }
444
445 pub fn branch_destination_mut<'a>(
449 &'a mut self,
450 jump_tables: &'a mut ir::JumpTables,
451 exception_tables: &'a mut ir::ExceptionTables,
452 ) -> &'a mut [BlockCall] {
453 match self {
454 Self::Jump { destination, .. } => std::slice::from_mut(destination),
455 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
456 Self::BranchTable { table, .. } => {
457 jump_tables.get_mut(*table).unwrap().all_branches_mut()
458 }
459 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
460 exception_tables
461 .get_mut(*exception)
462 .unwrap()
463 .all_branches_mut()
464 }
465 _ => {
466 debug_assert!(!self.opcode().is_branch());
467 &mut []
468 }
469 }
470 }
471
472 pub fn map_values(
475 &mut self,
476 pool: &mut ValueListPool,
477 jump_tables: &mut ir::JumpTables,
478 exception_tables: &mut ir::ExceptionTables,
479 mut f: impl FnMut(Value) -> Value,
480 ) {
481 for arg in self.arguments_mut(pool) {
482 *arg = f(*arg);
483 }
484
485 for block in self.branch_destination_mut(jump_tables, exception_tables) {
486 block.update_args(pool, |arg| arg.map_value(|val| f(val)));
487 }
488 }
489
490 pub fn trap_code(&self) -> Option<TrapCode> {
493 match *self {
494 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
495 _ => None,
496 }
497 }
498
499 pub fn cond_code(&self) -> Option<IntCC> {
502 match self {
503 &InstructionData::IntCompare { cond, .. }
504 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
505 _ => None,
506 }
507 }
508
509 pub fn fp_cond_code(&self) -> Option<FloatCC> {
512 match self {
513 &InstructionData::FloatCompare { cond, .. } => Some(cond),
514 _ => None,
515 }
516 }
517
518 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
521 match self {
522 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
523 _ => None,
524 }
525 }
526
527 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
529 match self {
530 &InstructionData::AtomicRmw { op, .. } => Some(op),
531 _ => None,
532 }
533 }
534
535 pub fn load_store_offset(&self) -> Option<i32> {
537 match self {
538 &InstructionData::Load { offset, .. }
539 | &InstructionData::StackLoad { offset, .. }
540 | &InstructionData::Store { offset, .. }
541 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
542 _ => None,
543 }
544 }
545
546 pub fn memflags(&self) -> Option<MemFlags> {
548 match self {
549 &InstructionData::Load { flags, .. }
550 | &InstructionData::LoadNoOffset { flags, .. }
551 | &InstructionData::Store { flags, .. }
552 | &InstructionData::StoreNoOffset { flags, .. }
553 | &InstructionData::AtomicCas { flags, .. }
554 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
555 _ => None,
556 }
557 }
558
559 pub fn stack_slot(&self) -> Option<StackSlot> {
561 match self {
562 &InstructionData::StackStore { stack_slot, .. }
563 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
564 _ => None,
565 }
566 }
567
568 pub fn analyze_call<'a>(
572 &'a self,
573 pool: &'a ValueListPool,
574 exception_tables: &ExceptionTables,
575 ) -> CallInfo<'a> {
576 match *self {
577 Self::Call {
578 func_ref, ref args, ..
579 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
580 Self::CallIndirect {
581 sig_ref, ref args, ..
582 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
583 Self::TryCall {
584 func_ref,
585 ref args,
586 exception,
587 ..
588 } => {
589 let exdata = &exception_tables[exception];
590 CallInfo::DirectWithSig(func_ref, exdata.signature(), args.as_slice(pool))
591 }
592 Self::TryCallIndirect {
593 exception,
594 ref args,
595 ..
596 } => {
597 let exdata = &exception_tables[exception];
598 CallInfo::Indirect(exdata.signature(), &args.as_slice(pool)[1..])
599 }
600 Self::Ternary {
601 opcode: Opcode::StackSwitch,
602 ..
603 } => {
604 CallInfo::NotACall
607 }
608 _ => {
609 debug_assert!(!self.opcode().is_call());
610 CallInfo::NotACall
611 }
612 }
613 }
614
615 #[inline]
616 pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
617 if ctrl_typevar.is_invalid() {
618 return;
619 }
620
621 let bit_width = ctrl_typevar.bits();
622
623 match self {
624 Self::UnaryImm { opcode: _, imm } => {
625 *imm = imm.mask_to_width(bit_width);
626 }
627 Self::BinaryImm64 {
628 opcode,
629 arg: _,
630 imm,
631 } => {
632 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
633 *imm = imm.mask_to_width(bit_width);
634 }
635 }
636 Self::IntCompareImm {
637 opcode,
638 arg: _,
639 cond,
640 imm,
641 } => {
642 debug_assert_eq!(*opcode, Opcode::IcmpImm);
643 if cond.unsigned() != *cond {
644 *imm = imm.mask_to_width(bit_width);
645 }
646 }
647 _ => {}
648 }
649 }
650
651 pub fn exception_table(&self) -> Option<ExceptionTable> {
653 match self {
654 Self::TryCall { exception, .. } | Self::TryCallIndirect { exception, .. } => {
655 Some(*exception)
656 }
657 _ => None,
658 }
659 }
660}
661
662pub enum CallInfo<'a> {
664 NotACall,
666
667 Direct(FuncRef, &'a [Value]),
670
671 Indirect(SigRef, &'a [Value]),
673
674 DirectWithSig(FuncRef, SigRef, &'a [Value]),
678}
679
680#[derive(Clone, Copy)]
686pub struct OpcodeConstraints {
687 flags: u8,
706
707 typeset_offset: u8,
709
710 constraint_offset: u16,
714}
715
716impl OpcodeConstraints {
717 pub fn use_typevar_operand(self) -> bool {
721 (self.flags & 0x8) != 0
722 }
723
724 pub fn requires_typevar_operand(self) -> bool {
731 (self.flags & 0x10) != 0
732 }
733
734 pub fn num_fixed_results(self) -> usize {
737 (self.flags & 0x7) as usize
738 }
739
740 pub fn num_fixed_value_arguments(self) -> usize {
748 ((self.flags >> 5) & 0x7) as usize
749 }
750
751 fn typeset_offset(self) -> Option<usize> {
754 let offset = usize::from(self.typeset_offset);
755 if offset < TYPE_SETS.len() {
756 Some(offset)
757 } else {
758 None
759 }
760 }
761
762 fn constraint_offset(self) -> usize {
764 self.constraint_offset as usize
765 }
766
767 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
770 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
771 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
772 ResolvedConstraint::Bound(t) => t,
773 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {ts:?}"),
774 }
775 }
776
777 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
783 debug_assert!(
784 n < self.num_fixed_value_arguments(),
785 "Invalid value argument index"
786 );
787 let offset = self.constraint_offset() + self.num_fixed_results();
788 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
789 }
790
791 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
794 self.typeset_offset().map(|offset| TYPE_SETS[offset])
795 }
796
797 pub fn is_polymorphic(self) -> bool {
799 self.ctrl_typeset().is_some()
800 }
801}
802
803type BitSet8 = ScalarBitSet<u8>;
804type BitSet16 = ScalarBitSet<u16>;
805
806#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
808pub struct ValueTypeSet {
809 pub lanes: BitSet16,
811 pub ints: BitSet8,
813 pub floats: BitSet8,
815 pub dynamic_lanes: BitSet16,
817}
818
819impl ValueTypeSet {
820 fn is_base_type(self, scalar: Type) -> bool {
824 let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
825 if scalar.is_int() {
826 self.ints.contains(l2b)
827 } else if scalar.is_float() {
828 self.floats.contains(l2b)
829 } else {
830 false
831 }
832 }
833
834 pub fn contains(self, typ: Type) -> bool {
836 if typ.is_dynamic_vector() {
837 let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
838 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
839 } else {
840 let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
841 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
842 }
843 }
844
845 pub fn example(self) -> Type {
849 let t = if self.ints.max().unwrap_or(0) > 5 {
850 types::I32
851 } else if self.floats.max().unwrap_or(0) > 5 {
852 types::F32
853 } else {
854 types::I8
855 };
856 t.by(1 << self.lanes.min().unwrap()).unwrap()
857 }
858}
859
860enum OperandConstraint {
862 Concrete(Type),
864
865 Free(u8),
868
869 Same,
871
872 LaneOf,
874
875 AsTruthy,
877
878 HalfWidth,
880
881 DoubleWidth,
883
884 SplitLanes,
886
887 MergeLanes,
889
890 DynamicToVector,
892
893 Narrower,
895
896 Wider,
898}
899
900impl OperandConstraint {
901 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
904 use self::OperandConstraint::*;
905 use self::ResolvedConstraint::Bound;
906 match *self {
907 Concrete(t) => Bound(t),
908 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
909 Same => Bound(ctrl_type),
910 LaneOf => Bound(ctrl_type.lane_of()),
911 AsTruthy => Bound(ctrl_type.as_truthy()),
912 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
913 DoubleWidth => Bound(
914 ctrl_type
915 .double_width()
916 .expect("invalid type for double_width"),
917 ),
918 SplitLanes => {
919 if ctrl_type.is_dynamic_vector() {
920 Bound(
921 ctrl_type
922 .dynamic_to_vector()
923 .expect("invalid type for dynamic_to_vector")
924 .split_lanes()
925 .expect("invalid type for split_lanes")
926 .vector_to_dynamic()
927 .expect("invalid dynamic type"),
928 )
929 } else {
930 Bound(
931 ctrl_type
932 .split_lanes()
933 .expect("invalid type for split_lanes"),
934 )
935 }
936 }
937 MergeLanes => {
938 if ctrl_type.is_dynamic_vector() {
939 Bound(
940 ctrl_type
941 .dynamic_to_vector()
942 .expect("invalid type for dynamic_to_vector")
943 .merge_lanes()
944 .expect("invalid type for merge_lanes")
945 .vector_to_dynamic()
946 .expect("invalid dynamic type"),
947 )
948 } else {
949 Bound(
950 ctrl_type
951 .merge_lanes()
952 .expect("invalid type for merge_lanes"),
953 )
954 }
955 }
956 DynamicToVector => Bound(
957 ctrl_type
958 .dynamic_to_vector()
959 .expect("invalid type for dynamic_to_vector"),
960 ),
961 Narrower => {
962 let ctrl_type_bits = ctrl_type.log2_lane_bits();
963 let mut tys = ValueTypeSet::default();
964
965 tys.lanes = ScalarBitSet::from_range(0, 1);
967
968 if ctrl_type.is_int() {
969 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
972 } else if ctrl_type.is_float() {
973 tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
976 } else {
977 panic!("The Narrower constraint only operates on floats or ints, got {ctrl_type:?}");
978 }
979 ResolvedConstraint::Free(tys)
980 }
981 Wider => {
982 let ctrl_type_bits = ctrl_type.log2_lane_bits();
983 let mut tys = ValueTypeSet::default();
984
985 tys.lanes = ScalarBitSet::from_range(0, 1);
987
988 if ctrl_type.is_int() {
989 let lower_bound = ctrl_type_bits as u8 + 1;
990 if lower_bound < BitSet8::capacity() {
996 tys.ints = BitSet8::from_range(lower_bound, 8);
1000 }
1001 } else if ctrl_type.is_float() {
1002 let lower_bound = ctrl_type_bits as u8 + 1;
1004 if lower_bound < BitSet8::capacity() {
1005 tys.floats = BitSet8::from_range(lower_bound, 8);
1006 }
1007 } else {
1008 panic!(
1009 "The Wider constraint only operates on floats or ints, got {ctrl_type:?}"
1010 );
1011 }
1012
1013 ResolvedConstraint::Free(tys)
1014 }
1015 }
1016 }
1017}
1018
1019#[derive(Copy, Clone, Debug, PartialEq, Eq)]
1021pub enum ResolvedConstraint {
1022 Bound(Type),
1024 Free(ValueTypeSet),
1026}
1027
1028#[cfg(test)]
1029mod tests {
1030 use super::*;
1031 use alloc::string::ToString;
1032
1033 #[test]
1034 fn inst_data_is_copy() {
1035 fn is_copy<T: Copy>() {}
1036 is_copy::<InstructionData>();
1037 }
1038
1039 #[test]
1040 fn inst_data_size() {
1041 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
1044 }
1045
1046 #[test]
1047 fn opcodes() {
1048 use core::mem;
1049
1050 let x = Opcode::Iadd;
1051 let mut y = Opcode::Isub;
1052
1053 assert!(x != y);
1054 y = Opcode::Iadd;
1055 assert_eq!(x, y);
1056 assert_eq!(x.format(), InstructionFormat::Binary);
1057
1058 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
1059 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
1060
1061 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
1063 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
1064 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
1065 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
1066 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
1067
1068 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
1073 }
1074
1075 #[test]
1076 fn instruction_data() {
1077 use core::mem;
1078 assert_eq!(mem::size_of::<InstructionData>(), 16);
1083 }
1084
1085 #[test]
1086 fn constraints() {
1087 let a = Opcode::Iadd.constraints();
1088 assert!(a.use_typevar_operand());
1089 assert!(!a.requires_typevar_operand());
1090 assert_eq!(a.num_fixed_results(), 1);
1091 assert_eq!(a.num_fixed_value_arguments(), 2);
1092 assert_eq!(a.result_type(0, types::I32), types::I32);
1093 assert_eq!(a.result_type(0, types::I8), types::I8);
1094 assert_eq!(
1095 a.value_argument_constraint(0, types::I32),
1096 ResolvedConstraint::Bound(types::I32)
1097 );
1098 assert_eq!(
1099 a.value_argument_constraint(1, types::I32),
1100 ResolvedConstraint::Bound(types::I32)
1101 );
1102
1103 let b = Opcode::Bitcast.constraints();
1104 assert!(!b.use_typevar_operand());
1105 assert!(!b.requires_typevar_operand());
1106 assert_eq!(b.num_fixed_results(), 1);
1107 assert_eq!(b.num_fixed_value_arguments(), 1);
1108 assert_eq!(b.result_type(0, types::I32), types::I32);
1109 assert_eq!(b.result_type(0, types::I8), types::I8);
1110 match b.value_argument_constraint(0, types::I32) {
1111 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
1112 _ => panic!("Unexpected constraint from value_argument_constraint"),
1113 }
1114
1115 let c = Opcode::Call.constraints();
1116 assert_eq!(c.num_fixed_results(), 0);
1117 assert_eq!(c.num_fixed_value_arguments(), 0);
1118
1119 let i = Opcode::CallIndirect.constraints();
1120 assert_eq!(i.num_fixed_results(), 0);
1121 assert_eq!(i.num_fixed_value_arguments(), 1);
1122
1123 let cmp = Opcode::Icmp.constraints();
1124 assert!(cmp.use_typevar_operand());
1125 assert!(cmp.requires_typevar_operand());
1126 assert_eq!(cmp.num_fixed_results(), 1);
1127 assert_eq!(cmp.num_fixed_value_arguments(), 2);
1128 assert_eq!(cmp.result_type(0, types::I64), types::I8);
1129 }
1130
1131 #[test]
1132 fn value_set() {
1133 use crate::ir::types::*;
1134
1135 let vts = ValueTypeSet {
1136 lanes: BitSet16::from_range(0, 8),
1137 ints: BitSet8::from_range(4, 7),
1138 floats: BitSet8::from_range(0, 0),
1139 dynamic_lanes: BitSet16::from_range(0, 4),
1140 };
1141 assert!(!vts.contains(I8));
1142 assert!(vts.contains(I32));
1143 assert!(vts.contains(I64));
1144 assert!(vts.contains(I32X4));
1145 assert!(vts.contains(I32X4XN));
1146 assert!(!vts.contains(F16));
1147 assert!(!vts.contains(F32));
1148 assert!(!vts.contains(F128));
1149 assert_eq!(vts.example().to_string(), "i32");
1150
1151 let vts = ValueTypeSet {
1152 lanes: BitSet16::from_range(0, 8),
1153 ints: BitSet8::from_range(0, 0),
1154 floats: BitSet8::from_range(5, 7),
1155 dynamic_lanes: BitSet16::from_range(0, 8),
1156 };
1157 assert_eq!(vts.example().to_string(), "f32");
1158
1159 let vts = ValueTypeSet {
1160 lanes: BitSet16::from_range(1, 8),
1161 ints: BitSet8::from_range(0, 0),
1162 floats: BitSet8::from_range(5, 7),
1163 dynamic_lanes: BitSet16::from_range(0, 8),
1164 };
1165 assert_eq!(vts.example().to_string(), "f32x2");
1166
1167 let vts = ValueTypeSet {
1168 lanes: BitSet16::from_range(2, 8),
1169 ints: BitSet8::from_range(3, 7),
1170 floats: BitSet8::from_range(0, 0),
1171 dynamic_lanes: BitSet16::from_range(0, 8),
1172 };
1173 assert_eq!(vts.example().to_string(), "i32x4");
1174
1175 let vts = ValueTypeSet {
1176 lanes: BitSet16::from_range(0, 9),
1178 ints: BitSet8::from_range(3, 7),
1179 floats: BitSet8::from_range(0, 0),
1180 dynamic_lanes: BitSet16::from_range(0, 8),
1181 };
1182 assert!(vts.contains(I32));
1183 assert!(vts.contains(I32X4));
1184 }
1185}