1use crate::config::Config;
2use crate::cranelift_arbitrary::CraneliftArbitrary;
3use crate::target_isa_extras::TargetIsaExtras;
4use anyhow::Result;
5use arbitrary::{Arbitrary, Unstructured};
6use cranelift::codegen::data_value::DataValue;
7use cranelift::codegen::ir::immediates::Offset32;
8use cranelift::codegen::ir::instructions::{InstructionFormat, ResolvedConstraint};
9use cranelift::codegen::ir::stackslot::StackSize;
10
11use cranelift::codegen::ir::{
12 types::*, AliasRegion, AtomicRmwOp, Block, ConstantData, Endianness, ExternalName, FuncRef,
13 Function, LibCall, Opcode, SigRef, Signature, StackSlot, UserExternalName, UserFuncName, Value,
14};
15use cranelift::codegen::isa::CallConv;
16use cranelift::frontend::{FunctionBuilder, FunctionBuilderContext, Switch, Variable};
17use cranelift::prelude::isa::OwnedTargetIsa;
18use cranelift::prelude::{
19 EntityRef, ExtFuncData, FloatCC, InstBuilder, IntCC, JumpTableData, MemFlags, StackSlotData,
20 StackSlotKind,
21};
22use std::collections::HashMap;
23use std::ops::RangeInclusive;
24use std::str::FromStr;
25use std::sync::LazyLock;
26use target_lexicon::{Architecture, Triple};
27
28type BlockSignature = Vec<Type>;
29
30fn insert_opcode(
31 fgen: &mut FunctionGenerator,
32 builder: &mut FunctionBuilder,
33 opcode: Opcode,
34 args: &[Type],
35 rets: &[Type],
36) -> Result<()> {
37 let mut vals = Vec::with_capacity(args.len());
38 for &arg in args.into_iter() {
39 let var = fgen.get_variable_of_type(arg)?;
40 let val = builder.use_var(var);
41 vals.push(val);
42 }
43
44 let ctrl_type = if opcode.constraints().requires_typevar_operand() {
48 args.first()
49 } else {
50 rets.first()
51 }
52 .copied()
53 .unwrap_or(INVALID);
54
55 let (inst, dfg) = match opcode.format() {
57 InstructionFormat::NullAry => builder.ins().NullAry(opcode, ctrl_type),
58 InstructionFormat::Unary => builder.ins().Unary(opcode, ctrl_type, vals[0]),
59 InstructionFormat::Binary => builder.ins().Binary(opcode, ctrl_type, vals[0], vals[1]),
60 InstructionFormat::Ternary => builder
61 .ins()
62 .Ternary(opcode, ctrl_type, vals[0], vals[1], vals[2]),
63 _ => unimplemented!(),
64 };
65 let results = dfg.inst_results(inst).to_vec();
66
67 for (val, &ty) in results.into_iter().zip(rets) {
68 let var = fgen.get_variable_of_type(ty)?;
69 builder.def_var(var, val);
70 }
71 Ok(())
72}
73
74fn insert_call_to_function(
75 fgen: &mut FunctionGenerator,
76 builder: &mut FunctionBuilder,
77 call_opcode: Opcode,
78 sig: &Signature,
79 sig_ref: SigRef,
80 func_ref: FuncRef,
81) -> Result<()> {
82 let actuals = fgen.generate_values_for_signature(
83 builder,
84 sig.params.iter().map(|abi_param| abi_param.value_type),
85 )?;
86
87 let addr_ty = fgen.isa.pointer_type();
88 let call = match call_opcode {
89 Opcode::Call => builder.ins().call(func_ref, &actuals),
90 Opcode::ReturnCall => builder.ins().return_call(func_ref, &actuals),
91 Opcode::CallIndirect => {
92 let addr = builder.ins().func_addr(addr_ty, func_ref);
93 builder.ins().call_indirect(sig_ref, addr, &actuals)
94 }
95 Opcode::ReturnCallIndirect => {
96 let addr = builder.ins().func_addr(addr_ty, func_ref);
97 builder.ins().return_call_indirect(sig_ref, addr, &actuals)
98 }
99 _ => unreachable!(),
100 };
101
102 let ret_values = builder.inst_results(call).to_vec();
104 let ret_types = sig.returns.iter().map(|p| p.value_type);
105 for (ty, val) in ret_types.zip(ret_values) {
106 let var = fgen.get_variable_of_type(ty)?;
107 builder.def_var(var, val);
108 }
109
110 Ok(())
111}
112
113fn insert_call(
114 fgen: &mut FunctionGenerator,
115 builder: &mut FunctionBuilder,
116 opcode: Opcode,
117 _args: &[Type],
118 _rets: &[Type],
119) -> Result<()> {
120 assert!(matches!(opcode, Opcode::Call | Opcode::CallIndirect));
121 let (sig, sig_ref, func_ref) = fgen.u.choose(&fgen.resources.func_refs)?.clone();
122
123 insert_call_to_function(fgen, builder, opcode, &sig, sig_ref, func_ref)
124}
125
126fn insert_stack_load(
127 fgen: &mut FunctionGenerator,
128 builder: &mut FunctionBuilder,
129 _opcode: Opcode,
130 _args: &[Type],
131 rets: &[Type],
132) -> Result<()> {
133 let typevar = rets[0];
134 let type_size = typevar.bytes();
135 let (slot, slot_size, _align, category) = fgen.stack_slot_with_size(type_size)?;
136
137 if category != AACategory::Other {
140 return Err(arbitrary::Error::IncorrectFormat.into());
141 }
142
143 let offset = fgen.u.int_in_range(0..=(slot_size - type_size))? as i32;
144
145 let val = builder.ins().stack_load(typevar, slot, offset);
146 let var = fgen.get_variable_of_type(typevar)?;
147 builder.def_var(var, val);
148
149 Ok(())
150}
151
152fn insert_stack_store(
153 fgen: &mut FunctionGenerator,
154 builder: &mut FunctionBuilder,
155 _opcode: Opcode,
156 args: &[Type],
157 _rets: &[Type],
158) -> Result<()> {
159 let typevar = args[0];
160 let type_size = typevar.bytes();
161
162 let (slot, slot_size, _align, category) = fgen.stack_slot_with_size(type_size)?;
163
164 if category != AACategory::Other {
167 return Err(arbitrary::Error::IncorrectFormat.into());
168 }
169
170 let offset = fgen.u.int_in_range(0..=(slot_size - type_size))? as i32;
171
172 let arg0 = fgen.get_variable_of_type(typevar)?;
173 let arg0 = builder.use_var(arg0);
174
175 builder.ins().stack_store(arg0, slot, offset);
176 Ok(())
177}
178
179fn insert_cmp(
180 fgen: &mut FunctionGenerator,
181 builder: &mut FunctionBuilder,
182 opcode: Opcode,
183 args: &[Type],
184 rets: &[Type],
185) -> Result<()> {
186 let lhs = fgen.get_variable_of_type(args[0])?;
187 let lhs = builder.use_var(lhs);
188
189 let rhs = fgen.get_variable_of_type(args[1])?;
190 let rhs = builder.use_var(rhs);
191
192 let res = if opcode == Opcode::Fcmp {
193 let cc = *fgen.u.choose(FloatCC::all())?;
194
195 let unimplemented_cc = match (fgen.isa.triple().architecture, cc) {
199 (Architecture::Aarch64(_), FloatCC::OrderedNotEqual) => true,
202 (Architecture::Aarch64(_), FloatCC::UnorderedOrEqual) => true,
203 (Architecture::Aarch64(_), FloatCC::UnorderedOrLessThan) => true,
204 (Architecture::Aarch64(_), FloatCC::UnorderedOrLessThanOrEqual) => true,
205 (Architecture::Aarch64(_), FloatCC::UnorderedOrGreaterThan) => true,
206 (Architecture::Aarch64(_), FloatCC::UnorderedOrGreaterThanOrEqual) => true,
207
208 (Architecture::X86_64, FloatCC::UnorderedOrEqual | FloatCC::OrderedNotEqual) => {
210 args[0].is_vector()
211 }
212 _ => false,
213 };
214 if unimplemented_cc {
215 return Err(arbitrary::Error::IncorrectFormat.into());
216 }
217
218 builder.ins().fcmp(cc, lhs, rhs)
219 } else {
220 let cc = *fgen.u.choose(IntCC::all())?;
221 builder.ins().icmp(cc, lhs, rhs)
222 };
223
224 let var = fgen.get_variable_of_type(rets[0])?;
225 builder.def_var(var, res);
226
227 Ok(())
228}
229
230fn insert_const(
231 fgen: &mut FunctionGenerator,
232 builder: &mut FunctionBuilder,
233 _opcode: Opcode,
234 _args: &[Type],
235 rets: &[Type],
236) -> Result<()> {
237 let typevar = rets[0];
238 let var = fgen.get_variable_of_type(typevar)?;
239 let val = fgen.generate_const(builder, typevar)?;
240 builder.def_var(var, val);
241 Ok(())
242}
243
244fn insert_bitcast(
245 fgen: &mut FunctionGenerator,
246 builder: &mut FunctionBuilder,
247 args: &[Type],
248 rets: &[Type],
249) -> Result<()> {
250 let from_var = fgen.get_variable_of_type(args[0])?;
251 let from_val = builder.use_var(from_var);
252
253 let to_var = fgen.get_variable_of_type(rets[0])?;
254
255 let mut memflags = MemFlags::new();
257
258 if args[0].lane_count() != rets[0].lane_count() {
261 memflags.set_endianness(Endianness::Little);
262 }
263
264 let res = builder.ins().bitcast(rets[0], memflags, from_val);
265 builder.def_var(to_var, res);
266 Ok(())
267}
268
269fn insert_load_store(
270 fgen: &mut FunctionGenerator,
271 builder: &mut FunctionBuilder,
272 opcode: Opcode,
273 args: &[Type],
274 rets: &[Type],
275) -> Result<()> {
276 if opcode == Opcode::Bitcast {
277 return insert_bitcast(fgen, builder, args, rets);
278 }
279
280 let ctrl_type = *rets.first().or(args.first()).unwrap();
281 let type_size = ctrl_type.bytes();
282
283 let is_atomic = [Opcode::AtomicLoad, Opcode::AtomicStore].contains(&opcode);
284 let (address, flags, offset) =
285 fgen.generate_address_and_memflags(builder, type_size, is_atomic)?;
286
287 let var = fgen.get_variable_of_type(ctrl_type)?;
289
290 match opcode.format() {
291 InstructionFormat::LoadNoOffset => {
292 let (inst, dfg) = builder
293 .ins()
294 .LoadNoOffset(opcode, ctrl_type, flags, address);
295
296 let new_val = dfg.first_result(inst);
297 builder.def_var(var, new_val);
298 }
299 InstructionFormat::StoreNoOffset => {
300 let val = builder.use_var(var);
301
302 builder
303 .ins()
304 .StoreNoOffset(opcode, ctrl_type, flags, val, address);
305 }
306 InstructionFormat::Store => {
307 let val = builder.use_var(var);
308
309 builder
310 .ins()
311 .Store(opcode, ctrl_type, flags, offset, val, address);
312 }
313 InstructionFormat::Load => {
314 let (inst, dfg) = builder
315 .ins()
316 .Load(opcode, ctrl_type, flags, offset, address);
317
318 let new_val = dfg.first_result(inst);
319 builder.def_var(var, new_val);
320 }
321 _ => unimplemented!(),
322 }
323
324 Ok(())
325}
326
327fn insert_atomic_rmw(
328 fgen: &mut FunctionGenerator,
329 builder: &mut FunctionBuilder,
330 _: Opcode,
331 _: &[Type],
332 rets: &[Type],
333) -> Result<()> {
334 let ctrl_type = *rets.first().unwrap();
335 let type_size = ctrl_type.bytes();
336
337 let rmw_op = *fgen.u.choose(AtomicRmwOp::all())?;
338
339 let (address, flags, offset) = fgen.generate_address_and_memflags(builder, type_size, true)?;
340
341 let address = builder.ins().iadd_imm(address, i64::from(offset));
343
344 let source_var = fgen.get_variable_of_type(ctrl_type)?;
346 let target_var = fgen.get_variable_of_type(ctrl_type)?;
347
348 let source_val = builder.use_var(source_var);
349 let new_val = builder
350 .ins()
351 .atomic_rmw(ctrl_type, flags, rmw_op, address, source_val);
352
353 builder.def_var(target_var, new_val);
354 Ok(())
355}
356
357fn insert_atomic_cas(
358 fgen: &mut FunctionGenerator,
359 builder: &mut FunctionBuilder,
360 _: Opcode,
361 _: &[Type],
362 rets: &[Type],
363) -> Result<()> {
364 let ctrl_type = *rets.first().unwrap();
365 let type_size = ctrl_type.bytes();
366
367 let (address, flags, offset) = fgen.generate_address_and_memflags(builder, type_size, true)?;
368
369 let address = builder.ins().iadd_imm(address, i64::from(offset));
371
372 let expected_var = fgen.get_variable_of_type(ctrl_type)?;
374 let store_var = fgen.get_variable_of_type(ctrl_type)?;
375 let loaded_var = fgen.get_variable_of_type(ctrl_type)?;
376
377 let expected_val = builder.use_var(expected_var);
378 let store_val = builder.use_var(store_var);
379 let new_val = builder
380 .ins()
381 .atomic_cas(flags, address, expected_val, store_val);
382
383 builder.def_var(loaded_var, new_val);
384 Ok(())
385}
386
387fn insert_shuffle(
388 fgen: &mut FunctionGenerator,
389 builder: &mut FunctionBuilder,
390 opcode: Opcode,
391 _: &[Type],
392 rets: &[Type],
393) -> Result<()> {
394 let ctrl_type = *rets.first().unwrap();
395
396 let lhs = builder.use_var(fgen.get_variable_of_type(ctrl_type)?);
397 let rhs = builder.use_var(fgen.get_variable_of_type(ctrl_type)?);
398
399 let mask = {
400 let mut lanes = [0u8; 16];
401 for lane in lanes.iter_mut() {
402 *lane = fgen.u.int_in_range(0..=31)?;
403 }
404 let lanes = ConstantData::from(lanes.as_ref());
405 builder.func.dfg.immediates.push(lanes)
406 };
407
408 assert_eq!(opcode, Opcode::Shuffle);
412 let res = builder.ins().shuffle(lhs, rhs, mask);
413
414 let target_var = fgen.get_variable_of_type(ctrl_type)?;
415 builder.def_var(target_var, res);
416
417 Ok(())
418}
419
420fn insert_ins_ext_lane(
421 fgen: &mut FunctionGenerator,
422 builder: &mut FunctionBuilder,
423 opcode: Opcode,
424 args: &[Type],
425 rets: &[Type],
426) -> Result<()> {
427 let vector_type = *args.first().unwrap();
428 let ret_type = *rets.first().unwrap();
429
430 let lhs = builder.use_var(fgen.get_variable_of_type(vector_type)?);
431 let max_lane = (vector_type.lane_count() as u8) - 1;
432 let lane = fgen.u.int_in_range(0..=max_lane)?;
433
434 let res = match opcode {
435 Opcode::Insertlane => {
436 let rhs = builder.use_var(fgen.get_variable_of_type(args[1])?);
437 builder.ins().insertlane(lhs, rhs, lane)
438 }
439 Opcode::Extractlane => builder.ins().extractlane(lhs, lane),
440 _ => todo!(),
441 };
442
443 let target_var = fgen.get_variable_of_type(ret_type)?;
444 builder.def_var(target_var, res);
445
446 Ok(())
447}
448
449type OpcodeInserter = fn(
450 fgen: &mut FunctionGenerator,
451 builder: &mut FunctionBuilder,
452 Opcode,
453 &[Type],
454 &[Type],
455) -> Result<()>;
456
457macro_rules! exceptions {
458 ($op:expr, $args:expr, $rets:expr, $(($($cases:pat),*)),* $(,)?) => {
459 match ($op, $args, $rets) {
460 $( ($($cases,)* ..) => return false, )*
461 _ => true,
462 }
463 }
464}
465
466fn valid_for_target(triple: &Triple, op: Opcode, args: &[Type], rets: &[Type]) -> bool {
472 match op {
475 Opcode::FcvtToUintSat | Opcode::FcvtToSintSat => {
476 assert_eq!(args.len(), 1);
477 assert_eq!(rets.len(), 1);
478
479 let arg = args[0];
480 let ret = rets[0];
481
482 if arg.is_vector() != ret.is_vector() {
485 return false;
486 }
487
488 if arg.is_vector() && ret.is_vector() {
489 if arg.lane_count() != ret.lane_count() {
492 return false;
493 }
494
495 if arg.lane_of().bits() != ret.lane_of().bits() {
496 return false;
497 }
498 }
499 }
500
501 Opcode::Bitcast => {
502 assert_eq!(args.len(), 1);
503 assert_eq!(rets.len(), 1);
504
505 let arg = args[0];
506 let ret = rets[0];
507
508 if arg.bits() != ret.bits() {
511 return false;
512 }
513 }
514
515 Opcode::StackSwitch => return false,
518
519 _ => {}
520 }
521
522 match triple.architecture {
523 Architecture::X86_64 => {
524 exceptions!(
525 op,
526 args,
527 rets,
528 (Opcode::UmulOverflow | Opcode::SmulOverflow, &[I128, I128]),
529 (Opcode::Imul, &[I8X16, I8X16]),
530 (Opcode::Udiv | Opcode::Sdiv, &[I128, I128]),
532 (Opcode::Urem | Opcode::Srem, &[I128, I128]),
534 (
536 Opcode::Smin | Opcode::Umin | Opcode::Smax | Opcode::Umax,
537 &[I128, I128]
538 ),
539 (Opcode::Cls, &[I8], &[I8]),
541 (Opcode::Cls, &[I16], &[I16]),
542 (Opcode::Cls, &[I32], &[I32]),
543 (Opcode::Cls, &[I64], &[I64]),
544 (Opcode::Cls, &[I128], &[I128]),
545 (Opcode::Bitselect, &[_, _, _], &[F32 | F64]),
547 (
550 Opcode::FcvtToUint
551 | Opcode::FcvtToUintSat
552 | Opcode::FcvtToSint
553 | Opcode::FcvtToSintSat,
554 &[F32 | F64],
555 &[I8 | I16 | I128]
556 ),
557 (Opcode::FcvtToUint | Opcode::FcvtToSint, &[F32X4], &[I32X4]),
558 (
559 Opcode::FcvtToUint
560 | Opcode::FcvtToUintSat
561 | Opcode::FcvtToSint
562 | Opcode::FcvtToSintSat,
563 &[F64X2],
564 &[I64X2]
565 ),
566 (Opcode::FcvtFromUint, &[I128], &[F32 | F64]),
568 (Opcode::FcvtFromUint, &[I64X2], &[F64X2]),
570 (Opcode::FcvtFromSint, &[I128], &[F32 | F64]),
572 (Opcode::FcvtFromSint, &[I64X2], &[F64X2]),
573 (
574 Opcode::Umulhi | Opcode::Smulhi,
575 &([I8X16, I8X16] | [I16X8, I16X8] | [I32X4, I32X4] | [I64X2, I64X2])
576 ),
577 (
578 Opcode::UaddSat | Opcode::SaddSat | Opcode::UsubSat | Opcode::SsubSat,
579 &([I32X4, I32X4] | [I64X2, I64X2])
580 ),
581 (Opcode::Fcopysign, &([F32X4, F32X4] | [F64X2, F64X2])),
582 (Opcode::Popcnt, &([I8X16] | [I16X8] | [I32X4] | [I64X2])),
583 (
584 Opcode::Umax | Opcode::Smax | Opcode::Umin | Opcode::Smin,
585 &[I64X2, I64X2]
586 ),
587 (Opcode::Bitcast, &[I128], &[_]),
589 (Opcode::Bitcast, &[_], &[I128]),
590 (Opcode::Uunarrow),
591 (Opcode::Snarrow | Opcode::Unarrow, &[I64X2, I64X2]),
592 (Opcode::SqmulRoundSat, &[I32X4, I32X4]),
593 (Opcode::Icmp, &[I64X2, I64X2]),
595 (Opcode::IaddPairwise),
597 (Opcode::Select, &[_, I128, I128]),
600 (
603 Opcode::StackStore,
604 &[I8X16 | I16X8 | I32X4 | I64X2 | F32X4 | F64X2]
605 ),
606 (
607 Opcode::StackLoad,
608 &[],
609 &[I8X16 | I16X8 | I32X4 | I64X2 | F32X4 | F64X2]
610 ),
611 (
613 Opcode::Sshr | Opcode::Ushr | Opcode::Ishl,
614 &[I8X16 | I16X8 | I32X4 | I64X2, I128]
615 ),
616 (
617 Opcode::Rotr | Opcode::Rotl,
618 &[I8X16 | I16X8 | I32X4 | I64X2, _]
619 ),
620 )
621 }
622
623 Architecture::Aarch64(_) => {
624 exceptions!(
625 op,
626 args,
627 rets,
628 (Opcode::UmulOverflow | Opcode::SmulOverflow, &[I128, I128]),
629 (Opcode::Udiv | Opcode::Sdiv, &[I128, I128]),
631 (Opcode::Urem | Opcode::Srem, &[I128, I128]),
633 (
635 Opcode::Smin | Opcode::Umin | Opcode::Smax | Opcode::Umax,
636 &[I128, I128]
637 ),
638 (Opcode::Bnot, &[F32 | F64]),
640 (
641 Opcode::Band
642 | Opcode::Bor
643 | Opcode::Bxor
644 | Opcode::BandNot
645 | Opcode::BorNot
646 | Opcode::BxorNot,
647 &([F32, F32] | [F64, F64])
648 ),
649 (Opcode::Bitselect, &[I128, I128, I128]),
651 (
653 Opcode::FcvtToUint
654 | Opcode::FcvtToUintSat
655 | Opcode::FcvtToSint
656 | Opcode::FcvtToSintSat,
657 &[F32 | F64],
658 &[I128]
659 ),
660 (
662 Opcode::FcvtFromUint | Opcode::FcvtFromSint,
663 &[I128],
664 &[F32 | F64]
665 ),
666 (
667 Opcode::Umulhi | Opcode::Smulhi,
668 &([I8X16, I8X16] | [I16X8, I16X8] | [I32X4, I32X4] | [I64X2, I64X2])
669 ),
670 (Opcode::Popcnt, &[I16X8 | I32X4 | I64X2]),
671 (Opcode::Select, &[I8, I128, I128]),
674 (Opcode::Bitcast, &[I128], &[_]),
676 (Opcode::Bitcast, &[_], &[I128]),
677 (
679 Opcode::Sshr | Opcode::Ushr | Opcode::Ishl,
680 &[I8X16 | I16X8 | I32X4 | I64X2, I128]
681 ),
682 (
683 Opcode::Rotr | Opcode::Rotl,
684 &[I8X16 | I16X8 | I32X4 | I64X2, _]
685 ),
686 (Opcode::Bitselect, &[_, _, _], &[F32 | F64]),
688 (Opcode::VhighBits, &[F32X4 | F64X2]),
689 )
690 }
691
692 Architecture::S390x => {
693 exceptions!(
694 op,
695 args,
696 rets,
697 (Opcode::UaddOverflow | Opcode::SaddOverflow),
698 (Opcode::UsubOverflow | Opcode::SsubOverflow),
699 (Opcode::UmulOverflow | Opcode::SmulOverflow),
700 (
701 Opcode::Udiv | Opcode::Sdiv | Opcode::Urem | Opcode::Srem,
702 &[I128, I128]
703 ),
704 (Opcode::Bnot, &[F32 | F64]),
705 (
706 Opcode::Band
707 | Opcode::Bor
708 | Opcode::Bxor
709 | Opcode::BandNot
710 | Opcode::BorNot
711 | Opcode::BxorNot,
712 &([F32, F32] | [F64, F64])
713 ),
714 (
715 Opcode::FcvtToUint
716 | Opcode::FcvtToUintSat
717 | Opcode::FcvtToSint
718 | Opcode::FcvtToSintSat,
719 &[F32 | F64],
720 &[I128]
721 ),
722 (
723 Opcode::FcvtFromUint | Opcode::FcvtFromSint,
724 &[I128],
725 &[F32 | F64]
726 ),
727 (Opcode::SsubSat | Opcode::SaddSat, &[I64X2, I64X2]),
728 (Opcode::Bitcast, &[I128], &[_]),
730 (Opcode::Bitcast, &[_], &[I128]),
731 (Opcode::Bitselect, &[_, _, _], &[F32 | F64]),
733 )
734 }
735
736 Architecture::Riscv64(_) => {
737 exceptions!(
738 op,
739 args,
740 rets,
741 (Opcode::UaddOverflow | Opcode::SaddOverflow),
743 (Opcode::UsubOverflow | Opcode::SsubOverflow),
744 (Opcode::UmulOverflow | Opcode::SmulOverflow),
745 (
747 Opcode::Udiv | Opcode::Sdiv | Opcode::Urem | Opcode::Srem,
748 &[I128, I128]
749 ),
750 (Opcode::Iabs, &[I128]),
752 (Opcode::Bitselect, &[I128, I128, I128]),
754 (
756 Opcode::FcvtToUint | Opcode::FcvtToSint,
757 [F32 | F64],
758 &[I128]
759 ),
760 (
761 Opcode::FcvtToUintSat | Opcode::FcvtToSintSat,
762 &[F32 | F64],
763 &[I128]
764 ),
765 (
767 Opcode::FcvtFromUint | Opcode::FcvtFromSint,
768 &[I128],
769 &[F32 | F64]
770 ),
771 (
773 Opcode::SelectSpectreGuard,
774 &[_, _, _],
775 &[F32 | F64 | I8X16 | I16X8 | I32X4 | I64X2 | F64X2 | F32X4]
776 ),
777 (Opcode::Bitselect, &[_, _, _], &[F32 | F64]),
779 (
780 Opcode::Rotr | Opcode::Rotl,
781 &[I8X16 | I16X8 | I32X4 | I64X2, _]
782 ),
783 )
784 }
785
786 _ => true,
787 }
788}
789
790type OpcodeSignature = (Opcode, Vec<Type>, Vec<Type>);
791
792static OPCODE_SIGNATURES: LazyLock<Vec<OpcodeSignature>> = LazyLock::new(|| {
793 let types = &[
794 I8, I16, I32, I64, I128, F32, F64, I8X16, I16X8, I32X4, I64X2, F32X4, F64X2, ];
799
800 let allowed_opcodes = std::env::var("FUZZGEN_ALLOWED_OPS").ok().map(|s| {
804 s.split(',')
805 .map(|s| s.trim())
806 .filter(|s| !s.is_empty())
807 .map(|s| Opcode::from_str(s).expect("Unrecoginzed opcode"))
808 .collect::<Vec<_>>()
809 });
810
811 Opcode::all()
812 .iter()
813 .filter(|op| {
814 match op {
815 Opcode::BrTable
817 | Opcode::Brif
818 | Opcode::Jump
819 | Opcode::Return
820 | Opcode::ReturnCall
821 | Opcode::ReturnCallIndirect => false,
822
823 Opcode::Iconst => false,
825
826 Opcode::ExtractVector => false,
829
830 _ => true,
831 }
832 })
833 .flat_map(|op| {
834 let constraints = op.constraints();
835
836 let ctrl_types = if let Some(ctrls) = constraints.ctrl_typeset() {
837 Vec::from_iter(types.iter().copied().filter(|ty| ctrls.contains(*ty)))
838 } else {
839 vec![INVALID]
840 };
841
842 ctrl_types.into_iter().flat_map(move |ctrl_type| {
843 let rets = Vec::from_iter(
844 (0..constraints.num_fixed_results())
845 .map(|i| constraints.result_type(i, ctrl_type)),
846 );
847
848 let mut cols = vec![];
852
853 for i in 0..constraints.num_fixed_value_arguments() {
854 match constraints.value_argument_constraint(i, ctrl_type) {
855 ResolvedConstraint::Bound(ty) => cols.push(Vec::from([ty])),
856 ResolvedConstraint::Free(tys) => cols.push(Vec::from_iter(
857 types.iter().copied().filter(|ty| tys.contains(*ty)),
858 )),
859 }
860 }
861
862 let mut argss = vec![vec![]];
866 let mut cols = cols.as_slice();
867 while let Some((col, rest)) = cols.split_last() {
868 cols = rest;
869
870 let mut next = vec![];
871 for current in argss.iter() {
872 for ty in col {
874 let mut args = vec![*ty];
875 args.extend_from_slice(¤t);
876 next.push(args);
877 }
878 }
879
880 let _ = std::mem::replace(&mut argss, next);
881 }
882
883 argss.into_iter().map(move |args| (*op, args, rets.clone()))
884 })
885 })
886 .filter(|(op, args, rets)| {
887 exceptions!(
889 op,
890 args.as_slice(),
891 rets.as_slice(),
892 (Opcode::Debugtrap),
893 (Opcode::Trap),
894 (Opcode::Trapz),
895 (Opcode::Trapnz),
896 (Opcode::CallIndirect, &[I32]),
897 (Opcode::FuncAddr),
898 (Opcode::X86Pshufb),
899 (Opcode::AvgRound),
900 (Opcode::Uload8x8),
901 (Opcode::Sload8x8),
902 (Opcode::Uload16x4),
903 (Opcode::Sload16x4),
904 (Opcode::Uload32x2),
905 (Opcode::Sload32x2),
906 (Opcode::StackAddr),
907 (Opcode::DynamicStackLoad),
908 (Opcode::DynamicStackStore),
909 (Opcode::DynamicStackAddr),
910 (Opcode::GlobalValue),
911 (Opcode::SymbolValue),
912 (Opcode::TlsValue),
913 (Opcode::GetPinnedReg),
914 (Opcode::SetPinnedReg),
915 (Opcode::GetFramePointer),
916 (Opcode::GetStackPointer),
917 (Opcode::GetReturnAddress),
918 (Opcode::X86Blendv),
919 (Opcode::IcmpImm),
920 (Opcode::X86Pmulhrsw),
921 (Opcode::IaddImm),
922 (Opcode::ImulImm),
923 (Opcode::UdivImm),
924 (Opcode::SdivImm),
925 (Opcode::UremImm),
926 (Opcode::SremImm),
927 (Opcode::IrsubImm),
928 (Opcode::UaddOverflowCin),
929 (Opcode::SaddOverflowCin),
930 (Opcode::UaddOverflowTrap),
931 (Opcode::UsubOverflowBin),
932 (Opcode::SsubOverflowBin),
933 (Opcode::BandImm),
934 (Opcode::BorImm),
935 (Opcode::BxorImm),
936 (Opcode::RotlImm),
937 (Opcode::RotrImm),
938 (Opcode::IshlImm),
939 (Opcode::UshrImm),
940 (Opcode::SshrImm),
941 (Opcode::ScalarToVector),
942 (Opcode::X86Pmaddubsw),
943 (Opcode::X86Cvtt2dq),
944 (Opcode::Umulhi, &[I128, I128], &[I128]),
945 (Opcode::Smulhi, &[I128, I128], &[I128]),
946 (Opcode::Iconcat, &[I32, I32], &[I64]),
948 (Opcode::Iconcat, &[I16, I16], &[I32]),
949 (Opcode::Iconcat, &[I8, I8], &[I16]),
950 (Opcode::Isplit, &[I64], &[I32, I32]),
952 (Opcode::Isplit, &[I32], &[I16, I16]),
953 (Opcode::Isplit, &[I16], &[I8, I8]),
954 (Opcode::Fmin, &[F32X4, F32X4], &[F32X4]),
955 (Opcode::Fmin, &[F64X2, F64X2], &[F64X2]),
956 (Opcode::Fmax, &[F32X4, F32X4], &[F32X4]),
957 (Opcode::Fmax, &[F64X2, F64X2], &[F64X2]),
958 (Opcode::FcvtToUintSat, &[F32X4], &[I8]),
959 (Opcode::FcvtToUintSat, &[F64X2], &[I8]),
960 (Opcode::FcvtToUintSat, &[F32X4], &[I16]),
961 (Opcode::FcvtToUintSat, &[F64X2], &[I16]),
962 (Opcode::FcvtToUintSat, &[F32X4], &[I32]),
963 (Opcode::FcvtToUintSat, &[F64X2], &[I32]),
964 (Opcode::FcvtToUintSat, &[F32X4], &[I64]),
965 (Opcode::FcvtToUintSat, &[F64X2], &[I64]),
966 (Opcode::FcvtToUintSat, &[F32X4], &[I128]),
967 (Opcode::FcvtToUintSat, &[F64X2], &[I128]),
968 (Opcode::FcvtToUintSat, &[F32], &[I8X16]),
969 (Opcode::FcvtToUintSat, &[F64], &[I8X16]),
970 (Opcode::FcvtToUintSat, &[F32X4], &[I8X16]),
971 (Opcode::FcvtToUintSat, &[F64X2], &[I8X16]),
972 (Opcode::FcvtToUintSat, &[F32], &[I16X8]),
973 (Opcode::FcvtToUintSat, &[F64], &[I16X8]),
974 (Opcode::FcvtToUintSat, &[F32X4], &[I16X8]),
975 (Opcode::FcvtToUintSat, &[F64X2], &[I16X8]),
976 (Opcode::FcvtToUintSat, &[F32], &[I32X4]),
977 (Opcode::FcvtToUintSat, &[F64], &[I32X4]),
978 (Opcode::FcvtToUintSat, &[F64X2], &[I32X4]),
979 (Opcode::FcvtToUintSat, &[F32], &[I64X2]),
980 (Opcode::FcvtToUintSat, &[F64], &[I64X2]),
981 (Opcode::FcvtToUintSat, &[F32X4], &[I64X2]),
982 (Opcode::FcvtToSintSat, &[F32X4], &[I8]),
983 (Opcode::FcvtToSintSat, &[F64X2], &[I8]),
984 (Opcode::FcvtToSintSat, &[F32X4], &[I16]),
985 (Opcode::FcvtToSintSat, &[F64X2], &[I16]),
986 (Opcode::FcvtToSintSat, &[F32X4], &[I32]),
987 (Opcode::FcvtToSintSat, &[F64X2], &[I32]),
988 (Opcode::FcvtToSintSat, &[F32X4], &[I64]),
989 (Opcode::FcvtToSintSat, &[F64X2], &[I64]),
990 (Opcode::FcvtToSintSat, &[F32X4], &[I128]),
991 (Opcode::FcvtToSintSat, &[F64X2], &[I128]),
992 (Opcode::FcvtToSintSat, &[F32], &[I8X16]),
993 (Opcode::FcvtToSintSat, &[F64], &[I8X16]),
994 (Opcode::FcvtToSintSat, &[F32X4], &[I8X16]),
995 (Opcode::FcvtToSintSat, &[F64X2], &[I8X16]),
996 (Opcode::FcvtToSintSat, &[F32], &[I16X8]),
997 (Opcode::FcvtToSintSat, &[F64], &[I16X8]),
998 (Opcode::FcvtToSintSat, &[F32X4], &[I16X8]),
999 (Opcode::FcvtToSintSat, &[F64X2], &[I16X8]),
1000 (Opcode::FcvtToSintSat, &[F32], &[I32X4]),
1001 (Opcode::FcvtToSintSat, &[F64], &[I32X4]),
1002 (Opcode::FcvtToSintSat, &[F64X2], &[I32X4]),
1003 (Opcode::FcvtToSintSat, &[F32], &[I64X2]),
1004 (Opcode::FcvtToSintSat, &[F64], &[I64X2]),
1005 (Opcode::FcvtToSintSat, &[F32X4], &[I64X2]),
1006 (Opcode::FcvtFromUint, &[I8X16], &[F32]),
1007 (Opcode::FcvtFromUint, &[I16X8], &[F32]),
1008 (Opcode::FcvtFromUint, &[I32X4], &[F32]),
1009 (Opcode::FcvtFromUint, &[I64X2], &[F32]),
1010 (Opcode::FcvtFromUint, &[I8X16], &[F64]),
1011 (Opcode::FcvtFromUint, &[I16X8], &[F64]),
1012 (Opcode::FcvtFromUint, &[I32X4], &[F64]),
1013 (Opcode::FcvtFromUint, &[I64X2], &[F64]),
1014 (Opcode::FcvtFromUint, &[I8], &[F32X4]),
1015 (Opcode::FcvtFromUint, &[I16], &[F32X4]),
1016 (Opcode::FcvtFromUint, &[I32], &[F32X4]),
1017 (Opcode::FcvtFromUint, &[I64], &[F32X4]),
1018 (Opcode::FcvtFromUint, &[I128], &[F32X4]),
1019 (Opcode::FcvtFromUint, &[I8X16], &[F32X4]),
1020 (Opcode::FcvtFromUint, &[I16X8], &[F32X4]),
1021 (Opcode::FcvtFromUint, &[I64X2], &[F32X4]),
1022 (Opcode::FcvtFromUint, &[I8], &[F64X2]),
1023 (Opcode::FcvtFromUint, &[I16], &[F64X2]),
1024 (Opcode::FcvtFromUint, &[I32], &[F64X2]),
1025 (Opcode::FcvtFromUint, &[I64], &[F64X2]),
1026 (Opcode::FcvtFromUint, &[I128], &[F64X2]),
1027 (Opcode::FcvtFromUint, &[I8X16], &[F64X2]),
1028 (Opcode::FcvtFromUint, &[I16X8], &[F64X2]),
1029 (Opcode::FcvtFromUint, &[I32X4], &[F64X2]),
1030 (Opcode::FcvtFromSint, &[I8X16], &[F32]),
1031 (Opcode::FcvtFromSint, &[I16X8], &[F32]),
1032 (Opcode::FcvtFromSint, &[I32X4], &[F32]),
1033 (Opcode::FcvtFromSint, &[I64X2], &[F32]),
1034 (Opcode::FcvtFromSint, &[I8X16], &[F64]),
1035 (Opcode::FcvtFromSint, &[I16X8], &[F64]),
1036 (Opcode::FcvtFromSint, &[I32X4], &[F64]),
1037 (Opcode::FcvtFromSint, &[I64X2], &[F64]),
1038 (Opcode::FcvtFromSint, &[I8], &[F32X4]),
1039 (Opcode::FcvtFromSint, &[I16], &[F32X4]),
1040 (Opcode::FcvtFromSint, &[I32], &[F32X4]),
1041 (Opcode::FcvtFromSint, &[I64], &[F32X4]),
1042 (Opcode::FcvtFromSint, &[I128], &[F32X4]),
1043 (Opcode::FcvtFromSint, &[I8X16], &[F32X4]),
1044 (Opcode::FcvtFromSint, &[I16X8], &[F32X4]),
1045 (Opcode::FcvtFromSint, &[I64X2], &[F32X4]),
1046 (Opcode::FcvtFromSint, &[I8], &[F64X2]),
1047 (Opcode::FcvtFromSint, &[I16], &[F64X2]),
1048 (Opcode::FcvtFromSint, &[I32], &[F64X2]),
1049 (Opcode::FcvtFromSint, &[I64], &[F64X2]),
1050 (Opcode::FcvtFromSint, &[I128], &[F64X2]),
1051 (Opcode::FcvtFromSint, &[I8X16], &[F64X2]),
1052 (Opcode::FcvtFromSint, &[I16X8], &[F64X2]),
1053 (Opcode::FcvtFromSint, &[I32X4], &[F64X2]),
1054 (Opcode::AtomicRmw, _, &[I128]),
1057 (Opcode::AtomicCas, _, &[I128]),
1058 (Opcode::AtomicLoad, _, &[I128]),
1059 (Opcode::AtomicStore, &[I128, _], _),
1060 )
1061 })
1062 .filter(|(op, ..)| {
1063 allowed_opcodes
1064 .as_ref()
1065 .map_or(true, |opcodes| opcodes.contains(op))
1066 })
1067 .collect()
1068});
1069
1070fn inserter_for_format(fmt: InstructionFormat) -> OpcodeInserter {
1071 match fmt {
1072 InstructionFormat::AtomicCas => insert_atomic_cas,
1073 InstructionFormat::AtomicRmw => insert_atomic_rmw,
1074 InstructionFormat::Binary => insert_opcode,
1075 InstructionFormat::BinaryImm64 => todo!(),
1076 InstructionFormat::BinaryImm8 => insert_ins_ext_lane,
1077 InstructionFormat::Call => insert_call,
1078 InstructionFormat::CallIndirect => insert_call,
1079 InstructionFormat::CondTrap => todo!(),
1080 InstructionFormat::DynamicStackLoad => todo!(),
1081 InstructionFormat::DynamicStackStore => todo!(),
1082 InstructionFormat::FloatCompare => insert_cmp,
1083 InstructionFormat::FuncAddr => todo!(),
1084 InstructionFormat::IntAddTrap => todo!(),
1085 InstructionFormat::IntCompare => insert_cmp,
1086 InstructionFormat::IntCompareImm => todo!(),
1087 InstructionFormat::Load => insert_load_store,
1088 InstructionFormat::LoadNoOffset => insert_load_store,
1089 InstructionFormat::NullAry => insert_opcode,
1090 InstructionFormat::Shuffle => insert_shuffle,
1091 InstructionFormat::StackLoad => insert_stack_load,
1092 InstructionFormat::StackStore => insert_stack_store,
1093 InstructionFormat::Store => insert_load_store,
1094 InstructionFormat::StoreNoOffset => insert_load_store,
1095 InstructionFormat::Ternary => insert_opcode,
1096 InstructionFormat::TernaryImm8 => insert_ins_ext_lane,
1097 InstructionFormat::Trap => todo!(),
1098 InstructionFormat::Unary => insert_opcode,
1099 InstructionFormat::UnaryConst => insert_const,
1100 InstructionFormat::UnaryGlobalValue => todo!(),
1101 InstructionFormat::UnaryIeee16 => insert_const,
1102 InstructionFormat::UnaryIeee32 => insert_const,
1103 InstructionFormat::UnaryIeee64 => insert_const,
1104 InstructionFormat::UnaryImm => insert_const,
1105
1106 InstructionFormat::BranchTable
1107 | InstructionFormat::Brif
1108 | InstructionFormat::Jump
1109 | InstructionFormat::MultiAry => {
1110 panic!("Control-flow instructions should be handled by 'insert_terminator': {fmt:?}")
1111 }
1112 }
1113}
1114
1115pub struct FunctionGenerator<'r, 'data>
1116where
1117 'data: 'r,
1118{
1119 u: &'r mut Unstructured<'data>,
1120 config: &'r Config,
1121 resources: Resources,
1122 isa: OwnedTargetIsa,
1123 name: UserFuncName,
1124 signature: Signature,
1125}
1126
1127#[derive(Debug, Clone)]
1128enum BlockTerminator {
1129 Return,
1130 Jump(Block),
1131 Br(Block, Block),
1132 BrTable(Block, Vec<Block>),
1133 Switch(Type, Block, HashMap<u128, Block>),
1134 TailCall(FuncRef),
1135 TailCallIndirect(FuncRef),
1136}
1137
1138#[derive(Debug, Clone)]
1139enum BlockTerminatorKind {
1140 Return,
1141 Jump,
1142 Br,
1143 BrTable,
1144 Switch,
1145 TailCall,
1146 TailCallIndirect,
1147}
1148
1149#[derive(Debug, PartialEq, Clone, Copy)]
1161enum AACategory {
1162 Other,
1163 Heap,
1164 Table,
1165 VmCtx,
1166}
1167
1168impl AACategory {
1169 pub fn all() -> &'static [Self] {
1170 &[
1171 AACategory::Other,
1172 AACategory::Heap,
1173 AACategory::Table,
1174 AACategory::VmCtx,
1175 ]
1176 }
1177
1178 pub fn update_memflags(&self, flags: &mut MemFlags) {
1179 flags.set_alias_region(match self {
1180 AACategory::Other => None,
1181 AACategory::Heap => Some(AliasRegion::Heap),
1182 AACategory::Table => Some(AliasRegion::Table),
1183 AACategory::VmCtx => Some(AliasRegion::Vmctx),
1184 })
1185 }
1186}
1187
1188pub type StackAlignment = StackSize;
1189
1190#[derive(Default)]
1191struct Resources {
1192 vars: HashMap<Type, Vec<Variable>>,
1193 blocks: Vec<(Block, BlockSignature)>,
1194 blocks_without_params: Vec<Block>,
1195 block_terminators: Vec<BlockTerminator>,
1196 func_refs: Vec<(Signature, SigRef, FuncRef)>,
1197 stack_slots: Vec<(StackSlot, StackSize, StackAlignment, AACategory)>,
1201 usercalls: Vec<(UserExternalName, Signature)>,
1202 libcalls: Vec<LibCall>,
1203}
1204
1205impl Resources {
1206 fn partition_target_blocks(
1211 &self,
1212 block: Block,
1213 ) -> (&[(Block, BlockSignature)], &[(Block, BlockSignature)]) {
1214 let target_blocks = &self.blocks[1..];
1217 target_blocks.split_at(block.as_u32() as usize)
1218 }
1219
1220 fn forward_blocks(&self, block: Block) -> &[(Block, BlockSignature)] {
1222 let (_, forward_blocks) = self.partition_target_blocks(block);
1223 forward_blocks
1224 }
1225
1226 fn forward_blocks_without_params(&self, block: Block) -> &[Block] {
1228 let partition_point = self.blocks_without_params.partition_point(|b| *b <= block);
1229 &self.blocks_without_params[partition_point..]
1230 }
1231
1232 fn tail_call_targets<'a>(
1235 &'a self,
1236 caller_sig: &'a Signature,
1237 ) -> impl Iterator<Item = &'a (Signature, SigRef, FuncRef)> {
1238 self.func_refs.iter().filter(|(sig, _, _)| {
1239 sig.call_conv == CallConv::Tail && sig.returns == caller_sig.returns
1240 })
1241 }
1242}
1243
1244impl<'r, 'data> FunctionGenerator<'r, 'data>
1245where
1246 'data: 'r,
1247{
1248 pub fn new(
1249 u: &'r mut Unstructured<'data>,
1250 config: &'r Config,
1251 isa: OwnedTargetIsa,
1252 name: UserFuncName,
1253 signature: Signature,
1254 usercalls: Vec<(UserExternalName, Signature)>,
1255 libcalls: Vec<LibCall>,
1256 ) -> Self {
1257 Self {
1258 u,
1259 config,
1260 resources: Resources {
1261 usercalls,
1262 libcalls,
1263 ..Resources::default()
1264 },
1265 isa,
1266 name,
1267 signature,
1268 }
1269 }
1270
1271 fn param(&mut self, param: &RangeInclusive<usize>) -> Result<usize> {
1273 Ok(self.u.int_in_range(param.clone())?)
1274 }
1275
1276 fn system_callconv(&mut self) -> CallConv {
1277 CallConv::SystemV
1280 }
1281
1282 fn stack_slot_with_size(
1284 &mut self,
1285 n: u32,
1286 ) -> Result<(StackSlot, StackSize, StackAlignment, AACategory)> {
1287 let first = self
1288 .resources
1289 .stack_slots
1290 .partition_point(|&(_slot, size, _align, _category)| size < n);
1291 Ok(*self.u.choose(&self.resources.stack_slots[first..])?)
1292 }
1293
1294 fn generate_load_store_address(
1307 &mut self,
1308 builder: &mut FunctionBuilder,
1309 min_size: u32,
1310 aligned: bool,
1311 ) -> Result<(Value, u32, AACategory)> {
1312 let (addr, available_size, category) = {
1315 let (ss, slot_size, _align, category) = self.stack_slot_with_size(min_size)?;
1316
1317 let max_offset = slot_size - min_size;
1319 let offset = if aligned {
1320 self.u.int_in_range(0..=max_offset / min_size)? * min_size
1321 } else {
1322 self.u.int_in_range(0..=max_offset)?
1323 };
1324
1325 let base_addr = builder.ins().stack_addr(I64, ss, offset as i32);
1326 let available_size = slot_size.saturating_sub(offset);
1327 (base_addr, available_size, category)
1328 };
1329
1330 let max_offset = available_size.saturating_sub(min_size);
1335 Ok((addr, max_offset, category))
1336 }
1337
1338 fn generate_address_and_memflags(
1340 &mut self,
1341 builder: &mut FunctionBuilder,
1342 min_size: u32,
1343 is_atomic: bool,
1344 ) -> Result<(Value, MemFlags, Offset32)> {
1345 let requires_aligned_atomics = matches!(
1350 self.isa.triple().architecture,
1351 Architecture::Aarch64(_) | Architecture::Riscv64(_)
1352 );
1353 let aligned = if is_atomic && requires_aligned_atomics {
1354 true
1355 } else if min_size > 8 {
1356 false
1361 } else {
1362 bool::arbitrary(self.u)?
1363 };
1364
1365 let mut flags = MemFlags::new();
1366 if aligned && bool::arbitrary(self.u)? {
1368 flags.set_aligned();
1369 }
1370 if aligned && bool::arbitrary(self.u)? {
1372 flags.set_notrap();
1373 }
1374
1375 let (address, max_offset, category) =
1376 self.generate_load_store_address(builder, min_size, aligned)?;
1377
1378 category.update_memflags(&mut flags);
1380
1381 let offset = if aligned {
1383 0
1384 } else {
1385 self.u.int_in_range(0..=max_offset)? as i32
1386 }
1387 .into();
1388
1389 Ok((address, flags, offset))
1390 }
1391
1392 fn get_variable_of_type(&mut self, ty: Type) -> Result<Variable> {
1394 let opts = self.resources.vars.get(&ty).map_or(&[][..], Vec::as_slice);
1395 let var = self.u.choose(opts)?;
1396 Ok(*var)
1397 }
1398
1399 fn generate_const(&mut self, builder: &mut FunctionBuilder, ty: Type) -> Result<Value> {
1401 Ok(match self.u.datavalue(ty)? {
1402 DataValue::I8(i) => builder.ins().iconst(ty, i as u8 as i64),
1403 DataValue::I16(i) => builder.ins().iconst(ty, i as u16 as i64),
1404 DataValue::I32(i) => builder.ins().iconst(ty, i as u32 as i64),
1405 DataValue::I64(i) => builder.ins().iconst(ty, i),
1406 DataValue::I128(i) => {
1407 let hi = builder.ins().iconst(I64, (i >> 64) as i64);
1408 let lo = builder.ins().iconst(I64, i as i64);
1409 builder.ins().iconcat(lo, hi)
1410 }
1411 DataValue::F16(f) => builder.ins().f16const(f),
1412 DataValue::F32(f) => builder.ins().f32const(f),
1413 DataValue::F64(f) => builder.ins().f64const(f),
1414 DataValue::F128(f) => {
1415 let handle = builder.func.dfg.constants.insert(f.into());
1416 builder.ins().f128const(handle)
1417 }
1418 DataValue::V128(bytes) => {
1419 let data = bytes.to_vec().into();
1420 let handle = builder.func.dfg.constants.insert(data);
1421 builder.ins().vconst(ty, handle)
1422 }
1423 _ => unimplemented!(),
1424 })
1425 }
1426
1427 fn generate_target_block(&mut self, source_block: Block) -> Result<Block> {
1430 let (backwards_blocks, forward_blocks) =
1433 self.resources.partition_target_blocks(source_block);
1434 let ratio = self.config.backwards_branch_ratio;
1435 let block_targets = if !backwards_blocks.is_empty() && self.u.ratio(ratio.0, ratio.1)? {
1436 backwards_blocks
1437 } else {
1438 forward_blocks
1439 };
1440 assert!(!block_targets.is_empty());
1441
1442 let (block, _) = self.u.choose(block_targets)?.clone();
1443 Ok(block)
1444 }
1445
1446 fn generate_values_for_block(
1447 &mut self,
1448 builder: &mut FunctionBuilder,
1449 block: Block,
1450 ) -> Result<Vec<Value>> {
1451 let (_, sig) = self.resources.blocks[block.as_u32() as usize].clone();
1452 self.generate_values_for_signature(builder, sig.iter().copied())
1453 }
1454
1455 fn generate_values_for_signature<I: Iterator<Item = Type>>(
1456 &mut self,
1457 builder: &mut FunctionBuilder,
1458 signature: I,
1459 ) -> Result<Vec<Value>> {
1460 signature
1461 .map(|ty| {
1462 let var = self.get_variable_of_type(ty)?;
1463 let val = builder.use_var(var);
1464 Ok(val)
1465 })
1466 .collect()
1467 }
1468
1469 fn insert_terminator(
1472 &mut self,
1473 builder: &mut FunctionBuilder,
1474 source_block: Block,
1475 ) -> Result<()> {
1476 let terminator = self.resources.block_terminators[source_block.as_u32() as usize].clone();
1477
1478 match terminator {
1479 BlockTerminator::Return => {
1480 let types: Vec<Type> = {
1481 let rets = &builder.func.signature.returns;
1482 rets.iter().map(|p| p.value_type).collect()
1483 };
1484 let vals = self.generate_values_for_signature(builder, types.into_iter())?;
1485
1486 builder.ins().return_(&vals[..]);
1487 }
1488 BlockTerminator::Jump(target) => {
1489 let args = self.generate_values_for_block(builder, target)?;
1490 builder.ins().jump(target, &args[..]);
1491 }
1492 BlockTerminator::Br(left, right) => {
1493 let left_args = self.generate_values_for_block(builder, left)?;
1494 let right_args = self.generate_values_for_block(builder, right)?;
1495
1496 let condbr_types = [I8, I16, I32, I64, I128];
1497 let _type = *self.u.choose(&condbr_types[..])?;
1498 let val = builder.use_var(self.get_variable_of_type(_type)?);
1499 builder
1500 .ins()
1501 .brif(val, left, &left_args[..], right, &right_args[..]);
1502 }
1503 BlockTerminator::BrTable(default, targets) => {
1504 let mut jt = Vec::with_capacity(targets.len());
1506 for block in targets {
1507 let args = self.generate_values_for_block(builder, block)?;
1508 jt.push(builder.func.dfg.block_call(block, &args))
1509 }
1510
1511 let args = self.generate_values_for_block(builder, default)?;
1512 let jt_data = JumpTableData::new(builder.func.dfg.block_call(default, &args), &jt);
1513 let jt = builder.create_jump_table(jt_data);
1514
1515 let val = builder.use_var(self.get_variable_of_type(I32)?);
1517
1518 builder.ins().br_table(val, jt);
1519 }
1520 BlockTerminator::Switch(_type, default, entries) => {
1521 let mut switch = Switch::new();
1522 for (&entry, &block) in entries.iter() {
1523 switch.set_entry(entry, block);
1524 }
1525
1526 let switch_val = builder.use_var(self.get_variable_of_type(_type)?);
1527
1528 switch.emit(builder, switch_val, default);
1529 }
1530 BlockTerminator::TailCall(target) | BlockTerminator::TailCallIndirect(target) => {
1531 let (sig, sig_ref, func_ref) = self
1532 .resources
1533 .func_refs
1534 .iter()
1535 .find(|(_, _, f)| *f == target)
1536 .expect("Failed to find previously selected function")
1537 .clone();
1538
1539 let opcode = match terminator {
1540 BlockTerminator::TailCall(_) => Opcode::ReturnCall,
1541 BlockTerminator::TailCallIndirect(_) => Opcode::ReturnCallIndirect,
1542 _ => unreachable!(),
1543 };
1544
1545 insert_call_to_function(self, builder, opcode, &sig, sig_ref, func_ref)?;
1546 }
1547 }
1548
1549 Ok(())
1550 }
1551
1552 fn generate_instructions(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1554 for _ in 0..self.param(&self.config.instructions_per_block)? {
1555 let (op, args, rets) = self.u.choose(&OPCODE_SIGNATURES)?;
1556
1557 if !valid_for_target(&self.isa.triple(), *op, &args, &rets) {
1561 return Err(arbitrary::Error::IncorrectFormat.into());
1562 }
1563
1564 let inserter = inserter_for_format(op.format());
1565 inserter(self, builder, *op, &args, &rets)?;
1566 }
1567
1568 Ok(())
1569 }
1570
1571 fn generate_funcrefs(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1572 let usercalls: Vec<(ExternalName, Signature)> = self
1573 .resources
1574 .usercalls
1575 .iter()
1576 .map(|(name, signature)| {
1577 let user_func_ref = builder.func.declare_imported_user_function(name.clone());
1578 let name = ExternalName::User(user_func_ref);
1579 (name, signature.clone())
1580 })
1581 .collect();
1582
1583 let lib_callconv = self.system_callconv();
1584 let libcalls: Vec<(ExternalName, Signature)> = self
1585 .resources
1586 .libcalls
1587 .iter()
1588 .map(|libcall| {
1589 let pointer_type = Type::int_with_byte_size(
1590 self.isa.triple().pointer_width().unwrap().bytes().into(),
1591 )
1592 .unwrap();
1593 let signature = libcall.signature(lib_callconv, pointer_type);
1594 let name = ExternalName::LibCall(*libcall);
1595 (name, signature)
1596 })
1597 .collect();
1598
1599 for (name, signature) in usercalls.into_iter().chain(libcalls) {
1600 let sig_ref = builder.import_signature(signature.clone());
1601 let func_ref = builder.import_function(ExtFuncData {
1602 name,
1603 signature: sig_ref,
1604 colocated: self.u.arbitrary()?,
1605 });
1606
1607 self.resources
1608 .func_refs
1609 .push((signature, sig_ref, func_ref));
1610 }
1611
1612 Ok(())
1613 }
1614
1615 fn generate_stack_slots(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1616 for _ in 0..self.param(&self.config.static_stack_slots_per_function)? {
1617 let bytes = self.param(&self.config.static_stack_slot_size)? as u32;
1618 let alignment = self.param(&self.config.stack_slot_alignment_log2)? as u8;
1619 let alignment_bytes = 1 << alignment;
1620
1621 let ss_data = StackSlotData::new(StackSlotKind::ExplicitSlot, bytes, alignment);
1622 let slot = builder.create_sized_stack_slot(ss_data);
1623
1624 let category = *self.u.choose(AACategory::all())?;
1626
1627 self.resources
1628 .stack_slots
1629 .push((slot, bytes, alignment_bytes, category));
1630 }
1631
1632 self.resources
1633 .stack_slots
1634 .sort_unstable_by_key(|&(_slot, bytes, _align, _category)| bytes);
1635
1636 Ok(())
1637 }
1638
1639 fn initialize_stack_slots(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1641 let i8_zero = builder.ins().iconst(I8, 0);
1642 let i16_zero = builder.ins().iconst(I16, 0);
1643 let i32_zero = builder.ins().iconst(I32, 0);
1644 let i64_zero = builder.ins().iconst(I64, 0);
1645 let i128_zero = builder.ins().uextend(I128, i64_zero);
1646
1647 for &(slot, init_size, _align, category) in self.resources.stack_slots.iter() {
1648 let mut size = init_size;
1649
1650 while size != 0 {
1652 let offset = (init_size - size) as i32;
1653 let (val, filled) = match size {
1654 sz if sz / 16 > 0 => (i128_zero, 16),
1655 sz if sz / 8 > 0 => (i64_zero, 8),
1656 sz if sz / 4 > 0 => (i32_zero, 4),
1657 sz if sz / 2 > 0 => (i16_zero, 2),
1658 _ => (i8_zero, 1),
1659 };
1660 let addr = builder.ins().stack_addr(I64, slot, offset);
1661
1662 let mut flags = MemFlags::new();
1665 flags.set_notrap();
1666 category.update_memflags(&mut flags);
1667
1668 builder.ins().store(flags, val, addr, 0);
1669
1670 size -= filled;
1671 }
1672 }
1673 Ok(())
1674 }
1675
1676 fn generate_blocks(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1678 let extra_block_count = self.param(&self.config.blocks_per_function)?;
1679
1680 let block_count = 1 + extra_block_count;
1683
1684 self.resources.blocks = (0..block_count)
1686 .map(|i| {
1687 let is_entry = i == 0;
1688 let block = builder.create_block();
1689
1690 if !is_entry {
1692 if bool::arbitrary(self.u)? {
1693 builder.set_cold_block(block);
1694 }
1695 }
1696
1697 if is_entry {
1700 builder.append_block_params_for_function_params(block);
1701 Ok((
1702 block,
1703 self.signature.params.iter().map(|a| a.value_type).collect(),
1704 ))
1705 } else {
1706 let sig = self.generate_block_signature()?;
1707 sig.iter().for_each(|ty| {
1708 builder.append_block_param(block, *ty);
1709 });
1710 Ok((block, sig))
1711 }
1712 })
1713 .collect::<Result<Vec<_>>>()?;
1714
1715 self.resources.blocks_without_params = self.resources.blocks[1..]
1718 .iter()
1719 .filter(|(_, sig)| sig.len() == 0)
1720 .map(|(b, _)| *b)
1721 .collect();
1722
1723 let blocks = self.resources.blocks.clone();
1733 self.resources.block_terminators = blocks
1734 .iter()
1735 .map(|&(block, _)| {
1736 let next_block = Block::with_number(block.as_u32() + 1).unwrap();
1737 let forward_blocks = self.resources.forward_blocks(block);
1738 let paramless_targets = self.resources.forward_blocks_without_params(block);
1739 let has_paramless_targets = !paramless_targets.is_empty();
1740 let next_block_is_paramless = paramless_targets.contains(&next_block);
1741
1742 let mut valid_terminators = vec![];
1743
1744 if forward_blocks.is_empty() {
1745 valid_terminators.push(BlockTerminatorKind::Return);
1747 } else {
1748 valid_terminators.extend_from_slice(&[
1752 BlockTerminatorKind::Jump,
1753 BlockTerminatorKind::Br,
1754 BlockTerminatorKind::BrTable,
1755 ]);
1756 }
1757
1758 if has_paramless_targets && next_block_is_paramless {
1762 valid_terminators.push(BlockTerminatorKind::Switch);
1763 }
1764
1765 let has_tail_callees = self
1769 .resources
1770 .tail_call_targets(&self.signature)
1771 .next()
1772 .is_some();
1773 let is_tail_caller = self.signature.call_conv == CallConv::Tail;
1774
1775 let supports_tail_calls = match self.isa.triple().architecture {
1776 Architecture::Aarch64(_) | Architecture::Riscv64(_) => true,
1777 Architecture::X86_64 => self.isa.flags().preserve_frame_pointers(),
1779 _ => false,
1781 };
1782
1783 if is_tail_caller && has_tail_callees && supports_tail_calls {
1784 valid_terminators.extend([
1785 BlockTerminatorKind::TailCall,
1786 BlockTerminatorKind::TailCallIndirect,
1787 ]);
1788 }
1789
1790 let terminator = self.u.choose(&valid_terminators)?;
1791
1792 Ok(match terminator {
1794 BlockTerminatorKind::Return => BlockTerminator::Return,
1795 BlockTerminatorKind::Jump => BlockTerminator::Jump(next_block),
1796 BlockTerminatorKind::Br => {
1797 BlockTerminator::Br(next_block, self.generate_target_block(block)?)
1798 }
1799 BlockTerminatorKind::BrTable => {
1801 let default = next_block;
1804
1805 let target_count = self.param(&self.config.jump_table_entries)?;
1806 let targets = Result::from_iter(
1807 (0..target_count).map(|_| self.generate_target_block(block)),
1808 )?;
1809
1810 BlockTerminator::BrTable(default, targets)
1811 }
1812 BlockTerminatorKind::Switch => {
1813 let default_block = next_block;
1816
1817 let _type = *self.u.choose(&[I8, I16, I32, I64, I128][..])?;
1818
1819 let mut entries = HashMap::new();
1821 for _ in 0..self.param(&self.config.switch_cases)? {
1822 let (ty_min, ty_max) = _type.bounds(false);
1825 let range_start = self.u.int_in_range(ty_min..=ty_max)?;
1826
1827 let range_size = if bool::arbitrary(self.u)? {
1830 1
1831 } else {
1832 self.param(&self.config.switch_max_range_size)?
1833 } as u128;
1834
1835 for i in 0..range_size {
1837 let index = range_start.wrapping_add(i) % ty_max;
1838 let block = *self
1839 .u
1840 .choose(self.resources.forward_blocks_without_params(block))?;
1841
1842 entries.insert(index, block);
1843 }
1844 }
1845
1846 BlockTerminator::Switch(_type, default_block, entries)
1847 }
1848 BlockTerminatorKind::TailCall => {
1849 let targets = self
1850 .resources
1851 .tail_call_targets(&self.signature)
1852 .collect::<Vec<_>>();
1853 let (_, _, funcref) = *self.u.choose(&targets[..])?;
1854 BlockTerminator::TailCall(*funcref)
1855 }
1856 BlockTerminatorKind::TailCallIndirect => {
1857 let targets = self
1858 .resources
1859 .tail_call_targets(&self.signature)
1860 .collect::<Vec<_>>();
1861 let (_, _, funcref) = *self.u.choose(&targets[..])?;
1862 BlockTerminator::TailCallIndirect(*funcref)
1863 }
1864 })
1865 })
1866 .collect::<Result<_>>()?;
1867
1868 Ok(())
1869 }
1870
1871 fn generate_block_signature(&mut self) -> Result<BlockSignature> {
1872 let param_count = self.param(&self.config.block_signature_params)?;
1873
1874 let mut params = Vec::with_capacity(param_count);
1875 for _ in 0..param_count {
1876 params.push(self.u._type((&*self.isa).supports_simd())?);
1877 }
1878 Ok(params)
1879 }
1880
1881 fn build_variable_pool(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
1882 let block = builder.current_block().unwrap();
1883
1884 let mut vars: Vec<_> = builder
1886 .func
1887 .signature
1888 .params
1889 .iter()
1890 .map(|param| param.value_type)
1891 .zip(builder.block_params(block).iter().copied())
1892 .collect();
1893
1894 for _ in 0..self.param(&self.config.vars_per_function)? {
1896 let ty = self.u._type((&*self.isa).supports_simd())?;
1897 let value = self.generate_const(builder, ty)?;
1898 vars.push((ty, value));
1899 }
1900
1901 for (id, (ty, value)) in vars.into_iter().enumerate() {
1902 let var = Variable::new(id);
1903 builder.declare_var(var, ty);
1904 builder.def_var(var, value);
1905
1906 if ty.bytes() <= 16 && self.u.arbitrary()? {
1910 builder.declare_var_needs_stack_map(var);
1911 }
1912
1913 self.resources
1914 .vars
1915 .entry(ty)
1916 .or_insert_with(Vec::new)
1917 .push(var);
1918 }
1919
1920 Ok(())
1921 }
1922
1923 pub fn generate(mut self) -> Result<Function> {
1932 let mut fn_builder_ctx = FunctionBuilderContext::new();
1933 let mut func = Function::with_name_signature(self.name.clone(), self.signature.clone());
1934
1935 let mut builder = FunctionBuilder::new(&mut func, &mut fn_builder_ctx);
1936
1937 self.generate_funcrefs(&mut builder)?;
1940 self.generate_blocks(&mut builder)?;
1941
1942 self.generate_stack_slots(&mut builder)?;
1944
1945 for (block, block_sig) in self.resources.blocks.clone().into_iter() {
1947 let is_block0 = block.as_u32() == 0;
1948 builder.switch_to_block(block);
1949
1950 if is_block0 {
1951 self.build_variable_pool(&mut builder)?;
1955
1956 self.initialize_stack_slots(&mut builder)?;
1959 } else {
1960 for (i, ty) in block_sig.iter().enumerate() {
1962 let var = self.get_variable_of_type(*ty)?;
1963 let block_param = builder.block_params(block)[i];
1964 builder.def_var(var, block_param);
1965 }
1966 }
1967
1968 self.generate_instructions(&mut builder)?;
1970
1971 self.insert_terminator(&mut builder, block)?;
1973 }
1974
1975 builder.seal_all_blocks();
1976 builder.finalize();
1977
1978 Ok(func)
1979 }
1980}