1use crate::ir::pcc::*;
4use crate::ir::types::*;
5use crate::ir::MemFlags;
6use crate::isa::aarch64::inst::args::{Cond, PairAMode, ShiftOp};
7use crate::isa::aarch64::inst::regs::zero_reg;
8use crate::isa::aarch64::inst::Inst;
9use crate::isa::aarch64::inst::{ALUOp, MoveWideOp};
10use crate::isa::aarch64::inst::{AMode, ExtendOp};
11use crate::machinst::pcc::*;
12use crate::machinst::Reg;
13use crate::machinst::{InsnIndex, VCode};
14use crate::trace;
15
16fn extend_fact(ctx: &FactContext, value: &Fact, mode: ExtendOp) -> Option<Fact> {
17 match mode {
18 ExtendOp::UXTB => ctx.uextend(value, 8, 64),
19 ExtendOp::UXTH => ctx.uextend(value, 16, 64),
20 ExtendOp::UXTW => ctx.uextend(value, 32, 64),
21 ExtendOp::UXTX => Some(value.clone()),
22 ExtendOp::SXTB => ctx.sextend(value, 8, 64),
23 ExtendOp::SXTH => ctx.sextend(value, 16, 64),
24 ExtendOp::SXTW => ctx.sextend(value, 32, 64),
25 ExtendOp::SXTX => None,
26 }
27}
28
29#[derive(Clone, Debug, Default)]
31pub struct FactFlowState {
32 cmp_flags: Option<(Fact, Fact)>,
33}
34
35pub(crate) fn check(
36 ctx: &FactContext,
37 vcode: &mut VCode<Inst>,
38 inst_idx: InsnIndex,
39 state: &mut FactFlowState,
40) -> PccResult<()> {
41 let inst = &vcode[inst_idx];
42 trace!("Checking facts on inst: {:?}", inst);
43
44 let cmp_flags = state.cmp_flags.take();
49 trace!(" * with cmp_flags = {cmp_flags:?}");
50
51 match *inst {
52 Inst::Args { .. } => {
53 Ok(())
57 }
58 Inst::ULoad8 { rd, ref mem, flags }
59 | Inst::SLoad8 { rd, ref mem, flags }
60 | Inst::ULoad16 { rd, ref mem, flags }
61 | Inst::SLoad16 { rd, ref mem, flags }
62 | Inst::ULoad32 { rd, ref mem, flags }
63 | Inst::SLoad32 { rd, ref mem, flags }
64 | Inst::ULoad64 { rd, ref mem, flags } => {
65 let access_ty = inst.mem_type().unwrap();
66 check_load(ctx, Some(rd.to_reg()), flags, mem, vcode, access_ty)
67 }
68 Inst::FpuLoad16 { ref mem, flags, .. }
69 | Inst::FpuLoad32 { ref mem, flags, .. }
70 | Inst::FpuLoad64 { ref mem, flags, .. }
71 | Inst::FpuLoad128 { ref mem, flags, .. } => {
72 let access_ty = inst.mem_type().unwrap();
73 check_load(ctx, None, flags, mem, vcode, access_ty)
74 }
75 Inst::LoadP64 { ref mem, flags, .. } => check_load_pair(ctx, flags, mem, vcode, 16),
76 Inst::FpuLoadP64 { ref mem, flags, .. } => check_load_pair(ctx, flags, mem, vcode, 16),
77 Inst::FpuLoadP128 { ref mem, flags, .. } => check_load_pair(ctx, flags, mem, vcode, 32),
78 Inst::VecLoadReplicate {
79 rn, flags, size, ..
80 } => check_load_addr(ctx, flags, rn, vcode, size.lane_size().ty()),
81 Inst::LoadAcquire {
82 access_ty,
83 rn,
84 flags,
85 ..
86 } => check_load_addr(ctx, flags, rn, vcode, access_ty),
87
88 Inst::Store8 { rd, ref mem, flags }
89 | Inst::Store16 { rd, ref mem, flags }
90 | Inst::Store32 { rd, ref mem, flags }
91 | Inst::Store64 { rd, ref mem, flags } => {
92 let access_ty = inst.mem_type().unwrap();
93 check_store(ctx, Some(rd), flags, mem, vcode, access_ty)
94 }
95 Inst::FpuStore16 { ref mem, flags, .. }
96 | Inst::FpuStore32 { ref mem, flags, .. }
97 | Inst::FpuStore64 { ref mem, flags, .. }
98 | Inst::FpuStore128 { ref mem, flags, .. } => {
99 let access_ty = inst.mem_type().unwrap();
100 check_store(ctx, None, flags, mem, vcode, access_ty)
101 }
102 Inst::StoreP64 { ref mem, flags, .. } => check_store_pair(ctx, flags, mem, vcode, 16),
103 Inst::FpuStoreP64 { ref mem, flags, .. } => check_store_pair(ctx, flags, mem, vcode, 16),
104 Inst::FpuStoreP128 { ref mem, flags, .. } => check_store_pair(ctx, flags, mem, vcode, 32),
105 Inst::StoreRelease {
106 access_ty,
107 rn,
108 flags,
109 ..
110 } => check_store_addr(ctx, flags, rn, vcode, access_ty),
111
112 Inst::AluRRR {
113 alu_op: ALUOp::Add | ALUOp::AddS,
114 size,
115 rd,
116 rn,
117 rm,
118 } => check_binop(ctx, vcode, 64, rd, rn, rm, |rn, rm| {
119 clamp_range(
120 ctx,
121 64,
122 size.bits().into(),
123 ctx.add(rn, rm, size.bits().into()),
124 )
125 }),
126 Inst::AluRRImm12 {
127 alu_op: ALUOp::Add | ALUOp::AddS,
128 size,
129 rd,
130 rn,
131 imm12,
132 } => check_unop(ctx, vcode, 64, rd, rn, |rn| {
133 let imm12: i64 = imm12.value().into();
134 clamp_range(
135 ctx,
136 64,
137 size.bits().into(),
138 ctx.offset(&rn, size.bits().into(), imm12),
139 )
140 }),
141 Inst::AluRRImm12 {
142 alu_op: ALUOp::Sub,
143 size,
144 rd,
145 rn,
146 imm12,
147 } => check_unop(ctx, vcode, 64, rd, rn, |rn| {
148 let imm12: i64 = imm12.value().into();
149 clamp_range(
150 ctx,
151 64,
152 size.bits().into(),
153 ctx.offset(&rn, size.bits().into(), -imm12),
154 )
155 }),
156 Inst::AluRRR {
157 alu_op: ALUOp::Sub,
158 size,
159 rd,
160 rn,
161 rm,
162 } => check_binop(ctx, vcode, 64, rd, rn, rm, |rn, rm| {
163 if let Some(k) = rm.as_const(64) {
164 clamp_range(
165 ctx,
166 64,
167 size.bits().into(),
168 ctx.offset(rn, size.bits().into(), -(k as i64)),
169 )
170 } else {
171 clamp_range(ctx, 64, size.bits().into(), None)
172 }
173 }),
174 Inst::AluRRRShift {
175 alu_op: ALUOp::Add | ALUOp::AddS,
176 size,
177 rd,
178 rn,
179 rm,
180 shiftop,
181 } if shiftop.op() == ShiftOp::LSL && has_fact(vcode, rn) && has_fact(vcode, rm) => {
182 check_binop(ctx, vcode, 64, rd, rn, rm, |rn, rm| {
183 let rm_shifted = fail_if_missing(ctx.shl(
184 &rm,
185 size.bits().into(),
186 shiftop.amt().value().into(),
187 ))?;
188 clamp_range(
189 ctx,
190 64,
191 size.bits().into(),
192 ctx.add(&rn, &rm_shifted, size.bits().into()),
193 )
194 })
195 }
196 Inst::AluRRRExtend {
197 alu_op: ALUOp::Add | ALUOp::AddS,
198 size,
199 rd,
200 rn,
201 rm,
202 extendop,
203 } if has_fact(vcode, rn) && has_fact(vcode, rm) => {
204 check_binop(ctx, vcode, 64, rd, rn, rm, |rn, rm| {
205 let rm_extended = fail_if_missing(extend_fact(ctx, rm, extendop))?;
206 clamp_range(
207 ctx,
208 64,
209 size.bits().into(),
210 ctx.add(&rn, &rm_extended, size.bits().into()),
211 )
212 })
213 }
214 Inst::AluRRImmShift {
215 alu_op: ALUOp::Lsl,
216 size,
217 rd,
218 rn,
219 immshift,
220 } if has_fact(vcode, rn) => check_unop(ctx, vcode, 64, rd, rn, |rn| {
221 clamp_range(
222 ctx,
223 64,
224 size.bits().into(),
225 ctx.shl(&rn, size.bits().into(), immshift.value().into()),
226 )
227 }),
228 Inst::Extend {
229 rd,
230 rn,
231 signed: false,
232 from_bits,
233 to_bits,
234 } if has_fact(vcode, rn) => check_unop(ctx, vcode, 64, rd, rn, |rn| {
235 clamp_range(
236 ctx,
237 64,
238 to_bits.into(),
239 ctx.uextend(&rn, from_bits.into(), to_bits.into()),
240 )
241 }),
242
243 Inst::AluRRR {
244 alu_op: ALUOp::SubS,
245 size,
246 rd,
247 rn,
248 rm,
249 } if rd.to_reg() == zero_reg() => {
250 let rn = get_fact_or_default(vcode, rn, size.bits().into());
252 let rm = get_fact_or_default(vcode, rm, size.bits().into());
253 state.cmp_flags = Some((rn, rm));
254 Ok(())
255 }
256
257 Inst::AluRRImmLogic {
258 alu_op: ALUOp::Orr,
259 size,
260 rd,
261 rn,
262 imml,
263 } if rn == zero_reg() => {
264 let constant = imml.value();
265 check_constant(ctx, vcode, rd, size.bits().into(), constant)
266 }
267
268 Inst::AluRRR { rd, size, .. }
269 | Inst::AluRRImm12 { rd, size, .. }
270 | Inst::AluRRRShift { rd, size, .. }
271 | Inst::AluRRRExtend { rd, size, .. }
272 | Inst::AluRRImmLogic { rd, size, .. }
273 | Inst::AluRRImmShift { rd, size, .. } => check_output(ctx, vcode, rd, &[], |_vcode| {
274 clamp_range(ctx, 64, size.bits().into(), None)
275 }),
276
277 Inst::Extend {
278 rd,
279 from_bits,
280 to_bits,
281 ..
282 } => check_output(ctx, vcode, rd, &[], |_vcode| {
283 clamp_range(ctx, to_bits.into(), from_bits.into(), None)
284 }),
285
286 Inst::MovWide {
287 op: MoveWideOp::MovZ,
288 imm,
289 size: _,
290 rd,
291 } => {
292 let constant = u64::from(imm.bits) << (imm.shift * 16);
293 check_constant(ctx, vcode, rd, 64, constant)
294 }
295
296 Inst::MovWide {
297 op: MoveWideOp::MovN,
298 imm,
299 size,
300 rd,
301 } => {
302 let constant = !(u64::from(imm.bits) << (imm.shift * 16)) & size.max_value();
303 check_constant(ctx, vcode, rd, 64, constant)
304 }
305
306 Inst::MovK { rd, rn, imm, .. } => {
307 let input = get_fact_or_default(vcode, rn, 64);
308 if let Some(input_constant) = input.as_const(64) {
309 let mask = 0xffff << (imm.shift * 16);
310 let constant = u64::from(imm.bits) << (imm.shift * 16);
311 let constant = (input_constant & !mask) | constant;
312 check_constant(ctx, vcode, rd, 64, constant)
313 } else {
314 check_output(ctx, vcode, rd, &[], |_vcode| {
315 Ok(Some(Fact::max_range_for_width(64)))
316 })
317 }
318 }
319
320 Inst::CSel { rd, cond, rn, rm }
321 if (cond == Cond::Hs || cond == Cond::Hi) && cmp_flags.is_some() =>
322 {
323 let (cmp_lhs, cmp_rhs) = cmp_flags.unwrap();
324 trace!("CSel: cmp {cond:?} ({cmp_lhs:?}, {cmp_rhs:?})");
325
326 check_output(ctx, vcode, rd, &[], |vcode| {
327 let rn = get_fact_or_default(vcode, rn, 64);
343 let lhs_kind = match cond {
344 Cond::Hs => InequalityKind::Loose,
345 Cond::Hi => InequalityKind::Strict,
346 _ => unreachable!(),
347 };
348 let rn = ctx.apply_inequality(&rn, &cmp_lhs, &cmp_rhs, lhs_kind);
349 let rm = get_fact_or_default(vcode, rm, 64);
351 let rhs_kind = match cond {
352 Cond::Hs => InequalityKind::Strict,
353 Cond::Hi => InequalityKind::Loose,
354 _ => unreachable!(),
355 };
356 let rm = ctx.apply_inequality(&rm, &cmp_rhs, &cmp_lhs, rhs_kind);
357 let union = ctx.union(&rn, &rm);
358 clamp_range(ctx, 64, 64, union)
360 })
361 }
362
363 _ if vcode.inst_defines_facts(inst_idx) => Err(PccError::UnsupportedFact),
364
365 _ => Ok(()),
366 }
367}
368
369fn check_load(
370 ctx: &FactContext,
371 rd: Option<Reg>,
372 flags: MemFlags,
373 addr: &AMode,
374 vcode: &VCode<Inst>,
375 ty: Type,
376) -> PccResult<()> {
377 let result_fact = rd.and_then(|rd| vcode.vreg_fact(rd.into()));
378 let bits = u16::try_from(ty.bits()).unwrap();
379 check_addr(
380 ctx,
381 flags,
382 addr,
383 vcode,
384 ty,
385 LoadOrStore::Load {
386 result_fact,
387 from_bits: bits,
388 to_bits: bits,
389 },
390 )
391}
392
393fn check_store(
394 ctx: &FactContext,
395 rd: Option<Reg>,
396 flags: MemFlags,
397 addr: &AMode,
398 vcode: &VCode<Inst>,
399 ty: Type,
400) -> PccResult<()> {
401 let stored_fact = rd.and_then(|rd| vcode.vreg_fact(rd.into()));
402 check_addr(
403 ctx,
404 flags,
405 addr,
406 vcode,
407 ty,
408 LoadOrStore::Store { stored_fact },
409 )
410}
411
412fn check_addr<'a>(
413 ctx: &FactContext,
414 flags: MemFlags,
415 addr: &AMode,
416 vcode: &VCode<Inst>,
417 ty: Type,
418 op: LoadOrStore<'a>,
419) -> PccResult<()> {
420 if !flags.checked() {
421 return Ok(());
422 }
423
424 trace!("check_addr: {:?}", addr);
425
426 let check = |addr: &Fact, ty: Type| -> PccResult<()> {
427 match op {
428 LoadOrStore::Load {
429 result_fact,
430 from_bits,
431 to_bits,
432 } => {
433 let loaded_fact =
434 clamp_range(ctx, to_bits, from_bits, ctx.load(addr, ty)?.cloned())?;
435 trace!(
436 "checking a load: loaded_fact = {loaded_fact:?} result_fact = {result_fact:?}"
437 );
438 if ctx.subsumes_fact_optionals(loaded_fact.as_ref(), result_fact) {
439 Ok(())
440 } else {
441 Err(PccError::UnsupportedFact)
442 }
443 }
444 LoadOrStore::Store { stored_fact } => ctx.store(addr, ty, stored_fact),
445 }
446 };
447
448 match addr {
449 &AMode::RegReg { rn, rm } => {
450 let rn = get_fact_or_default(vcode, rn, 64);
451 let rm = get_fact_or_default(vcode, rm, 64);
452 let sum = fail_if_missing(ctx.add(&rn, &rm, 64))?;
453 trace!("rn = {rn:?} rm = {rm:?} sum = {sum:?}");
454 check(&sum, ty)
455 }
456 &AMode::RegScaled { rn, rm } => {
457 let rn = get_fact_or_default(vcode, rn, 64);
458 let rm = get_fact_or_default(vcode, rm, 64);
459 let rm_scaled = fail_if_missing(ctx.scale(&rm, 64, ty.bytes()))?;
460 let sum = fail_if_missing(ctx.add(&rn, &rm_scaled, 64))?;
461 check(&sum, ty)
462 }
463 &AMode::RegScaledExtended { rn, rm, extendop } => {
464 let rn = get_fact_or_default(vcode, rn, 64);
465 let rm = get_fact_or_default(vcode, rm, 64);
466 let rm_extended = fail_if_missing(extend_fact(ctx, &rm, extendop))?;
467 let rm_scaled = fail_if_missing(ctx.scale(&rm_extended, 64, ty.bytes()))?;
468 let sum = fail_if_missing(ctx.add(&rn, &rm_scaled, 64))?;
469 check(&sum, ty)
470 }
471 &AMode::RegExtended { rn, rm, extendop } => {
472 let rn = get_fact_or_default(vcode, rn, 64);
473 let rm = get_fact_or_default(vcode, rm, 64);
474 let rm_extended = fail_if_missing(extend_fact(ctx, &rm, extendop))?;
475 let sum = fail_if_missing(ctx.add(&rn, &rm_extended, 64))?;
476 check(&sum, ty)?;
477 Ok(())
478 }
479 &AMode::Unscaled { rn, simm9 } => {
480 let rn = get_fact_or_default(vcode, rn, 64);
481 let sum = fail_if_missing(ctx.offset(&rn, 64, simm9.value.into()))?;
482 check(&sum, ty)
483 }
484 &AMode::UnsignedOffset { rn, uimm12 } => {
485 let rn = get_fact_or_default(vcode, rn, 64);
486 let offset: u64 = uimm12.value().into();
493 let sum = fail_if_missing(ctx.offset(&rn, 64, i64::try_from(offset).unwrap()))?;
497 check(&sum, ty)
498 }
499 &AMode::Label { .. } | &AMode::Const { .. } => {
500 Ok(())
503 }
504 &AMode::RegOffset { rn, off, .. } => {
505 let rn = get_fact_or_default(vcode, rn, 64);
506 let sum = fail_if_missing(ctx.offset(&rn, 64, off))?;
507 check(&sum, ty)
508 }
509 &AMode::SPOffset { .. }
510 | &AMode::FPOffset { .. }
511 | &AMode::IncomingArg { .. }
512 | &AMode::SlotOffset { .. }
513 | &AMode::SPPostIndexed { .. }
514 | &AMode::SPPreIndexed { .. } => {
515 Ok(())
518 }
519 }
520}
521
522fn check_load_pair(
523 _ctx: &FactContext,
524 _flags: MemFlags,
525 _addr: &PairAMode,
526 _vcode: &VCode<Inst>,
527 _size: u8,
528) -> PccResult<()> {
529 Err(PccError::UnimplementedInst)
530}
531
532fn check_store_pair(
533 _ctx: &FactContext,
534 _flags: MemFlags,
535 _addr: &PairAMode,
536 _vcode: &VCode<Inst>,
537 _size: u8,
538) -> PccResult<()> {
539 Err(PccError::UnimplementedInst)
540}
541
542fn check_load_addr(
543 ctx: &FactContext,
544 flags: MemFlags,
545 reg: Reg,
546 vcode: &VCode<Inst>,
547 ty: Type,
548) -> PccResult<()> {
549 if !flags.checked() {
550 return Ok(());
551 }
552 let fact = get_fact_or_default(vcode, reg, 64);
553 let _output_fact = ctx.load(&fact, ty)?;
554 Ok(())
555}
556
557fn check_store_addr(
558 ctx: &FactContext,
559 flags: MemFlags,
560 reg: Reg,
561 vcode: &VCode<Inst>,
562 ty: Type,
563) -> PccResult<()> {
564 if !flags.checked() {
565 return Ok(());
566 }
567 let fact = get_fact_or_default(vcode, reg, 64);
568 let _output_fact = ctx.store(&fact, ty, None)?;
569 Ok(())
570}