Skip to content

Commit 621519a

Browse files
Make points-to analysis handle all intrinsics explicitly (#3452)
Initially, points-to analysis tried to determine the body of an intrinsic (if it was available) to avoid enumerating them all. However, it turned out this logic was faulty, and the analysis attempted to query the body for intrinsics that didn't have it and ICEd. I added a couple of missing intrinsics, which had a side benefit of removing some duplicate assertion failures. Resolves #3447 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 and MIT licenses.
1 parent a5d4406 commit 621519a

File tree

2 files changed

+104
-121
lines changed

2 files changed

+104
-121
lines changed

kani-compiler/src/kani_middle/points_to/points_to_analysis.rs

Lines changed: 104 additions & 113 deletions
Original file line numberDiff line numberDiff line change
@@ -203,119 +203,108 @@ impl<'a, 'tcx> Analysis<'tcx> for PointsToAnalysis<'a, 'tcx> {
203203
};
204204
match instance.def {
205205
// Intrinsics could introduce aliasing edges we care about, so need to handle them.
206-
InstanceKind::Intrinsic(def_id) => {
207-
// Check if the intrinsic has a body we can analyze.
208-
if self.tcx.is_mir_available(def_id) {
209-
self.apply_regular_call_effect(state, instance, args, destination);
210-
} else {
211-
// Check all of the other intrinsics.
212-
match Intrinsic::from_instance(&rustc_internal::stable(instance)) {
213-
intrinsic if is_identity_aliasing_intrinsic(intrinsic.clone()) => {
214-
// Treat the intrinsic as an aggregate, taking a union of all of the
215-
// arguments' aliases.
216-
let destination_set =
217-
state.resolve_place(*destination, self.instance);
218-
let operands_set = args
219-
.into_iter()
220-
.flat_map(|operand| {
221-
self.successors_for_operand(state, operand.node.clone())
222-
})
223-
.collect();
224-
state.extend(&destination_set, &operands_set);
225-
}
226-
// All `atomic_cxchg` intrinsics take `dst, old, src` as arguments.
227-
// This is equivalent to `destination = *dst; *dst = src`.
228-
Intrinsic::AtomicCxchg(_) | Intrinsic::AtomicCxchgWeak(_) => {
229-
let src_set =
230-
self.successors_for_operand(state, args[2].node.clone());
231-
let dst_set =
232-
self.successors_for_deref(state, args[0].node.clone());
233-
let destination_set =
234-
state.resolve_place(*destination, self.instance);
235-
state.extend(&destination_set, &state.successors(&dst_set));
236-
state.extend(&dst_set, &src_set);
237-
}
238-
// All `atomic_load` intrinsics take `src` as an argument.
239-
// This is equivalent to `destination = *src`.
240-
Intrinsic::AtomicLoad(_) => {
241-
let src_set =
242-
self.successors_for_deref(state, args[0].node.clone());
243-
let destination_set =
244-
state.resolve_place(*destination, self.instance);
245-
state.extend(&destination_set, &state.successors(&src_set));
246-
}
247-
// All `atomic_store` intrinsics take `dst, val` as arguments.
248-
// This is equivalent to `*dst = val`.
249-
Intrinsic::AtomicStore(_) => {
250-
let dst_set =
251-
self.successors_for_deref(state, args[0].node.clone());
252-
let val_set =
253-
self.successors_for_operand(state, args[1].node.clone());
254-
state.extend(&dst_set, &val_set);
255-
}
256-
// All other `atomic` intrinsics take `dst, src` as arguments.
257-
// This is equivalent to `destination = *dst; *dst = src`.
258-
Intrinsic::AtomicAnd(_)
259-
| Intrinsic::AtomicMax(_)
260-
| Intrinsic::AtomicMin(_)
261-
| Intrinsic::AtomicNand(_)
262-
| Intrinsic::AtomicOr(_)
263-
| Intrinsic::AtomicUmax(_)
264-
| Intrinsic::AtomicUmin(_)
265-
| Intrinsic::AtomicXadd(_)
266-
| Intrinsic::AtomicXchg(_)
267-
| Intrinsic::AtomicXor(_)
268-
| Intrinsic::AtomicXsub(_) => {
269-
let src_set =
270-
self.successors_for_operand(state, args[1].node.clone());
271-
let dst_set =
272-
self.successors_for_deref(state, args[0].node.clone());
273-
let destination_set =
274-
state.resolve_place(*destination, self.instance);
275-
state.extend(&destination_set, &state.successors(&dst_set));
276-
state.extend(&dst_set, &src_set);
277-
}
278-
// Similar to `copy_nonoverlapping`, argument order is `src`, `dst`, `count`.
279-
Intrinsic::Copy => {
280-
self.apply_copy_effect(
281-
state,
282-
args[0].node.clone(),
283-
args[1].node.clone(),
284-
);
285-
}
286-
// Similar to `copy_nonoverlapping`, argument order is `dst`, `src`, `count`.
287-
Intrinsic::VolatileCopyMemory
288-
| Intrinsic::VolatileCopyNonOverlappingMemory => {
289-
self.apply_copy_effect(
290-
state,
291-
args[1].node.clone(),
292-
args[0].node.clone(),
293-
);
294-
}
295-
// Semantically equivalent to dest = *a
296-
Intrinsic::VolatileLoad | Intrinsic::UnalignedVolatileLoad => {
297-
// Destination of the return value.
298-
let lvalue_set = state.resolve_place(*destination, self.instance);
299-
let rvalue_set =
300-
self.successors_for_deref(state, args[0].node.clone());
301-
state.extend(&lvalue_set, &state.successors(&rvalue_set));
302-
}
303-
// Semantically equivalent *a = b.
304-
Intrinsic::VolatileStore => {
305-
let lvalue_set =
306-
self.successors_for_deref(state, args[0].node.clone());
307-
let rvalue_set =
308-
self.successors_for_operand(state, args[1].node.clone());
309-
state.extend(&lvalue_set, &rvalue_set);
310-
}
311-
Intrinsic::Unimplemented { .. } => {
312-
// This will be taken care of at the codegen level.
313-
}
314-
intrinsic => {
315-
unimplemented!(
316-
"Kani does not support reasoning about aliasing in presence of intrinsic `{intrinsic:?}`. For more information about the state of uninitialized memory checks implementation, see: https://github.com/model-checking/kani/issues/3300."
317-
);
318-
}
206+
InstanceKind::Intrinsic(_) => {
207+
match Intrinsic::from_instance(&rustc_internal::stable(instance)) {
208+
intrinsic if is_identity_aliasing_intrinsic(intrinsic.clone()) => {
209+
// Treat the intrinsic as an aggregate, taking a union of all of the
210+
// arguments' aliases.
211+
let destination_set = state.resolve_place(*destination, self.instance);
212+
let operands_set = args
213+
.into_iter()
214+
.flat_map(|operand| {
215+
self.successors_for_operand(state, operand.node.clone())
216+
})
217+
.collect();
218+
state.extend(&destination_set, &operands_set);
219+
}
220+
// All `atomic_cxchg` intrinsics take `dst, old, src` as arguments.
221+
// This is equivalent to `destination = *dst; *dst = src`.
222+
Intrinsic::AtomicCxchg(_) | Intrinsic::AtomicCxchgWeak(_) => {
223+
let src_set = self.successors_for_operand(state, args[2].node.clone());
224+
let dst_set = self.successors_for_deref(state, args[0].node.clone());
225+
let destination_set = state.resolve_place(*destination, self.instance);
226+
state.extend(&destination_set, &state.successors(&dst_set));
227+
state.extend(&dst_set, &src_set);
228+
}
229+
// All `atomic_load` intrinsics take `src` as an argument.
230+
// This is equivalent to `destination = *src`.
231+
Intrinsic::AtomicLoad(_) => {
232+
let src_set = self.successors_for_deref(state, args[0].node.clone());
233+
let destination_set = state.resolve_place(*destination, self.instance);
234+
state.extend(&destination_set, &state.successors(&src_set));
235+
}
236+
// All `atomic_store` intrinsics take `dst, val` as arguments.
237+
// This is equivalent to `*dst = val`.
238+
Intrinsic::AtomicStore(_) => {
239+
let dst_set = self.successors_for_deref(state, args[0].node.clone());
240+
let val_set = self.successors_for_operand(state, args[1].node.clone());
241+
state.extend(&dst_set, &val_set);
242+
}
243+
// All other `atomic` intrinsics take `dst, src` as arguments.
244+
// This is equivalent to `destination = *dst; *dst = src`.
245+
Intrinsic::AtomicAnd(_)
246+
| Intrinsic::AtomicMax(_)
247+
| Intrinsic::AtomicMin(_)
248+
| Intrinsic::AtomicNand(_)
249+
| Intrinsic::AtomicOr(_)
250+
| Intrinsic::AtomicUmax(_)
251+
| Intrinsic::AtomicUmin(_)
252+
| Intrinsic::AtomicXadd(_)
253+
| Intrinsic::AtomicXchg(_)
254+
| Intrinsic::AtomicXor(_)
255+
| Intrinsic::AtomicXsub(_) => {
256+
let src_set = self.successors_for_operand(state, args[1].node.clone());
257+
let dst_set = self.successors_for_deref(state, args[0].node.clone());
258+
let destination_set = state.resolve_place(*destination, self.instance);
259+
state.extend(&destination_set, &state.successors(&dst_set));
260+
state.extend(&dst_set, &src_set);
261+
}
262+
// Similar to `copy_nonoverlapping`, argument order is `src`, `dst`, `count`.
263+
Intrinsic::Copy => {
264+
self.apply_copy_effect(
265+
state,
266+
args[0].node.clone(),
267+
args[1].node.clone(),
268+
);
269+
}
270+
Intrinsic::TypedSwap => {
271+
// Extend from x_set to y_set and vice-versa so that both x and y alias
272+
// to a union of places each of them alias to.
273+
let x_set = self.successors_for_deref(state, args[0].node.clone());
274+
let y_set = self.successors_for_deref(state, args[1].node.clone());
275+
state.extend(&x_set, &state.successors(&y_set));
276+
state.extend(&y_set, &state.successors(&x_set));
277+
}
278+
// Similar to `copy_nonoverlapping`, argument order is `dst`, `src`, `count`.
279+
Intrinsic::VolatileCopyMemory
280+
| Intrinsic::VolatileCopyNonOverlappingMemory => {
281+
self.apply_copy_effect(
282+
state,
283+
args[1].node.clone(),
284+
args[0].node.clone(),
285+
);
286+
}
287+
// Semantically equivalent to dest = *a
288+
Intrinsic::VolatileLoad | Intrinsic::UnalignedVolatileLoad => {
289+
// Destination of the return value.
290+
let lvalue_set = state.resolve_place(*destination, self.instance);
291+
let rvalue_set = self.successors_for_deref(state, args[0].node.clone());
292+
state.extend(&lvalue_set, &state.successors(&rvalue_set));
293+
}
294+
// Semantically equivalent *a = b.
295+
Intrinsic::VolatileStore => {
296+
let lvalue_set = self.successors_for_deref(state, args[0].node.clone());
297+
let rvalue_set =
298+
self.successors_for_operand(state, args[1].node.clone());
299+
state.extend(&lvalue_set, &rvalue_set);
300+
}
301+
Intrinsic::Unimplemented { .. } => {
302+
// This will be taken care of at the codegen level.
303+
}
304+
intrinsic => {
305+
unimplemented!(
306+
"Kani does not support reasoning about aliasing in presence of intrinsic `{intrinsic:?}`. For more information about the state of uninitialized memory checks implementation, see: https://github.com/model-checking/kani/issues/3300."
307+
);
319308
}
320309
}
321310
}
@@ -681,6 +670,7 @@ fn is_identity_aliasing_intrinsic(intrinsic: Intrinsic) -> bool {
681670
| Intrinsic::PtrOffsetFrom
682671
| Intrinsic::PtrOffsetFromUnsigned
683672
| Intrinsic::RawEq
673+
| Intrinsic::RetagBoxToRaw
684674
| Intrinsic::RintF32
685675
| Intrinsic::RintF64
686676
| Intrinsic::RotateLeft
@@ -695,6 +685,7 @@ fn is_identity_aliasing_intrinsic(intrinsic: Intrinsic) -> bool {
695685
| Intrinsic::SqrtF32
696686
| Intrinsic::SqrtF64
697687
| Intrinsic::SubWithOverflow
688+
| Intrinsic::Transmute
698689
| Intrinsic::TruncF32
699690
| Intrinsic::TruncF64
700691
| Intrinsic::TypeId

tests/expected/uninit/intrinsics/expected

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,10 @@ std::ptr::read::<std::mem::MaybeUninit<u8>>.assertion.1\
22
- Status: FAILURE\
33
- Description: "Kani currently doesn't support checking memory initialization for pointers to `std::mem::MaybeUninit<u8>."
44

5-
std::ptr::read::<std::mem::MaybeUninit<u8>>.assertion.2\
6-
- Status: FAILURE\
7-
- Description: "Kani currently doesn't support checking memory initialization for pointers to `std::mem::MaybeUninit<u8>."
8-
95
std::ptr::write::<std::mem::MaybeUninit<u8>>.assertion.1\
106
- Status: FAILURE\
117
- Description: "Kani currently doesn't support checking memory initialization for pointers to `std::mem::MaybeUninit<u8>."
128

13-
std::ptr::write::<std::mem::MaybeUninit<u8>>.assertion.2\
14-
- Status: FAILURE\
15-
- Description: "Kani currently doesn't support checking memory initialization for pointers to `std::mem::MaybeUninit<u8>."
16-
179
check_typed_swap.assertion.1\
1810
- Status: FAILURE\
1911
- Description: "Undefined Behavior: Reading from an uninitialized pointer of type `*mut u8`"

0 commit comments

Comments
 (0)