Skip to content

Commit

Permalink
Merge branch 'main' into harness_output_individual_files
Browse files Browse the repository at this point in the history
  • Loading branch information
jaisnan authored Aug 20, 2024
2 parents 698d718 + 621519a commit 56159a1
Show file tree
Hide file tree
Showing 23 changed files with 559 additions and 161 deletions.
217 changes: 104 additions & 113 deletions kani-compiler/src/kani_middle/points_to/points_to_analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,119 +203,108 @@ impl<'a, 'tcx> Analysis<'tcx> for PointsToAnalysis<'a, 'tcx> {
};
match instance.def {
// Intrinsics could introduce aliasing edges we care about, so need to handle them.
InstanceKind::Intrinsic(def_id) => {
// Check if the intrinsic has a body we can analyze.
if self.tcx.is_mir_available(def_id) {
self.apply_regular_call_effect(state, instance, args, destination);
} else {
// Check all of the other intrinsics.
match Intrinsic::from_instance(&rustc_internal::stable(instance)) {
intrinsic if is_identity_aliasing_intrinsic(intrinsic.clone()) => {
// Treat the intrinsic as an aggregate, taking a union of all of the
// arguments' aliases.
let destination_set =
state.resolve_place(*destination, self.instance);
let operands_set = args
.into_iter()
.flat_map(|operand| {
self.successors_for_operand(state, operand.node.clone())
})
.collect();
state.extend(&destination_set, &operands_set);
}
// All `atomic_cxchg` intrinsics take `dst, old, src` as arguments.
// This is equivalent to `destination = *dst; *dst = src`.
Intrinsic::AtomicCxchg(_) | Intrinsic::AtomicCxchgWeak(_) => {
let src_set =
self.successors_for_operand(state, args[2].node.clone());
let dst_set =
self.successors_for_deref(state, args[0].node.clone());
let destination_set =
state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&dst_set));
state.extend(&dst_set, &src_set);
}
// All `atomic_load` intrinsics take `src` as an argument.
// This is equivalent to `destination = *src`.
Intrinsic::AtomicLoad(_) => {
let src_set =
self.successors_for_deref(state, args[0].node.clone());
let destination_set =
state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&src_set));
}
// All `atomic_store` intrinsics take `dst, val` as arguments.
// This is equivalent to `*dst = val`.
Intrinsic::AtomicStore(_) => {
let dst_set =
self.successors_for_deref(state, args[0].node.clone());
let val_set =
self.successors_for_operand(state, args[1].node.clone());
state.extend(&dst_set, &val_set);
}
// All other `atomic` intrinsics take `dst, src` as arguments.
// This is equivalent to `destination = *dst; *dst = src`.
Intrinsic::AtomicAnd(_)
| Intrinsic::AtomicMax(_)
| Intrinsic::AtomicMin(_)
| Intrinsic::AtomicNand(_)
| Intrinsic::AtomicOr(_)
| Intrinsic::AtomicUmax(_)
| Intrinsic::AtomicUmin(_)
| Intrinsic::AtomicXadd(_)
| Intrinsic::AtomicXchg(_)
| Intrinsic::AtomicXor(_)
| Intrinsic::AtomicXsub(_) => {
let src_set =
self.successors_for_operand(state, args[1].node.clone());
let dst_set =
self.successors_for_deref(state, args[0].node.clone());
let destination_set =
state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&dst_set));
state.extend(&dst_set, &src_set);
}
// Similar to `copy_nonoverlapping`, argument order is `src`, `dst`, `count`.
Intrinsic::Copy => {
self.apply_copy_effect(
state,
args[0].node.clone(),
args[1].node.clone(),
);
}
// Similar to `copy_nonoverlapping`, argument order is `dst`, `src`, `count`.
Intrinsic::VolatileCopyMemory
| Intrinsic::VolatileCopyNonOverlappingMemory => {
self.apply_copy_effect(
state,
args[1].node.clone(),
args[0].node.clone(),
);
}
// Semantically equivalent to dest = *a
Intrinsic::VolatileLoad | Intrinsic::UnalignedVolatileLoad => {
// Destination of the return value.
let lvalue_set = state.resolve_place(*destination, self.instance);
let rvalue_set =
self.successors_for_deref(state, args[0].node.clone());
state.extend(&lvalue_set, &state.successors(&rvalue_set));
}
// Semantically equivalent *a = b.
Intrinsic::VolatileStore => {
let lvalue_set =
self.successors_for_deref(state, args[0].node.clone());
let rvalue_set =
self.successors_for_operand(state, args[1].node.clone());
state.extend(&lvalue_set, &rvalue_set);
}
Intrinsic::Unimplemented { .. } => {
// This will be taken care of at the codegen level.
}
intrinsic => {
unimplemented!(
"Kani does not support reasoning about aliasing in presence of intrinsic `{intrinsic:?}`. For more information about the state of uninitialized memory checks implementation, see: https://github.com/model-checking/kani/issues/3300."
);
}
InstanceKind::Intrinsic(_) => {
match Intrinsic::from_instance(&rustc_internal::stable(instance)) {
intrinsic if is_identity_aliasing_intrinsic(intrinsic.clone()) => {
// Treat the intrinsic as an aggregate, taking a union of all of the
// arguments' aliases.
let destination_set = state.resolve_place(*destination, self.instance);
let operands_set = args
.into_iter()
.flat_map(|operand| {
self.successors_for_operand(state, operand.node.clone())
})
.collect();
state.extend(&destination_set, &operands_set);
}
// All `atomic_cxchg` intrinsics take `dst, old, src` as arguments.
// This is equivalent to `destination = *dst; *dst = src`.
Intrinsic::AtomicCxchg(_) | Intrinsic::AtomicCxchgWeak(_) => {
let src_set = self.successors_for_operand(state, args[2].node.clone());
let dst_set = self.successors_for_deref(state, args[0].node.clone());
let destination_set = state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&dst_set));
state.extend(&dst_set, &src_set);
}
// All `atomic_load` intrinsics take `src` as an argument.
// This is equivalent to `destination = *src`.
Intrinsic::AtomicLoad(_) => {
let src_set = self.successors_for_deref(state, args[0].node.clone());
let destination_set = state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&src_set));
}
// All `atomic_store` intrinsics take `dst, val` as arguments.
// This is equivalent to `*dst = val`.
Intrinsic::AtomicStore(_) => {
let dst_set = self.successors_for_deref(state, args[0].node.clone());
let val_set = self.successors_for_operand(state, args[1].node.clone());
state.extend(&dst_set, &val_set);
}
// All other `atomic` intrinsics take `dst, src` as arguments.
// This is equivalent to `destination = *dst; *dst = src`.
Intrinsic::AtomicAnd(_)
| Intrinsic::AtomicMax(_)
| Intrinsic::AtomicMin(_)
| Intrinsic::AtomicNand(_)
| Intrinsic::AtomicOr(_)
| Intrinsic::AtomicUmax(_)
| Intrinsic::AtomicUmin(_)
| Intrinsic::AtomicXadd(_)
| Intrinsic::AtomicXchg(_)
| Intrinsic::AtomicXor(_)
| Intrinsic::AtomicXsub(_) => {
let src_set = self.successors_for_operand(state, args[1].node.clone());
let dst_set = self.successors_for_deref(state, args[0].node.clone());
let destination_set = state.resolve_place(*destination, self.instance);
state.extend(&destination_set, &state.successors(&dst_set));
state.extend(&dst_set, &src_set);
}
// Similar to `copy_nonoverlapping`, argument order is `src`, `dst`, `count`.
Intrinsic::Copy => {
self.apply_copy_effect(
state,
args[0].node.clone(),
args[1].node.clone(),
);
}
Intrinsic::TypedSwap => {
// Extend from x_set to y_set and vice-versa so that both x and y alias
// to a union of places each of them alias to.
let x_set = self.successors_for_deref(state, args[0].node.clone());
let y_set = self.successors_for_deref(state, args[1].node.clone());
state.extend(&x_set, &state.successors(&y_set));
state.extend(&y_set, &state.successors(&x_set));
}
// Similar to `copy_nonoverlapping`, argument order is `dst`, `src`, `count`.
Intrinsic::VolatileCopyMemory
| Intrinsic::VolatileCopyNonOverlappingMemory => {
self.apply_copy_effect(
state,
args[1].node.clone(),
args[0].node.clone(),
);
}
// Semantically equivalent to dest = *a
Intrinsic::VolatileLoad | Intrinsic::UnalignedVolatileLoad => {
// Destination of the return value.
let lvalue_set = state.resolve_place(*destination, self.instance);
let rvalue_set = self.successors_for_deref(state, args[0].node.clone());
state.extend(&lvalue_set, &state.successors(&rvalue_set));
}
// Semantically equivalent *a = b.
Intrinsic::VolatileStore => {
let lvalue_set = self.successors_for_deref(state, args[0].node.clone());
let rvalue_set =
self.successors_for_operand(state, args[1].node.clone());
state.extend(&lvalue_set, &rvalue_set);
}
Intrinsic::Unimplemented { .. } => {
// This will be taken care of at the codegen level.
}
intrinsic => {
unimplemented!(
"Kani does not support reasoning about aliasing in presence of intrinsic `{intrinsic:?}`. For more information about the state of uninitialized memory checks implementation, see: https://github.com/model-checking/kani/issues/3300."
);
}
}
}
Expand Down Expand Up @@ -681,6 +670,7 @@ fn is_identity_aliasing_intrinsic(intrinsic: Intrinsic) -> bool {
| Intrinsic::PtrOffsetFrom
| Intrinsic::PtrOffsetFromUnsigned
| Intrinsic::RawEq
| Intrinsic::RetagBoxToRaw
| Intrinsic::RintF32
| Intrinsic::RintF64
| Intrinsic::RotateLeft
Expand All @@ -695,6 +685,7 @@ fn is_identity_aliasing_intrinsic(intrinsic: Intrinsic) -> bool {
| Intrinsic::SqrtF32
| Intrinsic::SqrtF64
| Intrinsic::SubWithOverflow
| Intrinsic::Transmute
| Intrinsic::TruncF32
| Intrinsic::TruncF64
| Intrinsic::TypeId
Expand Down
35 changes: 35 additions & 0 deletions kani-compiler/src/kani_middle/transform/check_uninit/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,9 @@ impl<'a> UninitInstrumenter<'a> {
| MemoryInitOp::SetRef { .. } => {
self.build_set(tcx, body, source, operation, pointee_ty_info, skip_first)
}
MemoryInitOp::Copy { .. } => {
self.build_copy(tcx, body, source, operation, pointee_ty_info, skip_first)
}
MemoryInitOp::Unsupported { .. } | MemoryInitOp::TriviallyUnsafe { .. } => {
unreachable!()
}
Expand Down Expand Up @@ -397,6 +400,38 @@ impl<'a> UninitInstrumenter<'a> {
};
}

/// Copy memory initialization state from one pointer to the other.
fn build_copy(
&mut self,
tcx: TyCtxt,
body: &mut MutableBody,
source: &mut SourceInstruction,
operation: MemoryInitOp,
pointee_info: PointeeInfo,
skip_first: &mut HashSet<usize>,
) {
let ret_place = Place {
local: body.new_local(Ty::new_tuple(&[]), source.span(body.blocks()), Mutability::Not),
projection: vec![],
};
let PointeeLayout::Sized { layout } = pointee_info.layout() else { unreachable!() };
let copy_init_state_instance = resolve_mem_init_fn(
get_mem_init_fn_def(tcx, "KaniCopyInitState", &mut self.mem_init_fn_cache),
layout.len(),
*pointee_info.ty(),
);
collect_skipped(&operation, body, skip_first);
let position = operation.position();
let MemoryInitOp::Copy { from, to, count } = operation else { unreachable!() };
body.insert_call(
&copy_init_state_instance,
source,
position,
vec![from, to, count],
ret_place.clone(),
);
}

fn inject_assert_false(
&self,
tcx: TyCtxt,
Expand Down
Loading

0 comments on commit 56159a1

Please sign in to comment.