Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Value visitors for miri #55549

Merged
merged 21 commits into from
Nov 7, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
5b5e076
generalize the traversal part of validation to a ValueVisitor
RalfJung Oct 31, 2018
7d7bd9b
reduce the amount of traversal/projection code that the visitor has t…
RalfJung Oct 31, 2018
c8e471f
also allow visiting places and mplaces
RalfJung Oct 31, 2018
fdc3a3e
fix for pre-NLL rustc
RalfJung Oct 31, 2018
33770ab
add visit() hook to the trait
RalfJung Nov 1, 2018
b0f1b1a
provide some default implementations
RalfJung Nov 1, 2018
6d24b37
let the Value handle enum projections, so the visitor does not have t…
RalfJung Nov 1, 2018
fa01e04
fix validation error on non-integer enum discriminants
RalfJung Nov 1, 2018
77c2834
Also test for undef in enum discriminant
RalfJung Nov 1, 2018
aea61e3
converting a VisitorValue to a MemPlace must not mutate anything
RalfJung Nov 1, 2018
98295e9
use more traditional walk_array/visit_array instead of the handle_arr…
RalfJung Nov 2, 2018
b096f08
finally this actually looks like a visitor
RalfJung Nov 2, 2018
c267721
all values can convert to operators
RalfJung Nov 2, 2018
996a425
the visitor can already load the value for visit_primitive
RalfJung Nov 2, 2018
91cad39
visit_aggregate with an iterator; fix some comment typos
RalfJung Nov 2, 2018
0d596f2
FIXME
RalfJung Nov 2, 2018
7565b5a
machine hooks for ptr (de)ref also need layout, and then they do not …
RalfJung Nov 2, 2018
8730410
make ValueVisitor mut-polymorphic
RalfJung Nov 2, 2018
0529dc8
proide ptr_wrapping_offset on Scalars
RalfJung Nov 4, 2018
a0074ca
walk_value: more tracing
RalfJung Nov 5, 2018
7b7c6ce
add method to obtain the ptr offset of a Scalar
RalfJung Nov 5, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/librustc/mir/interpret/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use ty::{Ty, layout};
use ty::layout::{Size, Align, LayoutError};
use rustc_target::spec::abi::Abi;

use super::Pointer;
use super::{Pointer, Scalar};

use backtrace::Backtrace;

Expand Down Expand Up @@ -240,7 +240,7 @@ pub enum EvalErrorKind<'tcx, O> {
InvalidMemoryAccess,
InvalidFunctionPointer,
InvalidBool,
InvalidDiscriminant(u128),
InvalidDiscriminant(Scalar),
PointerOutOfBounds {
ptr: Pointer,
access: bool,
Expand Down
79 changes: 43 additions & 36 deletions src/librustc/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,42 +91,43 @@ pub trait PointerArithmetic: layout::HasDataLayout {
}

//// Trunace the given value to the pointer size; also return whether there was an overflow
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
}

// Overflow checking only works properly on the range from -u64 to +u64.
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(res as u128);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}

#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i as i128);
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

fn wrapping_signed_offset(&self, val: u64, i: i64) -> u64 {
self.overflowing_signed_offset(val, i as i128).0
// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}

Expand Down Expand Up @@ -176,19 +177,27 @@ impl<'tcx, Tag> Pointer<Tag> {
Pointer { alloc_id, offset, tag }
}

pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
Pointer::new_with_tag(
#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().wrapping_signed_offset(self.offset.bytes(), i)),
self.tag,
)
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
}

pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}

#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Expand All @@ -197,20 +206,18 @@ impl<'tcx, Tag> Pointer<Tag> {
))
}

pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
#[inline]
pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i128::from(i), cx).0
}

#[inline]
#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
Expand Down
53 changes: 45 additions & 8 deletions src/librustc/mir/interpret/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![allow(unknown_lints)]
use std::fmt;

use ty::layout::{HasDataLayout, Size};
use ty::subst::Substs;
Expand Down Expand Up @@ -99,6 +99,15 @@ pub enum Scalar<Tag=(), Id=AllocId> {
Ptr(Pointer<Tag, Id>),
}

impl<Tag> fmt::Display for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(_) => write!(f, "a pointer"),
Scalar::Bits { bits, .. } => write!(f, "{}", bits),
}
}
}

impl<'tcx> Scalar<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> Scalar<Tag>
Expand Down Expand Up @@ -134,32 +143,47 @@ impl<'tcx, Tag> Scalar<Tag> {
}

#[inline]
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Ok(Scalar::Bits {
bits: dl.signed_offset(bits as u64, i)? as u128,
bits: dl.offset(bits as u64, i.bytes())? as u128,
size,
})
}
Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
}
}

#[inline]
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.overflowing_offset(bits as u64, i.bytes()).0 as u128,
size,
}
}
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_offset(i, dl)),
}
}

#[inline]
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, dl.pointer_size().bytes());
Ok(Scalar::Bits {
bits: dl.offset(bits as u64, i.bytes())? as u128,
bits: dl.signed_offset(bits as u64, i)? as u128,
size,
})
}
Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
}
}

Expand All @@ -170,14 +194,27 @@ impl<'tcx, Tag> Scalar<Tag> {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.wrapping_signed_offset(bits as u64, i) as u128,
bits: dl.overflowing_signed_offset(bits as u64, i128::from(i)).0 as u128,
size,
}
}
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, dl)),
}
}

/// Returns this pointers offset from the allocation base, or from NULL (for
/// integer pointers).
#[inline]
pub fn get_ptr_offset(self, cx: &impl HasDataLayout) -> Size {
match self {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, cx.pointer_size().bytes());
Size::from_bytes(bits as u64)
}
Scalar::Ptr(ptr) => ptr.offset,
}
}

#[inline]
pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
match self {
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/const_eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -539,10 +539,10 @@ fn validate_const<'a, 'tcx>(
let val = (|| {
let op = ecx.const_to_op(constant)?;
let mut ref_tracking = RefTracking::new(op);
while let Some((op, mut path)) = ref_tracking.todo.pop() {
while let Some((op, path)) = ref_tracking.todo.pop() {
ecx.validate_operand(
op,
&mut path,
path,
Some(&mut ref_tracking),
/* const_mode */ true,
)?;
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_mir/interpret/eval_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tc
// return place is always a local and then this cannot happen.
self.validate_operand(
self.place_to_op(return_place)?,
&mut vec![],
vec![],
None,
/*const_mode*/false,
)?;
Expand Down
22 changes: 9 additions & 13 deletions src/librustc_mir/interpret/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ use std::hash::Hash;

use rustc::hir::{self, def_id::DefId};
use rustc::mir;
use rustc::ty::{self, Ty, layout::{Size, TyLayout}, query::TyCtxtAt};
use rustc::ty::{self, layout::{Size, TyLayout}, query::TyCtxtAt};

use super::{
Allocation, AllocId, EvalResult, Scalar,
EvalContext, PlaceTy, OpTy, Pointer, MemPlace, MemoryKind,
EvalContext, PlaceTy, MPlaceTy, OpTy, Pointer, MemoryKind,
};

/// Whether this kind of memory is allowed to leak
Expand Down Expand Up @@ -217,26 +217,22 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
#[inline]
fn tag_reference(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
place: MemPlace<Self::PointerTag>,
_ty: Ty<'tcx>,
_size: Size,
place: MPlaceTy<'tcx, Self::PointerTag>,
_mutability: Option<hir::Mutability>,
) -> EvalResult<'tcx, MemPlace<Self::PointerTag>> {
Ok(place)
) -> EvalResult<'tcx, Scalar<Self::PointerTag>> {
Ok(place.ptr)
}

/// Executed when evaluating the `*` operator: Following a reference.
/// This has the change to adjust the tag. It should not change anything else!
/// This has the chance to adjust the tag. It should not change anything else!
/// `mutability` can be `None` in case a raw ptr is being dereferenced.
#[inline]
fn tag_dereference(
_ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
place: MemPlace<Self::PointerTag>,
_ty: Ty<'tcx>,
_size: Size,
place: MPlaceTy<'tcx, Self::PointerTag>,
_mutability: Option<hir::Mutability>,
) -> EvalResult<'tcx, MemPlace<Self::PointerTag>> {
Ok(place)
) -> EvalResult<'tcx, Scalar<Self::PointerTag>> {
Ok(place.ptr)
}

/// Execute a validation operation
Expand Down
3 changes: 3 additions & 0 deletions src/librustc_mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ mod terminator;
mod traits;
mod validity;
mod intrinsics;
mod visitor;

pub use rustc::mir::interpret::*; // have all the `interpret` symbols in one place: here

Expand All @@ -38,4 +39,6 @@ pub use self::machine::{Machine, AllocMap, MayLeak};

pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy};

pub use self::visitor::ValueVisitor;

pub use self::validity::RefTracking;
20 changes: 17 additions & 3 deletions src/librustc_mir/interpret/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
//! All high-level functions to read from memory work on operands as sources.

use std::convert::TryInto;
use std::fmt;

use rustc::{mir, ty};
use rustc::ty::layout::{self, Size, LayoutOf, TyLayout, HasDataLayout, IntegerExt};
Expand All @@ -36,6 +37,15 @@ impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
}
}

impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
}
}
}

impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
Expand Down Expand Up @@ -732,8 +742,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
Ok(match rval.layout.variants {
layout::Variants::Single { .. } => bug!(),
layout::Variants::Tagged { .. } => {
let bits_discr = match raw_discr.to_bits(discr_val.layout.size) {
Ok(raw_discr) => raw_discr,
Err(_) => return err!(InvalidDiscriminant(raw_discr.erase_tag())),
};
let real_discr = if discr_val.layout.ty.is_signed() {
let i = raw_discr.to_bits(discr_val.layout.size)? as i128;
let i = bits_discr as i128;
// going from layout tag type to typeck discriminant type
// requires first sign extending with the layout discriminant
let shift = 128 - discr_val.layout.size.bits();
Expand All @@ -748,15 +762,15 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
let truncatee = sexted as u128;
(truncatee << shift) >> shift
} else {
raw_discr.to_bits(discr_val.layout.size)?
bits_discr
};
// Make sure we catch invalid discriminants
let index = rval.layout.ty
.ty_adt_def()
.expect("tagged layout for non adt")
.discriminants(self.tcx.tcx)
.position(|var| var.val == real_discr)
.ok_or_else(|| EvalErrorKind::InvalidDiscriminant(real_discr))?;
.ok_or_else(|| EvalErrorKind::InvalidDiscriminant(raw_discr.erase_tag()))?;
(real_discr, index)
},
layout::Variants::NicheFilling {
Expand Down
Loading