Skip to content

Commit

Permalink
Auto merge of rust-lang#53508 - japaric:maybe-uninit, r=RalfJung
Browse files Browse the repository at this point in the history
Implement `MaybeUninit`

This PR:

- Adds `MaybeUninit` (see rust-lang#53491) to `{core,std}::mem`.
- Makes `mem::{uninitialized,zeroed}` panic when they are used to instantiate an uninhabited type.
- Does *not* deprecate `mem::{uninitialized,zeroed}` just yet. As per rust-lang#53491 (comment), we should not deprecate them until `MaybeUninit` is stabilized.
- It replaces uses of `mem::{uninitialized,zeroed}` in core and alloc with `MaybeUninit`.

There are still several instances of `mem::{uninitialized,zeroed}` in `std` that *this* PR doesn't address.

r? @RalfJung
cc @eddyb you may want to look at the new panicking logic
  • Loading branch information
bors committed Sep 22, 2018
2 parents 4591a24 + 1cdbad2 commit c6e3d7f
Show file tree
Hide file tree
Showing 20 changed files with 350 additions and 85 deletions.
16 changes: 12 additions & 4 deletions src/etc/gdb_rust_pretty_printing.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,8 +322,11 @@ def to_string(self):
def children(self):
(length, data_ptr) = \
rustpp.extract_length_and_ptr_from_std_btreeset(self.__val)
val = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(3)
gdb_ptr = val.get_wrapped_value()
leaf_node = GdbValue(data_ptr.get_wrapped_value().dereference())
maybe_uninit_keys = leaf_node.get_child_at_index(3)
manually_drop_keys = maybe_uninit_keys.get_child_at_index(1)
keys = manually_drop_keys.get_child_at_index(0)
gdb_ptr = keys.get_wrapped_value()
for index in xrange(length):
yield (str(index), gdb_ptr[index])

Expand All @@ -345,9 +348,14 @@ def to_string(self):
def children(self):
(length, data_ptr) = \
rustpp.extract_length_and_ptr_from_std_btreemap(self.__val)
keys = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(3)
leaf_node = GdbValue(data_ptr.get_wrapped_value().dereference())
maybe_uninit_keys = leaf_node.get_child_at_index(3)
manually_drop_keys = maybe_uninit_keys.get_child_at_index(1)
keys = manually_drop_keys.get_child_at_index(0)
keys_ptr = keys.get_wrapped_value()
vals = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(4)
maybe_uninit_vals = leaf_node.get_child_at_index(4)
manually_drop_vals = maybe_uninit_vals.get_child_at_index(1)
vals = manually_drop_vals.get_child_at_index(0)
vals_ptr = vals.get_wrapped_value()
for index in xrange(length):
yield (str(index), keys_ptr[index])
Expand Down
40 changes: 20 additions & 20 deletions src/liballoc/collections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
// This implies that even an empty internal node has at least one edge.

use core::marker::PhantomData;
use core::mem;
use core::mem::{self, MaybeUninit};
use core::ptr::{self, Unique, NonNull};
use core::slice;

Expand Down Expand Up @@ -73,7 +73,7 @@ struct LeafNode<K, V> {
/// This node's index into the parent node's `edges` array.
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
/// This is only guaranteed to be initialized when `parent` is nonnull.
parent_idx: u16,
parent_idx: MaybeUninit<u16>,

/// The number of keys and values this node stores.
///
Expand All @@ -83,8 +83,8 @@ struct LeafNode<K, V> {

/// The arrays storing the actual data of the node. Only the first `len` elements of each
/// array are initialized and valid.
keys: [K; CAPACITY],
vals: [V; CAPACITY],
keys: MaybeUninit<[K; CAPACITY]>,
vals: MaybeUninit<[V; CAPACITY]>,
}

impl<K, V> LeafNode<K, V> {
Expand All @@ -94,10 +94,10 @@ impl<K, V> LeafNode<K, V> {
LeafNode {
// As a general policy, we leave fields uninitialized if they can be, as this should
// be both slightly faster and easier to track in Valgrind.
keys: mem::uninitialized(),
vals: mem::uninitialized(),
keys: MaybeUninit::uninitialized(),
vals: MaybeUninit::uninitialized(),
parent: ptr::null(),
parent_idx: mem::uninitialized(),
parent_idx: MaybeUninit::uninitialized(),
len: 0
}
}
Expand All @@ -115,10 +115,10 @@ unsafe impl Sync for LeafNode<(), ()> {}
// ever take a pointer past the first key.
static EMPTY_ROOT_NODE: LeafNode<(), ()> = LeafNode {
parent: ptr::null(),
parent_idx: 0,
parent_idx: MaybeUninit::uninitialized(),
len: 0,
keys: [(); CAPACITY],
vals: [(); CAPACITY],
keys: MaybeUninit::uninitialized(),
vals: MaybeUninit::uninitialized(),
};

/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
Expand Down Expand Up @@ -430,7 +430,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
root: self.root,
_marker: PhantomData
},
idx: self.as_leaf().parent_idx as usize,
idx: unsafe { usize::from(*self.as_leaf().parent_idx.get_ref()) },
_marker: PhantomData
})
} else {
Expand Down Expand Up @@ -567,7 +567,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
// the node, which is allowed by LLVM.
unsafe {
slice::from_raw_parts(
self.as_leaf().keys.as_ptr(),
self.as_leaf().keys.as_ptr() as *const K,
self.len()
)
}
Expand All @@ -578,7 +578,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
debug_assert!(!self.is_shared_root());
unsafe {
slice::from_raw_parts(
self.as_leaf().vals.as_ptr(),
self.as_leaf().vals.as_ptr() as *const V,
self.len()
)
}
Expand All @@ -605,7 +605,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
} else {
unsafe {
slice::from_raw_parts_mut(
&mut self.as_leaf_mut().keys as *mut [K] as *mut K,
self.as_leaf_mut().keys.get_mut() as *mut [K] as *mut K,
self.len()
)
}
Expand All @@ -616,7 +616,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
debug_assert!(!self.is_shared_root());
unsafe {
slice::from_raw_parts_mut(
&mut self.as_leaf_mut().vals as *mut [V] as *mut V,
self.as_leaf_mut().vals.get_mut() as *mut [V] as *mut V,
self.len()
)
}
Expand Down Expand Up @@ -1013,7 +1013,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
let ptr = self.node.as_internal_mut() as *mut _;
let mut child = self.descend();
child.as_leaf_mut().parent = ptr;
child.as_leaf_mut().parent_idx = idx;
child.as_leaf_mut().parent_idx.set(idx);
}

/// Unsafely asserts to the compiler some static information about whether the underlying
Expand Down Expand Up @@ -1152,12 +1152,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>

ptr::copy_nonoverlapping(
self.node.keys().as_ptr().add(self.idx + 1),
new_node.keys.as_mut_ptr(),
new_node.keys.as_mut_ptr() as *mut K,
new_len
);
ptr::copy_nonoverlapping(
self.node.vals().as_ptr().add(self.idx + 1),
new_node.vals.as_mut_ptr(),
new_node.vals.as_mut_ptr() as *mut V,
new_len
);

Expand Down Expand Up @@ -1210,12 +1210,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::

ptr::copy_nonoverlapping(
self.node.keys().as_ptr().add(self.idx + 1),
new_node.data.keys.as_mut_ptr(),
new_node.data.keys.as_mut_ptr() as *mut K,
new_len
);
ptr::copy_nonoverlapping(
self.node.vals().as_ptr().add(self.idx + 1),
new_node.data.vals.as_mut_ptr(),
new_node.data.vals.as_mut_ptr() as *mut V,
new_len
);
ptr::copy_nonoverlapping(
Expand Down
1 change: 1 addition & 0 deletions src/liballoc/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@
#![feature(exact_chunks)]
#![feature(rustc_const_unstable)]
#![feature(const_vec_new)]
#![feature(maybe_uninit)]

// Allow testing this library

Expand Down
27 changes: 14 additions & 13 deletions src/libcore/fmt/float.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

use fmt::{Formatter, Result, LowerExp, UpperExp, Display, Debug};
use mem;
use mem::MaybeUninit;
use num::flt2dec;

// Don't inline this so callers don't use the stack space this function
Expand All @@ -20,11 +20,11 @@ fn float_to_decimal_common_exact<T>(fmt: &mut Formatter, num: &T,
where T: flt2dec::DecodableFloat
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
false, &mut buf, &mut parts);
false, buf.get_mut(), parts.get_mut());
fmt.pad_formatted_parts(&formatted)
}
}
Expand All @@ -38,10 +38,11 @@ fn float_to_decimal_common_shortest<T>(fmt: &mut Formatter, num: &T,
{
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest, *num,
sign, precision, false, &mut buf, &mut parts);
sign, precision, false, buf.get_mut(),
parts.get_mut());
fmt.pad_formatted_parts(&formatted)
}
}
Expand Down Expand Up @@ -75,11 +76,11 @@ fn float_to_exponential_common_exact<T>(fmt: &mut Formatter, num: &T,
where T: flt2dec::DecodableFloat
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
upper, &mut buf, &mut parts);
upper, buf.get_mut(), parts.get_mut());
fmt.pad_formatted_parts(&formatted)
}
}
Expand All @@ -94,11 +95,11 @@ fn float_to_exponential_common_shortest<T>(fmt: &mut Formatter,
{
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, (0, 0), upper,
&mut buf, &mut parts);
buf.get_mut(), parts.get_mut());
fmt.pad_formatted_parts(&formatted)
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/libcore/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,8 @@ macro_rules! test_v512 { ($item:item) => {}; }
#[allow(unused_macros)]
macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
#[path = "../stdsimd/coresimd/mod.rs"]
// replacing uses of mem::{uninitialized,zeroed} with MaybeUninit needs to be in the stdsimd repo
#[allow(deprecated)]
#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]
#[unstable(feature = "stdsimd", issue = "48556")]
#[cfg(not(stage0))] // allow changes to how stdsimd works in stage0
Expand Down
96 changes: 96 additions & 0 deletions src/libcore/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -514,6 +514,7 @@ pub fn needs_drop<T>() -> bool {
/// assert_eq!(0, x);
/// ```
#[inline]
#[rustc_deprecated(since = "2.0.0", reason = "use `mem::MaybeUninit::zeroed` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn zeroed<T>() -> T {
intrinsics::init()
Expand Down Expand Up @@ -608,6 +609,7 @@ pub unsafe fn zeroed<T>() -> T {
/// [copy_no]: ../intrinsics/fn.copy_nonoverlapping.html
/// [`Drop`]: ../ops/trait.Drop.html
#[inline]
#[rustc_deprecated(since = "2.0.0", reason = "use `mem::MaybeUninit::uninitialized` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::uninit()
Expand Down Expand Up @@ -1024,3 +1026,97 @@ impl<T: ?Sized> DerefMut for ManuallyDrop<T> {
&mut self.value
}
}

/// A newtype to construct uninitialized instances of `T`
#[allow(missing_debug_implementations)]
#[unstable(feature = "maybe_uninit", issue = "53491")]
// NOTE after stabilizing `MaybeUninit` proceed to deprecate `mem::{uninitialized,zeroed}`
pub union MaybeUninit<T> {
uninit: (),
value: ManuallyDrop<T>,
}

impl<T> MaybeUninit<T> {
/// Create a new `MaybeUninit` in an uninitialized state.
///
/// Note that dropping a `MaybeUninit` will never call `T`'s drop code.
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub const fn uninitialized() -> MaybeUninit<T> {
MaybeUninit { uninit: () }
}

/// Create a new `MaybeUninit` in an uninitialized state, with the memory being
/// filled with `0` bytes. It depends on `T` whether that already makes for
/// proper initialization. For example, `MaybeUninit<usize>::zeroed()` is initialized,
/// but `MaybeUninit<&'static i32>::zeroed()` is not because references must not
/// be null.
///
/// Note that dropping a `MaybeUninit` will never call `T`'s drop code.
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub fn zeroed() -> MaybeUninit<T> {
let mut u = MaybeUninit::<T>::uninitialized();
unsafe {
u.as_mut_ptr().write_bytes(0u8, 1);
}
u
}

/// Set the value of the `MaybeUninit`. This overwrites any previous value without dropping it.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub fn set(&mut self, val: T) {
unsafe {
self.value = ManuallyDrop::new(val);
}
}

/// Extract the value from the `MaybeUninit` container. This is a great way
/// to ensure that the data will get dropped, because the resulting `T` is
/// subject to the usual drop handling.
///
/// # Unsafety
///
/// It is up to the caller to guarantee that the the `MaybeUninit` really is in an initialized
/// state, otherwise this will immediately cause undefined behavior.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub unsafe fn into_inner(self) -> T {
ManuallyDrop::into_inner(self.value)
}

/// Get a reference to the contained value.
///
/// # Unsafety
///
/// It is up to the caller to guarantee that the the `MaybeUninit` really is in an initialized
/// state, otherwise this will immediately cause undefined behavior.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub unsafe fn get_ref(&self) -> &T {
&*self.value
}

/// Get a mutable reference to the contained value.
///
/// # Unsafety
///
/// It is up to the caller to guarantee that the the `MaybeUninit` really is in an initialized
/// state, otherwise this will immediately cause undefined behavior.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub unsafe fn get_mut(&mut self) -> &mut T {
&mut *self.value
}

/// Get a pointer to the contained value. Reading from this pointer will be undefined
/// behavior unless the `MaybeUninit` is initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub fn as_ptr(&self) -> *const T {
unsafe { &*self.value as *const T }
}

/// Get a mutable pointer to the contained value. Reading from this pointer will be undefined
/// behavior unless the `MaybeUninit` is initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
pub fn as_mut_ptr(&mut self) -> *mut T {
unsafe { &mut *self.value as *mut T }
}
}
Loading

0 comments on commit c6e3d7f

Please sign in to comment.