From 56cbf2f22aeb6448acd7eb49e9b2554c80bdbf79 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Tue, 24 Mar 2020 11:45:38 +0100 Subject: [PATCH 01/23] Overhaul of the `AllocRef` trait to match allocator-wg's latest consens --- src/liballoc/alloc.rs | 111 +- src/liballoc/alloc/tests.rs | 5 +- src/liballoc/boxed.rs | 44 +- src/liballoc/raw_vec.rs | 568 ++++------ src/liballoc/raw_vec/tests.rs | 9 +- src/liballoc/rc.rs | 6 +- src/liballoc/sync.rs | 6 +- src/liballoc/tests/heap.rs | 10 +- src/liballoc/vec.rs | 2 +- src/libcore/alloc.rs | 1043 ------------------- src/libcore/alloc/global.rs | 198 ++++ src/libcore/alloc/layout.rs | 345 ++++++ src/libcore/alloc/mod.rs | 376 +++++++ src/libstd/alloc.rs | 109 +- src/libstd/error.rs | 9 +- src/test/ui/allocator/custom.rs | 6 +- src/test/ui/allocator/xcrate-use.rs | 8 +- src/test/ui/realloc-16687.rs | 84 +- src/test/ui/regions/regions-mock-codegen.rs | 14 +- 19 files changed, 1408 insertions(+), 1545 deletions(-) delete mode 100644 src/libcore/alloc.rs create mode 100644 src/libcore/alloc/global.rs create mode 100644 src/libcore/alloc/layout.rs create mode 100644 src/libcore/alloc/mod.rs diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 9f82b2c6fa66d..26524f6296221 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -2,7 +2,7 @@ #![stable(feature = "alloc_module", since = "1.28.0")] -use core::intrinsics::{min_align_of_val, size_of_val}; +use core::intrinsics::{self, min_align_of_val, size_of_val}; use core::ptr::{NonNull, Unique}; use core::usize; @@ -165,11 +165,19 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for Global { #[inline] - fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { - if layout.size() == 0 { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { + let new_size = layout.size(); + if new_size == 0 { Ok((layout.dangling(), 0)) } else { - unsafe { NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) } + unsafe { + let raw_ptr = match init { + AllocInit::Uninitialized => alloc(layout), + AllocInit::Zeroed => alloc_zeroed(layout), + }; + let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; + Ok((ptr, new_size)) + } } } @@ -181,33 +189,77 @@ unsafe impl AllocRef for Global { } #[inline] - unsafe fn realloc( + unsafe fn grow( &mut self, ptr: NonNull, layout: Layout, new_size: usize, + placement: ReallocPlacement, + init: AllocInit, ) -> Result<(NonNull, usize), AllocErr> { - match (layout.size(), new_size) { - (0, 0) => Ok((layout.dangling(), 0)), - (0, _) => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())), - (_, 0) => { - self.dealloc(ptr, layout); - Ok((layout.dangling(), 0)) + let old_size = layout.size(); + debug_assert!( + new_size >= old_size, + "`new_size` must be greater than or equal to `layout.size()`" + ); + + if old_size == new_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + if old_size == 0 { + self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) + } else { + // `realloc` probably checks for `new_size > old_size` or something similar. + // `new_size` must be greater than or equal to `old_size` due to the safety constraint, + // and `new_size` == `old_size` was caught before + intrinsics::assume(new_size > old_size); + let ptr = + NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?; + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + init.initialize_offset(ptr, new_layout, old_size); + Ok((ptr, new_size)) + } } - (_, _) => NonNull::new(realloc(ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr) - .map(|p| (p, new_size)), + ReallocPlacement::InPlace => Err(AllocErr), } } #[inline] - fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { - if layout.size() == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { - NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) + unsafe fn shrink( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + placement: ReallocPlacement, + ) -> Result<(NonNull, usize), AllocErr> { + let old_size = layout.size(); + debug_assert!( + new_size <= old_size, + "`new_size` must be smaller than or equal to `layout.size()`" + ); + + if old_size == new_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + let ptr = if new_size == 0 { + self.dealloc(ptr, layout); + layout.dangling() + } else { + // `realloc` probably checks for `new_size > old_size` or something similar. + // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, + // and `new_size` == `old_size` was caught before + intrinsics::assume(new_size < old_size); + NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)? + }; + Ok((ptr, new_size)) } + ReallocPlacement::InPlace => Err(AllocErr), } } } @@ -218,14 +270,10 @@ unsafe impl AllocRef for Global { #[lang = "exchange_malloc"] #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { - if size == 0 { - align as *mut u8 - } else { - let layout = Layout::from_size_align_unchecked(size, align); - match Global.alloc(layout) { - Ok((ptr, _)) => ptr.as_ptr(), - Err(_) => handle_alloc_error(layout), - } + let layout = Layout::from_size_align_unchecked(size, align); + match Global.alloc(layout, AllocInit::Uninitialized) { + Ok((ptr, _)) => ptr.as_ptr(), + Err(_) => handle_alloc_error(layout), } } @@ -239,11 +287,8 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { pub(crate) unsafe fn box_free(ptr: Unique) { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); - // We do not allocate for Box when T is ZST, so deallocation is also not necessary. - if size != 0 { - let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr.cast().into(), layout); - } + let layout = Layout::from_size_align_unchecked(size, align); + Global.dealloc(ptr.cast().into(), layout) } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 55944398e1677..6a2130a7192f0 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -8,8 +8,9 @@ use test::Bencher; fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); - let (ptr, _) = - Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout)); + let (ptr, _) = Global + .alloc(layout.clone(), AllocInit::Zeroed) + .unwrap_or_else(|_| handle_alloc_error(layout)); let mut i = ptr.cast::().as_ptr(); let end = i.add(layout.size()); diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 36641284a769b..9690e311e9653 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -146,7 +146,7 @@ use core::ptr::{self, NonNull, Unique}; use core::slice; use core::task::{Context, Poll}; -use crate::alloc::{self, AllocRef, Global}; +use crate::alloc::{self, AllocInit, AllocRef, Global}; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; use crate::vec::Vec; @@ -196,14 +196,12 @@ impl Box { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Box> { let layout = alloc::Layout::new::>(); - unsafe { - let ptr = if layout.size() == 0 { - NonNull::dangling() - } else { - Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast() - }; - Box::from_raw(ptr.as_ptr()) - } + let ptr = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) + .0 + .cast(); + unsafe { Box::from_raw(ptr.as_ptr()) } } /// Constructs a new `Box` with uninitialized contents, with the memory @@ -226,11 +224,13 @@ impl Box { /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Box> { - unsafe { - let mut uninit = Self::new_uninit(); - ptr::write_bytes::(uninit.as_mut_ptr(), 0, 1); - uninit - } + let layout = alloc::Layout::new::>(); + let ptr = Global + .alloc(layout, AllocInit::Zeroed) + .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) + .0 + .cast(); + unsafe { Box::from_raw(ptr.as_ptr()) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -266,14 +266,12 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { let layout = alloc::Layout::array::>(len).unwrap(); - unsafe { - let ptr = if layout.size() == 0 { - NonNull::dangling() - } else { - Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast() - }; - Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) - } + let ptr = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) + .0 + .cast(); + unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) } } } @@ -778,7 +776,7 @@ impl From<&[T]> for Box<[T]> { let buf = RawVec::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); - buf.into_box() + buf.into_box().assume_init() } } } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index b31fec7f037c9..3a108adb218ce 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -2,12 +2,17 @@ #![doc(hidden)] use core::cmp; -use core::mem; +use core::mem::{self, MaybeUninit}; use core::ops::Drop; -use core::ptr::{self, NonNull, Unique}; +use core::ptr::Unique; use core::slice; -use crate::alloc::{handle_alloc_error, AllocErr, AllocRef, Global, Layout}; +use crate::alloc::{ + handle_alloc_error, AllocErr, + AllocInit::{self, *}, + AllocRef, Global, Layout, + ReallocPlacement::{self, *}, +}; use crate::boxed::Box; use crate::collections::TryReserveError::{self, *}; @@ -19,28 +24,22 @@ mod tests; /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// -/// * Produces `Unique::empty()` on zero-sized types. -/// * Produces `Unique::empty()` on zero-length allocations. /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics). /// * Guards against 32-bit systems allocating more than isize::MAX bytes. /// * Guards against overflowing your length. -/// * Aborts on OOM or calls `handle_alloc_error` as applicable. -/// * Avoids freeing `Unique::empty()`. +/// * Calls `handle_alloc_error` for fallible allocations. /// * Contains a `ptr::Unique` and thus endows the user with all related benefits. +/// * Uses the excess returned from the allocator to use the largest available capacity. /// /// This type does not in anyway inspect the memory that it manages. When dropped it *will* /// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec` /// to handle the actual things *stored* inside of a `RawVec`. /// -/// Note that a `RawVec` always forces its capacity to be `usize::MAX` for zero-sized types. -/// This enables you to use capacity-growing logic catch the overflows in your length -/// that might occur with zero-sized types. -/// -/// The above means that you need to be careful when round-tripping this type with a +/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns +/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. However, `with_capacity`, -/// `shrink_to_fit`, and `from_box` will actually set `RawVec`'s private capacity -/// field. This allows zero-sized types to not be special-cased by consumers of -/// this type. +/// `shrink_to_fit`, and `from_box` will actually set `RawVec`'s private capacity field. This allows +/// zero-sized types to not be special-cased by consumers of this type. #[allow(missing_debug_implementations)] pub struct RawVec { ptr: Unique, @@ -52,49 +51,30 @@ impl RawVec { /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. pub const fn new_in(a: A) -> Self { - let cap = if mem::size_of::() == 0 { core::usize::MAX } else { 0 }; - - // `Unique::empty()` doubles as "unallocated" and "zero-sized allocation". - RawVec { ptr: Unique::empty(), cap, a } + // `cap: 0` means "unallocated". zero-sized allocations are handled by `AllocRef` + Self { ptr: Unique::empty(), cap: 0, a } } /// Like `with_capacity`, but parameterized over the choice of /// allocator for the returned `RawVec`. #[inline] pub fn with_capacity_in(capacity: usize, a: A) -> Self { - RawVec::allocate_in(capacity, false, a) + Self::allocate_in(capacity, Uninitialized, a) } /// Like `with_capacity_zeroed`, but parameterized over the choice /// of allocator for the returned `RawVec`. #[inline] pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self { - RawVec::allocate_in(capacity, true, a) + Self::allocate_in(capacity, Zeroed, a) } - fn allocate_in(mut capacity: usize, zeroed: bool, mut a: A) -> Self { - let elem_size = mem::size_of::(); - - let alloc_size = capacity.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow()); - alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow()); + fn allocate_in(capacity: usize, init: AllocInit, mut a: A) -> Self { + let layout = Layout::array::(capacity).unwrap_or_else(|_| capacity_overflow()); + alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow()); - // Handles ZSTs and `capacity == 0` alike. - let ptr = if alloc_size == 0 { - NonNull::::dangling() - } else { - let align = mem::align_of::(); - let layout = Layout::from_size_align(alloc_size, align).unwrap(); - let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) }; - match result { - Ok((ptr, size)) => { - capacity = size / elem_size; - ptr.cast() - } - Err(_) => handle_alloc_error(layout), - } - }; - - RawVec { ptr: ptr.into(), cap: capacity, a } + let (ptr, excess) = a.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); + Self { ptr: ptr.cast().into(), cap: Self::capacity_from_bytes(excess), a } } } @@ -138,13 +118,13 @@ impl RawVec { /// Aborts on OOM. #[inline] pub fn with_capacity(capacity: usize) -> Self { - RawVec::allocate_in(capacity, false, Global) + Self::with_capacity_in(capacity, Global) } /// Like `with_capacity`, but guarantees the buffer is zeroed. #[inline] pub fn with_capacity_zeroed(capacity: usize) -> Self { - RawVec::allocate_in(capacity, true, Global) + Self::with_capacity_zeroed_in(capacity, Global) } } @@ -156,8 +136,9 @@ impl RawVec { /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. + #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { - RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a } + Self { ptr: Unique::new_unchecked(ptr), cap: capacity, a } } } @@ -169,8 +150,9 @@ impl RawVec { /// The `ptr` must be allocated (on the system heap), and with the given `capacity`. /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. + #[inline] pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { - RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a: Global } + Self::from_raw_parts_in(ptr, capacity, Global) } /// Converts a `Box<[T]>` into a `RawVec`. @@ -196,7 +178,7 @@ impl RawVec { /// This will always be `usize::MAX` if `T` is zero-sized. #[inline(always)] pub fn capacity(&self) -> usize { - if mem::size_of::() == 0 { !0 } else { self.cap } + if mem::size_of::() == 0 { usize::MAX } else { self.cap } } /// Returns a shared reference to the allocator backing this `RawVec`. @@ -274,50 +256,10 @@ impl RawVec { #[inline(never)] #[cold] pub fn double(&mut self) { - unsafe { - let elem_size = mem::size_of::(); - - // Since we set the capacity to `usize::MAX` when `elem_size` is - // 0, getting to here necessarily means the `RawVec` is overfull. - assert!(elem_size != 0, "capacity overflow"); - - let (ptr, new_cap) = match self.current_layout() { - Some(cur) => { - // Since we guarantee that we never allocate more than - // `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as - // a precondition, so this can't overflow. Additionally the - // alignment will never be too large as to "not be - // satisfiable", so `Layout::from_size_align` will always - // return `Some`. - // - // TL;DR, we bypass runtime checks due to dynamic assertions - // in this module, allowing us to use - // `from_size_align_unchecked`. - let new_cap = 2 * self.cap; - let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size); - match ptr_res { - Ok((ptr, new_size)) => (ptr, new_size / elem_size), - Err(_) => handle_alloc_error(Layout::from_size_align_unchecked( - new_size, - cur.align(), - )), - } - } - None => { - // Skip to 4 because tiny `Vec`'s are dumb; but not if that - // would cause overflow. - let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; - let layout = Layout::array::(new_cap).unwrap(); - match self.a.alloc(layout) { - Ok((ptr, new_size)) => (ptr, new_size / elem_size), - Err(_) => handle_alloc_error(layout), - } - } - }; - self.ptr = ptr.cast().into(); - self.cap = new_cap; + match self.grow(Double, MayMove, Uninitialized) { + Err(CapacityOverflow) => capacity_overflow(), + Err(AllocError { layout, .. }) => handle_alloc_error(layout), + Ok(()) => { /* yay */ } } } @@ -336,99 +278,7 @@ impl RawVec { #[inline(never)] #[cold] pub fn double_in_place(&mut self) -> bool { - unsafe { - let elem_size = mem::size_of::(); - let old_layout = match self.current_layout() { - Some(layout) => layout, - None => return false, // nothing to double - }; - - // Since we set the capacity to `usize::MAX` when `elem_size` is - // 0, getting to here necessarily means the `RawVec` is overfull. - assert!(elem_size != 0, "capacity overflow"); - - // Since we guarantee that we never allocate more than `isize::MAX` - // bytes, `elem_size * self.cap <= isize::MAX` as a precondition, so - // this can't overflow. - // - // Similarly to with `double` above, we can go straight to - // `Layout::from_size_align_unchecked` as we know this won't - // overflow and the alignment is sufficiently small. - let new_cap = 2 * self.cap; - let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) { - Ok(_) => { - // We can't directly divide `size`. - self.cap = new_cap; - true - } - Err(_) => false, - } - } - } - - /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. - pub fn try_reserve_exact( - &mut self, - used_capacity: usize, - needed_extra_capacity: usize, - ) -> Result<(), TryReserveError> { - self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Exact) - } - - /// Ensures that the buffer contains at least enough space to hold - /// `used_capacity + needed_extra_capacity` elements. If it doesn't already, - /// will reallocate the minimum possible amount of memory necessary. - /// Generally this will be exactly the amount of memory necessary, - /// but in principle the allocator is free to give back more than - /// we asked for. - /// - /// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate - /// the requested space. This is not really unsafe, but the unsafe - /// code *you* write that relies on the behavior of this function may break. - /// - /// # Panics - /// - /// * Panics if the requested capacity exceeds `usize::MAX` bytes. - /// * Panics on 32-bit platforms if the requested capacity exceeds - /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM. - pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize) { - match self.reserve_internal(used_capacity, needed_extra_capacity, Infallible, Exact) { - Err(CapacityOverflow) => capacity_overflow(), - Err(AllocError { .. }) => unreachable!(), - Ok(()) => { /* yay */ } - } - } - - /// Calculates the buffer's new size given that it'll hold `used_capacity + - /// needed_extra_capacity` elements. This logic is used in amortized reserve methods. - /// Returns `(new_capacity, new_alloc_size)`. - fn amortized_new_size( - &self, - used_capacity: usize, - needed_extra_capacity: usize, - ) -> Result { - // Nothing we can really do about these checks, sadly. - let required_cap = - used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?; - // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. - let double_cap = self.cap * 2; - // `double_cap` guarantees exponential growth. - Ok(cmp::max(double_cap, required_cap)) - } - - /// The same as `reserve`, but returns on errors instead of panicking or aborting. - pub fn try_reserve( - &mut self, - used_capacity: usize, - needed_extra_capacity: usize, - ) -> Result<(), TryReserveError> { - self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Amortized) + self.grow(Double, InPlace, Uninitialized).is_ok() } /// Ensures that the buffer contains at least enough space to hold @@ -484,12 +334,26 @@ impl RawVec { /// # } /// ``` pub fn reserve(&mut self, used_capacity: usize, needed_extra_capacity: usize) { - match self.reserve_internal(used_capacity, needed_extra_capacity, Infallible, Amortized) { + match self.try_reserve(used_capacity, needed_extra_capacity) { Err(CapacityOverflow) => capacity_overflow(), - Err(AllocError { .. }) => unreachable!(), + Err(AllocError { layout, .. }) => handle_alloc_error(layout), Ok(()) => { /* yay */ } } } + + /// The same as `reserve`, but returns on errors instead of panicking or aborting. + pub fn try_reserve( + &mut self, + used_capacity: usize, + needed_extra_capacity: usize, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(used_capacity, needed_extra_capacity) { + self.grow(Amortized { used_capacity, needed_extra_capacity }, MayMove, Uninitialized) + } else { + Ok(()) + } + } + /// Attempts to ensure that the buffer contains at least enough space to hold /// `used_capacity + needed_extra_capacity` elements. If it doesn't already have /// enough capacity, will reallocate in place enough space plus comfortable slack @@ -508,45 +372,54 @@ impl RawVec { /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. pub fn reserve_in_place(&mut self, used_capacity: usize, needed_extra_capacity: usize) -> bool { - unsafe { - // NOTE: we don't early branch on ZSTs here because we want this - // to actually catch "asking for more than usize::MAX" in that case. - // If we make it past the first branch then we are guaranteed to - // panic. - - // Don't actually need any more capacity. If the current `cap` is 0, we can't - // reallocate in place. - // Wrapping in case they give a bad `used_capacity` - let old_layout = match self.current_layout() { - Some(layout) => layout, - None => return false, - }; - if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity { - return false; - } + // This is more readable than putting this in one line: + // `!self.needs_to_grow(...) || self.grow(...).is_ok()` + if self.needs_to_grow(used_capacity, needed_extra_capacity) { + self.grow(Amortized { used_capacity, needed_extra_capacity }, InPlace, Uninitialized) + .is_ok() + } else { + true + } + } - let new_cap = self - .amortized_new_size(used_capacity, needed_extra_capacity) - .unwrap_or_else(|_| capacity_overflow()); - - // Here, `cap < used_capacity + needed_extra_capacity <= new_cap` - // (regardless of whether `self.cap - used_capacity` wrapped). - // Therefore, we can safely call `grow_in_place`. - - let new_layout = Layout::new::().repeat(new_cap).unwrap().0; - // FIXME: may crash and burn on over-reserve - alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); - match self.a.grow_in_place( - NonNull::from(self.ptr).cast(), - old_layout, - new_layout.size(), - ) { - Ok(_) => { - self.cap = new_cap; - true - } - Err(_) => false, - } + /// Ensures that the buffer contains at least enough space to hold + /// `used_capacity + needed_extra_capacity` elements. If it doesn't already, + /// will reallocate the minimum possible amount of memory necessary. + /// Generally this will be exactly the amount of memory necessary, + /// but in principle the allocator is free to give back more than + /// we asked for. + /// + /// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate + /// the requested space. This is not really unsafe, but the unsafe + /// code *you* write that relies on the behavior of this function may break. + /// + /// # Panics + /// + /// * Panics if the requested capacity exceeds `usize::MAX` bytes. + /// * Panics on 32-bit platforms if the requested capacity exceeds + /// `isize::MAX` bytes. + /// + /// # Aborts + /// + /// Aborts on OOM. + pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize) { + match self.try_reserve_exact(used_capacity, needed_extra_capacity) { + Err(CapacityOverflow) => capacity_overflow(), + Err(AllocError { layout, .. }) => handle_alloc_error(layout), + Ok(()) => { /* yay */ } + } + } + + /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. + pub fn try_reserve_exact( + &mut self, + used_capacity: usize, + needed_extra_capacity: usize, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(used_capacity, needed_extra_capacity) { + self.grow(Exact { used_capacity, needed_extra_capacity }, MayMove, Uninitialized) + } else { + Ok(()) } } @@ -561,126 +434,134 @@ impl RawVec { /// /// Aborts on OOM. pub fn shrink_to_fit(&mut self, amount: usize) { - let elem_size = mem::size_of::(); - - // Set the `cap` because they might be about to promote to a `Box<[T]>` - if elem_size == 0 { - self.cap = amount; - return; - } - - // This check is my waterloo; it's the only thing `Vec` wouldn't have to do. - assert!(self.cap >= amount, "Tried to shrink to a larger capacity"); - - if amount == 0 { - // We want to create a new zero-length vector within the - // same allocator. We use `ptr::write` to avoid an - // erroneous attempt to drop the contents, and we use - // `ptr::read` to sidestep condition against destructuring - // types that implement Drop. - - unsafe { - let a = ptr::read(&self.a as *const A); - self.dealloc_buffer(); - ptr::write(self, RawVec::new_in(a)); - } - } else if self.cap != amount { - unsafe { - // We know here that our `amount` is greater than zero. This - // implies, via the assert above, that capacity is also greater - // than zero, which means that we've got a current layout that - // "fits" - // - // We also know that `self.cap` is greater than `amount`, and - // consequently we don't need runtime checks for creating either - // layout. - let old_size = elem_size * self.cap; - let new_size = elem_size * amount; - let align = mem::align_of::(); - let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) { - Ok((ptr, _)) => self.ptr = ptr.cast().into(), - Err(_) => { - handle_alloc_error(Layout::from_size_align_unchecked(new_size, align)) - } - } - } - self.cap = amount; + match self.shrink(amount, MayMove) { + Err(CapacityOverflow) => capacity_overflow(), + Err(AllocError { layout, .. }) => handle_alloc_error(layout), + Ok(()) => { /* yay */ } } } } -enum Fallibility { - Fallible, - Infallible, +#[derive(Copy, Clone)] +enum Strategy { + Double, + Amortized { used_capacity: usize, needed_extra_capacity: usize }, + Exact { used_capacity: usize, needed_extra_capacity: usize }, } +use Strategy::*; -use Fallibility::*; - -enum ReserveStrategy { - Exact, - Amortized, -} +impl RawVec { + /// Returns if the buffer needs to grow to fulfill the needed extra capacity. + /// Mainly used to make inlining reserve-calls possible without inlining `grow`. + fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool { + needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) + } -use ReserveStrategy::*; + fn capacity_from_bytes(excess: usize) -> usize { + match mem::size_of::() { + 0 => usize::MAX, + elem_size => excess / elem_size, + } + } -impl RawVec { - fn reserve_internal( + /// Single method to handle all possibilities of growing the buffer. + fn grow( &mut self, - used_capacity: usize, - needed_extra_capacity: usize, - fallibility: Fallibility, - strategy: ReserveStrategy, + strategy: Strategy, + placement: ReallocPlacement, + init: AllocInit, ) -> Result<(), TryReserveError> { let elem_size = mem::size_of::(); + let new_layout = match strategy { + Double => unsafe { + if elem_size == 0 { + // Since we return a capacity of `usize::MAX` when `elem_size` is + // 0, getting to here necessarily means the `RawVec` is overfull. + return Err(CapacityOverflow); + } + // Since we guarantee that we never allocate more than `isize::MAX` bytes, + // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow. + // Additionally the alignment will never be too large as to "not be satisfiable", + // so `Layout::from_size_align` will always return `Some`. + // + // TL;DR, we bypass runtime checks due to dynamic assertions in this module, + // allowing us to use `from_size_align_unchecked`. + let cap = if self.cap == 0 { + // Skip to 4 because tiny `Vec`'s are dumb; but not if that would cause overflow. + if elem_size > usize::MAX / 8 { 1 } else { 4 } + } else { + self.cap * 2 + }; + Layout::from_size_align_unchecked(cap * elem_size, mem::align_of::()) + }, + Amortized { used_capacity, needed_extra_capacity } => { + // Nothing we can really do about these checks, sadly. + let required_cap = + used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?; + // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. + let double_cap = self.cap * 2; + // `double_cap` guarantees exponential growth. + let cap = cmp::max(double_cap, required_cap); + Layout::array::(cap).map_err(|_| CapacityOverflow)? + } + Exact { used_capacity, needed_extra_capacity } => { + let cap = + used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?; + Layout::array::(cap).map_err(|_| CapacityOverflow)? + } + }; - unsafe { - // NOTE: we don't early branch on ZSTs here because we want this - // to actually catch "asking for more than usize::MAX" in that case. - // If we make it past the first branch then we are guaranteed to - // panic. - - // Don't actually need any more capacity. - // Wrapping in case they gave a bad `used_capacity`. - if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity { - return Ok(()); + let allocation = if let Some(old_layout) = self.current_layout() { + debug_assert!(old_layout.align() == new_layout.align()); + unsafe { + self.a.grow(self.ptr.cast().into(), old_layout, new_layout.size(), placement, init) } + } else { + match placement { + MayMove => self.a.alloc(new_layout, init), + InPlace => Err(AllocErr), + } + }; - // Nothing we can really do about these checks, sadly. - let new_cap = match strategy { - Exact => { - used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)? - } - Amortized => self.amortized_new_size(used_capacity, needed_extra_capacity)?, - }; - let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; + allocation + .map(|(ptr, excess)| { + self.ptr = ptr.cast().into(); + self.cap = Self::capacity_from_bytes(excess); + }) + .map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: () }) + } - alloc_guard(new_layout.size())?; + fn shrink( + &mut self, + amount: usize, + placement: ReallocPlacement, + ) -> Result<(), TryReserveError> { + assert!(amount <= self.cap, "Tried to shrink to a larger capacity"); - let res = match self.current_layout() { - Some(layout) => { - debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) - } - None => self.a.alloc(new_layout), - }; - - let (ptr, new_cap) = match (res, fallibility) { - (Err(AllocErr), Infallible) => handle_alloc_error(new_layout), - (Err(AllocErr), Fallible) => { - return Err(TryReserveError::AllocError { - layout: new_layout, - non_exhaustive: (), - }); - } - (Ok((ptr, new_size)), _) => (ptr, new_size / elem_size), - }; + let elem_size = mem::size_of::(); + let old_layout = + if let Some(layout) = self.current_layout() { layout } else { return Ok(()) }; + let old_ptr = self.ptr.cast().into(); + let new_size = amount * elem_size; - self.ptr = ptr.cast().into(); - self.cap = new_cap; + let allocation = unsafe { + if amount == 0 && placement == MayMove { + self.dealloc_buffer(); + Ok((old_layout.dangling(), 0)) + } else { + self.a.shrink(old_ptr, old_layout, new_size, placement) + } + }; - Ok(()) - } + allocation + .map(|(ptr, excess)| { + self.ptr = ptr.cast().into(); + self.cap = Self::capacity_from_bytes(excess); + }) + .map_err(|_| TryReserveError::AllocError { + layout: unsafe { Layout::from_size_align_unchecked(new_size, old_layout.align()) }, + non_exhaustive: (), + }) } } @@ -689,29 +570,24 @@ impl RawVec { /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) - /// - /// # Undefined Behavior - /// - /// All elements of `RawVec` must be initialized. Notice that - /// the rules around uninitialized boxed values are not finalized yet, - /// but until they are, it is advisable to avoid them. - pub unsafe fn into_box(self) -> Box<[T]> { - // NOTE: not calling `capacity()` here; actually using the real `cap` field! - let slice = slice::from_raw_parts_mut(self.ptr(), self.cap); - let output: Box<[T]> = Box::from_raw(slice); - mem::forget(self); - output + pub fn into_box(self) -> Box<[MaybeUninit]> { + unsafe { + // NOTE: not calling `capacity()` here; actually using the real `cap` field! + let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit, self.cap); + let output = Box::from_raw(slice); + mem::forget(self); + output + } } } impl RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. pub unsafe fn dealloc_buffer(&mut self) { - let elem_size = mem::size_of::(); - if elem_size != 0 { - if let Some(layout) = self.current_layout() { - self.a.dealloc(NonNull::from(self.ptr).cast(), layout); - } + if let Some(layout) = self.current_layout() { + self.a.dealloc(self.ptr.cast().into(), layout); + self.ptr = Unique::empty(); + self.cap = 0; } } } @@ -719,9 +595,7 @@ impl RawVec { unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { - unsafe { - self.dealloc_buffer(); - } + unsafe { self.dealloc_buffer() } } } diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index 21a8a76d0a75b..a2d6cc63c92f4 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -1,4 +1,5 @@ use super::*; +use core::ptr::NonNull; #[test] fn allocator_param() { @@ -20,12 +21,16 @@ fn allocator_param() { fuel: usize, } unsafe impl AllocRef for BoundedAlloc { - fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + fn alloc( + &mut self, + layout: Layout, + init: AllocInit, + ) -> Result<(NonNull, usize), AllocErr> { let size = layout.size(); if size > self.fuel { return Err(AllocErr); } - match Global.alloc(layout) { + match Global.alloc(layout, init) { ok @ Ok(_) => { self.fuel -= size; ok diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index e7f7608e676a2..495e196df40a2 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -252,7 +252,7 @@ use core::ptr::{self, NonNull}; use core::slice::{self, from_raw_parts_mut}; use core::usize; -use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout}; +use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use crate::string::String; use crate::vec::Vec; @@ -936,7 +936,9 @@ impl Rc { let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); // Allocate for the layout. - let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (mem, _) = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox let inner = mem_to_rcbox(mem.as_ptr()); diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index e8985e202567b..048c89d12809e 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -25,7 +25,7 @@ use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use core::{isize, usize}; -use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout}; +use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use crate::boxed::Box; use crate::rc::is_dangling; use crate::string::String; @@ -814,7 +814,9 @@ impl Arc { // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (mem, _) = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner let inner = mem_to_arcinner(mem.as_ptr()); diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index d159126f426c5..690ae84a5df68 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocRef, Global, Layout, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; /// Issue #45955 and #62251. #[test] @@ -20,7 +20,13 @@ fn check_overalign_requests(mut allocator: T) { unsafe { let pointers: Vec<_> = (0..iterations) .map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0 + allocator + .alloc( + Layout::from_size_align(size, align).unwrap(), + AllocInit::Uninitialized, + ) + .unwrap() + .0 }) .collect(); for &ptr in &pointers { diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 4769091183a37..528a4f732934e 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -679,7 +679,7 @@ impl Vec { self.shrink_to_fit(); let buf = ptr::read(&self.buf); mem::forget(self); - buf.into_box() + buf.into_box().assume_init() } } diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs deleted file mode 100644 index be20a1cde3694..0000000000000 --- a/src/libcore/alloc.rs +++ /dev/null @@ -1,1043 +0,0 @@ -//! Memory allocation APIs - -// ignore-tidy-undocumented-unsafe - -#![stable(feature = "alloc_module", since = "1.28.0")] - -use crate::cmp; -use crate::fmt; -use crate::mem; -use crate::num::NonZeroUsize; -use crate::ptr::{self, NonNull}; -use crate::usize; - -const fn size_align() -> (usize, usize) { - (mem::size_of::(), mem::align_of::()) -} - -/// Layout of a block of memory. -/// -/// An instance of `Layout` describes a particular layout of memory. -/// You build a `Layout` up as an input to give to an allocator. -/// -/// All layouts have an associated non-negative size and a -/// power-of-two alignment. -/// -/// (Note however that layouts are *not* required to have positive -/// size, even though many allocators require that all memory -/// requests have positive size. A caller to the `AllocRef::alloc` -/// method must either ensure that conditions like this are met, or -/// use specific allocators with looser requirements.) -#[stable(feature = "alloc_layout", since = "1.28.0")] -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -#[lang = "alloc_layout"] -pub struct Layout { - // size of the requested block of memory, measured in bytes. - size_: usize, - - // alignment of the requested block of memory, measured in bytes. - // we ensure that this is always a power-of-two, because API's - // like `posix_memalign` require it and it is a reasonable - // constraint to impose on Layout constructors. - // - // (However, we do not analogously require `align >= sizeof(void*)`, - // even though that is *also* a requirement of `posix_memalign`.) - align_: NonZeroUsize, -} - -impl Layout { - /// Constructs a `Layout` from a given `size` and `align`, - /// or returns `LayoutErr` if any of the following conditions - /// are not met: - /// - /// * `align` must not be zero, - /// - /// * `align` must be a power of two, - /// - /// * `size`, when rounded up to the nearest multiple of `align`, - /// must not overflow (i.e., the rounded value must be less than - /// `usize::MAX`). - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[inline] - pub const fn from_size_align(size: usize, align: usize) -> Result { - if !align.is_power_of_two() { - return Err(LayoutErr { private: () }); - } - - // (power-of-two implies align != 0.) - - // Rounded up size is: - // size_rounded_up = (size + align - 1) & !(align - 1); - // - // We know from above that align != 0. If adding (align - 1) - // does not overflow, then rounding up will be fine. - // - // Conversely, &-masking with !(align - 1) will subtract off - // only low-order-bits. Thus if overflow occurs with the sum, - // the &-mask cannot subtract enough to undo that overflow. - // - // Above implies that checking for summation overflow is both - // necessary and sufficient. - if size > usize::MAX - (align - 1) { - return Err(LayoutErr { private: () }); - } - - unsafe { Ok(Layout::from_size_align_unchecked(size, align)) } - } - - /// Creates a layout, bypassing all checks. - /// - /// # Safety - /// - /// This function is unsafe as it does not verify the preconditions from - /// [`Layout::from_size_align`](#method.from_size_align). - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_stable(feature = "alloc_layout", since = "1.28.0")] - #[inline] - pub const unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self { - Layout { size_: size, align_: NonZeroUsize::new_unchecked(align) } - } - - /// The minimum size in bytes for a memory block of this layout. - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[inline] - pub const fn size(&self) -> usize { - self.size_ - } - - /// The minimum byte alignment for a memory block of this layout. - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[inline] - pub const fn align(&self) -> usize { - self.align_.get() - } - - /// Constructs a `Layout` suitable for holding a value of type `T`. - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_stable(feature = "alloc_layout_const_new", since = "1.42.0")] - #[inline] - pub const fn new() -> Self { - let (size, align) = size_align::(); - // Note that the align is guaranteed by rustc to be a power of two and - // the size+align combo is guaranteed to fit in our address space. As a - // result use the unchecked constructor here to avoid inserting code - // that panics if it isn't optimized well enough. - unsafe { Layout::from_size_align_unchecked(size, align) } - } - - /// Produces layout describing a record that could be used to - /// allocate backing structure for `T` (which could be a trait - /// or other unsized type like a slice). - #[stable(feature = "alloc_layout", since = "1.28.0")] - #[inline] - pub fn for_value(t: &T) -> Self { - let (size, align) = (mem::size_of_val(t), mem::align_of_val(t)); - // See rationale in `new` for why this is using an unsafe variant below - debug_assert!(Layout::from_size_align(size, align).is_ok()); - unsafe { Layout::from_size_align_unchecked(size, align) } - } - - /// Produces layout describing a record that could be used to - /// allocate backing structure for `T` (which could be a trait - /// or other unsized type like a slice). - /// - /// # Safety - /// - /// This function is only safe to call if the following conditions hold: - /// - /// - If `T` is `Sized`, this function is always safe to call. - /// - If the unsized tail of `T` is: - /// - a [slice], then the length of the slice tail must be an intialized - /// integer, and the size of the *entire value* - /// (dynamic tail length + statically sized prefix) must fit in `isize`. - /// - a [trait object], then the vtable part of the pointer must point - /// to a valid vtable acquired by an unsizing coersion, and the size - /// of the *entire value* (dynamic tail length + statically sized prefix) - /// must fit in `isize`. - /// - an (unstable) [extern type], then this function is always safe to - /// call, but may panic or otherwise return the wrong value, as the - /// extern type's layout is not known. This is the same behavior as - /// [`Layout::for_value`] on a reference to an extern type tail. - /// - otherwise, it is conservatively not allowed to call this function. - /// - /// [slice]: ../../std/primitive.slice.html - /// [trait object]: ../../book/ch17-02-trait-objects.html - /// [extern type]: ../../unstable-book/language-features/extern-types.html - #[inline] - #[cfg(not(bootstrap))] - #[unstable(feature = "layout_for_ptr", issue = "69835")] - pub unsafe fn for_value_raw(t: *const T) -> Self { - let (size, align) = (mem::size_of_val_raw(t), mem::align_of_val_raw(t)); - // See rationale in `new` for why this is using an unsafe variant below - debug_assert!(Layout::from_size_align(size, align).is_ok()); - Layout::from_size_align_unchecked(size, align) - } - - /// Creates a `NonNull` that is dangling, but well-aligned for this Layout. - /// - /// Note that the pointer value may potentially represent a valid pointer, - /// which means this must not be used as a "not yet initialized" - /// sentinel value. Types that lazily allocate must track initialization by - /// some other means. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - pub const fn dangling(&self) -> NonNull { - // align is non-zero and a power of two - unsafe { NonNull::new_unchecked(self.align() as *mut u8) } - } - - /// Creates a layout describing the record that can hold a value - /// of the same layout as `self`, but that also is aligned to - /// alignment `align` (measured in bytes). - /// - /// If `self` already meets the prescribed alignment, then returns - /// `self`. - /// - /// Note that this method does not add any padding to the overall - /// size, regardless of whether the returned layout has a different - /// alignment. In other words, if `K` has size 16, `K.align_to(32)` - /// will *still* have size 16. - /// - /// Returns an error if the combination of `self.size()` and the given - /// `align` violates the conditions listed in - /// [`Layout::from_size_align`](#method.from_size_align). - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn align_to(&self, align: usize) -> Result { - Layout::from_size_align(self.size(), cmp::max(self.align(), align)) - } - - /// Returns the amount of padding we must insert after `self` - /// to ensure that the following address will satisfy `align` - /// (measured in bytes). - /// - /// e.g., if `self.size()` is 9, then `self.padding_needed_for(4)` - /// returns 3, because that is the minimum number of bytes of - /// padding required to get a 4-aligned address (assuming that the - /// corresponding memory block starts at a 4-aligned address). - /// - /// The return value of this function has no meaning if `align` is - /// not a power-of-two. - /// - /// Note that the utility of the returned value requires `align` - /// to be less than or equal to the alignment of the starting - /// address for the whole allocated block of memory. One way to - /// satisfy this constraint is to ensure `align <= self.align()`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[inline] - pub const fn padding_needed_for(&self, align: usize) -> usize { - let len = self.size(); - - // Rounded up value is: - // len_rounded_up = (len + align - 1) & !(align - 1); - // and then we return the padding difference: `len_rounded_up - len`. - // - // We use modular arithmetic throughout: - // - // 1. align is guaranteed to be > 0, so align - 1 is always - // valid. - // - // 2. `len + align - 1` can overflow by at most `align - 1`, - // so the &-mask with `!(align - 1)` will ensure that in the - // case of overflow, `len_rounded_up` will itself be 0. - // Thus the returned padding, when added to `len`, yields 0, - // which trivially satisfies the alignment `align`. - // - // (Of course, attempts to allocate blocks of memory whose - // size and padding overflow in the above manner should cause - // the allocator to yield an error anyway.) - - let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); - len_rounded_up.wrapping_sub(len) - } - - /// Creates a layout by rounding the size of this layout up to a multiple - /// of the layout's alignment. - /// - /// This is equivalent to adding the result of `padding_needed_for` - /// to the layout's current size. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn pad_to_align(&self) -> Layout { - let pad = self.padding_needed_for(self.align()); - // This cannot overflow. Quoting from the invariant of Layout: - // > `size`, when rounded up to the nearest multiple of `align`, - // > must not overflow (i.e., the rounded value must be less than - // > `usize::MAX`) - let new_size = self.size() + pad; - - Layout::from_size_align(new_size, self.align()).unwrap() - } - - /// Creates a layout describing the record for `n` instances of - /// `self`, with a suitable amount of padding between each to - /// ensure that each instance is given its requested size and - /// alignment. On success, returns `(k, offs)` where `k` is the - /// layout of the array and `offs` is the distance between the start - /// of each element in the array. - /// - /// On arithmetic overflow, returns `LayoutErr`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> { - // This cannot overflow. Quoting from the invariant of Layout: - // > `size`, when rounded up to the nearest multiple of `align`, - // > must not overflow (i.e., the rounded value must be less than - // > `usize::MAX`) - let padded_size = self.size() + self.padding_needed_for(self.align()); - let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?; - - unsafe { - // self.align is already known to be valid and alloc_size has been - // padded already. - Ok((Layout::from_size_align_unchecked(alloc_size, self.align()), padded_size)) - } - } - - /// Creates a layout describing the record for `self` followed by - /// `next`, including any necessary padding to ensure that `next` - /// will be properly aligned. Note that the resulting layout will - /// satisfy the alignment properties of both `self` and `next`. - /// - /// The resulting layout will be the same as that of a C struct containing - /// two fields with the layouts of `self` and `next`, in that order. - /// - /// Returns `Some((k, offset))`, where `k` is layout of the concatenated - /// record and `offset` is the relative location, in bytes, of the - /// start of the `next` embedded within the concatenated record - /// (assuming that the record itself starts at offset 0). - /// - /// On arithmetic overflow, returns `LayoutErr`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn extend(&self, next: Self) -> Result<(Self, usize), LayoutErr> { - let new_align = cmp::max(self.align(), next.align()); - let pad = self.padding_needed_for(next.align()); - - let offset = self.size().checked_add(pad).ok_or(LayoutErr { private: () })?; - let new_size = offset.checked_add(next.size()).ok_or(LayoutErr { private: () })?; - - let layout = Layout::from_size_align(new_size, new_align)?; - Ok((layout, offset)) - } - - /// Creates a layout describing the record for `n` instances of - /// `self`, with no padding between each instance. - /// - /// Note that, unlike `repeat`, `repeat_packed` does not guarantee - /// that the repeated instances of `self` will be properly - /// aligned, even if a given instance of `self` is properly - /// aligned. In other words, if the layout returned by - /// `repeat_packed` is used to allocate an array, it is not - /// guaranteed that all elements in the array will be properly - /// aligned. - /// - /// On arithmetic overflow, returns `LayoutErr`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn repeat_packed(&self, n: usize) -> Result { - let size = self.size().checked_mul(n).ok_or(LayoutErr { private: () })?; - Layout::from_size_align(size, self.align()) - } - - /// Creates a layout describing the record for `self` followed by - /// `next` with no additional padding between the two. Since no - /// padding is inserted, the alignment of `next` is irrelevant, - /// and is not incorporated *at all* into the resulting layout. - /// - /// On arithmetic overflow, returns `LayoutErr`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn extend_packed(&self, next: Self) -> Result { - let new_size = self.size().checked_add(next.size()).ok_or(LayoutErr { private: () })?; - Layout::from_size_align(new_size, self.align()) - } - - /// Creates a layout describing the record for a `[T; n]`. - /// - /// On arithmetic overflow, returns `LayoutErr`. - #[unstable(feature = "alloc_layout_extra", issue = "55724")] - #[inline] - pub fn array(n: usize) -> Result { - Layout::new::().repeat(n).map(|(k, offs)| { - debug_assert!(offs == mem::size_of::()); - k - }) - } -} - -/// The parameters given to `Layout::from_size_align` -/// or some other `Layout` constructor -/// do not satisfy its documented constraints. -#[stable(feature = "alloc_layout", since = "1.28.0")] -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct LayoutErr { - private: (), -} - -// (we need this for downstream impl of trait Error) -#[stable(feature = "alloc_layout", since = "1.28.0")] -impl fmt::Display for LayoutErr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("invalid parameters to Layout::from_size_align") - } -} - -/// The `AllocErr` error indicates an allocation failure -/// that may be due to resource exhaustion or to -/// something wrong when combining the given input arguments with this -/// allocator. -#[unstable(feature = "allocator_api", issue = "32838")] -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct AllocErr; - -// (we need this for downstream impl of trait Error) -#[unstable(feature = "allocator_api", issue = "32838")] -impl fmt::Display for AllocErr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("memory allocation failed") - } -} - -/// The `CannotReallocInPlace` error is used when [`grow_in_place`] or -/// [`shrink_in_place`] were unable to reuse the given memory block for -/// a requested layout. -/// -/// [`grow_in_place`]: ./trait.AllocRef.html#method.grow_in_place -/// [`shrink_in_place`]: ./trait.AllocRef.html#method.shrink_in_place -#[unstable(feature = "allocator_api", issue = "32838")] -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct CannotReallocInPlace; - -#[unstable(feature = "allocator_api", issue = "32838")] -impl CannotReallocInPlace { - pub fn description(&self) -> &str { - "cannot reallocate allocator's memory in place" - } -} - -// (we need this for downstream impl of trait Error) -#[unstable(feature = "allocator_api", issue = "32838")] -impl fmt::Display for CannotReallocInPlace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.description()) - } -} - -/// A memory allocator that can be registered as the standard library’s default -/// through the `#[global_allocator]` attribute. -/// -/// Some of the methods require that a memory block be *currently -/// allocated* via an allocator. This means that: -/// -/// * the starting address for that memory block was previously -/// returned by a previous call to an allocation method -/// such as `alloc`, and -/// -/// * the memory block has not been subsequently deallocated, where -/// blocks are deallocated either by being passed to a deallocation -/// method such as `dealloc` or by being -/// passed to a reallocation method that returns a non-null pointer. -/// -/// -/// # Example -/// -/// ```no_run -/// use std::alloc::{GlobalAlloc, Layout, alloc}; -/// use std::ptr::null_mut; -/// -/// struct MyAllocator; -/// -/// unsafe impl GlobalAlloc for MyAllocator { -/// unsafe fn alloc(&self, _layout: Layout) -> *mut u8 { null_mut() } -/// unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {} -/// } -/// -/// #[global_allocator] -/// static A: MyAllocator = MyAllocator; -/// -/// fn main() { -/// unsafe { -/// assert!(alloc(Layout::new::()).is_null()) -/// } -/// } -/// ``` -/// -/// # Safety -/// -/// The `GlobalAlloc` trait is an `unsafe` trait for a number of reasons, and -/// implementors must ensure that they adhere to these contracts: -/// -/// * It's undefined behavior if global allocators unwind. This restriction may -/// be lifted in the future, but currently a panic from any of these -/// functions may lead to memory unsafety. -/// -/// * `Layout` queries and calculations in general must be correct. Callers of -/// this trait are allowed to rely on the contracts defined on each method, -/// and implementors must ensure such contracts remain true. -#[stable(feature = "global_alloc", since = "1.28.0")] -pub unsafe trait GlobalAlloc { - /// Allocate memory as described by the given `layout`. - /// - /// Returns a pointer to newly-allocated memory, - /// or null to indicate allocation failure. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure that `layout` has non-zero size. - /// - /// (Extension subtraits might provide more specific bounds on - /// behavior, e.g., guarantee a sentinel address or a null pointer - /// in response to a zero-size allocation request.) - /// - /// The allocated block of memory may or may not be initialized. - /// - /// # Errors - /// - /// Returning a null pointer indicates that either memory is exhausted - /// or `layout` does not meet this allocator's size or alignment constraints. - /// - /// Implementations are encouraged to return null on memory - /// exhaustion rather than aborting, but this is not - /// a strict requirement. (Specifically: it is *legal* to - /// implement this trait atop an underlying native allocation - /// library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - #[stable(feature = "global_alloc", since = "1.28.0")] - unsafe fn alloc(&self, layout: Layout) -> *mut u8; - - /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must denote a block of memory currently allocated via - /// this allocator, - /// - /// * `layout` must be the same layout that was used - /// to allocate that block of memory, - #[stable(feature = "global_alloc", since = "1.28.0")] - unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); - - /// Behaves like `alloc`, but also ensures that the contents - /// are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `alloc` is. - /// However the allocated block of memory is guaranteed to be initialized. - /// - /// # Errors - /// - /// Returning a null pointer indicates that either memory is exhausted - /// or `layout` does not meet allocator's size or alignment constraints, - /// just as in `alloc`. - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - #[stable(feature = "global_alloc", since = "1.28.0")] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { - let size = layout.size(); - let ptr = self.alloc(layout); - if !ptr.is_null() { - ptr::write_bytes(ptr, 0, size); - } - ptr - } - - /// Shrink or grow a block of memory to the given `new_size`. - /// The block is described by the given `ptr` pointer and `layout`. - /// - /// If this returns a non-null pointer, then ownership of the memory block - /// referenced by `ptr` has been transferred to this allocator. - /// The memory may or may not have been deallocated, - /// and should be considered unusable (unless of course it was - /// transferred back to the caller again via the return value of - /// this method). The new memory block is allocated with `layout`, but - /// with the `size` updated to `new_size`. - /// - /// If this method returns null, then ownership of the memory - /// block has not been transferred to this allocator, and the - /// contents of the memory block are unaltered. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must be currently allocated via this allocator, - /// - /// * `layout` must be the same layout that was used - /// to allocate that block of memory, - /// - /// * `new_size` must be greater than zero. - /// - /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, - /// must not overflow (i.e., the rounded value must be less than `usize::MAX`). - /// - /// (Extension subtraits might provide more specific bounds on - /// behavior, e.g., guarantee a sentinel address or a null pointer - /// in response to a zero-size allocation request.) - /// - /// # Errors - /// - /// Returns null if the new layout does not meet the size - /// and alignment constraints of the allocator, or if reallocation - /// otherwise fails. - /// - /// Implementations are encouraged to return null on memory - /// exhaustion rather than panicking or aborting, but this is not - /// a strict requirement. (Specifically: it is *legal* to - /// implement this trait atop an underlying native allocation - /// library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to a - /// reallocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - #[stable(feature = "global_alloc", since = "1.28.0")] - unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let new_ptr = self.alloc(new_layout); - if !new_ptr.is_null() { - ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size)); - self.dealloc(ptr, layout); - } - new_ptr - } -} - -/// An implementation of `AllocRef` can allocate, reallocate, and -/// deallocate arbitrary blocks of data described via `Layout`. -/// -/// `AllocRef` is designed to be implemented on ZSTs, references, or -/// smart pointers because having an allocator like `MyAlloc([u8; N])` -/// cannot be moved, without updating the pointers to the allocated -/// memory. -/// -/// Some of the methods require that a memory block be *currently -/// allocated* via an allocator. This means that: -/// -/// * the starting address for that memory block was previously -/// returned by a previous call to an allocation method (`alloc`, -/// `alloc_zeroed`) or reallocation method (`realloc`), and -/// -/// * the memory block has not been subsequently deallocated, where -/// blocks are deallocated either by being passed to a deallocation -/// method (`dealloc`) or by being passed to a reallocation method -/// (see above) that returns `Ok`. -/// -/// Unlike [`GlobalAlloc`], zero-sized allocations are allowed in -/// `AllocRef`. If an underlying allocator does not support this (like -/// jemalloc) or return a null pointer (such as `libc::malloc`), this case -/// must be caught. In this case [`Layout::dangling()`] can be used to -/// create a dangling, but aligned `NonNull`. -/// -/// Some of the methods require that a layout *fit* a memory block. -/// What it means for a layout to "fit" a memory block means (or -/// equivalently, for a memory block to "fit" a layout) is that the -/// following two conditions must hold: -/// -/// 1. The block's starting address must be aligned to `layout.align()`. -/// -/// 2. The block's size must fall in the range `[use_min, use_max]`, where: -/// -/// * `use_min` is `layout.size()`, and -/// -/// * `use_max` is the capacity that was returned. -/// -/// Note that: -/// -/// * the size of the layout most recently used to allocate the block -/// is guaranteed to be in the range `[use_min, use_max]`, and -/// -/// * a lower-bound on `use_max` can be safely approximated by a call to -/// `usable_size`. -/// -/// * if a layout `k` fits a memory block (denoted by `ptr`) -/// currently allocated via an allocator `a`, then it is legal to -/// use that layout to deallocate it, i.e., `a.dealloc(ptr, k);`. -/// -/// * if an allocator does not support overallocating, it is fine to -/// simply return `layout.size()` as the allocated size. -/// -/// [`GlobalAlloc`]: self::GlobalAlloc -/// [`Layout::dangling()`]: self::Layout::dangling -/// -/// # Safety -/// -/// The `AllocRef` trait is an `unsafe` trait for a number of reasons, and -/// implementors must ensure that they adhere to these contracts: -/// -/// * Pointers returned from allocation functions must point to valid memory and -/// retain their validity until at least one instance of `AllocRef` is dropped -/// itself. -/// -/// * Cloning or moving the allocator must not invalidate pointers returned -/// from this allocator. Cloning must return a reference to the same allocator. -/// -/// * `Layout` queries and calculations in general must be correct. Callers of -/// this trait are allowed to rely on the contracts defined on each method, -/// and implementors must ensure such contracts remain true. -/// -/// Note that this list may get tweaked over time as clarifications are made in -/// the future. -#[unstable(feature = "allocator_api", issue = "32838")] -pub unsafe trait AllocRef { - /// On success, returns a pointer meeting the size and alignment - /// guarantees of `layout` and the actual size of the allocated block, - /// which must be greater than or equal to `layout.size()`. - /// - /// If this method returns an `Ok(addr)`, then the `addr` returned - /// will be non-null address pointing to a block of storage - /// suitable for holding an instance of `layout`. - /// - /// The returned block of storage may or may not have its contents - /// initialized. (Extension subtraits might restrict this - /// behavior, e.g., to ensure initialization to particular sets of - /// bit patterns.) - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints. - /// - /// Implementations are encouraged to return `Err` on memory - /// exhaustion rather than panicking or aborting, but this is not - /// a strict requirement. (Specifically: it is *legal* to - /// implement this trait atop an underlying native allocation - /// library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr>; - - /// Deallocate the memory referenced by `ptr`. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must denote a block of memory currently allocated via - /// this allocator, - /// - /// * `layout` must *fit* that block of memory, - /// - /// * In addition to fitting the block of memory `layout`, the - /// alignment of the `layout` must match the alignment used - /// to allocate that block of memory. - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); - - /// Behaves like `alloc`, but also ensures that the contents - /// are set to zero before being returned. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `alloc`. - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { - let size = layout.size(); - let result = self.alloc(layout); - if let Ok((p, _)) = result { - unsafe { ptr::write_bytes(p.as_ptr(), 0, size) } - } - result - } - - // == METHODS FOR MEMORY REUSE == - // realloc, realloc_zeroed, grow_in_place, grow_in_place_zeroed, shrink_in_place - - /// Returns a pointer suitable for holding data described by - /// a new layout with `layout`’s alignment and a size given - /// by `new_size` and the actual size of the allocated block. - /// The latter is greater than or equal to `layout.size()`. - /// To accomplish this, the allocator may extend or shrink - /// the allocation referenced by `ptr` to fit the new layout. - /// - /// If this returns `Ok`, then ownership of the memory block - /// referenced by `ptr` has been transferred to this - /// allocator. The memory may or may not have been freed, and - /// should be considered unusable (unless of course it was - /// transferred back to the caller again via the return value of - /// this method). - /// - /// If this method returns `Err`, then ownership of the memory - /// block has not been transferred to this allocator, and the - /// contents of the memory block are unaltered. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must be currently allocated via this allocator, - /// - /// * `layout` must *fit* the `ptr` (see above). (The `new_size` - /// argument need not fit it.) - /// - /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, - /// must not overflow (i.e., the rounded value must be less than `usize::MAX`). - /// - /// (Extension subtraits might provide more specific bounds on - /// behavior, e.g., guarantee a sentinel address or a null pointer - /// in response to a zero-size allocation request.) - /// - /// # Errors - /// - /// Returns `Err` only if the new layout - /// does not meet the allocator's size - /// and alignment constraints of the allocator, or if reallocation - /// otherwise fails. - /// - /// Implementations are encouraged to return `Err` on memory - /// exhaustion rather than panicking or aborting, but this is not - /// a strict requirement. (Specifically: it is *legal* to - /// implement this trait atop an underlying native allocation - /// library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to a - /// reallocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - - if new_size > old_size { - if let Ok(size) = self.grow_in_place(ptr, layout, new_size) { - return Ok((ptr, size)); - } - } else if new_size < old_size { - if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) { - return Ok((ptr, size)); - } - } else { - return Ok((ptr, new_size)); - } - - // otherwise, fall back on alloc + copy + dealloc. - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let result = self.alloc(new_layout); - if let Ok((new_ptr, _)) = result { - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); - self.dealloc(ptr, layout); - } - result - } - - /// Behaves like `realloc`, but also ensures that the new contents - /// are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `realloc` is. - /// - /// # Errors - /// - /// Returns `Err` only if the new layout - /// does not meet the allocator's size - /// and alignment constraints of the allocator, or if reallocation - /// otherwise fails. - /// - /// Implementations are encouraged to return `Err` on memory - /// exhaustion rather than panicking or aborting, but this is not - /// a strict requirement. (Specifically: it is *legal* to - /// implement this trait atop an underlying native allocation - /// library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to a - /// reallocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc_zeroed( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - - if new_size > old_size { - if let Ok(size) = self.grow_in_place_zeroed(ptr, layout, new_size) { - return Ok((ptr, size)); - } - } else if new_size < old_size { - if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) { - return Ok((ptr, size)); - } - } else { - return Ok((ptr, new_size)); - } - - // otherwise, fall back on alloc + copy + dealloc. - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let result = self.alloc_zeroed(new_layout); - if let Ok((new_ptr, _)) = result { - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); - self.dealloc(ptr, layout); - } - result - } - - /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. - /// - /// If this returns `Ok`, then the allocator has asserted that the - /// memory block referenced by `ptr` now fits `new_size`, and thus can - /// be used to carry data of a layout of that size and same alignment as - /// `layout`. The returned value is the new size of the allocated block. - /// (The allocator is allowed to expend effort to accomplish this, such - /// as extending the memory block to include successor blocks, or virtual - /// memory tricks.) - /// - /// Regardless of what this method returns, ownership of the - /// memory block referenced by `ptr` has not been transferred, and - /// the contents of the memory block are unaltered. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must be currently allocated via this allocator, - /// - /// * `layout` must *fit* the `ptr` (see above); note the - /// `new_size` argument need not fit it, - /// - /// * `new_size` must not be less than `layout.size()`, - /// - /// # Errors - /// - /// Returns `Err(CannotReallocInPlace)` when the allocator is - /// unable to assert that the memory block referenced by `ptr` - /// could fit `layout`. - /// - /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error` - /// function; clients are expected either to be able to recover from - /// `grow_in_place` failures without aborting, or to fall back on - /// another reallocation method before resorting to an abort. - #[inline] - unsafe fn grow_in_place( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result { - let _ = ptr; - let _ = layout; - let _ = new_size; - Err(CannotReallocInPlace) - } - - /// Behaves like `grow_in_place`, but also ensures that the new - /// contents are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `grow_in_place` is. - /// - /// # Errors - /// - /// Returns `Err(CannotReallocInPlace)` when the allocator is - /// unable to assert that the memory block referenced by `ptr` - /// could fit `layout`. - /// - /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error` - /// function; clients are expected either to be able to recover from - /// `grow_in_place` failures without aborting, or to fall back on - /// another reallocation method before resorting to an abort. - unsafe fn grow_in_place_zeroed( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result { - let size = self.grow_in_place(ptr, layout, new_size)?; - ptr.as_ptr().add(layout.size()).write_bytes(0, new_size - layout.size()); - Ok(size) - } - - /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. - /// - /// If this returns `Ok`, then the allocator has asserted that the - /// memory block referenced by `ptr` now fits `new_size`, and - /// thus can only be used to carry data of that smaller - /// layout. The returned value is the new size the allocated block. - /// (The allocator is allowed to take advantage of this, - /// carving off portions of the block for reuse elsewhere.) The - /// truncated contents of the block within the smaller layout are - /// unaltered, and ownership of block has not been transferred. - /// - /// If this returns `Err`, then the memory block is considered to - /// still represent the original (larger) `layout`. None of the - /// block has been carved off for reuse elsewhere, ownership of - /// the memory block has not been transferred, and the contents of - /// the memory block are unaltered. - /// - /// # Safety - /// - /// This function is unsafe because undefined behavior can result - /// if the caller does not ensure all of the following: - /// - /// * `ptr` must be currently allocated via this allocator, - /// - /// * `layout` must *fit* the `ptr` (see above); note the - /// `new_size` argument need not fit it, - /// - /// * `new_size` must not be greater than `layout.size()`, - /// - /// # Errors - /// - /// Returns `Err(CannotReallocInPlace)` when the allocator is - /// unable to assert that the memory block referenced by `ptr` - /// could fit `layout`. - /// - /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error` - /// function; clients are expected either to be able to recover from - /// `shrink_in_place` failures without aborting, or to fall back - /// on another reallocation method before resorting to an abort. - #[inline] - unsafe fn shrink_in_place( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result { - let _ = ptr; - let _ = layout; - let _ = new_size; - Err(CannotReallocInPlace) - } -} diff --git a/src/libcore/alloc/global.rs b/src/libcore/alloc/global.rs new file mode 100644 index 0000000000000..147fe696ac02f --- /dev/null +++ b/src/libcore/alloc/global.rs @@ -0,0 +1,198 @@ +use crate::alloc::Layout; +use crate::cmp; +use crate::ptr; + +/// A memory allocator that can be registered as the standard library’s default +/// through the `#[global_allocator]` attribute. +/// +/// Some of the methods require that a memory block be *currently +/// allocated* via an allocator. This means that: +/// +/// * the starting address for that memory block was previously +/// returned by a previous call to an allocation method +/// such as `alloc`, and +/// +/// * the memory block has not been subsequently deallocated, where +/// blocks are deallocated either by being passed to a deallocation +/// method such as `dealloc` or by being +/// passed to a reallocation method that returns a non-null pointer. +/// +/// +/// # Example +/// +/// ```no_run +/// use std::alloc::{GlobalAlloc, Layout, alloc}; +/// use std::ptr::null_mut; +/// +/// struct MyAllocator; +/// +/// unsafe impl GlobalAlloc for MyAllocator { +/// unsafe fn alloc(&self, _layout: Layout) -> *mut u8 { null_mut() } +/// unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {} +/// } +/// +/// #[global_allocator] +/// static A: MyAllocator = MyAllocator; +/// +/// fn main() { +/// unsafe { +/// assert!(alloc(Layout::new::()).is_null()) +/// } +/// } +/// ``` +/// +/// # Safety +/// +/// The `GlobalAlloc` trait is an `unsafe` trait for a number of reasons, and +/// implementors must ensure that they adhere to these contracts: +/// +/// * It's undefined behavior if global allocators unwind. This restriction may +/// be lifted in the future, but currently a panic from any of these +/// functions may lead to memory unsafety. +/// +/// * `Layout` queries and calculations in general must be correct. Callers of +/// this trait are allowed to rely on the contracts defined on each method, +/// and implementors must ensure such contracts remain true. +#[stable(feature = "global_alloc", since = "1.28.0")] +pub unsafe trait GlobalAlloc { + /// Allocate memory as described by the given `layout`. + /// + /// Returns a pointer to newly-allocated memory, + /// or null to indicate allocation failure. + /// + /// # Safety + /// + /// This function is unsafe because undefined behavior can result + /// if the caller does not ensure that `layout` has non-zero size. + /// + /// (Extension subtraits might provide more specific bounds on + /// behavior, e.g., guarantee a sentinel address or a null pointer + /// in response to a zero-size allocation request.) + /// + /// The allocated block of memory may or may not be initialized. + /// + /// # Errors + /// + /// Returning a null pointer indicates that either memory is exhausted + /// or `layout` does not meet this allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return null on memory + /// exhaustion rather than aborting, but this is not + /// a strict requirement. (Specifically: it is *legal* to + /// implement this trait atop an underlying native allocation + /// library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an + /// allocation error are encouraged to call the [`handle_alloc_error`] function, + /// rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + #[stable(feature = "global_alloc", since = "1.28.0")] + unsafe fn alloc(&self, layout: Layout) -> *mut u8; + + /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. + /// + /// # Safety + /// + /// This function is unsafe because undefined behavior can result + /// if the caller does not ensure all of the following: + /// + /// * `ptr` must denote a block of memory currently allocated via + /// this allocator, + /// + /// * `layout` must be the same layout that was used + /// to allocate that block of memory, + #[stable(feature = "global_alloc", since = "1.28.0")] + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); + + /// Behaves like `alloc`, but also ensures that the contents + /// are set to zero before being returned. + /// + /// # Safety + /// + /// This function is unsafe for the same reasons that `alloc` is. + /// However the allocated block of memory is guaranteed to be initialized. + /// + /// # Errors + /// + /// Returning a null pointer indicates that either memory is exhausted + /// or `layout` does not meet allocator's size or alignment constraints, + /// just as in `alloc`. + /// + /// Clients wishing to abort computation in response to an + /// allocation error are encouraged to call the [`handle_alloc_error`] function, + /// rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + #[stable(feature = "global_alloc", since = "1.28.0")] + unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { + let size = layout.size(); + let ptr = self.alloc(layout); + if !ptr.is_null() { + ptr::write_bytes(ptr, 0, size); + } + ptr + } + + /// Shrink or grow a block of memory to the given `new_size`. + /// The block is described by the given `ptr` pointer and `layout`. + /// + /// If this returns a non-null pointer, then ownership of the memory block + /// referenced by `ptr` has been transferred to this allocator. + /// The memory may or may not have been deallocated, + /// and should be considered unusable (unless of course it was + /// transferred back to the caller again via the return value of + /// this method). The new memory block is allocated with `layout`, but + /// with the `size` updated to `new_size`. + /// + /// If this method returns null, then ownership of the memory + /// block has not been transferred to this allocator, and the + /// contents of the memory block are unaltered. + /// + /// # Safety + /// + /// This function is unsafe because undefined behavior can result + /// if the caller does not ensure all of the following: + /// + /// * `ptr` must be currently allocated via this allocator, + /// + /// * `layout` must be the same layout that was used + /// to allocate that block of memory, + /// + /// * `new_size` must be greater than zero. + /// + /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, + /// must not overflow (i.e., the rounded value must be less than `usize::MAX`). + /// + /// (Extension subtraits might provide more specific bounds on + /// behavior, e.g., guarantee a sentinel address or a null pointer + /// in response to a zero-size allocation request.) + /// + /// # Errors + /// + /// Returns null if the new layout does not meet the size + /// and alignment constraints of the allocator, or if reallocation + /// otherwise fails. + /// + /// Implementations are encouraged to return null on memory + /// exhaustion rather than panicking or aborting, but this is not + /// a strict requirement. (Specifically: it is *legal* to + /// implement this trait atop an underlying native allocation + /// library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to a + /// reallocation error are encouraged to call the [`handle_alloc_error`] function, + /// rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + #[stable(feature = "global_alloc", since = "1.28.0")] + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + let new_ptr = self.alloc(new_layout); + if !new_ptr.is_null() { + ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size)); + self.dealloc(ptr, layout); + } + new_ptr + } +} diff --git a/src/libcore/alloc/layout.rs b/src/libcore/alloc/layout.rs new file mode 100644 index 0000000000000..c798aacc90b51 --- /dev/null +++ b/src/libcore/alloc/layout.rs @@ -0,0 +1,345 @@ +// ignore-tidy-undocumented-unsafe + +use crate::cmp; +use crate::fmt; +use crate::mem; +use crate::num::NonZeroUsize; +use crate::ptr::NonNull; + +const fn size_align() -> (usize, usize) { + (mem::size_of::(), mem::align_of::()) +} + +/// Layout of a block of memory. +/// +/// An instance of `Layout` describes a particular layout of memory. +/// You build a `Layout` up as an input to give to an allocator. +/// +/// All layouts have an associated size and a power-of-two alignment. +/// +/// (Note that layouts are *not* required to have non-zero size, +/// even though `GlobalAlloc` requires that all memory requests +/// be non-zero in size. A caller must either ensure that conditions +/// like this are met, use specific allocators with looser +/// requirements, or use the more lenient `AllocRef` interface.) +#[stable(feature = "alloc_layout", since = "1.28.0")] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[lang = "alloc_layout"] +pub struct Layout { + // size of the requested block of memory, measured in bytes. + size_: usize, + + // alignment of the requested block of memory, measured in bytes. + // we ensure that this is always a power-of-two, because API's + // like `posix_memalign` require it and it is a reasonable + // constraint to impose on Layout constructors. + // + // (However, we do not analogously require `align >= sizeof(void*)`, + // even though that is *also* a requirement of `posix_memalign`.) + align_: NonZeroUsize, +} + +impl Layout { + /// Constructs a `Layout` from a given `size` and `align`, + /// or returns `LayoutErr` if any of the following conditions + /// are not met: + /// + /// * `align` must not be zero, + /// + /// * `align` must be a power of two, + /// + /// * `size`, when rounded up to the nearest multiple of `align`, + /// must not overflow (i.e., the rounded value must be less than + /// or equal to `usize::MAX`). + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] + #[inline] + pub const fn from_size_align(size: usize, align: usize) -> Result { + if !align.is_power_of_two() { + return Err(LayoutErr { private: () }); + } + + // (power-of-two implies align != 0.) + + // Rounded up size is: + // size_rounded_up = (size + align - 1) & !(align - 1); + // + // We know from above that align != 0. If adding (align - 1) + // does not overflow, then rounding up will be fine. + // + // Conversely, &-masking with !(align - 1) will subtract off + // only low-order-bits. Thus if overflow occurs with the sum, + // the &-mask cannot subtract enough to undo that overflow. + // + // Above implies that checking for summation overflow is both + // necessary and sufficient. + if size > usize::MAX - (align - 1) { + return Err(LayoutErr { private: () }); + } + + unsafe { Ok(Layout::from_size_align_unchecked(size, align)) } + } + + /// Creates a layout, bypassing all checks. + /// + /// # Safety + /// + /// This function is unsafe as it does not verify the preconditions from + /// [`Layout::from_size_align`](#method.from_size_align). + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[rustc_const_stable(feature = "alloc_layout", since = "1.28.0")] + #[inline] + pub const unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self { + Layout { size_: size, align_: NonZeroUsize::new_unchecked(align) } + } + + /// The minimum size in bytes for a memory block of this layout. + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] + #[inline] + pub const fn size(&self) -> usize { + self.size_ + } + + /// The minimum byte alignment for a memory block of this layout. + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] + #[inline] + pub const fn align(&self) -> usize { + self.align_.get() + } + + /// Constructs a `Layout` suitable for holding a value of type `T`. + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[rustc_const_stable(feature = "alloc_layout_const_new", since = "1.42.0")] + #[inline] + pub const fn new() -> Self { + let (size, align) = size_align::(); + // Note that the align is guaranteed by rustc to be a power of two and + // the size+align combo is guaranteed to fit in our address space. As a + // result use the unchecked constructor here to avoid inserting code + // that panics if it isn't optimized well enough. + unsafe { Layout::from_size_align_unchecked(size, align) } + } + + /// Produces layout describing a record that could be used to + /// allocate backing structure for `T` (which could be a trait + /// or other unsized type like a slice). + #[stable(feature = "alloc_layout", since = "1.28.0")] + #[inline] + pub fn for_value(t: &T) -> Self { + let (size, align) = (mem::size_of_val(t), mem::align_of_val(t)); + // See rationale in `new` for why this is using an unsafe variant below + debug_assert!(Layout::from_size_align(size, align).is_ok()); + unsafe { Layout::from_size_align_unchecked(size, align) } + } + + /// Creates a `NonNull` that is dangling, but well-aligned for this Layout. + /// + /// Note that the pointer value may potentially represent a valid pointer, + /// which means this must not be used as a "not yet initialized" + /// sentinel value. Types that lazily allocate must track initialization by + /// some other means. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + pub const fn dangling(&self) -> NonNull { + // align is non-zero and a power of two + unsafe { NonNull::new_unchecked(self.align() as *mut u8) } + } + + /// Creates a layout describing the record that can hold a value + /// of the same layout as `self`, but that also is aligned to + /// alignment `align` (measured in bytes). + /// + /// If `self` already meets the prescribed alignment, then returns + /// `self`. + /// + /// Note that this method does not add any padding to the overall + /// size, regardless of whether the returned layout has a different + /// alignment. In other words, if `K` has size 16, `K.align_to(32)` + /// will *still* have size 16. + /// + /// Returns an error if the combination of `self.size()` and the given + /// `align` violates the conditions listed in + /// [`Layout::from_size_align`](#method.from_size_align). + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn align_to(&self, align: usize) -> Result { + Layout::from_size_align(self.size(), cmp::max(self.align(), align)) + } + + /// Returns the amount of padding we must insert after `self` + /// to ensure that the following address will satisfy `align` + /// (measured in bytes). + /// + /// e.g., if `self.size()` is 9, then `self.padding_needed_for(4)` + /// returns 3, because that is the minimum number of bytes of + /// padding required to get a 4-aligned address (assuming that the + /// corresponding memory block starts at a 4-aligned address). + /// + /// The return value of this function has no meaning if `align` is + /// not a power-of-two. + /// + /// Note that the utility of the returned value requires `align` + /// to be less than or equal to the alignment of the starting + /// address for the whole allocated block of memory. One way to + /// satisfy this constraint is to ensure `align <= self.align()`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] + #[inline] + pub const fn padding_needed_for(&self, align: usize) -> usize { + let len = self.size(); + + // Rounded up value is: + // len_rounded_up = (len + align - 1) & !(align - 1); + // and then we return the padding difference: `len_rounded_up - len`. + // + // We use modular arithmetic throughout: + // + // 1. align is guaranteed to be > 0, so align - 1 is always + // valid. + // + // 2. `len + align - 1` can overflow by at most `align - 1`, + // so the &-mask with `!(align - 1)` will ensure that in the + // case of overflow, `len_rounded_up` will itself be 0. + // Thus the returned padding, when added to `len`, yields 0, + // which trivially satisfies the alignment `align`. + // + // (Of course, attempts to allocate blocks of memory whose + // size and padding overflow in the above manner should cause + // the allocator to yield an error anyway.) + + let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); + len_rounded_up.wrapping_sub(len) + } + + /// Creates a layout by rounding the size of this layout up to a multiple + /// of the layout's alignment. + /// + /// This is equivalent to adding the result of `padding_needed_for` + /// to the layout's current size. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn pad_to_align(&self) -> Layout { + let pad = self.padding_needed_for(self.align()); + // This cannot overflow. Quoting from the invariant of Layout: + // > `size`, when rounded up to the nearest multiple of `align`, + // > must not overflow (i.e., the rounded value must be less than + // > `usize::MAX`) + let new_size = self.size() + pad; + + Layout::from_size_align(new_size, self.align()).unwrap() + } + + /// Creates a layout describing the record for `n` instances of + /// `self`, with a suitable amount of padding between each to + /// ensure that each instance is given its requested size and + /// alignment. On success, returns `(k, offs)` where `k` is the + /// layout of the array and `offs` is the distance between the start + /// of each element in the array. + /// + /// On arithmetic overflow, returns `LayoutErr`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> { + // This cannot overflow. Quoting from the invariant of Layout: + // > `size`, when rounded up to the nearest multiple of `align`, + // > must not overflow (i.e., the rounded value must be less than + // > `usize::MAX`) + let padded_size = self.size() + self.padding_needed_for(self.align()); + let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?; + + unsafe { + // self.align is already known to be valid and alloc_size has been + // padded already. + Ok((Layout::from_size_align_unchecked(alloc_size, self.align()), padded_size)) + } + } + + /// Creates a layout describing the record for `self` followed by + /// `next`, including any necessary padding to ensure that `next` + /// will be properly aligned. Note that the resulting layout will + /// satisfy the alignment properties of both `self` and `next`. + /// + /// The resulting layout will be the same as that of a C struct containing + /// two fields with the layouts of `self` and `next`, in that order. + /// + /// Returns `Some((k, offset))`, where `k` is layout of the concatenated + /// record and `offset` is the relative location, in bytes, of the + /// start of the `next` embedded within the concatenated record + /// (assuming that the record itself starts at offset 0). + /// + /// On arithmetic overflow, returns `LayoutErr`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn extend(&self, next: Self) -> Result<(Self, usize), LayoutErr> { + let new_align = cmp::max(self.align(), next.align()); + let pad = self.padding_needed_for(next.align()); + + let offset = self.size().checked_add(pad).ok_or(LayoutErr { private: () })?; + let new_size = offset.checked_add(next.size()).ok_or(LayoutErr { private: () })?; + + let layout = Layout::from_size_align(new_size, new_align)?; + Ok((layout, offset)) + } + + /// Creates a layout describing the record for `n` instances of + /// `self`, with no padding between each instance. + /// + /// Note that, unlike `repeat`, `repeat_packed` does not guarantee + /// that the repeated instances of `self` will be properly + /// aligned, even if a given instance of `self` is properly + /// aligned. In other words, if the layout returned by + /// `repeat_packed` is used to allocate an array, it is not + /// guaranteed that all elements in the array will be properly + /// aligned. + /// + /// On arithmetic overflow, returns `LayoutErr`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn repeat_packed(&self, n: usize) -> Result { + let size = self.size().checked_mul(n).ok_or(LayoutErr { private: () })?; + Layout::from_size_align(size, self.align()) + } + + /// Creates a layout describing the record for `self` followed by + /// `next` with no additional padding between the two. Since no + /// padding is inserted, the alignment of `next` is irrelevant, + /// and is not incorporated *at all* into the resulting layout. + /// + /// On arithmetic overflow, returns `LayoutErr`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn extend_packed(&self, next: Self) -> Result { + let new_size = self.size().checked_add(next.size()).ok_or(LayoutErr { private: () })?; + Layout::from_size_align(new_size, self.align()) + } + + /// Creates a layout describing the record for a `[T; n]`. + /// + /// On arithmetic overflow, returns `LayoutErr`. + #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] + pub fn array(n: usize) -> Result { + Layout::new::().repeat(n).map(|(k, offs)| { + debug_assert!(offs == mem::size_of::()); + k + }) + } +} + +/// The parameters given to `Layout::from_size_align` +/// or some other `Layout` constructor +/// do not satisfy its documented constraints. +#[stable(feature = "alloc_layout", since = "1.28.0")] +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct LayoutErr { + private: (), +} + +// (we need this for downstream impl of trait Error) +#[stable(feature = "alloc_layout", since = "1.28.0")] +impl fmt::Display for LayoutErr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("invalid parameters to Layout::from_size_align") + } +} diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs new file mode 100644 index 0000000000000..0c5a70bee1a56 --- /dev/null +++ b/src/libcore/alloc/mod.rs @@ -0,0 +1,376 @@ +//! Memory allocation APIs + +#![stable(feature = "alloc_module", since = "1.28.0")] + +mod global; +mod layout; + +#[stable(feature = "global_alloc", since = "1.28.0")] +pub use self::global::GlobalAlloc; +#[stable(feature = "alloc_layout", since = "1.28.0")] +pub use self::layout::{Layout, LayoutErr}; + +use crate::fmt; +use crate::ptr::{self, NonNull}; + +/// The `AllocErr` error indicates an allocation failure +/// that may be due to resource exhaustion or to +/// something wrong when combining the given input arguments with this +/// allocator. +#[unstable(feature = "allocator_api", issue = "32838")] +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct AllocErr; + +// (we need this for downstream impl of trait Error) +#[unstable(feature = "allocator_api", issue = "32838")] +impl fmt::Display for AllocErr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("memory allocation failed") + } +} + +/// A desired initial state for allocated memory. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[unstable(feature = "allocator_api", issue = "32838")] +pub enum AllocInit { + /// The contents of the new memory are undefined. + /// + /// Reading uninitialized memory is Undefined Behavior; it must be initialized before use. + Uninitialized, + /// The new memory is guaranteed to be zeroed. + Zeroed, +} + +impl AllocInit { + /// Initialize the memory block referenced by `ptr` and specified by `Layout`. + /// + /// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off]. + /// + /// [off]: AllocInit::initialize_offset + /// + /// # Safety + /// + /// * `layout` must [*fit*] the block of memory referenced by `ptr` + /// + /// [*fit*]: trait.AllocRef.html#memory-fitting + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn initialize(self, ptr: NonNull, layout: Layout) { + self.initialize_offset(ptr, layout, 0) + } + + /// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified + /// `offset`. + /// + /// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`] + /// at `ptr + offset` until `ptr + layout.size()`. + /// + /// # Safety + /// + /// * `layout` must [*fit*] the block of memory referenced by `ptr` + /// + /// * `offset` must be smaller than or equal to `layout.size()` + /// + /// [*fit*]: trait.AllocRef.html#memory-fitting + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn initialize_offset(self, ptr: NonNull, layout: Layout, offset: usize) { + debug_assert!( + offset <= layout.size(), + "`offset` must be smaller than or equal to `layout.size()`" + ); + match self { + AllocInit::Uninitialized => (), + AllocInit::Zeroed => { + let new_ptr = ptr.as_ptr().add(offset); + let size = layout.size() - offset; + ptr::write_bytes(new_ptr, 0, size); + } + } + } +} + +/// A placement constraint when growing or shrinking an existing allocation. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[unstable(feature = "allocator_api", issue = "32838")] +pub enum ReallocPlacement { + /// The allocator is allowed to move the allocation to a different memory address. + // FIXME(wg-allocators#46): Add a section to the module documentation "What is a legal + // allocator" and link it at "valid location". + /// + /// If the allocation _does_ move, it's the responsibility of the allocator + /// to also move the data from the previous location to the new location. + MayMove, + /// The address of the new memory must not change. + /// + /// If the allocation would have to be moved to a new location to fit, the + /// reallocation request will fail. + InPlace, +} + +/// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of +/// data described via [`Layout`][]. +/// +/// `AllocRef` is designed to be implemented on ZSTs, references, or smart pointers because having +/// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the +/// allocated memory. +/// +/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying +/// allocator does not support this (like jemalloc) or return a null pointer (such as +/// `libc::malloc`), this case must be caught. [`Layout::dangling()`][] then can be used to create +/// an aligned `NonNull`. +/// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via an allocator. This +/// means that: +/// +/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or +/// [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are either deallocated +/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or +/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer +/// remains valid. +/// +/// [`alloc`]: AllocRef::alloc +/// [`grow`]: AllocRef::grow +/// [`shrink`]: AllocRef::shrink +/// [`dealloc`]: AllocRef::dealloc +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to +/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the +/// following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: +/// - `min` is the size of the layout most recently used to allocate the block, and +/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// +/// ### Notes +/// +/// * if a layout `k` fits a memory block (denoted by `ptr`) currently allocated via an allocator +/// `a`, then it is legal to use that layout to deallocate it, i.e., +/// [`a.dealloc(ptr, k);`][`dealloc`], and +/// +/// * if an allocator does not support overallocating, it is fine to simply return +/// [`layout.size()`] as the actual size. +/// +/// # Safety +/// +/// * Pointers returned from an allocator must point to valid memory and retain their validity until +/// the instance and all of its clones are dropped, +/// +/// * cloning or moving the allocator must not invalidate pointers returned from this allocator. +/// A cloned allocator must behave like the same allocator, and +/// +/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// method of the allocator. +/// +/// [*currently allocated*]: #currently-allocated-memory +#[unstable(feature = "allocator_api", issue = "32838")] +pub unsafe trait AllocRef { + /// On success, returns a pointer meeting the size and alignment guarantees of `layout` and the + /// actual size of the allocated block, which is greater than or equal to `layout.size()`. + /// + /// The returned block of storage is initialized as specified by [`init`], all the way up to + /// the returned `actual_size`. + /// + /// [`init`]: AllocInit + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr>; + + /// Deallocates the memory referenced by `ptr`. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, + /// + /// * `layout` must [*fit*] that block of memory, and + /// + /// * the alignment of the `layout` must match the alignment used to allocate that block of + /// memory. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); + + /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. + /// + /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for + /// holding data described by a new layout with `layout`’s alignment and a size given by + /// `new_size`. To accomplish this, the allocator may extend the allocation referenced by `ptr` + /// to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable (unless of course it was transferred back to the caller again via the + /// return value of this method). + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. + /// The first `layout.size()` bytes of memory are preserved or copied as appropriate from `ptr`, + /// and the remaining bytes, from `layout.size()` to the returned actual size, are initialized + /// according to [`init`]. + /// + /// [`placement`]: ReallocPlacement + /// [`init`]: AllocInit + /// + /// # Safety + /// + /// * `ptr` must be [*currently allocated*] via this allocator, + /// + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) + /// + // We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs. + // An alternative would be + // * `new_size must be strictly greater than `layout.size()` or both are zero + /// * `new_size` must be greater than or equal to `layout.size()` + /// + /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow + /// (i.e., the rounded value must be less than `usize::MAX`). + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn grow( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + placement: ReallocPlacement, + init: AllocInit, + ) -> Result<(NonNull, usize), AllocErr> { + let old_size = layout.size(); + debug_assert!( + new_size >= old_size, + "`new_size` must be greater than or equal to `layout.size()`" + ); + + if new_size == old_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + let (new_ptr, alloc_size) = + self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)?; + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size); + self.dealloc(ptr, layout); + Ok((new_ptr, alloc_size)) + } + ReallocPlacement::InPlace => Err(AllocErr), + } + } + + /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. + /// + /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for + /// holding data described by a new layout with `layout`’s alignment and a size given by + /// `new_size`. To accomplish this, the allocator may shrink the allocation referenced by `ptr` + /// to fit the new layout. + /// + /// The behavior on how the allocator tries to shrink the memory can be specified by + /// [`placement`]. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable unless it was transferred back to the caller again via the + /// return value of this method. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// [`placement`]: ReallocPlacement + /// + /// # Safety + /// + /// * `ptr` must be [*currently allocated*] via this allocator, + /// + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) + /// + // We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs. + // An alternative would be + // * `new_size must be strictly smaller than `layout.size()` or both are zero + /// * `new_size` must be smaller than or equal to `layout.size()` + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if shrinking otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn shrink( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + placement: ReallocPlacement, + ) -> Result<(NonNull, usize), AllocErr> { + let old_size = layout.size(); + debug_assert!( + new_size <= old_size, + "`new_size` must be smaller than or equal to `layout.size()`" + ); + + if new_size == old_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + let (new_ptr, alloc_size) = self.alloc( + Layout::from_size_align_unchecked(new_size, layout.align()), + AllocInit::Uninitialized, + )?; + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size); + self.dealloc(ptr, layout); + Ok((new_ptr, alloc_size)) + } + ReallocPlacement::InPlace => Err(AllocErr), + } + } +} diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 25f3ddcbebab6..9ad0eae705fc8 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -61,6 +61,7 @@ #![stable(feature = "alloc_module", since = "1.28.0")] +use core::intrinsics; use core::ptr::NonNull; use core::sync::atomic::{AtomicPtr, Ordering}; use core::{mem, ptr}; @@ -133,60 +134,106 @@ pub use alloc_crate::alloc::*; #[derive(Debug, Default, Copy, Clone)] pub struct System; -// The AllocRef impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, -// which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for System { #[inline] - fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { - if layout.size() == 0 { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { + let new_size = layout.size(); + if new_size == 0 { Ok((layout.dangling(), 0)) } else { unsafe { - NonNull::new(GlobalAlloc::alloc(self, layout)) - .ok_or(AllocErr) - .map(|p| (p, layout.size())) + let raw_ptr = match init { + AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), + AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), + }; + let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; + Ok((ptr, new_size)) } } } #[inline] - fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { - if layout.size() == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { - NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)) - .ok_or(AllocErr) - .map(|p| (p, layout.size())) - } + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } } #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) + unsafe fn grow( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + placement: ReallocPlacement, + init: AllocInit, + ) -> Result<(NonNull, usize), AllocErr> { + let old_size = layout.size(); + debug_assert!( + new_size >= old_size, + "`new_size` must be greater than or equal to `layout.size()`" + ); + + if old_size == new_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + if old_size == 0 { + self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) + } else { + // `realloc` probably checks for `new_size > old_size` or something similar. + // `new_size` must be greater than or equal to `old_size` due to the safety constraint, + // and `new_size` == `old_size` was caught before + intrinsics::assume(new_size > old_size); + let ptr = + NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) + .ok_or(AllocErr)?; + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + init.initialize_offset(ptr, new_layout, old_size); + Ok((ptr, new_size)) + } + } + ReallocPlacement::InPlace => Err(AllocErr), } } #[inline] - unsafe fn realloc( + unsafe fn shrink( &mut self, ptr: NonNull, layout: Layout, new_size: usize, + placement: ReallocPlacement, ) -> Result<(NonNull, usize), AllocErr> { - match (layout.size(), new_size) { - (0, 0) => Ok((layout.dangling(), 0)), - (0, _) => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())), - (_, 0) => { - self.dealloc(ptr, layout); - Ok((layout.dangling(), 0)) + let old_size = layout.size(); + debug_assert!( + new_size <= old_size, + "`new_size` must be smaller than or equal to `layout.size()`" + ); + + if old_size == new_size { + return Ok((ptr, new_size)); + } + + match placement { + ReallocPlacement::MayMove => { + let ptr = if new_size == 0 { + self.dealloc(ptr, layout); + layout.dangling() + } else { + // `realloc` probably checks for `new_size > old_size` or something similar. + // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, + // and `new_size` == `old_size` was caught before + intrinsics::assume(new_size < old_size); + NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) + .ok_or(AllocErr)? + }; + Ok((ptr, new_size)) } - (_, _) => NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr) - .map(|p| (p, new_size)), + ReallocPlacement::InPlace => Err(AllocErr), } } } @@ -238,9 +285,7 @@ pub fn rust_oom(layout: Layout) -> ! { let hook: fn(Layout) = if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } }; hook(layout); - unsafe { - crate::sys::abort_internal(); - } + unsafe { crate::sys::abort_internal() } } #[cfg(not(test))] diff --git a/src/libstd/error.rs b/src/libstd/error.rs index b394f2efc2e35..24b57f12e8df4 100644 --- a/src/libstd/error.rs +++ b/src/libstd/error.rs @@ -15,7 +15,7 @@ use core::array; -use crate::alloc::{AllocErr, CannotReallocInPlace, LayoutErr}; +use crate::alloc::{AllocErr, LayoutErr}; use crate::any::TypeId; use crate::backtrace::Backtrace; use crate::borrow::Cow; @@ -409,13 +409,6 @@ impl Error for AllocErr {} )] impl Error for LayoutErr {} -#[unstable( - feature = "allocator_api", - reason = "the precise API and guarantees it provides may be tweaked.", - issue = "32838" -)] -impl Error for CannotReallocInPlace {} - #[stable(feature = "rust1", since = "1.0.0")] impl Error for str::ParseBoolError { #[allow(deprecated)] diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index c275db14b427c..a6a03a39b9647 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -7,7 +7,7 @@ extern crate helper; -use std::alloc::{self, Global, AllocRef, System, Layout}; +use std::alloc::{self, AllocInit, AllocRef, Global, Layout, System}; use std::sync::atomic::{AtomicUsize, Ordering}; static HITS: AtomicUsize = AtomicUsize::new(0); @@ -37,7 +37,7 @@ fn main() { unsafe { let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone()).unwrap(); + let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&ptr); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); Global.dealloc(ptr, layout.clone()); @@ -49,7 +49,7 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let (ptr, _) = System.alloc(layout.clone()).unwrap(); + let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); helper::work_with(&ptr); System.dealloc(ptr, layout); diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index e4746d1a7ec09..de47486cc3b26 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -9,8 +9,8 @@ extern crate custom; extern crate helper; -use std::alloc::{Global, AllocRef, System, Layout}; -use std::sync::atomic::{Ordering, AtomicUsize}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; +use std::sync::atomic::{AtomicUsize, Ordering}; #[global_allocator] static GLOBAL: custom::A = custom::A(AtomicUsize::new(0)); @@ -20,13 +20,13 @@ fn main() { let n = GLOBAL.0.load(Ordering::SeqCst); let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone()).unwrap(); + let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&ptr); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); Global.dealloc(ptr, layout.clone()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let (ptr, _) = System.alloc(layout.clone()).unwrap(); + let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&ptr); System.dealloc(ptr, layout); diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index eb6224ad1bbb6..59ce2b4cf86f9 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -6,7 +6,7 @@ #![feature(allocator_api)] -use std::alloc::{Global, AllocRef, Layout, handle_alloc_error}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; use std::ptr::{self, NonNull}; fn main() { @@ -16,17 +16,17 @@ fn main() { } unsafe fn test_triangle() -> bool { - static COUNT : usize = 16; + static COUNT: usize = 16; let mut ascend = vec![ptr::null_mut(); COUNT]; let ascend = &mut *ascend; - static ALIGN : usize = 1; + static ALIGN: usize = 1; // Checks that `ascend` forms triangle of ascending size formed // from pairs of rows (where each pair of rows is equally sized), // and the elements of the triangle match their row-pair index. unsafe fn sanity_check(ascend: &[*mut u8]) { for i in 0..COUNT / 2 { - let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); for j in 0..size { assert_eq!(*p0.add(j), i as u8); assert_eq!(*p1.add(j), i as u8); @@ -34,14 +34,16 @@ unsafe fn test_triangle() -> bool { } } - static PRINT : bool = false; + static PRINT: bool = false; unsafe fn allocate(layout: Layout) -> *mut u8 { if PRINT { println!("allocate({:?})", layout); } - let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (ptr, _) = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { println!("allocate({:?}) = {:?}", layout, ptr); @@ -63,19 +65,33 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let (ptr, _) = Global.realloc(NonNull::new_unchecked(ptr), old, new.size()) - .unwrap_or_else(|_| handle_alloc_error( - Layout::from_size_align_unchecked(new.size(), old.align()) - )); + let allocation = if new.size() > old.size() { + Global.grow( + NonNull::new_unchecked(ptr), + old, + new.size(), + ReallocPlacement::MayMove, + AllocInit::Uninitialized, + ) + } else if new.size() < old.size() { + Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) + } else { + return ptr; + }; + + let (ptr, _) = allocation.unwrap_or_else(|_| { + handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align())) + }); if PRINT { - println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", - ptr, old, new, ptr); + println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr); } ptr.cast().as_ptr() } - fn idx_to_size(i: usize) -> usize { (i+1) * 10 } + fn idx_to_size(i: usize) -> usize { + (i + 1) * 10 + } // Allocate pairs of rows that form a triangle shape. (Hope is // that at least two rows will be allocated near each other, so @@ -83,13 +99,13 @@ unsafe fn test_triangle() -> bool { // way.) for i in 0..COUNT / 2 { let size = idx_to_size(i); - ascend[2*i] = allocate(Layout::from_size_align(size, ALIGN).unwrap()); - ascend[2*i+1] = allocate(Layout::from_size_align(size, ALIGN).unwrap()); + ascend[2 * i] = allocate(Layout::from_size_align(size, ALIGN).unwrap()); + ascend[2 * i + 1] = allocate(Layout::from_size_align(size, ALIGN).unwrap()); } // Initialize each pair of rows to distinct value. for i in 0..COUNT / 2 { - let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); for j in 0..size { *p0.add(j) = i as u8; *p1.add(j) = i as u8; @@ -104,8 +120,8 @@ unsafe fn test_triangle() -> bool { for i in 0..COUNT / 2 { let size = idx_to_size(i); - deallocate(ascend[2*i], Layout::from_size_align(size, ALIGN).unwrap()); - deallocate(ascend[2*i+1], Layout::from_size_align(size, ALIGN).unwrap()); + deallocate(ascend[2 * i], Layout::from_size_align(size, ALIGN).unwrap()); + deallocate(ascend[2 * i + 1], Layout::from_size_align(size, ALIGN).unwrap()); } return true; @@ -115,68 +131,68 @@ unsafe fn test_triangle() -> bool { // realloc'ing each row from top to bottom, and checking all the // rows as we go. unsafe fn test_1(ascend: &mut [*mut u8]) { - let new_size = idx_to_size(COUNT-1); + let new_size = idx_to_size(COUNT - 1); let new = Layout::from_size_align(new_size, ALIGN).unwrap(); for i in 0..COUNT / 2 { - let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, old_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); assert!(old_size < new_size); let old = Layout::from_size_align(old_size, ALIGN).unwrap(); - ascend[2*i] = reallocate(p0, old.clone(), new.clone()); + ascend[2 * i] = reallocate(p0, old.clone(), new.clone()); sanity_check(&*ascend); - ascend[2*i+1] = reallocate(p1, old.clone(), new.clone()); + ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone()); sanity_check(&*ascend); } } // Test 2: turn the square back into a triangle, top to bottom. unsafe fn test_2(ascend: &mut [*mut u8]) { - let old_size = idx_to_size(COUNT-1); + let old_size = idx_to_size(COUNT - 1); let old = Layout::from_size_align(old_size, ALIGN).unwrap(); for i in 0..COUNT / 2 { - let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, new_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); assert!(new_size < old_size); let new = Layout::from_size_align(new_size, ALIGN).unwrap(); - ascend[2*i] = reallocate(p0, old.clone(), new.clone()); + ascend[2 * i] = reallocate(p0, old.clone(), new.clone()); sanity_check(&*ascend); - ascend[2*i+1] = reallocate(p1, old.clone(), new.clone()); + ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone()); sanity_check(&*ascend); } } // Test 3: turn triangle into a square, bottom to top. unsafe fn test_3(ascend: &mut [*mut u8]) { - let new_size = idx_to_size(COUNT-1); + let new_size = idx_to_size(COUNT - 1); let new = Layout::from_size_align(new_size, ALIGN).unwrap(); for i in (0..COUNT / 2).rev() { - let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, old_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); assert!(old_size < new_size); let old = Layout::from_size_align(old_size, ALIGN).unwrap(); - ascend[2*i+1] = reallocate(p1, old.clone(), new.clone()); + ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone()); sanity_check(&*ascend); - ascend[2*i] = reallocate(p0, old.clone(), new.clone()); + ascend[2 * i] = reallocate(p0, old.clone(), new.clone()); sanity_check(&*ascend); } } // Test 4: turn the square back into a triangle, bottom to top. unsafe fn test_4(ascend: &mut [*mut u8]) { - let old_size = idx_to_size(COUNT-1); + let old_size = idx_to_size(COUNT - 1); let old = Layout::from_size_align(old_size, ALIGN).unwrap(); for i in (0..COUNT / 2).rev() { - let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i)); + let (p0, p1, new_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i)); assert!(new_size < old_size); let new = Layout::from_size_align(new_size, ALIGN).unwrap(); - ascend[2*i+1] = reallocate(p1, old.clone(), new.clone()); + ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone()); sanity_check(&*ascend); - ascend[2*i] = reallocate(p0, old.clone(), new.clone()); + ascend[2 * i] = reallocate(p0, old.clone(), new.clone()); sanity_check(&*ascend); } } diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index fe3a864fe4ba5..7f8f461d57ba1 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -1,33 +1,33 @@ // run-pass #![allow(dead_code)] #![allow(non_camel_case_types)] - // pretty-expanded FIXME #23616 - #![feature(allocator_api)] -use std::alloc::{AllocRef, Global, Layout, handle_alloc_error}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use std::ptr::NonNull; struct arena(()); struct Bcx<'a> { - fcx: &'a Fcx<'a> + fcx: &'a Fcx<'a>, } struct Fcx<'a> { arena: &'a arena, - ccx: &'a Ccx + ccx: &'a Ccx, } struct Ccx { - x: isize + x: isize, } fn alloc(_bcx: &arena) -> &Bcx<'_> { unsafe { let layout = Layout::new::(); - let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (ptr, _) = Global + .alloc(layout, AllocInit::Uninitialized) + .unwrap_or_else(|_| handle_alloc_error(layout)); &*(ptr.as_ptr() as *const _) } } From 2526accdd35c564eee80b6453a0b4965e6a76afd Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 26 Mar 2020 17:11:47 +0100 Subject: [PATCH 02/23] Fix issues from review and unsoundness of `RawVec::into_box` --- src/liballoc/alloc.rs | 96 +++--- src/liballoc/alloc/tests.rs | 6 +- src/liballoc/boxed.rs | 15 +- src/liballoc/collections/btree/node.rs | 19 +- src/liballoc/lib.rs | 1 + src/liballoc/raw_vec.rs | 228 +++++++------- src/liballoc/raw_vec/tests.rs | 16 +- src/liballoc/rc.rs | 17 +- src/liballoc/sync.rs | 16 +- src/liballoc/tests/heap.rs | 9 +- src/liballoc/vec.rs | 3 +- src/libcore/alloc/mod.rs | 317 +++++++++----------- src/libstd/alloc.rs | 94 +++--- src/test/ui/allocator/custom.rs | 12 +- src/test/ui/allocator/xcrate-use.rs | 12 +- src/test/ui/realloc-16687.rs | 26 +- src/test/ui/regions/regions-mock-codegen.rs | 11 +- 17 files changed, 430 insertions(+), 468 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 26524f6296221..7eb9e0d5ea3e1 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -4,7 +4,7 @@ use core::intrinsics::{self, min_align_of_val, size_of_val}; use core::ptr::{NonNull, Unique}; -use core::usize; +use core::{mem, usize}; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] @@ -165,102 +165,96 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for Global { #[inline] - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { - let new_size = layout.size(); - if new_size == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { + unsafe { + if layout.size() == 0 { + Ok(MemoryBlock::new(layout.dangling(), layout)) + } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok((ptr, new_size)) + Ok(MemoryBlock::new(ptr, layout)) } } } #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - dealloc(ptr.as_ptr(), layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + if memory.size() != 0 { + dealloc(memory.ptr().as_ptr(), memory.layout()) } } #[inline] unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" + "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if memory.size() == 0 => { + *memory = self.alloc(new_layout, init)? + } ReallocPlacement::MayMove => { - if old_size == 0 { - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be greater than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size > old_size); - let ptr = - NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?; - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - init.initialize_offset(ptr, new_layout, old_size); - Ok((ptr, new_size)) - } + // `realloc` probably checks for `new_size > old_size` or something similar. + intrinsics::assume(new_size > old_size); + let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + memory.init_offset(init, old_size); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } #[inline] unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" + "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if new_size == 0 => { + let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); + let old_memory = mem::replace(memory, new_memory); + self.dealloc(old_memory) + } ReallocPlacement::MayMove => { - let ptr = if new_size == 0 { - self.dealloc(ptr, layout); - layout.dangling() - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size < old_size); - NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)? - }; - Ok((ptr, new_size)) + // `realloc` probably checks for `new_size < old_size` or something similar. + intrinsics::assume(new_size < old_size); + let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } } @@ -272,7 +266,7 @@ unsafe impl AllocRef for Global { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match Global.alloc(layout, AllocInit::Uninitialized) { - Ok((ptr, _)) => ptr.as_ptr(), + Ok(memory) => memory.ptr().as_ptr(), Err(_) => handle_alloc_error(layout), } } @@ -288,7 +282,7 @@ pub(crate) unsafe fn box_free(ptr: Unique) { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr.cast().into(), layout) + Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout)) } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 6a2130a7192f0..34380ba41b4fd 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -8,17 +8,17 @@ use test::Bencher; fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); - let (ptr, _) = Global + let memory = Global .alloc(layout.clone(), AllocInit::Zeroed) .unwrap_or_else(|_| handle_alloc_error(layout)); - let mut i = ptr.cast::().as_ptr(); + let mut i = memory.ptr().cast::().as_ptr(); let end = i.add(layout.size()); while i < end { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(ptr, layout); + Global.dealloc(memory); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 9690e311e9653..03d759e4a9ae4 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -143,7 +143,6 @@ use core::ops::{ }; use core::pin::Pin; use core::ptr::{self, NonNull, Unique}; -use core::slice; use core::task::{Context, Poll}; use crate::alloc::{self, AllocInit, AllocRef, Global}; @@ -199,7 +198,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 + .ptr() .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -228,7 +227,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Zeroed) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 + .ptr() .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -265,13 +264,7 @@ impl Box<[T]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { - let layout = alloc::Layout::array::>(len).unwrap(); - let ptr = Global - .alloc(layout, AllocInit::Uninitialized) - .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 - .cast(); - unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) } + unsafe { RawVec::with_capacity(len).into_box(len) } } } @@ -776,7 +769,7 @@ impl From<&[T]> for Box<[T]> { let buf = RawVec::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); - buf.into_box().assume_init() + buf.into_box(slice.len()).assume_init() } } } diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 6ebb98c42cd4f..8b4daa28ee8bc 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -31,6 +31,7 @@ // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges. // This implies that even an empty internal node has at least one edge. +use core::alloc::MemoryBlock; use core::cmp::Ordering; use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; @@ -227,7 +228,10 @@ impl Root { } unsafe { - Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); + Global.dealloc(MemoryBlock::new( + NonNull::from(top).cast(), + Layout::new::>(), + )); } } } @@ -392,14 +396,14 @@ impl NodeRef { let height = self.height; let node = self.node; let ret = self.ascend().ok(); - Global.dealloc( + Global.dealloc(MemoryBlock::new( node.cast(), if height > 0 { Layout::new::>() } else { Layout::new::>() }, - ); + )); ret } } @@ -1142,7 +1146,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: (*left_node.as_leaf_mut()).len += right_len as u16 + 1; - if self.node.height > 1 { + let layout = if self.node.height > 1 { ptr::copy_nonoverlapping( right_node.cast_unchecked().as_internal().edges.as_ptr(), left_node @@ -1159,10 +1163,11 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: .correct_parent_link(); } - Global.dealloc(right_node.node.cast(), Layout::new::>()); + Layout::new::>() } else { - Global.dealloc(right_node.node.cast(), Layout::new::>()); - } + Layout::new::>() + }; + Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout)); Handle::new_edge(self.node, self.idx) } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 5857b79d5eeeb..121c1cde548cb 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -100,6 +100,7 @@ #![feature(lang_items)] #![feature(libc)] #![cfg_attr(not(bootstrap), feature(negative_impls))] +#![feature(new_uninit)] #![feature(nll)] #![feature(optin_builtin_traits)] #![feature(pattern)] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 3a108adb218ce..aee2367bd951b 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -1,6 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] +use core::alloc::MemoryBlock; use core::cmp; use core::mem::{self, MaybeUninit}; use core::ops::Drop; @@ -24,6 +25,9 @@ mod tests; /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// +/// * Produces `Unique::empty()` on zero-sized types. +/// * Produces `Unique::empty()` on zero-length allocations. +/// * Avoids freeing `Unique::empty()`. /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics). /// * Guards against 32-bit systems allocating more than isize::MAX bytes. /// * Guards against overflowing your length. @@ -44,38 +48,7 @@ mod tests; pub struct RawVec { ptr: Unique, cap: usize, - a: A, -} - -impl RawVec { - /// Like `new`, but parameterized over the choice of allocator for - /// the returned `RawVec`. - pub const fn new_in(a: A) -> Self { - // `cap: 0` means "unallocated". zero-sized allocations are handled by `AllocRef` - Self { ptr: Unique::empty(), cap: 0, a } - } - - /// Like `with_capacity`, but parameterized over the choice of - /// allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_in(capacity: usize, a: A) -> Self { - Self::allocate_in(capacity, Uninitialized, a) - } - - /// Like `with_capacity_zeroed`, but parameterized over the choice - /// of allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self { - Self::allocate_in(capacity, Zeroed, a) - } - - fn allocate_in(capacity: usize, init: AllocInit, mut a: A) -> Self { - let layout = Layout::array::(capacity).unwrap_or_else(|_| capacity_overflow()); - alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow()); - - let (ptr, excess) = a.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); - Self { ptr: ptr.cast().into(), cap: Self::capacity_from_bytes(excess), a } - } + alloc: A, } impl RawVec { @@ -126,23 +99,7 @@ impl RawVec { pub fn with_capacity_zeroed(capacity: usize) -> Self { Self::with_capacity_zeroed_in(capacity, Global) } -} -impl RawVec { - /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. - /// - /// # Undefined Behavior - /// - /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. - /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). - /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. - #[inline] - pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { - Self { ptr: Unique::new_unchecked(ptr), cap: capacity, a } - } -} - -impl RawVec { /// Reconstitutes a `RawVec` from a pointer and capacity. /// /// # Undefined Behavior @@ -166,6 +123,55 @@ impl RawVec { } impl RawVec { + /// Like `new`, but parameterized over the choice of allocator for + /// the returned `RawVec`. + pub const fn new_in(alloc: A) -> Self { + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr: Unique::empty(), cap: 0, alloc } + } + + /// Like `with_capacity`, but parameterized over the choice of + /// allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, Uninitialized, alloc) + } + + /// Like `with_capacity_zeroed`, but parameterized over the choice + /// of allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, Zeroed, alloc) + } + + fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self { + if mem::size_of::() == 0 { + Self::new_in(alloc) + } else { + let layout = Layout::array::(capacity).unwrap_or_else(|_| capacity_overflow()); + alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow()); + + let memory = alloc.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); + Self { + ptr: memory.ptr().cast().into(), + cap: Self::capacity_from_bytes(memory.size()), + alloc, + } + } + } + + /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. + /// + /// # Undefined Behavior + /// + /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. + /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). + /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. + #[inline] + pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { + Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a } + } + /// Gets a raw pointer to the start of the allocation. Note that this is /// `Unique::empty()` if `capacity == 0` or `T` is zero-sized. In the former case, you must /// be careful. @@ -183,16 +189,16 @@ impl RawVec { /// Returns a shared reference to the allocator backing this `RawVec`. pub fn alloc(&self) -> &A { - &self.a + &self.alloc } /// Returns a mutable reference to the allocator backing this `RawVec`. pub fn alloc_mut(&mut self) -> &mut A { - &mut self.a + &mut self.alloc } - fn current_layout(&self) -> Option { - if self.cap == 0 { + fn current_memory(&self) -> Option { + if mem::size_of::() == 0 || self.cap == 0 { None } else { // We have an allocated chunk of memory, so we can bypass runtime @@ -200,7 +206,8 @@ impl RawVec { unsafe { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; - Some(Layout::from_size_align_unchecked(size, align)) + let layout = Layout::from_size_align_unchecked(size, align); + Some(MemoryBlock::new(self.ptr.cast().into(), layout)) } } } @@ -454,14 +461,19 @@ impl RawVec { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool { - needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) + mem::size_of::() != 0 + && needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) } fn capacity_from_bytes(excess: usize) -> usize { - match mem::size_of::() { - 0 => usize::MAX, - elem_size => excess / elem_size, - } + debug_assert_ne!(mem::size_of::(), 0); + excess / mem::size_of::() + } + + fn set_memory(&mut self, memory: MemoryBlock) { + self.ptr = memory.ptr().cast().into(); + self.cap = Self::capacity_from_bytes(memory.size()); + drop(memory); } /// Single method to handle all possibilities of growing the buffer. @@ -471,9 +483,9 @@ impl RawVec { placement: ReallocPlacement, init: AllocInit, ) -> Result<(), TryReserveError> { - let elem_size = mem::size_of::(); - let new_layout = match strategy { + let layout = match strategy { Double => unsafe { + let elem_size = mem::size_of::(); if elem_size == 0 { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. @@ -511,24 +523,24 @@ impl RawVec { } }; - let allocation = if let Some(old_layout) = self.current_layout() { - debug_assert!(old_layout.align() == new_layout.align()); + let memory = if let Some(mut memory) = self.current_memory() { + debug_assert_eq!(memory.align(), layout.align()); unsafe { - self.a.grow(self.ptr.cast().into(), old_layout, new_layout.size(), placement, init) - } + self.alloc + .grow(&mut memory, layout.size(), placement, init) + .map_err(|_| AllocError { layout, non_exhaustive: () })? + }; + memory } else { match placement { - MayMove => self.a.alloc(new_layout, init), + MayMove => self.alloc.alloc(layout, init), InPlace => Err(AllocErr), } + .map_err(|_| AllocError { layout, non_exhaustive: () })? }; - allocation - .map(|(ptr, excess)| { - self.ptr = ptr.cast().into(); - self.cap = Self::capacity_from_bytes(excess); - }) - .map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: () }) + self.set_memory(memory); + Ok(()) } fn shrink( @@ -538,64 +550,52 @@ impl RawVec { ) -> Result<(), TryReserveError> { assert!(amount <= self.cap, "Tried to shrink to a larger capacity"); - let elem_size = mem::size_of::(); - let old_layout = - if let Some(layout) = self.current_layout() { layout } else { return Ok(()) }; - let old_ptr = self.ptr.cast().into(); - let new_size = amount * elem_size; - - let allocation = unsafe { - if amount == 0 && placement == MayMove { - self.dealloc_buffer(); - Ok((old_layout.dangling(), 0)) - } else { - self.a.shrink(old_ptr, old_layout, new_size, placement) - } - }; + let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; + let new_size = amount * mem::size_of::(); - allocation - .map(|(ptr, excess)| { - self.ptr = ptr.cast().into(); - self.cap = Self::capacity_from_bytes(excess); - }) - .map_err(|_| TryReserveError::AllocError { - layout: unsafe { Layout::from_size_align_unchecked(new_size, old_layout.align()) }, - non_exhaustive: (), - }) + unsafe { + self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| { + TryReserveError::AllocError { + layout: Layout::from_size_align_unchecked(new_size, memory.align()), + non_exhaustive: (), + } + })?; + } + + self.set_memory(memory); + Ok(()) } } impl RawVec { - /// Converts the entire buffer into `Box<[T]>`. + /// Converts the entire buffer into `Box<[T]>` with the specified `len`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) - pub fn into_box(self) -> Box<[MaybeUninit]> { - unsafe { - // NOTE: not calling `capacity()` here; actually using the real `cap` field! - let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit, self.cap); - let output = Box::from_raw(slice); - mem::forget(self); - output - } - } -} + /// + /// # Safety + /// + /// * `len` must be smaller than or equal to `self.capacity()` + pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit]> { + debug_assert!( + len <= self.capacity(), + "`len` must be smaller than or equal to `self.capacity()`" + ); -impl RawVec { - /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - pub unsafe fn dealloc_buffer(&mut self) { - if let Some(layout) = self.current_layout() { - self.a.dealloc(self.ptr.cast().into(), layout); - self.ptr = Unique::empty(); - self.cap = 0; - } + // NOTE: not calling `capacity()` here; actually using the real `cap` field! + let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit, len); + let output = Box::from_raw(slice); + mem::forget(self); + output } } unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { - unsafe { self.dealloc_buffer() } + if let Some(memory) = self.current_memory() { + unsafe { self.alloc.dealloc(memory) } + } } } diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index a2d6cc63c92f4..4bdd36ed63a11 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -1,5 +1,4 @@ use super::*; -use core::ptr::NonNull; #[test] fn allocator_param() { @@ -13,6 +12,7 @@ fn allocator_param() { // // Instead, this just checks that the `RawVec` methods do at // least go through the Allocator API when it reserves + // storage. // A dumb allocator that consumes a fixed amount of fuel @@ -21,11 +21,7 @@ fn allocator_param() { fuel: usize, } unsafe impl AllocRef for BoundedAlloc { - fn alloc( - &mut self, - layout: Layout, - init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { let size = layout.size(); if size > self.fuel { return Err(AllocErr); @@ -38,16 +34,16 @@ fn allocator_param() { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - Global.dealloc(ptr, layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + Global.dealloc(memory) } } let a = BoundedAlloc { fuel: 500 }; let mut v: RawVec = RawVec::with_capacity_in(50, a); - assert_eq!(v.a.fuel, 450); + assert_eq!(v.alloc.fuel, 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) - assert_eq!(v.a.fuel, 250); + assert_eq!(v.alloc.fuel, 250); } #[test] diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 495e196df40a2..3625caf5f237a 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -234,6 +234,7 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; +use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -936,12 +937,12 @@ impl Rc { let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); // Allocate for the layout. - let (mem, _) = Global + let mem = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox - let inner = mem_to_rcbox(mem.as_ptr()); + let inner = mem_to_rcbox(mem.ptr().as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, Cell::new(1)); @@ -1031,7 +1032,7 @@ impl Rc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout); + Global.dealloc(MemoryBlock::new(self.mem, self.layout)); } } } @@ -1131,7 +1132,10 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )); } } } @@ -1939,7 +1943,10 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )); } } } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 048c89d12809e..b5e6d669f80ed 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -6,6 +6,7 @@ //! //! [arc]: struct.Arc.html +use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -770,7 +771,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { acquire!(self.inner().weak); - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))) } } @@ -814,12 +815,12 @@ impl Arc { // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - let (mem, _) = Global + let mem = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner - let inner = mem_to_arcinner(mem.as_ptr()); + let inner = mem_to_arcinner(mem.ptr().as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); @@ -909,7 +910,7 @@ impl Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem.cast(), self.layout); + Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout)); } } } @@ -1734,7 +1735,12 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } + unsafe { + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )) + } } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 690ae84a5df68..4b0d7bc1f449e 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System}; /// Issue #45955 and #62251. #[test] @@ -26,7 +26,7 @@ fn check_overalign_requests(mut allocator: T) { AllocInit::Uninitialized, ) .unwrap() - .0 + .ptr() }) .collect(); for &ptr in &pointers { @@ -39,7 +39,10 @@ fn check_overalign_requests(mut allocator: T) { // Clean up for &ptr in &pointers { - allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap()) + allocator.dealloc(MemoryBlock::new( + ptr, + Layout::from_size_align(size, align).unwrap(), + )) } } } diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 528a4f732934e..ba49f043d46a7 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -678,8 +678,9 @@ impl Vec { unsafe { self.shrink_to_fit(); let buf = ptr::read(&self.buf); + let len = self.len(); mem::forget(self); - buf.into_box().assume_init() + buf.into_box(len).assume_init() } } diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index 0c5a70bee1a56..e693f50846bdb 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -11,7 +11,8 @@ pub use self::global::GlobalAlloc; pub use self::layout::{Layout, LayoutErr}; use crate::fmt; -use crate::ptr::{self, NonNull}; +use crate::mem; +use crate::ptr::{self, NonNull, Unique}; /// The `AllocErr` error indicates an allocation failure /// that may be due to resource exhaustion or to @@ -41,49 +42,91 @@ pub enum AllocInit { Zeroed, } -impl AllocInit { - /// Initialize the memory block referenced by `ptr` and specified by `Layout`. +/// Represents a block of allocated memory returned by an allocator. +#[derive(Debug)] +#[unstable(feature = "allocator_api", issue = "32838")] +#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"] +pub struct MemoryBlock { + ptr: Unique, + layout: Layout, +} + +impl MemoryBlock { + /// Creates a new `MemoryBlock`. /// - /// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off]. + /// # Safety /// - /// [off]: AllocInit::initialize_offset + /// * The block must be allocated with the same alignment as [`layout.align()`], and + /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: + /// - `min` is the size requested size when allocating the block, and + /// - `max` is the size of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const unsafe fn new(ptr: NonNull, layout: Layout) -> Self { + Self { ptr: Unique::new_unchecked(ptr.as_ptr()), layout } + } + + /// Acquires the underlying `NonNull` pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn ptr(&self) -> NonNull { + // SAFETY: Unique is always non-null + unsafe { NonNull::new_unchecked(self.ptr.as_ptr()) } + } + + /// Returns the layout describing the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn layout(&self) -> Layout { + self.layout + } + + /// Returns the size of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn size(&self) -> usize { + self.layout().size() + } + + /// Returns the minimum alignment of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn align(&self) -> usize { + self.layout().align() + } + + /// Initialize the memory block like specified by `init`. /// - /// # Safety + /// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off]. /// - /// * `layout` must [*fit*] the block of memory referenced by `ptr` + /// [off]: MemoryBlock::init_offset /// /// [*fit*]: trait.AllocRef.html#memory-fitting #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn initialize(self, ptr: NonNull, layout: Layout) { - self.initialize_offset(ptr, layout, 0) + pub fn init(&mut self, init: AllocInit) { + // SAFETY: 0 is always smaller or equal to the size + unsafe { self.init_offset(init, 0) } } - /// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified - /// `offset`. + /// Initialize the memory block like specified by `init` at the specified `offset`. /// /// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`] /// at `ptr + offset` until `ptr + layout.size()`. /// /// # Safety /// - /// * `layout` must [*fit*] the block of memory referenced by `ptr` - /// - /// * `offset` must be smaller than or equal to `layout.size()` + /// * `offset` must be smaller than or equal to `size()` /// /// [*fit*]: trait.AllocRef.html#memory-fitting + #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn initialize_offset(self, ptr: NonNull, layout: Layout, offset: usize) { - debug_assert!( - offset <= layout.size(), - "`offset` must be smaller than or equal to `layout.size()`" - ); - match self { + pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) { + debug_assert!(offset <= self.size(), "`offset` must be smaller than or equal to `size()`"); + match init { AllocInit::Uninitialized => (), AllocInit::Zeroed => { - let new_ptr = ptr.as_ptr().add(offset); - let size = layout.size() - offset; - ptr::write_bytes(new_ptr, 0, size); + self.ptr().as_ptr().add(offset).write_bytes(0, self.size() - offset) } } } @@ -116,70 +159,23 @@ pub enum ReallocPlacement { /// /// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying /// allocator does not support this (like jemalloc) or return a null pointer (such as -/// `libc::malloc`), this case must be caught. [`Layout::dangling()`][] then can be used to create -/// an aligned `NonNull`. -/// -/// ### Currently allocated memory -/// -/// Some of the methods require that a memory block be *currently allocated* via an allocator. This -/// means that: -/// -/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or -/// [`shrink`], and -/// -/// * the memory block has not been subsequently deallocated, where blocks are either deallocated -/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or -/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer -/// remains valid. -/// -/// [`alloc`]: AllocRef::alloc -/// [`grow`]: AllocRef::grow -/// [`shrink`]: AllocRef::shrink -/// [`dealloc`]: AllocRef::dealloc -/// -/// ### Memory fitting -/// -/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to -/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the -/// following conditions must hold: -/// -/// * The block must be allocated with the same alignment as [`layout.align()`], and -/// -/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: -/// - `min` is the size of the layout most recently used to allocate the block, and -/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. -/// -/// [`layout.align()`]: Layout::align -/// [`layout.size()`]: Layout::size -/// -/// ### Notes -/// -/// * if a layout `k` fits a memory block (denoted by `ptr`) currently allocated via an allocator -/// `a`, then it is legal to use that layout to deallocate it, i.e., -/// [`a.dealloc(ptr, k);`][`dealloc`], and -/// -/// * if an allocator does not support overallocating, it is fine to simply return -/// [`layout.size()`] as the actual size. +/// `libc::malloc`), this case must be caught. /// /// # Safety /// -/// * Pointers returned from an allocator must point to valid memory and retain their validity until -/// the instance and all of its clones are dropped, -/// -/// * cloning or moving the allocator must not invalidate pointers returned from this allocator. -/// A cloned allocator must behave like the same allocator, and +/// * Memory blocks returned from an allocator must point to valid memory and retain their validity +/// until the instance and all of its clones are dropped, and /// -/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other -/// method of the allocator. +/// * cloning or moving the allocator must not invalidate memory blocks returned from this +/// allocator. A cloned allocator must behave like the same allocator. /// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait AllocRef { - /// On success, returns a pointer meeting the size and alignment guarantees of `layout` and the - /// actual size of the allocated block, which is greater than or equal to `layout.size()`. + /// On success, returns a memory block meeting the size and alignment guarantees of `layout`. /// - /// The returned block of storage is initialized as specified by [`init`], all the way up to - /// the returned `actual_size`. + /// The returned block may have a larger size than specified by `layout.size()` and is + /// initialized as specified by [`init`], all the way up to the returned size of the block. /// /// [`init`]: AllocInit /// @@ -196,58 +192,32 @@ pub unsafe trait AllocRef { /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr>; + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result; - /// Deallocates the memory referenced by `ptr`. + /// Deallocates the memory denoted by `memory`. /// /// # Safety /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] that block of memory, and - /// - /// * the alignment of the `layout` must match the alignment used to allocate that block of - /// memory. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); + /// `memory` must be a memory block returned by this allocator. + unsafe fn dealloc(&mut self, memory: MemoryBlock); - /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. - /// - /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for - /// holding data described by a new layout with `layout`’s alignment and a size given by - /// `new_size`. To accomplish this, the allocator may extend the allocation referenced by `ptr` - /// to fit the new layout. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable (unless of course it was transferred back to the caller again via the - /// return value of this method). - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. + /// Attempts to extend the memory block. /// /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. - /// The first `layout.size()` bytes of memory are preserved or copied as appropriate from `ptr`, - /// and the remaining bytes, from `layout.size()` to the returned actual size, are initialized - /// according to [`init`]. + /// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the + /// remaining bytes up to the new `memory.size()` are initialized according to [`init`]. /// /// [`placement`]: ReallocPlacement /// [`init`]: AllocInit /// /// # Safety /// - /// * `ptr` must be [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - /// - // We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs. + /// * `memory` must be a memory block returned by this allocator. + // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. // An alternative would be - // * `new_size must be strictly greater than `layout.size()` or both are zero - /// * `new_size` must be greater than or equal to `layout.size()` - /// - /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow + // * `new_size must be strictly greater than `memory.size()` or both are zero + /// * `new_size` must be greater than or equal to `memory.size()` + /// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). /// /// [*currently allocated*]: #currently-allocated-memory @@ -268,64 +238,50 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - debug_assert!( - new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" - ); - - if new_size == old_size { - return Ok((ptr, new_size)); - } - + ) -> Result<(), AllocErr> { match placement { + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let (new_ptr, alloc_size) = - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size); - self.dealloc(ptr, layout); - Ok((new_ptr, alloc_size)) + let old_size = memory.size(); + debug_assert!( + new_size >= old_size, + "`new_size` must be greater than or equal to `memory.size()`" + ); + + if new_size == old_size { + return Ok(()); + } + + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_memory = self.alloc(new_layout, init)?; + ptr::copy_nonoverlapping( + memory.ptr().as_ptr(), + new_memory.ptr().as_ptr(), + old_size, + ); + self.dealloc(mem::replace(memory, new_memory)); + Ok(()) } - ReallocPlacement::InPlace => Err(AllocErr), } } - /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. + /// Attempts to shrink the memory block. /// - /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for - /// holding data described by a new layout with `layout`’s alignment and a size given by - /// `new_size`. To accomplish this, the allocator may shrink the allocation referenced by `ptr` - /// to fit the new layout. - /// - /// The behavior on how the allocator tries to shrink the memory can be specified by - /// [`placement`]. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable unless it was transferred back to the caller again via the - /// return value of this method. - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. + /// The behavior of how the allocator tries to shrink the memory is specified by [`placement`]. /// /// [`placement`]: ReallocPlacement /// /// # Safety /// - /// * `ptr` must be [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - /// - // We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs. + /// * `memory` must be a memory block returned by this allocator. + // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. // An alternative would be - // * `new_size must be strictly smaller than `layout.size()` or both are zero - /// * `new_size` must be smaller than or equal to `layout.size()` + // * `new_size must be strictly smaller than `memory.size()` or both are zero + /// * `new_size` must be smaller than or equal to `memory.size()` /// /// [*currently allocated*]: #currently-allocated-memory /// [*fit*]: #memory-fitting @@ -333,7 +289,7 @@ pub unsafe trait AllocRef { /// # Errors /// /// Returns `Err` if the new layout does not meet the allocator's size and alignment - /// constraints of the allocator, or if shrinking otherwise fails. + /// constraints of the allocator, or if growing otherwise fails. /// /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement @@ -345,32 +301,33 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - debug_assert!( - new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" - ); - - if new_size == old_size { - return Ok((ptr, new_size)); - } - + ) -> Result<(), AllocErr> { match placement { + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let (new_ptr, alloc_size) = self.alloc( - Layout::from_size_align_unchecked(new_size, layout.align()), - AllocInit::Uninitialized, - )?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size); - self.dealloc(ptr, layout); - Ok((new_ptr, alloc_size)) + let old_size = memory.size(); + debug_assert!( + new_size <= old_size, + "`new_size` must be smaller than or equal to `layout.size()`" + ); + + if new_size == old_size { + return Ok(()); + } + + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; + ptr::copy_nonoverlapping( + memory.ptr().as_ptr(), + new_memory.ptr().as_ptr(), + new_size, + ); + self.dealloc(mem::replace(memory, new_memory)); + Ok(()) } - ReallocPlacement::InPlace => Err(AllocErr), } } } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 9ad0eae705fc8..f295565bec348 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -137,104 +137,98 @@ pub struct System; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for System { #[inline] - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { - let new_size = layout.size(); - if new_size == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { + unsafe { + if layout.size() == 0 { + Ok(MemoryBlock::new(layout.dangling(), layout)) + } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok((ptr, new_size)) + Ok(MemoryBlock::new(ptr, layout)) } } } #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + if memory.size() != 0 { + GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout()) } } #[inline] unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" + "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if memory.size() == 0 => { + *memory = self.alloc(new_layout, init)? + } ReallocPlacement::MayMove => { - if old_size == 0 { - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be greater than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size > old_size); - let ptr = - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr)?; - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - init.initialize_offset(ptr, new_layout, old_size); - Ok((ptr, new_size)) - } + // `realloc` probably checks for `new_size > old_size` or something similar. + intrinsics::assume(new_size > old_size); + let ptr = + GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + memory.init_offset(init, old_size); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } #[inline] unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" + "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if new_size == 0 => { + let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); + let old_memory = mem::replace(memory, new_memory); + self.dealloc(old_memory) + } ReallocPlacement::MayMove => { - let ptr = if new_size == 0 { - self.dealloc(ptr, layout); - layout.dangling() - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size < old_size); - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr)? - }; - Ok((ptr, new_size)) + // `realloc` probably checks for `new_size < old_size` or something similar. + intrinsics::assume(new_size < old_size); + let ptr = + GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index a6a03a39b9647..63b1b2fbb8bef 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -37,10 +37,10 @@ fn main() { unsafe { let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&ptr); + let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + helper::work_with(&memory.ptr()); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(ptr, layout.clone()); + Global.dealloc(memory); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -49,10 +49,10 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - helper::work_with(&ptr); - System.dealloc(ptr, layout); + helper::work_with(&memory.ptr()); + System.dealloc(memory); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index de47486cc3b26..d4f8b4247b1e5 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -20,16 +20,16 @@ fn main() { let n = GLOBAL.0.load(Ordering::SeqCst); let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&ptr); + let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + helper::work_with(&memory.ptr()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(ptr, layout.clone()); + Global.dealloc(memory); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - helper::work_with(&ptr); - System.dealloc(ptr, layout); + helper::work_with(&memory.ptr()); + System.dealloc(memory); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 59ce2b4cf86f9..3fe8ed224c270 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -6,7 +6,9 @@ #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; +use std::alloc::{ + handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement, +}; use std::ptr::{self, NonNull}; fn main() { @@ -41,15 +43,15 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?})", layout); } - let (ptr, _) = Global + let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { - println!("allocate({:?}) = {:?}", layout, ptr); + println!("allocate({:?}) = {:?}", layout, memory.ptr()); } - ptr.cast().as_ptr() + memory.ptr().cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -57,7 +59,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(NonNull::new_unchecked(ptr), layout); + Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout)); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -65,28 +67,28 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let allocation = if new.size() > old.size() { + let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old); + let result = if new.size() > old.size() { Global.grow( - NonNull::new_unchecked(ptr), - old, + &mut memory, new.size(), ReallocPlacement::MayMove, AllocInit::Uninitialized, ) } else if new.size() < old.size() { - Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) + Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove) } else { return ptr; }; - let (ptr, _) = allocation.unwrap_or_else(|_| { + result.unwrap_or_else(|_| { handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align())) }); if PRINT { - println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr); + println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr()); } - ptr.cast().as_ptr() + memory.ptr().cast().as_ptr() } fn idx_to_size(i: usize) -> usize { diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index 7f8f461d57ba1..b9bd2988b6ee7 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -4,7 +4,7 @@ // pretty-expanded FIXME #23616 #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock}; use std::ptr::NonNull; struct arena(()); @@ -25,10 +25,10 @@ struct Ccx { fn alloc(_bcx: &arena) -> &Bcx<'_> { unsafe { let layout = Layout::new::(); - let (ptr, _) = Global + let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); - &*(ptr.as_ptr() as *const _) + &*(memory.ptr().as_ptr() as *const _) } } @@ -40,7 +40,10 @@ fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); + Global.dealloc(MemoryBlock::new( + NonNull::new_unchecked(bcx2 as *const _ as *mut _), + Layout::new::(), + )); } } From 2f215b61b6368dad7b10295267a8f1a5a1bfafe8 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:26:38 +0100 Subject: [PATCH 03/23] Use `NonNull` instead of `Unique` in `MemoryBlock` --- src/libcore/alloc/mod.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index e693f50846bdb..4af1a8254a3db 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -47,7 +47,7 @@ pub enum AllocInit { #[unstable(feature = "allocator_api", issue = "32838")] #[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"] pub struct MemoryBlock { - ptr: Unique, + ptr: NonNull, layout: Layout, } @@ -63,15 +63,14 @@ impl MemoryBlock { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub const unsafe fn new(ptr: NonNull, layout: Layout) -> Self { - Self { ptr: Unique::new_unchecked(ptr.as_ptr()), layout } + Self { ptr, layout } } /// Acquires the underlying `NonNull` pointer. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub const fn ptr(&self) -> NonNull { - // SAFETY: Unique is always non-null - unsafe { NonNull::new_unchecked(self.ptr.as_ptr()) } + self.ptr } /// Returns the layout describing the memory block. From d9d35cc6967501818863adcb556991357098557d Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:30:14 +0100 Subject: [PATCH 04/23] Add comment to `AllocRef` implementation for `System` --- src/libstd/alloc.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index f295565bec348..6e8ac7c90363f 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -134,6 +134,8 @@ pub use alloc_crate::alloc::*; #[derive(Debug, Default, Copy, Clone)] pub struct System; +// The AllocRef impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, +// which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for System { #[inline] From c1fa02331ad60e73569f8351401f183089ff89bf Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:36:03 +0100 Subject: [PATCH 05/23] Fix ZST handling for `RawVec` --- src/liballoc/raw_vec.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index aee2367bd951b..a51d30448d132 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -483,14 +483,14 @@ impl RawVec { placement: ReallocPlacement, init: AllocInit, ) -> Result<(), TryReserveError> { + let elem_size = mem::size_of::(); + if elem_size == 0 { + // Since we return a capacity of `usize::MAX` when `elem_size` is + // 0, getting to here necessarily means the `RawVec` is overfull. + return Err(CapacityOverflow); + } let layout = match strategy { Double => unsafe { - let elem_size = mem::size_of::(); - if elem_size == 0 { - // Since we return a capacity of `usize::MAX` when `elem_size` is - // 0, getting to here necessarily means the `RawVec` is overfull. - return Err(CapacityOverflow); - } // Since we guarantee that we never allocate more than `isize::MAX` bytes, // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow. // Additionally the alignment will never be too large as to "not be satisfiable", From 42a8547038bde637d050b1b2688c540b04baed9e Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:39:52 +0100 Subject: [PATCH 06/23] Fix comment in `RawVec::into_box()` --- src/liballoc/raw_vec.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index a51d30448d132..f1b96c9dd9ded 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -568,7 +568,7 @@ impl RawVec { } impl RawVec { - /// Converts the entire buffer into `Box<[T]>` with the specified `len`. + /// Converts the entire buffer into `Box<[MaybeUninit]>` with the specified `len`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) From ba26a9e9579f4906a782635f3261781935dadee2 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:41:25 +0100 Subject: [PATCH 07/23] Fix assertion in `shrink` to use `capacity()` instead --- src/liballoc/raw_vec.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index f1b96c9dd9ded..3bf481a291c53 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -548,7 +548,7 @@ impl RawVec { amount: usize, placement: ReallocPlacement, ) -> Result<(), TryReserveError> { - assert!(amount <= self.cap, "Tried to shrink to a larger capacity"); + assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity"); let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; let new_size = amount * mem::size_of::(); From aae3c52c7aaa9115ba8f34604c34a11f7d4f5e2e Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:42:31 +0100 Subject: [PATCH 08/23] Remove the note on the internal capacity field in `RawVec` --- src/liballoc/raw_vec.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 3bf481a291c53..ba810bf5faf98 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -41,9 +41,7 @@ mod tests; /// /// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a -/// `Box<[T]>`, since `capacity()` won't yield the length. However, `with_capacity`, -/// `shrink_to_fit`, and `from_box` will actually set `RawVec`'s private capacity field. This allows -/// zero-sized types to not be special-cased by consumers of this type. +/// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] pub struct RawVec { ptr: Unique, From ad7de67a32d40360ad36d03845c5a7004ba68150 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:44:29 +0100 Subject: [PATCH 09/23] Refine docs for `RawVec::from_raw_parts(_in)` --- src/liballoc/raw_vec.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index ba810bf5faf98..79dfa7b6b453e 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -100,10 +100,11 @@ impl RawVec { /// Reconstitutes a `RawVec` from a pointer and capacity. /// - /// # Undefined Behavior + /// # Safety /// /// The `ptr` must be allocated (on the system heap), and with the given `capacity`. - /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). + /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit + /// systems). ZSTs may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { @@ -160,10 +161,11 @@ impl RawVec { /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. /// - /// # Undefined Behavior + /// # Safety /// /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. - /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). + /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit + /// systems). ZSTs may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { From b02e53f197566a88e175cddbf1e9c338744fd51a Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 18:46:01 +0100 Subject: [PATCH 10/23] Remove check for ZST in `RawVec::needs_to_grow` --- src/liballoc/raw_vec.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 79dfa7b6b453e..efb7d4cae5da3 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -461,8 +461,7 @@ impl RawVec { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool { - mem::size_of::() != 0 - && needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) + needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) } fn capacity_from_bytes(excess: usize) -> usize { From cbbdca059404a102db9daf2fa50ca8c1c9fc5ad2 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 19:08:56 +0100 Subject: [PATCH 11/23] Fix wording in `RawVec::from_raw_parts(_in)` --- src/liballoc/raw_vec.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index efb7d4cae5da3..cbafeae6417a0 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -104,7 +104,7 @@ impl RawVec { /// /// The `ptr` must be allocated (on the system heap), and with the given `capacity`. /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit - /// systems). ZSTs may have a capacity up to `usize::MAX`. + /// systems). ZST vectors may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { @@ -165,7 +165,7 @@ impl RawVec { /// /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit - /// systems). ZSTs may have a capacity up to `usize::MAX`. + /// systems). ZST vectors may have a capacity up to `usize::MAX`. /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { From fed3d6e64614adbd7ab4c73b561e48bc2800c899 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 19:12:06 +0100 Subject: [PATCH 12/23] Fix safety section of `RawVec::into_box` --- src/liballoc/raw_vec.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index cbafeae6417a0..baa642580579e 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -574,7 +574,8 @@ impl RawVec { /// /// # Safety /// - /// * `len` must be smaller than or equal to `self.capacity()` + /// `shrink_to_fit(len)` must be called immediately prior to calling this function. This + /// implies, that `len` must be smaller than or equal to `self.capacity()`. pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit]> { debug_assert!( len <= self.capacity(), From bfbdb5f06fcff7939825c33ce573b8f92b362c40 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 19:24:25 +0100 Subject: [PATCH 13/23] Remove unused import from libcore/alloc --- src/libcore/alloc/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index 4af1a8254a3db..7c104dac0fd6c 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -12,7 +12,7 @@ pub use self::layout::{Layout, LayoutErr}; use crate::fmt; use crate::mem; -use crate::ptr::{self, NonNull, Unique}; +use crate::ptr::{self, NonNull}; /// The `AllocErr` error indicates an allocation failure /// that may be due to resource exhaustion or to From 03b055b0b4dcf304cd3c5e7a1c6e68fea91584a9 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 21:12:12 +0100 Subject: [PATCH 14/23] Remove alignment from `MemoryBlock` --- src/liballoc/alloc.rs | 62 ++++---- src/liballoc/alloc/tests.rs | 2 +- src/liballoc/collections/btree/node.rs | 12 +- src/liballoc/raw_vec.rs | 42 +++-- src/liballoc/raw_vec/tests.rs | 4 +- src/liballoc/rc.rs | 13 +- src/liballoc/sync.rs | 12 +- src/liballoc/tests/heap.rs | 7 +- src/libcore/alloc/mod.rs | 165 ++++++++++++-------- src/libstd/alloc.rs | 61 ++++---- src/test/ui/allocator/custom.rs | 4 +- src/test/ui/allocator/xcrate-use.rs | 4 +- src/test/ui/realloc-16687.rs | 18 +-- src/test/ui/regions/regions-mock-codegen.rs | 7 +- 14 files changed, 211 insertions(+), 202 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 7eb9e0d5ea3e1..b044202686645 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -4,7 +4,7 @@ use core::intrinsics::{self, min_align_of_val, size_of_val}; use core::ptr::{NonNull, Unique}; -use core::{mem, usize}; +use core::usize; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] @@ -167,94 +167,94 @@ unsafe impl AllocRef for Global { #[inline] fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { unsafe { - if layout.size() == 0 { - Ok(MemoryBlock::new(layout.dangling(), layout)) + let size = layout.size(); + if size == 0 { + Ok(MemoryBlock::new(layout.dangling(), 0)) } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, layout)) + Ok(MemoryBlock::new(ptr, size)) } } } #[inline] - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - if memory.size() != 0 { - dealloc(memory.ptr().as_ptr(), memory.layout()) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + dealloc(ptr.as_ptr(), layout) } } #[inline] unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size >= old_size, "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), - ReallocPlacement::MayMove if memory.size() == 0 => { - *memory = self.alloc(new_layout, init)? + ReallocPlacement::InPlace => Err(AllocErr), + ReallocPlacement::MayMove if layout.size() == 0 => { + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc(new_layout, init) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. intrinsics::assume(new_size > old_size); - let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); memory.init_offset(init, old_size); + Ok(memory) } } - Ok(()) } #[inline] unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { - let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); - let old_memory = mem::replace(memory, new_memory); - self.dealloc(old_memory) + self.dealloc(ptr, layout); + Ok(MemoryBlock::new(layout.dangling(), 0)) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. intrinsics::assume(new_size < old_size); - let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) } } - Ok(()) } } @@ -282,7 +282,7 @@ pub(crate) unsafe fn box_free(ptr: Unique) { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout)) + Global.dealloc(ptr.cast().into(), layout) } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 34380ba41b4fd..7fa71f72ee779 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -18,7 +18,7 @@ fn allocate_zeroed() { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); } } diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 8b4daa28ee8bc..11c1429957326 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -31,7 +31,6 @@ // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges. // This implies that even an empty internal node has at least one edge. -use core::alloc::MemoryBlock; use core::cmp::Ordering; use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; @@ -228,10 +227,7 @@ impl Root { } unsafe { - Global.dealloc(MemoryBlock::new( - NonNull::from(top).cast(), - Layout::new::>(), - )); + Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); } } } @@ -396,14 +392,14 @@ impl NodeRef { let height = self.height; let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(MemoryBlock::new( + Global.dealloc( node.cast(), if height > 0 { Layout::new::>() } else { Layout::new::>() }, - )); + ); ret } } @@ -1167,7 +1163,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } else { Layout::new::>() }; - Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout)); + Global.dealloc(right_node.node.cast(), layout); Handle::new_edge(self.node, self.idx) } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index baa642580579e..a1f9a9291af4a 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -5,7 +5,7 @@ use core::alloc::MemoryBlock; use core::cmp; use core::mem::{self, MaybeUninit}; use core::ops::Drop; -use core::ptr::Unique; +use core::ptr::{NonNull, Unique}; use core::slice; use crate::alloc::{ @@ -197,7 +197,7 @@ impl RawVec { &mut self.alloc } - fn current_memory(&self) -> Option { + fn current_memory(&self) -> Option<(NonNull, Layout)> { if mem::size_of::() == 0 || self.cap == 0 { None } else { @@ -207,7 +207,7 @@ impl RawVec { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; let layout = Layout::from_size_align_unchecked(size, align); - Some(MemoryBlock::new(self.ptr.cast().into(), layout)) + Some((self.ptr.cast().into(), layout)) } } } @@ -472,7 +472,6 @@ impl RawVec { fn set_memory(&mut self, memory: MemoryBlock) { self.ptr = memory.ptr().cast().into(); self.cap = Self::capacity_from_bytes(memory.size()); - drop(memory); } /// Single method to handle all possibilities of growing the buffer. @@ -488,7 +487,7 @@ impl RawVec { // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow); } - let layout = match strategy { + let new_layout = match strategy { Double => unsafe { // Since we guarantee that we never allocate more than `isize::MAX` bytes, // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow. @@ -522,22 +521,20 @@ impl RawVec { } }; - let memory = if let Some(mut memory) = self.current_memory() { - debug_assert_eq!(memory.align(), layout.align()); + let memory = if let Some((ptr, old_layout)) = self.current_memory() { + debug_assert_eq!(old_layout.align(), new_layout.align()); unsafe { self.alloc - .grow(&mut memory, layout.size(), placement, init) - .map_err(|_| AllocError { layout, non_exhaustive: () })? - }; - memory + .grow(ptr, old_layout, new_layout.size(), placement, init) + .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? + } } else { match placement { - MayMove => self.alloc.alloc(layout, init), + MayMove => self.alloc.alloc(new_layout, init), InPlace => Err(AllocErr), } - .map_err(|_| AllocError { layout, non_exhaustive: () })? + .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? }; - self.set_memory(memory); Ok(()) } @@ -549,18 +546,17 @@ impl RawVec { ) -> Result<(), TryReserveError> { assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity"); - let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; + let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; let new_size = amount * mem::size_of::(); - unsafe { - self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| { + let memory = unsafe { + self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| { TryReserveError::AllocError { - layout: Layout::from_size_align_unchecked(new_size, memory.align()), + layout: Layout::from_size_align_unchecked(new_size, layout.align()), non_exhaustive: (), } - })?; - } - + })? + }; self.set_memory(memory); Ok(()) } @@ -593,8 +589,8 @@ impl RawVec { unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { - if let Some(memory) = self.current_memory() { - unsafe { self.alloc.dealloc(memory) } + if let Some((ptr, layout)) = self.current_memory() { + unsafe { self.alloc.dealloc(ptr, layout) } } } } diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index 4bdd36ed63a11..e7ab8a305d279 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -34,8 +34,8 @@ fn allocator_param() { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - Global.dealloc(memory) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + Global.dealloc(ptr, layout) } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 3625caf5f237a..ab344be12de1b 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -234,7 +234,6 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; -use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -1032,7 +1031,7 @@ impl Rc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(MemoryBlock::new(self.mem, self.layout)); + Global.dealloc(self.mem, self.layout); } } } @@ -1132,10 +1131,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1943,10 +1939,7 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index b5e6d669f80ed..1adc7fa3040b3 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -6,7 +6,6 @@ //! //! [arc]: struct.Arc.html -use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -771,7 +770,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { acquire!(self.inner().weak); - Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))) + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -910,7 +909,7 @@ impl Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout)); + Global.dealloc(self.mem.cast(), self.layout); } } } @@ -1735,12 +1734,7 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )) - } + unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 4b0d7bc1f449e..709e8c148d506 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; /// Issue #45955 and #62251. #[test] @@ -39,10 +39,7 @@ fn check_overalign_requests(mut allocator: T) { // Clean up for &ptr in &pointers { - allocator.dealloc(MemoryBlock::new( - ptr, - Layout::from_size_align(size, align).unwrap(), - )) + allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap()) } } } diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index 7c104dac0fd6c..cdb213fe10448 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -11,7 +11,6 @@ pub use self::global::GlobalAlloc; pub use self::layout::{Layout, LayoutErr}; use crate::fmt; -use crate::mem; use crate::ptr::{self, NonNull}; /// The `AllocErr` error indicates an allocation failure @@ -45,25 +44,17 @@ pub enum AllocInit { /// Represents a block of allocated memory returned by an allocator. #[derive(Debug)] #[unstable(feature = "allocator_api", issue = "32838")] -#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"] pub struct MemoryBlock { ptr: NonNull, - layout: Layout, + size: usize, } impl MemoryBlock { - /// Creates a new `MemoryBlock`. - /// - /// # Safety - /// - /// * The block must be allocated with the same alignment as [`layout.align()`], and - /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: - /// - `min` is the size requested size when allocating the block, and - /// - `max` is the size of the memory block. + /// Creates a new `MemoryBlock` from the specified `ptr` and `size`. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub const unsafe fn new(ptr: NonNull, layout: Layout) -> Self { - Self { ptr, layout } + pub const fn new(ptr: NonNull, size: usize) -> Self { + Self { ptr, size } } /// Acquires the underlying `NonNull` pointer. @@ -73,25 +64,11 @@ impl MemoryBlock { self.ptr } - /// Returns the layout describing the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn layout(&self) -> Layout { - self.layout - } - /// Returns the size of the memory block. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub const fn size(&self) -> usize { - self.layout().size() - } - - /// Returns the minimum alignment of the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn align(&self) -> usize { - self.layout().align() + self.size } /// Initialize the memory block like specified by `init`. @@ -160,6 +137,39 @@ pub enum ReallocPlacement { /// allocator does not support this (like jemalloc) or return a null pointer (such as /// `libc::malloc`), this case must be caught. /// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via an allocator. This +/// means that: +/// +/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or +/// [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are either deallocated +/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or +/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer +/// remains valid. +/// +/// [`alloc`]: AllocRef::alloc +/// [`grow`]: AllocRef::grow +/// [`shrink`]: AllocRef::shrink +/// [`dealloc`]: AllocRef::dealloc +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to +/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the +/// following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: +/// - `min` is the size of the layout most recently used to allocate the block, and +/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// /// # Safety /// /// * Memory blocks returned from an allocator must point to valid memory and retain their validity @@ -168,6 +178,9 @@ pub enum ReallocPlacement { /// * cloning or moving the allocator must not invalidate memory blocks returned from this /// allocator. A cloned allocator must behave like the same allocator. /// +/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// method of the allocator. +/// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait AllocRef { @@ -198,25 +211,45 @@ pub unsafe trait AllocRef { /// # Safety /// /// `memory` must be a memory block returned by this allocator. - unsafe fn dealloc(&mut self, memory: MemoryBlock); + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); /// Attempts to extend the memory block. /// - /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. - /// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the - /// remaining bytes up to the new `memory.size()` are initialized according to [`init`]. + /// Returns a new memory block containing a pointer and the actual size of the allocated + /// block. The pointer is suitable for holding data described by a new layout with `layout`’s + /// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the + /// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is + /// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable (unless of course it was transferred back to the caller again via the + /// return value of this method). /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. + /// After growing a memory block, the new memory can be separated into three regions: + /// 1. `0..layout.size()`. This region is preserved or copied as appropriate from `ptr`. + /// 2. `layout.size()..allocated_size` where `allocated_size` is the latest returned + /// size of the allocator. The new content is implementation defined. Allocators may + /// initialize it according to [`init`] or leave them as is. + /// 3. `allocated_size..returned_size` is initialized according to [`init`]. + /// + /// [`InPlace`]: ReallocPlacement::InPlace /// [`placement`]: ReallocPlacement /// [`init`]: AllocInit /// /// # Safety /// - /// * `memory` must be a memory block returned by this allocator. + /// * `ptr` must be [*currently allocated*] via this allocator, + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. // An alternative would be // * `new_size must be strictly greater than `memory.size()` or both are zero - /// * `new_size` must be greater than or equal to `memory.size()` - /// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow + /// * `new_size` must be greater than or equal to `layout.size()` + /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). /// /// [*currently allocated*]: #currently-allocated-memory @@ -237,46 +270,59 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { + ) -> Result { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = memory.size(); + let old_size = layout.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `memory.size()`" + "`new_size` must be greater than or equal to `layout.size()`" ); if new_size == old_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, init)?; - ptr::copy_nonoverlapping( - memory.ptr().as_ptr(), - new_memory.ptr().as_ptr(), - old_size, - ); - self.dealloc(mem::replace(memory, new_memory)); - Ok(()) + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), old_size); + self.dealloc(ptr, layout); + Ok(new_memory) } } } /// Attempts to shrink the memory block. /// + /// Returns a new memory block containing a pointer and the actual size of the allocated + /// block. The pointer is suitable for holding data described by a new layout with `layout`’s + /// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the + /// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is + /// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable unless it was transferred back to the caller again via the + /// return value of this method. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// /// The behavior of how the allocator tries to shrink the memory is specified by [`placement`]. /// + /// [`InPlace`]: ReallocPlacement::InPlace /// [`placement`]: ReallocPlacement /// /// # Safety /// - /// * `memory` must be a memory block returned by this allocator. + /// * `ptr` must be [*currently allocated*] via this allocator, + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. // An alternative would be // * `new_size must be strictly smaller than `memory.size()` or both are zero @@ -300,32 +346,29 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { + ) -> Result { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = memory.size(); + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `layout.size()`" ); if new_size == old_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; - ptr::copy_nonoverlapping( - memory.ptr().as_ptr(), - new_memory.ptr().as_ptr(), - new_size, - ); - self.dealloc(mem::replace(memory, new_memory)); - Ok(()) + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), new_size); + self.dealloc(ptr, layout); + Ok(new_memory) } } } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 6e8ac7c90363f..7f3a5d2849bd7 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -141,99 +141,96 @@ unsafe impl AllocRef for System { #[inline] fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { unsafe { - if layout.size() == 0 { - Ok(MemoryBlock::new(layout.dangling(), layout)) + let size = layout.size(); + if size == 0 { + Ok(MemoryBlock::new(layout.dangling(), 0)) } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, layout)) + Ok(MemoryBlock::new(ptr, size)) } } } #[inline] - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - if memory.size() != 0 { - GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout()) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } } #[inline] unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size >= old_size, "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), - ReallocPlacement::MayMove if memory.size() == 0 => { - *memory = self.alloc(new_layout, init)? + ReallocPlacement::InPlace => Err(AllocErr), + ReallocPlacement::MayMove if layout.size() == 0 => { + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc(new_layout, init) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. intrinsics::assume(new_size > old_size); - let ptr = - GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); + let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); memory.init_offset(init, old_size); + Ok(memory) } } - Ok(()) } #[inline] unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { - let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); - let old_memory = mem::replace(memory, new_memory); - self.dealloc(old_memory) + self.dealloc(ptr, layout); + Ok(MemoryBlock::new(layout.dangling(), 0)) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. intrinsics::assume(new_size < old_size); - let ptr = - GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); + Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) } } - Ok(()) } } - static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); /// Registers a custom allocation error hook, replacing any that was previously registered. diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index 63b1b2fbb8bef..8f894c5db5d9f 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -40,7 +40,7 @@ fn main() { let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&memory.ptr()); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -52,7 +52,7 @@ fn main() { let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); helper::work_with(&memory.ptr()); - System.dealloc(memory); + System.dealloc(memory.ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index d4f8b4247b1e5..689804bde86fd 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -23,13 +23,13 @@ fn main() { let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&memory.ptr()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&memory.ptr()); - System.dealloc(memory); + System.dealloc(memory.ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 3fe8ed224c270..8c419185f5101 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -6,9 +6,7 @@ #![feature(allocator_api)] -use std::alloc::{ - handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement, -}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; use std::ptr::{self, NonNull}; fn main() { @@ -59,7 +57,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout)); + Global.dealloc(NonNull::new_unchecked(ptr), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -67,21 +65,19 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old); - let result = if new.size() > old.size() { + let memory = if new.size() > old.size() { Global.grow( - &mut memory, + NonNull::new_unchecked(ptr), + old, new.size(), ReallocPlacement::MayMove, AllocInit::Uninitialized, ) - } else if new.size() < old.size() { - Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove) } else { - return ptr; + Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) }; - result.unwrap_or_else(|_| { + let memory = memory.unwrap_or_else(|_| { handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align())) }); diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index b9bd2988b6ee7..148b0a86a0589 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -4,7 +4,7 @@ // pretty-expanded FIXME #23616 #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use std::ptr::NonNull; struct arena(()); @@ -40,10 +40,7 @@ fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.dealloc(MemoryBlock::new( - NonNull::new_unchecked(bcx2 as *const _ as *mut _), - Layout::new::(), - )); + Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } } From 717e0c7b9d4b9fd8a1a3c7451255d30c194c7b3e Mon Sep 17 00:00:00 2001 From: Tim Diekmann <21277928+TimDiekmann@users.noreply.github.com> Date: Thu, 26 Mar 2020 10:02:36 +0100 Subject: [PATCH 15/23] Apply suggestions from code review Co-Authored-By: Amanieu d'Antras --- src/libcore/alloc/mod.rs | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index cdb213fe10448..d5e89f333f15f 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -221,21 +221,22 @@ pub unsafe trait AllocRef { /// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is /// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable (unless of course it was transferred back to the caller again via the + /// If `ReallocPlacement::MayMove` is used then ownership of the memory block referenced by `ptr` + /// is transferred to this allocator. The memory may or may not be freed, and should be + /// considered unusable (unless of course it is transferred back to the caller again via the /// return value of this method). /// /// If this method returns `Err`, then ownership of the memory block has not been transferred to /// this allocator, and the contents of the memory block are unaltered. /// - /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. - /// After growing a memory block, the new memory can be separated into three regions: - /// 1. `0..layout.size()`. This region is preserved or copied as appropriate from `ptr`. - /// 2. `layout.size()..allocated_size` where `allocated_size` is the latest returned - /// size of the allocator. The new content is implementation defined. Allocators may - /// initialize it according to [`init`] or leave them as is. - /// 3. `allocated_size..returned_size` is initialized according to [`init`]. + /// The memory block will contain the following contents after a successful call to `grow`: + /// * Bytes `0..layout.size()` are preserved from the original allocation. + /// * Bytes `layout.size()..old_size` will either be preserved or initialized according to + /// [`init`], depending on the allocator implementation. `old_size` refers to the size of + /// the `MemoryBlock` prior to the `grow` call, which may be larger than the size + /// that was originally requested when it was allocated. + /// * Bytes `old_size..new_size` are initialized according to [`init`]. `new_size` refers to + /// the size of the `MemoryBlock` returned by the `grow` call. /// /// [`InPlace`]: ReallocPlacement::InPlace /// [`placement`]: ReallocPlacement From db15fe6b383741ae1cc13cfb62fc1c18f8ff5241 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 26 Mar 2020 11:32:22 +0100 Subject: [PATCH 16/23] Mark `Layout::dangling` inline --- src/libcore/alloc/layout.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libcore/alloc/layout.rs b/src/libcore/alloc/layout.rs index c798aacc90b51..fa644cfe99ed6 100644 --- a/src/libcore/alloc/layout.rs +++ b/src/libcore/alloc/layout.rs @@ -141,6 +141,7 @@ impl Layout { /// sentinel value. Types that lazily allocate must track initialization by /// some other means. #[unstable(feature = "alloc_layout_extra", issue = "55724")] + #[inline] pub const fn dangling(&self) -> NonNull { // align is non-zero and a power of two unsafe { NonNull::new_unchecked(self.align() as *mut u8) } From bf6a46db3129b0bf31dc67f06af2e52ece52701a Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sat, 28 Mar 2020 20:21:26 +0100 Subject: [PATCH 17/23] Make fields in `MemoryBlock` public --- src/liballoc/alloc.rs | 35 ++++++------ src/liballoc/alloc/tests.rs | 4 +- src/liballoc/boxed.rs | 4 +- src/liballoc/raw_vec.rs | 8 +-- src/liballoc/rc.rs | 2 +- src/liballoc/sync.rs | 2 +- src/liballoc/tests/heap.rs | 2 +- src/libcore/alloc/mod.rs | 63 +++++++-------------- src/libstd/alloc.rs | 33 +++++------ src/test/ui/allocator/custom.rs | 8 +-- src/test/ui/allocator/xcrate-use.rs | 8 +-- src/test/ui/realloc-16687.rs | 8 +-- src/test/ui/regions/regions-mock-codegen.rs | 2 +- 13 files changed, 79 insertions(+), 100 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index b044202686645..67927629ed3b6 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -169,14 +169,14 @@ unsafe impl AllocRef for Global { unsafe { let size = layout.size(); if size == 0 { - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, size)) + Ok(MemoryBlock { ptr, size }) } } } @@ -197,14 +197,14 @@ unsafe impl AllocRef for Global { placement: ReallocPlacement, init: AllocInit, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { @@ -215,10 +215,11 @@ unsafe impl AllocRef for Global { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. - intrinsics::assume(new_size > old_size); + intrinsics::assume(new_size > size); let ptr = realloc(ptr.as_ptr(), layout, new_size); - let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); - memory.init_offset(init, old_size); + let mut memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + memory.init_offset(init, size); Ok(memory) } } @@ -232,27 +233,27 @@ unsafe impl AllocRef for Global { new_size: usize, placement: ReallocPlacement, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { self.dealloc(ptr, layout); - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. - intrinsics::assume(new_size < old_size); + intrinsics::assume(new_size < size); let ptr = realloc(ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) } } } @@ -266,7 +267,7 @@ unsafe impl AllocRef for Global { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match Global.alloc(layout, AllocInit::Uninitialized) { - Ok(memory) => memory.ptr().as_ptr(), + Ok(memory) => memory.ptr.as_ptr(), Err(_) => handle_alloc_error(layout), } } diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 7fa71f72ee779..1ad40eca93b69 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -12,13 +12,13 @@ fn allocate_zeroed() { .alloc(layout.clone(), AllocInit::Zeroed) .unwrap_or_else(|_| handle_alloc_error(layout)); - let mut i = memory.ptr().cast::().as_ptr(); + let mut i = memory.ptr.cast::().as_ptr(); let end = i.add(layout.size()); while i < end { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 03d759e4a9ae4..5406956a52886 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -198,7 +198,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .ptr() + .ptr .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -227,7 +227,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Zeroed) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .ptr() + .ptr .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index a1f9a9291af4a..590e82357fb71 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -152,8 +152,8 @@ impl RawVec { let memory = alloc.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); Self { - ptr: memory.ptr().cast().into(), - cap: Self::capacity_from_bytes(memory.size()), + ptr: memory.ptr.cast().into(), + cap: Self::capacity_from_bytes(memory.size), alloc, } } @@ -470,8 +470,8 @@ impl RawVec { } fn set_memory(&mut self, memory: MemoryBlock) { - self.ptr = memory.ptr().cast().into(); - self.cap = Self::capacity_from_bytes(memory.size()); + self.ptr = memory.ptr.cast().into(); + self.cap = Self::capacity_from_bytes(memory.size); } /// Single method to handle all possibilities of growing the buffer. diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index ab344be12de1b..6a78a7398a692 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -941,7 +941,7 @@ impl Rc { .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox - let inner = mem_to_rcbox(mem.ptr().as_ptr()); + let inner = mem_to_rcbox(mem.ptr.as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, Cell::new(1)); diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 1adc7fa3040b3..111a7651b5e77 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -819,7 +819,7 @@ impl Arc { .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner - let inner = mem_to_arcinner(mem.ptr().as_ptr()); + let inner = mem_to_arcinner(mem.ptr.as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 709e8c148d506..62f062b83d75d 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -26,7 +26,7 @@ fn check_overalign_requests(mut allocator: T) { AllocInit::Uninitialized, ) .unwrap() - .ptr() + .ptr }) .collect(); for &ptr in &pointers { diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index d5e89f333f15f..f2f12a98fa61b 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -42,35 +42,14 @@ pub enum AllocInit { } /// Represents a block of allocated memory returned by an allocator. -#[derive(Debug)] +#[derive(Debug, Copy, Clone)] #[unstable(feature = "allocator_api", issue = "32838")] pub struct MemoryBlock { - ptr: NonNull, - size: usize, + pub ptr: NonNull, + pub size: usize, } impl MemoryBlock { - /// Creates a new `MemoryBlock` from the specified `ptr` and `size`. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn new(ptr: NonNull, size: usize) -> Self { - Self { ptr, size } - } - - /// Acquires the underlying `NonNull` pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn ptr(&self) -> NonNull { - self.ptr - } - - /// Returns the size of the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn size(&self) -> usize { - self.size - } - /// Initialize the memory block like specified by `init`. /// /// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off]. @@ -98,12 +77,10 @@ impl MemoryBlock { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) { - debug_assert!(offset <= self.size(), "`offset` must be smaller than or equal to `size()`"); + debug_assert!(offset <= self.size, "`offset` must be smaller than or equal to `size()`"); match init { AllocInit::Uninitialized => (), - AllocInit::Zeroed => { - self.ptr().as_ptr().add(offset).write_bytes(0, self.size() - offset) - } + AllocInit::Zeroed => self.ptr.as_ptr().add(offset).write_bytes(0, self.size - offset), } } } @@ -246,9 +223,9 @@ pub unsafe trait AllocRef { /// /// * `ptr` must be [*currently allocated*] via this allocator, /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. + // We can't require that `new_size` is strictly greater than `memory.size` because of ZSTs. // An alternative would be - // * `new_size must be strictly greater than `memory.size()` or both are zero + // * `new_size must be strictly greater than `memory.size` or both are zero /// * `new_size` must be greater than or equal to `layout.size()` /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). @@ -280,19 +257,19 @@ pub unsafe trait AllocRef { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `layout.size()`" ); - if new_size == old_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if new_size == size { + return Ok(MemoryBlock { ptr, size }); } let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, init)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), old_size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), size); self.dealloc(ptr, layout); Ok(new_memory) } @@ -324,10 +301,10 @@ pub unsafe trait AllocRef { /// /// * `ptr` must be [*currently allocated*] via this allocator, /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. + // We can't require that `new_size` is strictly smaller than `memory.size` because of ZSTs. // An alternative would be - // * `new_size must be strictly smaller than `memory.size()` or both are zero - /// * `new_size` must be smaller than or equal to `memory.size()` + // * `new_size must be strictly smaller than `memory.size` or both are zero + /// * `new_size` must be smaller than or equal to `layout.size()` /// /// [*currently allocated*]: #currently-allocated-memory /// [*fit*]: #memory-fitting @@ -355,19 +332,19 @@ pub unsafe trait AllocRef { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `layout.size()`" ); - if new_size == old_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if new_size == size { + return Ok(MemoryBlock { ptr, size }); } let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), new_size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), new_size); self.dealloc(ptr, layout); Ok(new_memory) } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 7f3a5d2849bd7..843c46775af0c 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -143,14 +143,14 @@ unsafe impl AllocRef for System { unsafe { let size = layout.size(); if size == 0 { - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, size)) + Ok(MemoryBlock { ptr, size }) } } } @@ -171,14 +171,14 @@ unsafe impl AllocRef for System { placement: ReallocPlacement, init: AllocInit, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { @@ -189,10 +189,11 @@ unsafe impl AllocRef for System { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. - intrinsics::assume(new_size > old_size); + intrinsics::assume(new_size > size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); - let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); - memory.init_offset(init, old_size); + let mut memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + memory.init_offset(init, size); Ok(memory) } } @@ -206,27 +207,27 @@ unsafe impl AllocRef for System { new_size: usize, placement: ReallocPlacement, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { self.dealloc(ptr, layout); - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. - intrinsics::assume(new_size < old_size); + intrinsics::assume(new_size < size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) } } } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index 8f894c5db5d9f..184e4706a4c86 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -38,9 +38,9 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&memory.ptr()); + helper::work_with(&memory.ptr); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -51,8 +51,8 @@ fn main() { let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - helper::work_with(&memory.ptr()); - System.dealloc(memory.ptr(), layout); + helper::work_with(&memory.ptr); + System.dealloc(memory.ptr, layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index 689804bde86fd..7de1ab7a55315 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -21,15 +21,15 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&memory.ptr()); + helper::work_with(&memory.ptr); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - helper::work_with(&memory.ptr()); - System.dealloc(memory.ptr(), layout); + helper::work_with(&memory.ptr); + System.dealloc(memory.ptr, layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 8c419185f5101..0687a9ce454cc 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -46,10 +46,10 @@ unsafe fn test_triangle() -> bool { .unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { - println!("allocate({:?}) = {:?}", layout, memory.ptr()); + println!("allocate({:?}) = {:?}", layout, memory.ptr); } - memory.ptr().cast().as_ptr() + memory.ptr.cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -82,9 +82,9 @@ unsafe fn test_triangle() -> bool { }); if PRINT { - println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr()); + println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr); } - memory.ptr().cast().as_ptr() + memory.ptr.cast().as_ptr() } fn idx_to_size(i: usize) -> usize { diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index 148b0a86a0589..380310190be01 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -28,7 +28,7 @@ fn alloc(_bcx: &arena) -> &Bcx<'_> { let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); - &*(memory.ptr().as_ptr() as *const _) + &*(memory.ptr.as_ptr() as *const _) } } From 3ade8ae6608a9d371580e5e8d68c26a4e3e897fb Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 29 Mar 2020 01:47:05 +0100 Subject: [PATCH 18/23] Implement `init` and `init_offset` on `AllocInit` and mark it unsafe --- src/liballoc/alloc.rs | 8 +++--- src/libcore/alloc/mod.rs | 53 +++++++++++++++++++++++----------------- src/libstd/alloc.rs | 8 +++--- 3 files changed, 39 insertions(+), 30 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 67927629ed3b6..66575e3ef5517 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -214,12 +214,12 @@ unsafe impl AllocRef for Global { self.alloc(new_layout, init) } ReallocPlacement::MayMove => { - // `realloc` probably checks for `new_size > old_size` or something similar. + // `realloc` probably checks for `new_size > size` or something similar. intrinsics::assume(new_size > size); let ptr = realloc(ptr.as_ptr(), layout, new_size); - let mut memory = + let memory = MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; - memory.init_offset(init, size); + init.init_offset(memory, size); Ok(memory) } } @@ -250,7 +250,7 @@ unsafe impl AllocRef for Global { Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { - // `realloc` probably checks for `new_size < old_size` or something similar. + // `realloc` probably checks for `new_size < size` or something similar. intrinsics::assume(new_size < size); let ptr = realloc(ptr.as_ptr(), layout, new_size); Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index f2f12a98fa61b..cc8c730b63a94 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -41,27 +41,22 @@ pub enum AllocInit { Zeroed, } -/// Represents a block of allocated memory returned by an allocator. -#[derive(Debug, Copy, Clone)] -#[unstable(feature = "allocator_api", issue = "32838")] -pub struct MemoryBlock { - pub ptr: NonNull, - pub size: usize, -} - -impl MemoryBlock { - /// Initialize the memory block like specified by `init`. +impl AllocInit { + /// Initialize the specified memory block. + /// + /// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off]. + /// + /// [off]: AllocInit::init_offset /// - /// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off]. + /// # Safety /// - /// [off]: MemoryBlock::init_offset + /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// - /// [*fit*]: trait.AllocRef.html#memory-fitting + /// [valid]: ../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn init(&mut self, init: AllocInit) { - // SAFETY: 0 is always smaller or equal to the size - unsafe { self.init_offset(init, 0) } + pub unsafe fn init(self, memory: MemoryBlock) { + self.init_offset(memory, 0) } /// Initialize the memory block like specified by `init` at the specified `offset`. @@ -71,20 +66,34 @@ impl MemoryBlock { /// /// # Safety /// - /// * `offset` must be smaller than or equal to `size()` + /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. + /// * `offset` must be smaller than or equal to `memory.size` /// - /// [*fit*]: trait.AllocRef.html#memory-fitting + /// [valid]: ../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) { - debug_assert!(offset <= self.size, "`offset` must be smaller than or equal to `size()`"); - match init { + pub unsafe fn init_offset(self, memory: MemoryBlock, offset: usize) { + debug_assert!( + offset <= memory.size, + "`offset` must be smaller than or equal to `memory.size`" + ); + match self { AllocInit::Uninitialized => (), - AllocInit::Zeroed => self.ptr.as_ptr().add(offset).write_bytes(0, self.size - offset), + AllocInit::Zeroed => { + memory.ptr.as_ptr().add(offset).write_bytes(0, memory.size - offset) + } } } } +/// Represents a block of allocated memory returned by an allocator. +#[derive(Debug, Copy, Clone)] +#[unstable(feature = "allocator_api", issue = "32838")] +pub struct MemoryBlock { + pub ptr: NonNull, + pub size: usize, +} + /// A placement constraint when growing or shrinking an existing allocation. #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[unstable(feature = "allocator_api", issue = "32838")] diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 843c46775af0c..8df4666c53601 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -188,12 +188,12 @@ unsafe impl AllocRef for System { self.alloc(new_layout, init) } ReallocPlacement::MayMove => { - // `realloc` probably checks for `new_size > old_size` or something similar. + // `realloc` probably checks for `new_size > size` or something similar. intrinsics::assume(new_size > size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); - let mut memory = + let memory = MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; - memory.init_offset(init, size); + init.init_offset(memory, size); Ok(memory) } } @@ -224,7 +224,7 @@ unsafe impl AllocRef for System { Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { - // `realloc` probably checks for `new_size < old_size` or something similar. + // `realloc` probably checks for `new_size < size` or something similar. intrinsics::assume(new_size < size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) From 4309f6d9a165d892c1174582f02a067b98bca2bd Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 29 Mar 2020 01:52:21 +0100 Subject: [PATCH 19/23] Minor doc fixes in `AllocInit::init` --- src/libcore/alloc/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index cc8c730b63a94..e6364b1e01c21 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -44,7 +44,7 @@ pub enum AllocInit { impl AllocInit { /// Initialize the specified memory block. /// - /// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off]. + /// This behaves like calling [`AllocInit::init_offset(memory, 0)`][off]. /// /// [off]: AllocInit::init_offset /// @@ -61,8 +61,8 @@ impl AllocInit { /// Initialize the memory block like specified by `init` at the specified `offset`. /// - /// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`] - /// at `ptr + offset` until `ptr + layout.size()`. + /// This is a no-op for [`AllocInit::Uninitialized`][] and writes zeroes for + /// [`AllocInit::Zeroed`][] at `ptr + offset` until `ptr + layout.size()`. /// /// # Safety /// From d241db2d4e620277ddb47dd26779982709f851d8 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 29 Mar 2020 03:16:23 +0100 Subject: [PATCH 20/23] Fix links for `AllocInit` methods --- src/libcore/alloc/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index e6364b1e01c21..d98d585c05de4 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -52,7 +52,7 @@ impl AllocInit { /// /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// - /// [valid]: ../ptr/index.html#safety + /// [valid]: ../../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init(self, memory: MemoryBlock) { @@ -69,7 +69,7 @@ impl AllocInit { /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// * `offset` must be smaller than or equal to `memory.size` /// - /// [valid]: ../ptr/index.html#safety + /// [valid]: ../../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init_offset(self, memory: MemoryBlock, offset: usize) { From fcd7092ae1e871cb5c76844347814f307f4cb547 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 29 Mar 2020 03:16:23 +0100 Subject: [PATCH 21/23] Revert "Fix links for `AllocInit` methods" This reverts commit d241db2d4e620277ddb47dd26779982709f851d8. --- src/libcore/alloc/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index d98d585c05de4..e6364b1e01c21 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -52,7 +52,7 @@ impl AllocInit { /// /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// - /// [valid]: ../../ptr/index.html#safety + /// [valid]: ../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init(self, memory: MemoryBlock) { @@ -69,7 +69,7 @@ impl AllocInit { /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// * `offset` must be smaller than or equal to `memory.size` /// - /// [valid]: ../../ptr/index.html#safety + /// [valid]: ../ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init_offset(self, memory: MemoryBlock, offset: usize) { From c49f28005d8f636272f3103e6ecaa1affb7bf31b Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 29 Mar 2020 12:00:35 +0200 Subject: [PATCH 22/23] Fix links for `AllocInit` methods --- src/libcore/alloc/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index e6364b1e01c21..e8c4b68c64890 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -52,7 +52,7 @@ impl AllocInit { /// /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// - /// [valid]: ../ptr/index.html#safety + /// [valid]: ../../core/ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init(self, memory: MemoryBlock) { @@ -69,7 +69,7 @@ impl AllocInit { /// * `memory.ptr` must be [valid] for writes of `memory.size` bytes. /// * `offset` must be smaller than or equal to `memory.size` /// - /// [valid]: ../ptr/index.html#safety + /// [valid]: ../../core/ptr/index.html#safety #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init_offset(self, memory: MemoryBlock, offset: usize) { From 89ed59d8841a2b6057f61a3469c10bb2e6242160 Mon Sep 17 00:00:00 2001 From: Tim Diekmann <21277928+TimDiekmann@users.noreply.github.com> Date: Wed, 1 Apr 2020 10:26:30 +0200 Subject: [PATCH 23/23] Add missing allocation guard in `RawVec::grow` --- src/liballoc/raw_vec.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 590e82357fb71..2bf40490e7819 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -520,6 +520,7 @@ impl RawVec { Layout::array::(cap).map_err(|_| CapacityOverflow)? } }; + alloc_guard(new_layout.size())?; let memory = if let Some((ptr, old_layout)) = self.current_memory() { debug_assert_eq!(old_layout.align(), new_layout.align());