diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e7c5fbd5aa..1b6eb17b58 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -105,7 +105,7 @@ jobs: toolchain: - stable - nightly - - 1.36.0 + - 1.51.0 features: - serde buildtype: @@ -242,7 +242,7 @@ jobs: - name: Install Rust uses: actions-rs/toolchain@v1 with: - toolchain: 1.36.0 + toolchain: 1.51.0 target: x86_64-unknown-linux-gnu override: true diff --git a/CHANGELOG.md b/CHANGELOG.md index 387bbf5d41..d25d920d8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,19 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## [Unreleased] +### Changed + +- [breaking-change] Converted all data structures to use the `const generics` MVP +- [breaking-change] `HistoryBuffer` is now working with const constructors and non-`Copy` data +- [breaking-change] `HistoryBuffer::as_slice` and others now only return initialized values +- [breaking-change] `MultiCore`/`SingleCore` is now removed from `spsc::Queue` +- [breaking-change] `spsc::Queue` is now `usize` only +- [breaking-change] `spsc::Queue` now sacrifices one element for correctness (see issue #207), i.e. it creates an `N - 1` sized queue instead of the old that generated an size `N` queue +- `Pool` and `MPMC` now works on `thumbv6m` +- [breaking-change] `String` has had `utf8` related methods removed as this can be done via `str` +- [breaking-change] No data structures implement `AsSlice` traits any more, now using `AsRef` and `AsMut` +- `IndexMap::new()` is now a `const-fn` + ## [v0.6.1] - 2021-03-02 ### Fixed diff --git a/Cargo.toml b/Cargo.toml index f39d0fefb7..f4c66eb04a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ authors = [ "Jorge Aparicio ", "Per Lindgren ", + "Emil Fresk ", ] categories = [ "data-structures", @@ -31,10 +32,11 @@ __trybuild = [] [target.x86_64-unknown-linux-gnu.dev-dependencies] scoped_threadpool = "0.1.8" +[target.thumbv6m-none-eabi.dependencies] +atomic-polyfill = "0.1.2" + [dependencies] -as-slice = "0.1.5" -generic-array = "0.14.4" -hash32 = "0.1.0" +hash32 = "0.2.1" [dependencies.serde] version = "1" diff --git a/build.rs b/build.rs index 8a29d9e517..094b6cd262 100644 --- a/build.rs +++ b/build.rs @@ -24,10 +24,7 @@ fn main() -> Result<(), Box> { // built-in targets with no atomic / CAS support as of nightly-2019-12-17 // see the `no-atomics.sh` / `no-cas.sh` script sitting next to this file match &target[..] { - "thumbv6m-none-eabi" - | "msp430-none-elf" - | "riscv32i-unknown-none-elf" - | "riscv32imc-unknown-none-elf" => {} + "msp430-none-elf" | "riscv32i-unknown-none-elf" | "riscv32imc-unknown-none-elf" => {} _ => { println!("cargo:rustc-cfg=has_cas"); diff --git a/cfail/ui/freeze.rs b/cfail/ui/freeze.rs index d1adc1f1f7..12bbbcc238 100644 --- a/cfail/ui/freeze.rs +++ b/cfail/ui/freeze.rs @@ -1,7 +1,7 @@ -use heapless::{consts, spsc::Queue}; +use heapless::spsc::Queue; fn main() { - let mut q: Queue = Queue::new(); + let mut q: Queue = Queue::new(); let (_p, mut _c) = q.split(); q.enqueue(0).unwrap(); diff --git a/cfail/ui/not-send.rs b/cfail/ui/not-send.rs index cdd1359412..0c8559d9d3 100644 --- a/cfail/ui/not-send.rs +++ b/cfail/ui/not-send.rs @@ -3,8 +3,8 @@ use core::marker::PhantomData; use heapless::{ - consts, spsc::{Consumer, Producer, Queue}, + HistoryBuffer, Vec, }; type NotSend = PhantomData<*const ()>; @@ -16,8 +16,9 @@ where } fn main() { - is_send::>(); - is_send::>(); - is_send::>(); - is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); } diff --git a/cfail/ui/not-send.stderr b/cfail/ui/not-send.stderr index a1813b629b..97ed4e8687 100644 --- a/cfail/ui/not-send.stderr +++ b/cfail/ui/not-send.stderr @@ -1,83 +1,89 @@ error[E0277]: `*const ()` cannot be sent between threads safely --> $DIR/not-send.rs:19:5 | -19 | is_send::>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely +12 | fn is_send() + | ------- required by a bound in this +13 | where +14 | T: Send, + | ---- required by this bound in `is_send` +... +19 | is_send::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely | - = help: within `std::marker::PhantomData<*const ()>`, the trait `std::marker::Send` is not implemented for `*const ()` - = note: required because it appears within the type `std::marker::PhantomData<*const ()>` - = note: required because of the requirements on the impl of `std::marker::Send` for `heapless::spsc::split::Consumer<'_, std::marker::PhantomData<*const ()>, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` -note: required by `is_send` - --> $DIR/not-send.rs:12:1 - | -12 | / fn is_send() -13 | | where -14 | | T: Send, -15 | | { -16 | | } - | |_^ + = help: within `PhantomData<*const ()>`, the trait `Send` is not implemented for `*const ()` + = note: required because it appears within the type `PhantomData<*const ()>` + = note: required because of the requirements on the impl of `Send` for `Consumer<'_, PhantomData<*const ()>, 4_usize>` error[E0277]: `*const ()` cannot be sent between threads safely --> $DIR/not-send.rs:20:5 | -20 | is_send::>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely - | - = help: within `std::marker::PhantomData<*const ()>`, the trait `std::marker::Send` is not implemented for `*const ()` - = note: required because it appears within the type `std::marker::PhantomData<*const ()>` - = note: required because of the requirements on the impl of `std::marker::Send` for `heapless::spsc::split::Producer<'_, std::marker::PhantomData<*const ()>, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` -note: required by `is_send` - --> $DIR/not-send.rs:12:1 +12 | fn is_send() + | ------- required by a bound in this +13 | where +14 | T: Send, + | ---- required by this bound in `is_send` +... +20 | is_send::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely | -12 | / fn is_send() -13 | | where -14 | | T: Send, -15 | | { -16 | | } - | |_^ + = help: within `PhantomData<*const ()>`, the trait `Send` is not implemented for `*const ()` + = note: required because it appears within the type `PhantomData<*const ()>` + = note: required because of the requirements on the impl of `Send` for `Producer<'_, PhantomData<*const ()>, 4_usize>` error[E0277]: `*const ()` cannot be sent between threads safely --> $DIR/not-send.rs:21:5 | -21 | is_send::>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely - | - = help: within `std::marker::PhantomData<*const ()>`, the trait `std::marker::Send` is not implemented for `*const ()` - = note: required because it appears within the type `std::marker::PhantomData<*const ()>` - = note: required because of the requirements on the impl of `std::marker::Send` for `generic_array::GenericArray, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` - = note: required because it appears within the type `std::mem::ManuallyDrop, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `std::mem::MaybeUninit, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `heapless::i::Queue, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `heapless::spsc::Queue, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` -note: required by `is_send` - --> $DIR/not-send.rs:12:1 +12 | fn is_send() + | ------- required by a bound in this +13 | where +14 | T: Send, + | ---- required by this bound in `is_send` +... +21 | is_send::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely | -12 | / fn is_send() -13 | | where -14 | | T: Send, -15 | | { -16 | | } - | |_^ + = help: within `Queue, 4_usize>`, the trait `Send` is not implemented for `*const ()` + = note: required because it appears within the type `PhantomData<*const ()>` + = note: required because it appears within the type `ManuallyDrop>` + = note: required because it appears within the type `MaybeUninit>` + = note: required because it appears within the type `UnsafeCell>>` + = note: required because it appears within the type `[UnsafeCell>>; 4]` + = note: required because it appears within the type `Queue, 4_usize>` error[E0277]: `*const ()` cannot be sent between threads safely --> $DIR/not-send.rs:22:5 | -22 | is_send::>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely +12 | fn is_send() + | ------- required by a bound in this +13 | where +14 | T: Send, + | ---- required by this bound in `is_send` +... +22 | is_send::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely + | + = help: within `heapless::Vec, 4_usize>`, the trait `Send` is not implemented for `*const ()` + = note: required because it appears within the type `PhantomData<*const ()>` + = note: required because it appears within the type `[PhantomData<*const ()>; 4]` + = note: required because it appears within the type `ManuallyDrop<[PhantomData<*const ()>; 4]>` + = note: required because it appears within the type `MaybeUninit<[PhantomData<*const ()>; 4]>` + = note: required because it appears within the type `heapless::Vec, 4_usize>` + +error[E0277]: `*const ()` cannot be sent between threads safely + --> $DIR/not-send.rs:23:5 | - = help: within `std::marker::PhantomData<*const ()>`, the trait `std::marker::Send` is not implemented for `*const ()` - = note: required because it appears within the type `std::marker::PhantomData<*const ()>` - = note: required because of the requirements on the impl of `std::marker::Send` for `generic_array::GenericArray, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` - = note: required because it appears within the type `std::mem::ManuallyDrop, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `std::mem::MaybeUninit, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `heapless::i::Vec, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>>` - = note: required because it appears within the type `heapless::vec::Vec, typenum::uint::UInt, typenum::bit::B0>, typenum::bit::B0>>` -note: required by `is_send` - --> $DIR/not-send.rs:12:1 +12 | fn is_send() + | ------- required by a bound in this +13 | where +14 | T: Send, + | ---- required by this bound in `is_send` +... +23 | is_send::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `*const ()` cannot be sent between threads safely | -12 | / fn is_send() -13 | | where -14 | | T: Send, -15 | | { -16 | | } - | |_^ + = help: within `HistoryBuffer, 4_usize>`, the trait `Send` is not implemented for `*const ()` + = note: required because it appears within the type `PhantomData<*const ()>` + = note: required because it appears within the type `ManuallyDrop>` + = note: required because it appears within the type `MaybeUninit>` + = note: required because it appears within the type `[MaybeUninit>; 4]` + = note: required because it appears within the type `HistoryBuffer, 4_usize>` diff --git a/src/binary_heap.rs b/src/binary_heap.rs index aa56c57b72..caddf46d08 100644 --- a/src/binary_heap.rs +++ b/src/binary_heap.rs @@ -16,9 +16,8 @@ use core::{ ptr, slice, }; -use generic_array::{ArrayLength, GenericArray}; - use crate::sealed::binary_heap::Kind; +use crate::vec::Vec; /// Min-heap pub enum Min {} @@ -26,17 +25,6 @@ pub enum Min {} /// Max-heap pub enum Max {} -impl crate::i::BinaryHeap { - /// `BinaryHeap` `const` constructor; wrap the returned value in - /// [`BinaryHeap`](../struct.BinaryHeap.html) - pub const fn new() -> Self { - Self { - _kind: PhantomData, - data: crate::i::Vec::new(), - } - } -} - /// A priority queue implemented with a binary heap. /// /// This can be either a min-heap or a max-heap. @@ -47,9 +35,8 @@ impl crate::i::BinaryHeap { /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; -/// use heapless::consts::*; /// -/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); +/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// /// // We can use peek to look at the next item in the heap. In this case, /// // there's no items in there yet so we get None. @@ -84,51 +71,51 @@ impl crate::i::BinaryHeap { /// // The heap should now be empty. /// assert!(heap.is_empty()) /// ``` -pub struct BinaryHeap( - #[doc(hidden)] pub crate::i::BinaryHeap, KIND>, -) -where - T: Ord, - N: ArrayLength, - KIND: Kind; -impl BinaryHeap -where - T: Ord, - N: ArrayLength, - K: Kind, -{ +pub struct BinaryHeap { + pub(crate) _kind: PhantomData, + pub(crate) data: Vec, +} + +impl BinaryHeap { /* Constructors */ /// Creates an empty BinaryHeap as a $K-heap. /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// /// // allocate the binary heap on the stack - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(4).unwrap(); /// /// // allocate the binary heap in a static variable - /// static mut HEAP: BinaryHeap = BinaryHeap(heapless::i::BinaryHeap::new()); + /// static mut HEAP: BinaryHeap = BinaryHeap::new(); /// ``` - pub fn new() -> Self { - BinaryHeap(crate::i::BinaryHeap::new()) + pub const fn new() -> Self { + Self { + _kind: PhantomData, + data: Vec::new(), + } } +} +impl BinaryHeap +where + T: Ord, + K: Kind, +{ /* Public API */ /// Returns the capacity of the binary heap. pub fn capacity(&self) -> usize { - self.0.data.capacity() + self.data.capacity() } /// Drops all items from the binary heap. /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(1).unwrap(); /// heap.push(3).unwrap(); /// @@ -139,32 +126,30 @@ where /// assert!(heap.is_empty()); /// ``` pub fn clear(&mut self) { - self.0.data.clear() + self.data.clear() } /// Returns the length of the binary heap. /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(1).unwrap(); /// heap.push(3).unwrap(); /// /// assert_eq!(heap.len(), 2); /// ``` pub fn len(&self) -> usize { - self.0.data.len + self.data.len() } /// Checks if the binary heap is empty. /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// /// assert!(heap.is_empty()); /// @@ -182,9 +167,8 @@ where /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(1).unwrap(); /// heap.push(2).unwrap(); /// heap.push(3).unwrap(); @@ -197,7 +181,7 @@ where /// } /// ``` pub fn iter(&self) -> slice::Iter<'_, T> { - self.0.data.as_slice().iter() + self.data.as_slice().iter() } /// Returns a mutable iterator visiting all values in the underlying vector, in arbitrary order. @@ -205,7 +189,7 @@ where /// **WARNING** Mutating the items in the binary heap can leave the heap in an inconsistent /// state. pub fn iter_mut(&mut self) -> slice::IterMut<'_, T> { - self.0.data.as_mut_slice().iter_mut() + self.data.as_mut_slice().iter_mut() } /// Returns the *top* (greatest if max-heap, smallest if min-heap) item in the binary heap, or @@ -213,9 +197,8 @@ where /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// assert_eq!(heap.peek(), None); /// /// heap.push(1).unwrap(); @@ -224,7 +207,7 @@ where /// assert_eq!(heap.peek(), Some(&5)); /// ``` pub fn peek(&self) -> Option<&T> { - self.0.data.as_slice().get(0) + self.data.as_slice().get(0) } /// Returns a mutable reference to the greatest item in the binary heap, or @@ -239,9 +222,8 @@ where /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// assert!(heap.peek_mut().is_none()); /// /// heap.push(1); @@ -254,7 +236,7 @@ where /// /// assert_eq!(heap.peek(), Some(&2)); /// ``` - pub fn peek_mut(&mut self) -> Option> { + pub fn peek_mut(&mut self) -> Option> { if self.is_empty() { None } else { @@ -270,9 +252,8 @@ where /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(1).unwrap(); /// heap.push(3).unwrap(); /// @@ -291,10 +272,10 @@ where /// Removes the *top* (greatest if max-heap, smallest if min-heap) item from the binary heap and /// returns it, without checking if the binary heap is empty. pub unsafe fn pop_unchecked(&mut self) -> T { - let mut item = self.0.data.pop_unchecked(); + let mut item = self.data.pop_unchecked(); if !self.is_empty() { - mem::swap(&mut item, self.0.data.as_mut_slice().get_unchecked_mut(0)); + mem::swap(&mut item, self.data.as_mut_slice().get_unchecked_mut(0)); self.sift_down_to_bottom(0); } item @@ -304,9 +285,8 @@ where /// /// ``` /// use heapless::binary_heap::{BinaryHeap, Max}; - /// use heapless::consts::*; /// - /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new(); + /// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new(); /// heap.push(3).unwrap(); /// heap.push(5).unwrap(); /// heap.push(1).unwrap(); @@ -315,7 +295,7 @@ where /// assert_eq!(heap.peek(), Some(&5)); /// ``` pub fn push(&mut self, item: T) -> Result<(), T> { - if self.0.data.is_full() { + if self.data.is_full() { return Err(item); } @@ -326,7 +306,7 @@ where /// Pushes an item onto the binary heap without first checking if it's full. pub unsafe fn push_unchecked(&mut self, item: T) { let old_len = self.len(); - self.0.data.push_unchecked(item); + self.data.push_unchecked(item); self.sift_up(0, old_len); } @@ -335,7 +315,7 @@ where let end = self.len(); let start = pos; unsafe { - let mut hole = Hole::new(self.0.data.as_mut_slice(), pos); + let mut hole = Hole::new(self.data.as_mut_slice(), pos); let mut child = 2 * pos + 1; while child < end { let right = child + 1; @@ -354,7 +334,7 @@ where fn sift_up(&mut self, start: usize, pos: usize) -> usize { unsafe { // Take out the value at `pos` and create a hole. - let mut hole = Hole::new(self.0.data.as_mut_slice(), pos); + let mut hole = Hole::new(self.data.as_mut_slice(), pos); while hole.pos() > start { let parent = (hole.pos() - 1) / 2; @@ -437,20 +417,18 @@ impl<'a, T> Hole<'a, T> { /// /// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut /// [`BinaryHeap`]: struct.BinaryHeap.html -pub struct PeekMut<'a, T, N, K> +pub struct PeekMut<'a, T, K, const N: usize> where T: Ord, - N: ArrayLength, K: Kind, { - heap: &'a mut BinaryHeap, + heap: &'a mut BinaryHeap, sift: bool, } -impl Drop for PeekMut<'_, T, N, K> +impl Drop for PeekMut<'_, T, K, N> where T: Ord, - N: ArrayLength, K: Kind, { fn drop(&mut self) { @@ -460,41 +438,38 @@ where } } -impl Deref for PeekMut<'_, T, N, K> +impl Deref for PeekMut<'_, T, K, N> where T: Ord, - N: ArrayLength, K: Kind, { type Target = T; fn deref(&self) -> &T { debug_assert!(!self.heap.is_empty()); // SAFE: PeekMut is only instantiated for non-empty heaps - unsafe { self.heap.0.data.as_slice().get_unchecked(0) } + unsafe { self.heap.data.as_slice().get_unchecked(0) } } } -impl DerefMut for PeekMut<'_, T, N, K> +impl DerefMut for PeekMut<'_, T, K, N> where T: Ord, - N: ArrayLength, K: Kind, { fn deref_mut(&mut self) -> &mut T { debug_assert!(!self.heap.is_empty()); // SAFE: PeekMut is only instantiated for non-empty heaps - unsafe { self.heap.0.data.as_mut_slice().get_unchecked_mut(0) } + unsafe { self.heap.data.as_mut_slice().get_unchecked_mut(0) } } } -impl<'a, T, N, K> PeekMut<'a, T, N, K> +impl<'a, T, K, const N: usize> PeekMut<'a, T, K, N> where T: Ord, - N: ArrayLength, K: Kind, { /// Removes the peeked value from the heap and returns it. - pub fn pop(mut this: PeekMut<'a, T, N, K>) -> T { + pub fn pop(mut this: PeekMut<'a, T, K, N>) -> T { let value = this.heap.pop().unwrap(); this.sift = false; value @@ -512,10 +487,9 @@ impl<'a, T> Drop for Hole<'a, T> { } } -impl Default for BinaryHeap +impl Default for BinaryHeap where T: Ord, - N: ArrayLength, K: Kind, { fn default() -> Self { @@ -523,34 +497,27 @@ where } } -impl Clone for BinaryHeap +impl Clone for BinaryHeap where - N: ArrayLength, K: Kind, T: Ord + Clone, { fn clone(&self) -> Self { - BinaryHeap(crate::i::BinaryHeap { - _kind: self.0._kind, - data: self.0.data.clone(), - }) + Self { + _kind: self._kind, + data: self.data.clone(), + } } } -impl Drop for BinaryHeap -where - N: ArrayLength, - K: Kind, - T: Ord, -{ +impl Drop for BinaryHeap { fn drop(&mut self) { - unsafe { ptr::drop_in_place(self.0.data.as_mut_slice()) } + unsafe { ptr::drop_in_place(self.data.as_mut_slice()) } } } -impl fmt::Debug for BinaryHeap +impl fmt::Debug for BinaryHeap where - N: ArrayLength, K: Kind, T: Ord + fmt::Debug, { @@ -559,9 +526,8 @@ where } } -impl<'a, T, N, K> IntoIterator for &'a BinaryHeap +impl<'a, T, K, const N: usize> IntoIterator for &'a BinaryHeap where - N: ArrayLength, K: Kind, T: Ord, { @@ -577,19 +543,16 @@ where mod tests { use std::vec::Vec; - use crate::{ - binary_heap::{self, BinaryHeap, Min}, - consts::*, - }; + use crate::binary_heap::{BinaryHeap, Max, Min}; #[test] fn static_new() { - static mut _B: BinaryHeap = BinaryHeap(crate::i::BinaryHeap::new()); + static mut _B: BinaryHeap = BinaryHeap::new(); } #[test] fn min() { - let mut heap = BinaryHeap::<_, U16, Min>::new(); + let mut heap = BinaryHeap::<_, Min, 16>::new(); heap.push(1).unwrap(); heap.push(2).unwrap(); heap.push(3).unwrap(); @@ -641,7 +604,7 @@ mod tests { #[test] fn max() { - let mut heap = BinaryHeap::<_, U16, binary_heap::Max>::new(); + let mut heap = BinaryHeap::<_, Max, 16>::new(); heap.push(1).unwrap(); heap.push(2).unwrap(); heap.push(3).unwrap(); diff --git a/src/de.rs b/src/de.rs index af74019357..32a4d333db 100644 --- a/src/de.rs +++ b/src/de.rs @@ -1,36 +1,31 @@ +use crate::{ + sealed::binary_heap::Kind as BinaryHeapKind, BinaryHeap, IndexMap, IndexSet, LinearMap, String, + Vec, +}; use core::{fmt, marker::PhantomData}; - -use generic_array::{typenum::PowerOfTwo, ArrayLength}; use hash32::{BuildHasherDefault, Hash, Hasher}; use serde::de::{self, Deserialize, Deserializer, Error, MapAccess, SeqAccess}; -use crate::{ - indexmap::{Bucket, Pos}, - sealed::binary_heap::Kind as BinaryHeapKind, - BinaryHeap, IndexMap, IndexSet, LinearMap, String, Vec, -}; - // Sequential containers -impl<'de, T, N, KIND> Deserialize<'de> for BinaryHeap +impl<'de, T, KIND, const N: usize> Deserialize<'de> for BinaryHeap where T: Ord + Deserialize<'de>, - N: ArrayLength, + KIND: BinaryHeapKind, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, T, N, KIND>(PhantomData<(&'de (), T, N, KIND)>); + struct ValueVisitor<'de, T, KIND, const N: usize>(PhantomData<(&'de (), T, KIND)>); - impl<'de, T, N, KIND> de::Visitor<'de> for ValueVisitor<'de, T, N, KIND> + impl<'de, T, KIND, const N: usize> de::Visitor<'de> for ValueVisitor<'de, T, KIND, N> where T: Ord + Deserialize<'de>, - N: ArrayLength, KIND: BinaryHeapKind, { - type Value = BinaryHeap; + type Value = BinaryHeap; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a sequence") @@ -55,25 +50,23 @@ where } } -impl<'de, T, N, S> Deserialize<'de> for IndexSet> +impl<'de, T, S, const N: usize> Deserialize<'de> for IndexSet, N> where T: Eq + Hash + Deserialize<'de>, S: Hasher + Default, - N: ArrayLength> + ArrayLength> + PowerOfTwo, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, T, N, S>(PhantomData<(&'de (), T, N, S)>); + struct ValueVisitor<'de, T, S, const N: usize>(PhantomData<(&'de (), T, S)>); - impl<'de, T, N, S> de::Visitor<'de> for ValueVisitor<'de, T, N, S> + impl<'de, T, S, const N: usize> de::Visitor<'de> for ValueVisitor<'de, T, S, N> where T: Eq + Hash + Deserialize<'de>, S: Hasher + Default, - N: ArrayLength> + ArrayLength> + PowerOfTwo, { - type Value = IndexSet>; + type Value = IndexSet, N>; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a sequence") @@ -98,20 +91,18 @@ where } } -impl<'de, T, N> Deserialize<'de> for Vec +impl<'de, T, const N: usize> Deserialize<'de> for Vec where - N: ArrayLength, T: Deserialize<'de>, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, T, N>(PhantomData<(&'de (), T, N)>); + struct ValueVisitor<'de, T, const N: usize>(PhantomData<(&'de (), T)>); - impl<'de, T, N> de::Visitor<'de> for ValueVisitor<'de, T, N> + impl<'de, T, const N: usize> serde::de::Visitor<'de> for ValueVisitor<'de, T, N> where - N: ArrayLength, T: Deserialize<'de>, { type Value = Vec; @@ -141,27 +132,25 @@ where // Dictionaries -impl<'de, K, V, N, S> Deserialize<'de> for IndexMap> +impl<'de, K, V, S, const N: usize> Deserialize<'de> for IndexMap, N> where K: Eq + Hash + Deserialize<'de>, V: Deserialize<'de>, - N: ArrayLength> + ArrayLength> + PowerOfTwo, S: Default + Hasher, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, K, V, N, S>(PhantomData<(&'de (), K, V, N, S)>); + struct ValueVisitor<'de, K, V, S, const N: usize>(PhantomData<(&'de (), K, V, S)>); - impl<'de, K, V, N, S> de::Visitor<'de> for ValueVisitor<'de, K, V, N, S> + impl<'de, K, V, S, const N: usize> de::Visitor<'de> for ValueVisitor<'de, K, V, S, N> where K: Eq + Hash + Deserialize<'de>, V: Deserialize<'de>, - N: ArrayLength> + ArrayLength> + PowerOfTwo, S: Default + Hasher, { - type Value = IndexMap>; + type Value = IndexMap, N>; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("a map") @@ -186,23 +175,21 @@ where } } -impl<'de, K, V, N> Deserialize<'de> for LinearMap +impl<'de, K, V, const N: usize> Deserialize<'de> for LinearMap where K: Eq + Deserialize<'de>, V: Deserialize<'de>, - N: ArrayLength<(K, V)>, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, K, V, N>(PhantomData<(&'de (), K, V, N)>); + struct ValueVisitor<'de, K, V, const N: usize>(PhantomData<(&'de (), K, V)>); - impl<'de, K, V, N> de::Visitor<'de> for ValueVisitor<'de, K, V, N> + impl<'de, K, V, const N: usize> de::Visitor<'de> for ValueVisitor<'de, K, V, N> where K: Eq + Deserialize<'de>, V: Deserialize<'de>, - N: ArrayLength<(K, V)>, { type Value = LinearMap; @@ -231,28 +218,18 @@ where // String containers -impl<'de, N> Deserialize<'de> for String -where - N: ArrayLength, -{ +impl<'de, const N: usize> Deserialize<'de> for String { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValueVisitor<'de, N>(PhantomData<(&'de (), N)>); + struct ValueVisitor<'de, const N: usize>(PhantomData<&'de ()>); - impl<'de, N> de::Visitor<'de> for ValueVisitor<'de, N> - where - N: ArrayLength, - { + impl<'de, const N: usize> de::Visitor<'de> for ValueVisitor<'de, N> { type Value = String; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - formatter, - "a string no more than {} bytes long", - N::to_u64() - ) + write!(formatter, "a string no more than {} bytes long", N as u64) } fn visit_str(self, v: &str) -> Result @@ -269,13 +246,15 @@ where where E: de::Error, { - let mut bytes = Vec::new(); - if bytes.extend_from_slice(v).is_err() { - return Err(E::invalid_value(de::Unexpected::Bytes(v), &self)); - } + let mut s = String::new(); + + s.push_str( + core::str::from_utf8(v) + .map_err(|_| E::invalid_value(de::Unexpected::Bytes(v), &self))?, + ) + .map_err(|_| E::invalid_length(v.len(), &self))?; - String::from_utf8(bytes) - .map_err(|_| E::invalid_value(de::Unexpected::Bytes(v), &self)) + Ok(s) } } diff --git a/src/histbuf.rs b/src/histbuf.rs index 975fa200f9..2a6ce44ad9 100644 --- a/src/histbuf.rs +++ b/src/histbuf.rs @@ -1,4 +1,6 @@ -use generic_array::{sequence::GenericSequence, ArrayLength, GenericArray}; +use core::mem::MaybeUninit; +use core::ptr; +use core::slice; /// A "history buffer", similar to a write-only ring buffer of fixed length. /// @@ -6,67 +8,58 @@ use generic_array::{sequence::GenericSequence, ArrayLength, GenericArray}; /// is overwritten. Thus, the buffer is useful to keep a history of values with /// some desired depth, and for example calculate a rolling average. /// -/// The buffer is always fully initialized; depending on the constructor, the -/// initial value is either the default value for the element type or a supplied -/// initial value. This simplifies the API and is mostly irrelevant for the -/// intended use case. -/// /// # Examples /// ``` /// use heapless::HistoryBuffer; -/// use heapless::consts::*; /// -/// // Initialize a new buffer with 8 elements, all initially zero. -/// let mut buf = HistoryBuffer::<_, U8>::new(); +/// // Initialize a new buffer with 8 elements. +/// let mut buf = HistoryBuffer::<_, 8>::new(); +/// +/// // Starts with no data +/// assert_eq!(buf.recent(), None); /// /// buf.write(3); /// buf.write(5); /// buf.extend(&[4, 4]); /// /// // The most recent written element is a four. -/// assert_eq!(buf.recent(), &4); +/// assert_eq!(buf.recent(), Some(&4)); /// /// // To access all elements in an unspecified order, use `as_slice()`. /// for el in buf.as_slice() { println!("{:?}", el); } /// -/// // Now we can prepare an average of all values, which comes out to 2. +/// // Now we can prepare an average of all values, which comes out to 4. /// let avg = buf.as_slice().iter().sum::() / buf.len(); -/// assert_eq!(avg, 2); +/// assert_eq!(avg, 4); /// ``` -#[derive(Clone)] -pub struct HistoryBuffer -where - N: ArrayLength, -{ - data: GenericArray, +pub struct HistoryBuffer { + data: [MaybeUninit; N], write_at: usize, + filled: bool, } -impl HistoryBuffer -where - N: ArrayLength, - T: Default, -{ - /// Constructs a new history buffer, where every element is filled with the - /// default value of the type `T`. +impl HistoryBuffer { + const INIT: MaybeUninit = MaybeUninit::uninit(); + + /// Constructs a new history buffer. /// - /// `HistoryBuffer` currently cannot be constructed in `const` context. + /// The construction of a `HistoryBuffer` works in `const` contexts. /// /// # Examples /// /// ``` /// use heapless::HistoryBuffer; - /// use heapless::consts::*; /// /// // Allocate a 16-element buffer on the stack - /// let mut x: HistoryBuffer = HistoryBuffer::new(); - /// // All elements are zero - /// assert_eq!(x.as_slice(), [0; 16]); + /// let x: HistoryBuffer = HistoryBuffer::new(); + /// assert_eq!(x.len(), 0); /// ``` - pub fn new() -> Self { + #[inline] + pub const fn new() -> Self { Self { - data: Default::default(), + data: [Self::INIT; N], write_at: 0, + filled: false, } } @@ -77,10 +70,9 @@ where } } -impl HistoryBuffer +impl HistoryBuffer where - N: ArrayLength, - T: Clone, + T: Copy + Clone, { /// Constructs a new history buffer, where every element is the given value. /// @@ -88,17 +80,18 @@ where /// /// ``` /// use heapless::HistoryBuffer; - /// use heapless::consts::*; /// /// // Allocate a 16-element buffer on the stack - /// let mut x: HistoryBuffer = HistoryBuffer::new_with(4); + /// let mut x: HistoryBuffer = HistoryBuffer::new_with(4); /// // All elements are four /// assert_eq!(x.as_slice(), [4; 16]); /// ``` + #[inline] pub fn new_with(t: T) -> Self { Self { - data: GenericArray::generate(|_| t.clone()), + data: [MaybeUninit::new(t); N], write_at: 0, + filled: true, } } @@ -108,22 +101,36 @@ where } } -impl HistoryBuffer -where - N: ArrayLength, -{ +impl HistoryBuffer { + /// Returns the current fill level of the buffer. + #[inline] + pub fn len(&self) -> usize { + if self.filled { + N + } else { + self.write_at + } + } + /// Returns the capacity of the buffer, which is the length of the /// underlying backing array. - pub fn len(&self) -> usize { - self.data.len() + #[inline] + pub fn capacity(&self) -> usize { + N } /// Writes an element to the buffer, overwriting the oldest value. pub fn write(&mut self, t: T) { - self.data[self.write_at] = t; + if self.filled { + // Drop the old before we overwrite it. + unsafe { ptr::drop_in_place(self.data[self.write_at].as_mut_ptr()) } + } + self.data[self.write_at] = MaybeUninit::new(t); + self.write_at += 1; - if self.write_at == self.len() { + if self.write_at == self.capacity() { self.write_at = 0; + self.filled = true; } } @@ -146,32 +153,32 @@ where /// /// ``` /// use heapless::HistoryBuffer; - /// use heapless::consts::*; /// - /// let mut x: HistoryBuffer = HistoryBuffer::new(); + /// let mut x: HistoryBuffer = HistoryBuffer::new(); /// x.write(4); /// x.write(10); - /// assert_eq!(x.recent(), &10); + /// assert_eq!(x.recent(), Some(&10)); /// ``` - pub fn recent(&self) -> &T { + pub fn recent(&self) -> Option<&T> { if self.write_at == 0 { - &self.data[self.len() - 1] + if self.filled { + Some(unsafe { &*self.data[self.capacity() - 1].as_ptr() }) + } else { + None + } } else { - &self.data[self.write_at - 1] + Some(unsafe { &*self.data[self.write_at - 1].as_ptr() }) } } /// Returns the array slice backing the buffer, without keeping track /// of the write position. Therefore, the element order is unspecified. pub fn as_slice(&self) -> &[T] { - &self.data + unsafe { slice::from_raw_parts(self.data.as_ptr() as *const _, self.len()) } } } -impl Extend for HistoryBuffer -where - N: ArrayLength, -{ +impl Extend for HistoryBuffer { fn extend(&mut self, iter: I) where I: IntoIterator, @@ -182,10 +189,9 @@ where } } -impl<'a, T, N> Extend<&'a T> for HistoryBuffer +impl<'a, T, const N: usize> Extend<&'a T> for HistoryBuffer where T: 'a + Clone, - N: ArrayLength, { fn extend(&mut self, iter: I) where @@ -195,26 +201,37 @@ where } } +impl Drop for HistoryBuffer { + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(ptr::slice_from_raw_parts_mut( + self.data.as_mut_ptr() as *mut T, + self.len(), + )) + } + } +} + #[cfg(test)] mod tests { - use crate::{consts::*, HistoryBuffer}; + use crate::HistoryBuffer; #[test] fn new() { - let x: HistoryBuffer = HistoryBuffer::new_with(1); + let x: HistoryBuffer = HistoryBuffer::new_with(1); assert_eq!(x.len(), 4); assert_eq!(x.as_slice(), [1; 4]); - let x: HistoryBuffer = HistoryBuffer::new(); - assert_eq!(x.as_slice(), [0; 4]); + let x: HistoryBuffer = HistoryBuffer::new(); + assert_eq!(x.as_slice(), []); } #[test] fn write() { - let mut x: HistoryBuffer = HistoryBuffer::new(); + let mut x: HistoryBuffer = HistoryBuffer::new(); x.write(1); x.write(4); - assert_eq!(x.as_slice(), [1, 4, 0, 0]); + assert_eq!(x.as_slice(), [1, 4]); x.write(5); x.write(6); @@ -227,33 +244,35 @@ mod tests { #[test] fn clear() { - let mut x: HistoryBuffer = HistoryBuffer::new_with(1); + let mut x: HistoryBuffer = HistoryBuffer::new_with(1); x.clear(); - assert_eq!(x.as_slice(), [0; 4]); + assert_eq!(x.as_slice(), []); - let mut x: HistoryBuffer = HistoryBuffer::new(); + let mut x: HistoryBuffer = HistoryBuffer::new(); x.clear_with(1); assert_eq!(x.as_slice(), [1; 4]); } #[test] fn recent() { - let mut x: HistoryBuffer = HistoryBuffer::new(); - assert_eq!(x.recent(), &0); + let mut x: HistoryBuffer = HistoryBuffer::new(); + assert_eq!(x.recent(), None); x.write(1); x.write(4); - assert_eq!(x.recent(), &4); + assert_eq!(x.recent(), Some(&4)); x.write(5); x.write(6); x.write(10); - assert_eq!(x.recent(), &10); + assert_eq!(x.recent(), Some(&10)); } #[test] fn as_slice() { - let mut x: HistoryBuffer = HistoryBuffer::new(); + let mut x: HistoryBuffer = HistoryBuffer::new(); + + assert_eq!(x.as_slice(), []); x.extend([1, 2, 3, 4, 5].iter()); diff --git a/src/i.rs b/src/i.rs deleted file mode 100644 index b8a88a907c..0000000000 --- a/src/i.rs +++ /dev/null @@ -1,40 +0,0 @@ -//! Unfortunate implementation detail required to construct `heapless` types in const context - -use core::{marker::PhantomData, mem::MaybeUninit}; - -#[cfg(has_atomics)] -use crate::spsc::{Atomic, MultiCore}; - -/// `const-fn` version of [`BinaryHeap`](../binary_heap/struct.BinaryHeap.html) -pub struct BinaryHeap { - pub(crate) _kind: PhantomData, - pub(crate) data: Vec, -} - -/// `const-fn` version of [`LinearMap`](../struct.LinearMap.html) -pub struct LinearMap { - pub(crate) buffer: Vec, -} - -/// `const-fn` version of [`spsc::Queue`](../spsc/struct.Queue.html) -#[cfg(has_atomics)] -pub struct Queue { - // this is from where we dequeue items - pub(crate) head: Atomic, - - // this is where we enqueue new items - pub(crate) tail: Atomic, - - pub(crate) buffer: MaybeUninit, -} - -/// `const-fn` version of [`String`](../struct.String.html) -pub struct String { - pub(crate) vec: Vec, -} - -/// `const-fn` version of [`Vec`](../struct.Vec.html) -pub struct Vec { - pub(crate) buffer: MaybeUninit, - pub(crate) len: usize, -} diff --git a/src/indexmap.rs b/src/indexmap.rs index 6014df6791..100d700903 100644 --- a/src/indexmap.rs +++ b/src/indexmap.rs @@ -1,13 +1,5 @@ -use core::{ - borrow::Borrow, - fmt, - iter::FromIterator, - mem::{self, MaybeUninit}, - num::NonZeroU32, - ops, slice, -}; - -use generic_array::{typenum::PowerOfTwo, ArrayLength, GenericArray}; +use core::{borrow::Borrow, fmt, iter::FromIterator, mem, num::NonZeroU32, ops, slice}; + use hash32::{BuildHasher, BuildHasherDefault, FnvHasher, Hash, Hasher}; use crate::Vec; @@ -20,10 +12,9 @@ use crate::Vec; /// # Examples /// ``` /// use heapless::FnvIndexMap; -/// use heapless::consts::*; /// /// // A hash map with a capacity of 16 key-value pairs allocated on the stack -/// let mut book_reviews = FnvIndexMap::<_, _, U16>::new(); +/// let mut book_reviews = FnvIndexMap::<_, _, 16>::new(); /// /// // review some books. /// book_reviews.insert("Adventures of Huckleberry Finn", "My favorite book.").unwrap(); @@ -54,7 +45,7 @@ use crate::Vec; /// println!("{}: \"{}\"", book, review); /// } /// ``` -pub type FnvIndexMap = IndexMap>; +pub type FnvIndexMap = IndexMap, N>; #[derive(Clone, Copy, Eq, PartialEq)] struct HashValue(u16); @@ -126,30 +117,28 @@ macro_rules! probe_loop { } } -struct CoreMap -where - K: Eq + Hash, - N: ArrayLength> + ArrayLength>, -{ +struct CoreMap { entries: Vec, N>, - indices: GenericArray, N>, + indices: [Option; N], } -impl CoreMap -where - K: Eq + Hash, - N: ArrayLength> + ArrayLength>, -{ - // TODO turn into a `const fn`; needs `mem::zeroed` to be a `const fn` - fn new() -> Self { +impl CoreMap { + const fn new() -> Self { + const INIT: Option = None; + CoreMap { entries: Vec::new(), - indices: unsafe { MaybeUninit::zeroed().assume_init() }, + indices: [INIT; N], } } +} +impl CoreMap +where + K: Eq + Hash, +{ fn capacity() -> usize { - N::to_usize() + N } fn mask() -> usize { @@ -311,11 +300,10 @@ where } } -impl Clone for CoreMap +impl Clone for CoreMap where K: Eq + Hash + Clone, V: Clone, - N: ArrayLength> + ArrayLength>, { fn clone(&self) -> Self { Self { @@ -339,10 +327,9 @@ where /// /// ``` /// use heapless::FnvIndexMap; -/// use heapless::consts::*; /// /// // A hash map with a capacity of 16 key-value pairs allocated on the stack -/// let mut book_reviews = FnvIndexMap::<_, _, U16>::new(); +/// let mut book_reviews = FnvIndexMap::<_, _, 16>::new(); /// /// // review some books. /// book_reviews.insert("Adventures of Huckleberry Finn", "My favorite book.").unwrap(); @@ -373,52 +360,38 @@ where /// println!("{}: \"{}\"", book, review); /// } /// ``` -pub struct IndexMap -where - K: Eq + Hash, - N: ArrayLength> + ArrayLength>, -{ +pub struct IndexMap { core: CoreMap, build_hasher: S, } -impl IndexMap> -where - K: Eq + Hash, - S: Default + Hasher, - N: ArrayLength> + ArrayLength> + PowerOfTwo, -{ - // TODO turn into a `const fn`; needs `mem::zeroed` to be a `const fn` +impl IndexMap, N> { /// Creates an empty `IndexMap`. - /// - /// **NOTE** This constructor will become a `const fn` in the future - pub fn new() -> Self { + pub const fn new() -> Self { IndexMap { - build_hasher: BuildHasherDefault::default(), + build_hasher: BuildHasherDefault::new(), core: CoreMap::new(), } } } -impl IndexMap +impl IndexMap where K: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { /* Public API */ /// Returns the number of elements the map can hold pub fn capacity(&self) -> usize { - N::to_usize() + N } /// Return an iterator over the keys of the map, in their order /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -435,9 +408,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -454,9 +426,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -477,9 +448,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -498,9 +468,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -528,9 +497,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut a = FnvIndexMap::<_, _, U16>::new(); + /// let mut a = FnvIndexMap::<_, _, 16>::new(); /// assert_eq!(a.len(), 0); /// a.insert(1, "a").unwrap(); /// assert_eq!(a.len(), 1); @@ -545,9 +513,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut a = FnvIndexMap::<_, _, U16>::new(); + /// let mut a = FnvIndexMap::<_, _, 16>::new(); /// assert!(a.is_empty()); /// a.insert(1, "a"); /// assert!(!a.is_empty()); @@ -562,9 +529,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut a = FnvIndexMap::<_, _, U16>::new(); + /// let mut a = FnvIndexMap::<_, _, 16>::new(); /// a.insert(1, "a"); /// a.clear(); /// assert!(a.is_empty()); @@ -585,9 +551,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U16>::new(); + /// let mut map = FnvIndexMap::<_, _, 16>::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.get(&1), Some(&"a")); /// assert_eq!(map.get(&2), None); @@ -612,9 +577,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U8>::new(); + /// let mut map = FnvIndexMap::<_, _, 8>::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.contains_key(&1), true); /// assert_eq!(map.contains_key(&2), false); @@ -638,9 +602,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U8>::new(); + /// let mut map = FnvIndexMap::<_, _, 8>::new(); /// map.insert(1, "a").unwrap(); /// if let Some(x) = map.get_mut(&1) { /// *x = "b"; @@ -677,9 +640,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U8>::new(); + /// let mut map = FnvIndexMap::<_, _, 8>::new(); /// assert_eq!(map.insert(37, "a"), Ok(None)); /// assert_eq!(map.is_empty(), false); /// @@ -710,9 +672,8 @@ where /// /// ``` /// use heapless::FnvIndexMap; - /// use heapless::consts::*; /// - /// let mut map = FnvIndexMap::<_, _, U8>::new(); + /// let mut map = FnvIndexMap::<_, _, 8>::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.remove(&1), Some("a")); /// assert_eq!(map.remove(&1), None); @@ -762,12 +723,11 @@ where } } -impl<'a, K, Q, V, N, S> ops::Index<&'a Q> for IndexMap +impl<'a, K, Q, V, S, const N: usize> ops::Index<&'a Q> for IndexMap where K: Eq + Hash + Borrow, Q: ?Sized + Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { type Output = V; @@ -776,24 +736,22 @@ where } } -impl<'a, K, Q, V, N, S> ops::IndexMut<&'a Q> for IndexMap +impl<'a, K, Q, V, S, const N: usize> ops::IndexMut<&'a Q> for IndexMap where K: Eq + Hash + Borrow, Q: ?Sized + Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn index_mut(&mut self, key: &Q) -> &mut V { self.get_mut(key).expect("key not found") } } -impl Clone for IndexMap +impl Clone for IndexMap where K: Eq + Hash + Clone, V: Clone, S: Clone, - N: ArrayLength> + ArrayLength>, { fn clone(&self) -> Self { Self { @@ -803,23 +761,21 @@ where } } -impl fmt::Debug for IndexMap +impl fmt::Debug for IndexMap where K: Eq + Hash + fmt::Debug, V: fmt::Debug, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } -impl Default for IndexMap +impl Default for IndexMap where K: Eq + Hash, S: BuildHasher + Default, - N: ArrayLength> + ArrayLength>, { fn default() -> Self { IndexMap { @@ -829,16 +785,15 @@ where } } -impl PartialEq> for IndexMap +impl PartialEq> + for IndexMap where K: Eq + Hash, V: Eq, S: BuildHasher, - N: ArrayLength> + ArrayLength>, S2: BuildHasher, - N2: ArrayLength> + ArrayLength>, { - fn eq(&self, other: &IndexMap) -> bool { + fn eq(&self, other: &IndexMap) -> bool { self.len() == other.len() && self .iter() @@ -846,20 +801,18 @@ where } } -impl Eq for IndexMap +impl Eq for IndexMap where K: Eq + Hash, V: Eq, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { } -impl Extend<(K, V)> for IndexMap +impl Extend<(K, V)> for IndexMap where K: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn extend(&mut self, iterable: I) where @@ -871,12 +824,11 @@ where } } -impl<'a, K, V, N, S> Extend<(&'a K, &'a V)> for IndexMap +impl<'a, K, V, S, const N: usize> Extend<(&'a K, &'a V)> for IndexMap where K: Eq + Hash + Copy, V: Copy, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn extend(&mut self, iterable: I) where @@ -886,11 +838,10 @@ where } } -impl FromIterator<(K, V)> for IndexMap +impl FromIterator<(K, V)> for IndexMap where K: Eq + Hash, S: BuildHasher + Default, - N: ArrayLength> + ArrayLength>, { fn from_iter(iterable: I) -> Self where @@ -902,11 +853,10 @@ where } } -impl<'a, K, V, N, S> IntoIterator for &'a IndexMap +impl<'a, K, V, S, const N: usize> IntoIterator for &'a IndexMap where K: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; @@ -916,11 +866,10 @@ where } } -impl<'a, K, V, N, S> IntoIterator for &'a mut IndexMap +impl<'a, K, V, S, const N: usize> IntoIterator for &'a mut IndexMap where K: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; @@ -976,21 +925,16 @@ where #[cfg(test)] mod tests { + use crate::FnvIndexMap; use core::mem; - use generic_array::typenum::Unsigned; - - use crate::{consts::*, FnvIndexMap}; - #[test] fn size() { - type Cap = U4; - - let cap = Cap::to_usize(); + const CAP: usize = 4; assert_eq!( - mem::size_of::>(), - cap * mem::size_of::() + // indices - cap * (mem::size_of::() + // key + mem::size_of::>(), + CAP * mem::size_of::() + // indices + CAP * (mem::size_of::() + // key mem::size_of::() + // value mem::size_of::() // hash ) + // buckets @@ -1001,10 +945,10 @@ mod tests { #[test] fn partial_eq() { { - let mut a: FnvIndexMap<_, _, U4> = FnvIndexMap::new(); + let mut a: FnvIndexMap<_, _, 4> = FnvIndexMap::new(); a.insert("k1", "v1").unwrap(); - let mut b: FnvIndexMap<_, _, U4> = FnvIndexMap::new(); + let mut b: FnvIndexMap<_, _, 4> = FnvIndexMap::new(); b.insert("k1", "v1").unwrap(); assert!(a == b); @@ -1015,11 +959,11 @@ mod tests { } { - let mut a: FnvIndexMap<_, _, U4> = FnvIndexMap::new(); + let mut a: FnvIndexMap<_, _, 4> = FnvIndexMap::new(); a.insert("k1", "v1").unwrap(); a.insert("k2", "v2").unwrap(); - let mut b: FnvIndexMap<_, _, U4> = FnvIndexMap::new(); + let mut b: FnvIndexMap<_, _, 4> = FnvIndexMap::new(); b.insert("k2", "v2").unwrap(); b.insert("k1", "v1").unwrap(); diff --git a/src/indexset.rs b/src/indexset.rs index 3613941bfa..232564717a 100644 --- a/src/indexset.rs +++ b/src/indexset.rs @@ -1,10 +1,7 @@ +use crate::indexmap::{self, IndexMap}; use core::{borrow::Borrow, fmt, iter::FromIterator}; - -use generic_array::{typenum::PowerOfTwo, ArrayLength}; use hash32::{BuildHasher, BuildHasherDefault, FnvHasher, Hash, Hasher}; -use crate::indexmap::{self, Bucket, IndexMap, Pos}; - /// A [`heapless::IndexSet`](./struct.IndexSet.html) using the /// default FNV hasher. /// A list of all Methods and Traits available for `FnvIndexSet` can be found in @@ -13,10 +10,9 @@ use crate::indexmap::{self, Bucket, IndexMap, Pos}; /// # Examples /// ``` /// use heapless::FnvIndexSet; -/// use heapless::consts::*; /// /// // A hash set with a capacity of 16 elements allocated on the stack -/// let mut books = FnvIndexSet::<_, U16>::new(); +/// let mut books = FnvIndexSet::<_, 16>::new(); /// /// // Add some books. /// books.insert("A Dance With Dragons").unwrap(); @@ -38,7 +34,7 @@ use crate::indexmap::{self, Bucket, IndexMap, Pos}; /// println!("{}", book); /// } /// ``` -pub type FnvIndexSet = IndexSet>; +pub type FnvIndexSet = IndexSet, N>; /// Fixed capacity [`IndexSet`](https://docs.rs/indexmap/1/indexmap/set/struct.IndexSet.html). /// @@ -54,10 +50,9 @@ pub type FnvIndexSet = IndexSet>; /// /// ``` /// use heapless::FnvIndexSet; -/// use heapless::consts::*; /// /// // A hash set with a capacity of 16 elements allocated on the stack -/// let mut books = FnvIndexSet::<_, U16>::new(); +/// let mut books = FnvIndexSet::<_, 16>::new(); /// /// // Add some books. /// books.insert("A Dance With Dragons").unwrap(); @@ -79,33 +74,32 @@ pub type FnvIndexSet = IndexSet>; /// println!("{}", book); /// } /// ``` -pub struct IndexSet +pub struct IndexSet where T: Eq + Hash, - N: ArrayLength> + ArrayLength>, { - map: IndexMap, + map: IndexMap, } -impl IndexSet> +impl IndexSet, N> where T: Eq + Hash, S: Default + Hasher, - N: ArrayLength> + ArrayLength> + PowerOfTwo, { /// Creates an empty `IndexSet` pub fn new() -> Self { + assert!(N.is_power_of_two()); + IndexSet { map: IndexMap::new(), } } } -impl IndexSet +impl IndexSet where T: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { /// Returns the number of elements the set can hold /// @@ -113,9 +107,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let set = FnvIndexSet::::new(); + /// let set = FnvIndexSet::::new(); /// assert_eq!(set.capacity(), 16); /// ``` pub fn capacity(&self) -> usize { @@ -128,9 +121,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut set = FnvIndexSet::<_, U16>::new(); + /// let mut set = FnvIndexSet::<_, 16>::new(); /// set.insert("a").unwrap(); /// set.insert("b").unwrap(); /// @@ -152,30 +144,28 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect(); + /// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect(); /// /// // Can be seen as `a - b`. /// for x in a.difference(&b) { /// println!("{}", x); // Print 1 /// } /// - /// let diff: FnvIndexSet<_, U16> = a.difference(&b).collect(); - /// assert_eq!(diff, [1].iter().collect::>()); + /// let diff: FnvIndexSet<_, 16> = a.difference(&b).collect(); + /// assert_eq!(diff, [1].iter().collect::>()); /// /// // Note that difference is not symmetric, /// // and `b - a` means something else: - /// let diff: FnvIndexSet<_, U16> = b.difference(&a).collect(); - /// assert_eq!(diff, [4].iter().collect::>()); + /// let diff: FnvIndexSet<_, 16> = b.difference(&a).collect(); + /// assert_eq!(diff, [4].iter().collect::>()); /// ``` - pub fn difference<'a, N2, S2>( + pub fn difference<'a, S2, const N2: usize>( &'a self, - other: &'a IndexSet, - ) -> Difference<'a, T, N2, S2> + other: &'a IndexSet, + ) -> Difference<'a, T, S2, N2> where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { Difference { @@ -191,28 +181,26 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect(); + /// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect(); /// /// // Print 1, 4 in that order order. /// for x in a.symmetric_difference(&b) { /// println!("{}", x); /// } /// - /// let diff1: FnvIndexSet<_, U16> = a.symmetric_difference(&b).collect(); - /// let diff2: FnvIndexSet<_, U16> = b.symmetric_difference(&a).collect(); + /// let diff1: FnvIndexSet<_, 16> = a.symmetric_difference(&b).collect(); + /// let diff2: FnvIndexSet<_, 16> = b.symmetric_difference(&a).collect(); /// /// assert_eq!(diff1, diff2); - /// assert_eq!(diff1, [1, 4].iter().collect::>()); + /// assert_eq!(diff1, [1, 4].iter().collect::>()); /// ``` - pub fn symmetric_difference<'a, N2, S2>( + pub fn symmetric_difference<'a, S2, const N2: usize>( &'a self, - other: &'a IndexSet, + other: &'a IndexSet, ) -> impl Iterator where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { self.difference(other).chain(other.difference(self)) @@ -225,25 +213,23 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect(); + /// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect(); /// /// // Print 2, 3 in that order. /// for x in a.intersection(&b) { /// println!("{}", x); /// } /// - /// let intersection: FnvIndexSet<_, U16> = a.intersection(&b).collect(); - /// assert_eq!(intersection, [2, 3].iter().collect::>()); + /// let intersection: FnvIndexSet<_, 16> = a.intersection(&b).collect(); + /// assert_eq!(intersection, [2, 3].iter().collect::>()); /// ``` - pub fn intersection<'a, N2, S2>( + pub fn intersection<'a, S2, const N2: usize>( &'a self, - other: &'a IndexSet, - ) -> Intersection<'a, T, N2, S2> + other: &'a IndexSet, + ) -> Intersection<'a, T, S2, N2> where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { Intersection { @@ -259,25 +245,23 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect(); + /// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect(); /// /// // Print 1, 2, 3, 4 in that order. /// for x in a.union(&b) { /// println!("{}", x); /// } /// - /// let union: FnvIndexSet<_, U16> = a.union(&b).collect(); - /// assert_eq!(union, [1, 2, 3, 4].iter().collect::>()); + /// let union: FnvIndexSet<_, 16> = a.union(&b).collect(); + /// assert_eq!(union, [1, 2, 3, 4].iter().collect::>()); /// ``` - pub fn union<'a, N2, S2>( + pub fn union<'a, S2, const N2: usize>( &'a self, - other: &'a IndexSet, + other: &'a IndexSet, ) -> impl Iterator where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { self.iter().chain(other.difference(self)) @@ -289,9 +273,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new(); + /// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new(); /// assert_eq!(v.len(), 0); /// v.insert(1).unwrap(); /// assert_eq!(v.len(), 1); @@ -306,9 +289,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new(); + /// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new(); /// assert!(v.is_empty()); /// v.insert(1).unwrap(); /// assert!(!v.is_empty()); @@ -323,9 +305,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new(); + /// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new(); /// v.insert(1).unwrap(); /// v.clear(); /// assert!(v.is_empty()); @@ -343,9 +324,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let set: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); + /// let set: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); /// assert_eq!(set.contains(&1), true); /// assert_eq!(set.contains(&4), false); /// ``` @@ -364,10 +344,9 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut b = FnvIndexSet::<_, U16>::new(); + /// let a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut b = FnvIndexSet::<_, 16>::new(); /// /// assert_eq!(a.is_disjoint(&b), true); /// b.insert(4).unwrap(); @@ -375,9 +354,8 @@ where /// b.insert(1).unwrap(); /// assert_eq!(a.is_disjoint(&b), false); /// ``` - pub fn is_disjoint(&self, other: &IndexSet) -> bool + pub fn is_disjoint(&self, other: &IndexSet) -> bool where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { self.iter().all(|v| !other.contains(v)) @@ -390,10 +368,9 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let sup: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect(); - /// let mut set = FnvIndexSet::<_, U16>::new(); + /// let sup: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect(); + /// let mut set = FnvIndexSet::<_, 16>::new(); /// /// assert_eq!(set.is_subset(&sup), true); /// set.insert(2).unwrap(); @@ -401,9 +378,8 @@ where /// set.insert(4).unwrap(); /// assert_eq!(set.is_subset(&sup), false); /// ``` - pub fn is_subset(&self, other: &IndexSet) -> bool + pub fn is_subset(&self, other: &IndexSet) -> bool where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { self.iter().all(|v| other.contains(v)) @@ -416,10 +392,9 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let sub: FnvIndexSet<_, U16> = [1, 2].iter().cloned().collect(); - /// let mut set = FnvIndexSet::<_, U16>::new(); + /// let sub: FnvIndexSet<_, 16> = [1, 2].iter().cloned().collect(); + /// let mut set = FnvIndexSet::<_, 16>::new(); /// /// assert_eq!(set.is_superset(&sub), false); /// @@ -430,9 +405,8 @@ where /// set.insert(2).unwrap(); /// assert_eq!(set.is_superset(&sub), true); /// ``` - pub fn is_superset(&self, other: &IndexSet) -> bool + pub fn is_superset(&self, other: &IndexSet) -> bool where - N2: ArrayLength> + ArrayLength>, S2: BuildHasher, { other.is_subset(self) @@ -448,9 +422,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut set = FnvIndexSet::<_, U16>::new(); + /// let mut set = FnvIndexSet::<_, 16>::new(); /// /// assert_eq!(set.insert(2).unwrap(), true); /// assert_eq!(set.insert(2).unwrap(), false); @@ -472,9 +445,8 @@ where /// /// ``` /// use heapless::FnvIndexSet; - /// use heapless::consts::*; /// - /// let mut set = FnvIndexSet::<_, U16>::new(); + /// let mut set = FnvIndexSet::<_, 16>::new(); /// /// set.insert(2).unwrap(); /// assert_eq!(set.remove(&2), true); @@ -489,11 +461,10 @@ where } } -impl Clone for IndexSet +impl Clone for IndexSet where T: Eq + Hash + Clone, S: Clone, - N: ArrayLength> + ArrayLength>, { fn clone(&self) -> Self { Self { @@ -502,22 +473,20 @@ where } } -impl fmt::Debug for IndexSet +impl fmt::Debug for IndexSet where T: Eq + Hash + fmt::Debug, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } } -impl Default for IndexSet +impl Default for IndexSet where T: Eq + Hash, S: BuildHasher + Default, - N: ArrayLength> + ArrayLength>, { fn default() -> Self { IndexSet { @@ -526,24 +495,22 @@ where } } -impl PartialEq> for IndexSet +impl PartialEq> + for IndexSet where T: Eq + Hash, S1: BuildHasher, S2: BuildHasher, - N1: ArrayLength> + ArrayLength>, - N2: ArrayLength> + ArrayLength>, { - fn eq(&self, other: &IndexSet) -> bool { + fn eq(&self, other: &IndexSet) -> bool { self.len() == other.len() && self.is_subset(other) } } -impl Extend for IndexSet +impl Extend for IndexSet where T: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn extend(&mut self, iterable: I) where @@ -553,11 +520,10 @@ where } } -impl<'a, T, N, S> Extend<&'a T> for IndexSet +impl<'a, T, S, const N: usize> Extend<&'a T> for IndexSet where T: 'a + Eq + Hash + Copy, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { fn extend(&mut self, iterable: I) where @@ -567,11 +533,10 @@ where } } -impl FromIterator for IndexSet +impl FromIterator for IndexSet where T: Eq + Hash, S: BuildHasher + Default, - N: ArrayLength> + ArrayLength>, { fn from_iter(iter: I) -> Self where @@ -583,11 +548,10 @@ where } } -impl<'a, T, N, S> IntoIterator for &'a IndexSet +impl<'a, T, S, const N: usize> IntoIterator for &'a IndexSet where T: Eq + Hash, S: BuildHasher, - N: ArrayLength> + ArrayLength>, { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -617,21 +581,19 @@ impl<'a, T> Clone for Iter<'a, T> { } } -pub struct Difference<'a, T, N, S> +pub struct Difference<'a, T, S, const N: usize> where S: BuildHasher, T: Eq + Hash, - N: ArrayLength> + ArrayLength>, { iter: Iter<'a, T>, - other: &'a IndexSet, + other: &'a IndexSet, } -impl<'a, T, N, S> Iterator for Difference<'a, T, N, S> +impl<'a, T, S, const N: usize> Iterator for Difference<'a, T, S, N> where S: BuildHasher, T: Eq + Hash, - N: ArrayLength> + ArrayLength>, { type Item = &'a T; @@ -645,21 +607,19 @@ where } } -pub struct Intersection<'a, T, N, S> +pub struct Intersection<'a, T, S, const N: usize> where S: BuildHasher, T: Eq + Hash, - N: ArrayLength> + ArrayLength>, { iter: Iter<'a, T>, - other: &'a IndexSet, + other: &'a IndexSet, } -impl<'a, T, N, S> Iterator for Intersection<'a, T, N, S> +impl<'a, T, S, const N: usize> Iterator for Intersection<'a, T, S, N> where S: BuildHasher, T: Eq + Hash, - N: ArrayLength> + ArrayLength>, { type Item = &'a T; diff --git a/src/lib.rs b/src/lib.rs index c8db02a29a..54b4f65319 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,17 +10,14 @@ //! //! ``` //! use heapless::Vec; // fixed capacity `std::Vec` -//! use heapless::consts::U8; // type level integer used to specify capacity //! //! // on the stack -//! let mut xs: Vec = Vec::new(); // can hold up to 8 elements +//! let mut xs: Vec = Vec::new(); // can hold up to 8 elements //! xs.push(42).unwrap(); //! assert_eq!(xs.pop(), Some(42)); //! //! // in a `static` variable -//! // (because `const-fn` has not been fully stabilized you need to use the helper structs in -//! // the `i` module, which must be wrapped in a tuple struct) -//! static mut XS: Vec = Vec(heapless::i::Vec::new()); +//! static mut XS: Vec = Vec::new(); //! //! let xs = unsafe { &mut XS }; //! @@ -28,7 +25,7 @@ //! assert_eq!(xs.pop(), Some(42)); //! //! // in the heap (though kind of pointless because no reallocation) -//! let mut ys: Box> = Box::new(Vec::new()); +//! let mut ys: Box> = Box::new(Vec::new()); //! ys.push(42).unwrap(); //! assert_eq!(ys.pop(), Some(42)); //! ``` @@ -66,7 +63,7 @@ //! //! # Minimum Supported Rust Version (MSRV) //! -//! This crate is guaranteed to compile on stable Rust 1.36 and up with its default set of features. +//! This crate is guaranteed to compile on stable Rust 1.51 and up with its default set of features. //! It *might* compile on older versions but that may change in any new patch release. #![cfg_attr(not(test), no_std)] @@ -76,8 +73,6 @@ #![deny(warnings)] pub use binary_heap::BinaryHeap; -pub use generic_array::typenum::{consts, PowerOfTwo}; -pub use generic_array::ArrayLength; pub use histbuf::HistoryBuffer; pub use indexmap::{Bucket, FnvIndexMap, IndexMap, Pos}; pub use indexset::{FnvIndexSet, IndexSet}; @@ -99,7 +94,6 @@ mod de; mod ser; pub mod binary_heap; -pub mod i; #[cfg(all(has_cas, feature = "cas"))] pub mod mpmc; #[cfg(all(has_cas, feature = "cas"))] diff --git a/src/linear_map.rs b/src/linear_map.rs index a1b8d9515b..d096f210a2 100644 --- a/src/linear_map.rs +++ b/src/linear_map.rs @@ -1,56 +1,37 @@ -use core::{ - borrow::Borrow, - fmt, - iter::FromIterator, - mem::{self, MaybeUninit}, - ops, ptr, slice, -}; - -use generic_array::{ArrayLength, GenericArray}; - use crate::Vec; +use core::{borrow::Borrow, fmt, iter::FromIterator, mem, ops, slice}; /// A fixed capacity map / dictionary that performs lookups via linear search /// /// Note that as this map doesn't use hashing so most operations are **O(N)** instead of O(1) -pub struct LinearMap(#[doc(hidden)] pub crate::i::LinearMap>) -where - N: ArrayLength<(K, V)>, - K: Eq; -impl crate::i::LinearMap { - /// `LinearMap` `const` constructor; wrap the returned value in - /// [`LinearMap`](../struct.LinearMap.html) - pub const fn new() -> Self { - Self { - buffer: crate::i::Vec::new(), - } - } +pub struct LinearMap { + pub(crate) buffer: Vec<(K, V), N>, } -impl LinearMap -where - N: ArrayLength<(K, V)>, - K: Eq, -{ +impl LinearMap { /// Creates an empty `LinearMap` /// /// # Examples /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// /// // allocate the map on the stack - /// let mut map: LinearMap<&str, isize, U8> = LinearMap::new(); + /// let mut map: LinearMap<&str, isize, 8> = LinearMap::new(); /// /// // allocate the map in a static variable - /// static mut MAP: LinearMap<&str, isize, U8> = LinearMap(heapless::i::LinearMap::new()); + /// static mut MAP: LinearMap<&str, isize, 8> = LinearMap::new(); /// ``` - pub fn new() -> Self { - LinearMap(crate::i::LinearMap::new()) + pub const fn new() -> Self { + Self { buffer: Vec::new() } } +} +impl LinearMap +where + K: Eq, +{ /// Returns the number of elements that the map can hold /// /// Computes in **O(1)** time @@ -59,13 +40,12 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let map: LinearMap<&str, isize, U8> = LinearMap::new(); + /// let map: LinearMap<&str, isize, 8> = LinearMap::new(); /// assert_eq!(map.capacity(), 8); /// ``` pub fn capacity(&self) -> usize { - N::to_usize() + N } /// Clears the map, removing all key-value pairs @@ -76,15 +56,14 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert(1, "a").unwrap(); /// map.clear(); /// assert!(map.is_empty()); /// ``` pub fn clear(&mut self) { - self.0.buffer.clear() + self.buffer.clear() } /// Returns true if the map contains a value for the specified key. @@ -95,9 +74,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.contains_key(&1), true); /// assert_eq!(map.contains_key(&2), false); @@ -114,9 +92,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.get(&1), Some(&"a")); /// assert_eq!(map.get(&2), None); @@ -139,9 +116,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert(1, "a").unwrap(); /// if let Some(x) = map.get_mut(&1) { /// *x = "b"; @@ -166,15 +142,14 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut a: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut a: LinearMap<_, _, 8> = LinearMap::new(); /// assert_eq!(a.len(), 0); /// a.insert(1, "a").unwrap(); /// assert_eq!(a.len(), 1); /// ``` pub fn len(&self) -> usize { - self.0.buffer.len + self.buffer.len() } /// Inserts a key-value pair into the map. @@ -189,9 +164,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// assert_eq!(map.insert(37, "a").unwrap(), None); /// assert_eq!(map.is_empty(), false); /// @@ -205,7 +179,7 @@ where return Ok(Some(value)); } - self.0.buffer.push((key, value))?; + self.buffer.push((key, value))?; Ok(None) } @@ -217,9 +191,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut a: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut a: LinearMap<_, _, 8> = LinearMap::new(); /// assert!(a.is_empty()); /// a.insert(1, "a").unwrap(); /// assert!(!a.is_empty()); @@ -234,9 +207,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -247,7 +219,7 @@ where /// ``` pub fn iter(&self) -> Iter<'_, K, V> { Iter { - iter: self.0.buffer.as_slice().iter(), + iter: self.buffer.as_slice().iter(), } } @@ -258,9 +230,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -276,7 +247,7 @@ where /// ``` pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { IterMut { - iter: self.0.buffer.as_mut_slice().iter_mut(), + iter: self.buffer.as_mut_slice().iter_mut(), } } @@ -286,9 +257,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -310,9 +280,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert(1, "a").unwrap(); /// assert_eq!(map.remove(&1), Some("a")); /// assert_eq!(map.remove(&1), None); @@ -328,7 +297,7 @@ where .find(|&(_, k)| k.borrow() == key) .map(|(idx, _)| idx); - idx.map(|idx| self.0.buffer.swap_remove(idx).1) + idx.map(|idx| self.buffer.swap_remove(idx).1) } /// An iterator visiting all values in arbitrary order @@ -337,9 +306,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -358,9 +326,8 @@ where /// /// ``` /// use heapless::LinearMap; - /// use heapless::consts::*; /// - /// let mut map: LinearMap<_, _, U8> = LinearMap::new(); + /// let mut map: LinearMap<_, _, 8> = LinearMap::new(); /// map.insert("a", 1).unwrap(); /// map.insert("b", 2).unwrap(); /// map.insert("c", 3).unwrap(); @@ -378,9 +345,8 @@ where } } -impl<'a, K, V, N, Q> ops::Index<&'a Q> for LinearMap +impl<'a, K, V, Q, const N: usize> ops::Index<&'a Q> for LinearMap where - N: ArrayLength<(K, V)>, K: Borrow + Eq, Q: Eq + ?Sized, { @@ -391,9 +357,8 @@ where } } -impl<'a, K, V, N, Q> ops::IndexMut<&'a Q> for LinearMap +impl<'a, K, V, Q, const N: usize> ops::IndexMut<&'a Q> for LinearMap where - N: ArrayLength<(K, V)>, K: Borrow + Eq, Q: Eq + ?Sized, { @@ -402,9 +367,8 @@ where } } -impl Default for LinearMap +impl Default for LinearMap where - N: ArrayLength<(K, V)>, K: Eq, { fn default() -> Self { @@ -412,22 +376,20 @@ where } } -impl Clone for LinearMap +impl Clone for LinearMap where - N: ArrayLength<(K, V)>, K: Eq + Clone, V: Clone, { fn clone(&self) -> Self { - Self(crate::i::LinearMap { - buffer: self.0.buffer.clone(), - }) + Self { + buffer: self.buffer.clone(), + } } } -impl fmt::Debug for LinearMap +impl fmt::Debug for LinearMap where - N: ArrayLength<(K, V)>, K: Eq + fmt::Debug, V: fmt::Debug, { @@ -436,9 +398,8 @@ where } } -impl FromIterator<(K, V)> for LinearMap +impl FromIterator<(K, V)> for LinearMap where - N: ArrayLength<(K, V)>, K: Eq, { fn from_iter(iter: I) -> Self @@ -446,22 +407,20 @@ where I: IntoIterator, { let mut out = Self::new(); - out.0.buffer.extend(iter); + out.buffer.extend(iter); out } } -pub struct IntoIter +pub struct IntoIter where - N: ArrayLength<(K, V)>, K: Eq, { inner: as IntoIterator>::IntoIter, } -impl Iterator for IntoIter +impl Iterator for IntoIter where - N: ArrayLength<(K, V)>, K: Eq, { type Item = (K, V); @@ -470,28 +429,8 @@ where } } -impl IntoIterator for LinearMap +impl<'a, K, V, const N: usize> IntoIterator for &'a LinearMap where - N: ArrayLength<(K, V)>, - K: Eq, -{ - type Item = (K, V); - type IntoIter = IntoIter; - - fn into_iter(mut self) -> Self::IntoIter { - // FIXME this may result in a memcpy at runtime - let lm = mem::replace(&mut self.0, unsafe { MaybeUninit::uninit().assume_init() }); - mem::forget(self); - - Self::IntoIter { - inner: crate::Vec(lm.buffer).into_iter(), - } - } -} - -impl<'a, K, V, N> IntoIterator for &'a LinearMap -where - N: ArrayLength<(K, V)>, K: Eq, { type Item = (&'a K, &'a V); @@ -522,13 +461,12 @@ impl<'a, K, V> Clone for Iter<'a, K, V> { } } -impl Drop for LinearMap -where - N: ArrayLength<(K, V)>, - K: Eq, -{ +impl Drop for LinearMap { fn drop(&mut self) { - unsafe { ptr::drop_in_place(self.0.buffer.as_mut_slice()) } + // heapless::Vec implements drop right? + drop(&self.buffer); + // original code below + // unsafe { ptr::drop_in_place(self.buffer.as_mut_slice()) } } } @@ -544,12 +482,10 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> { } } -impl PartialEq> for LinearMap +impl PartialEq> for LinearMap where K: Eq, V: PartialEq, - N: ArrayLength<(K, V)>, - N2: ArrayLength<(K, V)>, { fn eq(&self, other: &LinearMap) -> bool { self.len() == other.len() @@ -559,30 +495,29 @@ where } } -impl Eq for LinearMap +impl Eq for LinearMap where K: Eq, V: PartialEq, - N: ArrayLength<(K, V)>, { } #[cfg(test)] mod test { - use crate::{consts::*, LinearMap}; + use crate::LinearMap; #[test] fn static_new() { - static mut _L: LinearMap = LinearMap(crate::i::LinearMap::new()); + static mut _L: LinearMap = LinearMap::new(); } #[test] fn partial_eq() { { - let mut a = LinearMap::<_, _, U1>::new(); + let mut a = LinearMap::<_, _, 1>::new(); a.insert("k1", "v1").unwrap(); - let mut b = LinearMap::<_, _, U2>::new(); + let mut b = LinearMap::<_, _, 2>::new(); b.insert("k1", "v1").unwrap(); assert!(a == b); @@ -593,15 +528,17 @@ mod test { } { - let mut a = LinearMap::<_, _, U2>::new(); + let mut a = LinearMap::<_, _, 2>::new(); a.insert("k1", "v1").unwrap(); a.insert("k2", "v2").unwrap(); - let mut b = LinearMap::<_, _, U2>::new(); + let mut b = LinearMap::<_, _, 2>::new(); b.insert("k2", "v2").unwrap(); b.insert("k1", "v1").unwrap(); assert!(a == b); } } + + // TODO: drop test } diff --git a/src/mpmc.rs b/src/mpmc.rs index 023d55835d..06b496b591 100644 --- a/src/mpmc.rs +++ b/src/mpmc.rs @@ -82,11 +82,13 @@ //! //! [0]: http://www.1024cores.net/home/lock-free-algorithms/queues/bounded-mpmc-queue -use core::{ - cell::UnsafeCell, - mem::MaybeUninit, - sync::atomic::{AtomicU8, Ordering}, -}; +use core::{cell::UnsafeCell, mem::MaybeUninit}; + +#[cfg(armv6m)] +use atomic_polyfill::{AtomicU8, Ordering}; + +#[cfg(not(armv6m))] +use core::sync::atomic::{AtomicU8, Ordering}; /// MPMC queue with a capacity for 2 elements pub struct Q2 { diff --git a/src/pool/llsc.rs b/src/pool/llsc.rs index 1aec52761c..83081521d4 100644 --- a/src/pool/llsc.rs +++ b/src/pool/llsc.rs @@ -1,11 +1,13 @@ //! Stack based on LL/SC atomics pub use core::ptr::NonNull as Ptr; -use core::{ - cell::UnsafeCell, - ptr, - sync::atomic::{AtomicPtr, Ordering}, -}; +use core::{cell::UnsafeCell, ptr}; + +#[cfg(armv6m)] +use atomic_polyfill::{AtomicPtr, Ordering}; + +#[cfg(not(armv6m))] +use core::sync::atomic::{AtomicPtr, Ordering}; /// Unfortunate implementation detail required to use the /// [`Pool.grow_exact`](struct.Pool.html#method.grow_exact) method diff --git a/src/pool/mod.rs b/src/pool/mod.rs index 739165f070..3e72086071 100644 --- a/src/pool/mod.rs +++ b/src/pool/mod.rs @@ -236,8 +236,6 @@ use core::{ ptr, }; -use as_slice::{AsMutSlice, AsSlice}; - pub use stack::Node; use stack::{Ptr, Stack}; @@ -384,13 +382,13 @@ impl Pool { /// memory block pub fn grow_exact(&self, memory: &'static mut MaybeUninit) -> usize where - A: AsMutSlice>, + A: AsMut<[Node]>, { if mem::size_of::() == 0 { return usize::max_value(); } - let nodes = unsafe { (*memory.as_mut_ptr()).as_mut_slice() }; + let nodes = unsafe { (*memory.as_mut_ptr()).as_mut() }; let cap = nodes.len(); for p in nodes { match () { @@ -441,23 +439,21 @@ unsafe impl Sync for Box where T: Sync {} unsafe impl stable_deref_trait::StableDeref for Box {} -impl AsSlice for Box +impl AsRef<[T]> for Box where - A: AsSlice, + A: AsRef<[T]>, { - type Element = A::Element; - - fn as_slice(&self) -> &[A::Element] { - self.deref().as_slice() + fn as_ref(&self) -> &[T] { + self.deref().as_ref() } } -impl AsMutSlice for Box +impl AsMut<[T]> for Box where - A: AsMutSlice, + A: AsMut<[T]>, { - fn as_mut_slice(&mut self) -> &mut [A::Element] { - self.deref_mut().as_mut_slice() + fn as_mut(&mut self) -> &mut [T] { + self.deref_mut().as_mut() } } diff --git a/src/pool/singleton.rs b/src/pool/singleton.rs index 17763df436..62de8c8f20 100644 --- a/src/pool/singleton.rs +++ b/src/pool/singleton.rs @@ -10,17 +10,17 @@ use core::{ ptr, }; -use as_slice::{AsMutSlice, AsSlice}; - use super::{Init, Node, Uninit}; /// Instantiates a pool as a global singleton +// NOTE(any(test)) makes testing easier (no need to enable Cargo features for testing) #[cfg(any( armv7a, armv7r, armv7m, armv8m_main, all(target_arch = "x86_64", feature = "x86-sync-pool"), + test ))] #[macro_export] macro_rules! pool { @@ -78,7 +78,7 @@ pub trait Pool { /// memory block fn grow_exact(memory: &'static mut MaybeUninit) -> usize where - A: AsMutSlice>, + A: AsMut<[Node]>, { Self::ptr().grow_exact(memory) } @@ -121,7 +121,7 @@ where impl

Box where P: Pool, - P::Data: AsSlice, + P::Data: AsRef<[u8]>, { /// Freezes the contents of this memory block /// @@ -244,25 +244,23 @@ where { } -impl AsSlice for Box

+impl AsRef<[T]> for Box

where P: Pool, - P::Data: AsSlice, + P::Data: AsRef<[T]>, { - type Element = T; - - fn as_slice(&self) -> &[T] { - self.deref().as_slice() + fn as_ref(&self) -> &[T] { + self.deref().as_ref() } } -impl AsMutSlice for Box

+impl AsMut<[T]> for Box

where P: Pool, - P::Data: AsMutSlice, + P::Data: AsMut<[T]>, { - fn as_mut_slice(&mut self) -> &mut [T] { - self.deref_mut().as_mut_slice() + fn as_mut(&mut self) -> &mut [T] { + self.deref_mut().as_mut() } } diff --git a/src/sealed.rs b/src/sealed.rs index 3f68564b55..0dc6b17683 100644 --- a/src/sealed.rs +++ b/src/sealed.rs @@ -1,28 +1,7 @@ /// Sealed traits and implementations for `spsc` pub mod spsc { #[cfg(has_atomics)] - use crate::spsc::{MultiCore, SingleCore}; - - #[cfg(has_atomics)] - use core::sync::atomic::{self, AtomicU16, AtomicU8, AtomicUsize, Ordering}; - - pub unsafe trait XCore { - fn is_multi_core() -> bool; - } - - #[cfg(has_atomics)] - unsafe impl XCore for SingleCore { - fn is_multi_core() -> bool { - false - } - } - - #[cfg(has_atomics)] - unsafe impl XCore for MultiCore { - fn is_multi_core() -> bool { - true - } - } + use core::sync::atomic::{AtomicU16, AtomicU8, AtomicUsize, Ordering}; pub unsafe trait Uxx: Into + Send { #[doc(hidden)] @@ -33,9 +12,7 @@ pub mod spsc { #[cfg(has_atomics)] #[doc(hidden)] - unsafe fn load_acquire(x: *const Self) -> Self - where - C: XCore; + unsafe fn load_acquire(x: *const Self) -> Self; #[cfg(has_atomics)] #[doc(hidden)] @@ -43,9 +20,7 @@ pub mod spsc { #[cfg(has_atomics)] #[doc(hidden)] - unsafe fn store_release(x: *const Self, val: Self) - where - C: XCore; + unsafe fn store_release(x: *const Self, val: Self); } unsafe impl Uxx for u8 { @@ -63,17 +38,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn load_acquire(x: *const Self) -> Self - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicU8)).load(Ordering::Acquire) - } else { - let y = (*(x as *const AtomicU8)).load(Ordering::Relaxed); // read - atomic::compiler_fence(Ordering::Acquire); // ▼ - y - } + unsafe fn load_acquire(x: *const Self) -> Self { + (*(x as *const AtomicU8)).load(Ordering::Acquire) } #[cfg(has_atomics)] @@ -82,16 +48,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn store_release(x: *const Self, val: Self) - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicU8)).store(val, Ordering::Release) - } else { - atomic::compiler_fence(Ordering::Release); // ▲ - (*(x as *const AtomicU8)).store(val, Ordering::Relaxed) // write - } + unsafe fn store_release(x: *const Self, val: Self) { + (*(x as *const AtomicU8)).store(val, Ordering::Release) } } @@ -110,17 +68,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn load_acquire(x: *const Self) -> Self - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicU16)).load(Ordering::Acquire) - } else { - let y = (*(x as *const AtomicU16)).load(Ordering::Relaxed); // read - atomic::compiler_fence(Ordering::Acquire); // ▼ - y - } + unsafe fn load_acquire(x: *const Self) -> Self { + (*(x as *const AtomicU16)).load(Ordering::Acquire) } #[cfg(has_atomics)] @@ -129,16 +78,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn store_release(x: *const Self, val: Self) - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicU16)).store(val, Ordering::Release) - } else { - atomic::compiler_fence(Ordering::Release); // ▲ - (*(x as *const AtomicU16)).store(val, Ordering::Relaxed) // write - } + unsafe fn store_release(x: *const Self, val: Self) { + (*(x as *const AtomicU16)).store(val, Ordering::Release) } } @@ -152,17 +93,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn load_acquire(x: *const Self) -> Self - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicUsize)).load(Ordering::Acquire) - } else { - let y = (*(x as *const AtomicUsize)).load(Ordering::Relaxed); // read - atomic::compiler_fence(Ordering::Acquire); // ▼ - y - } + unsafe fn load_acquire(x: *const Self) -> Self { + (*(x as *const AtomicUsize)).load(Ordering::Acquire) } #[cfg(has_atomics)] @@ -171,16 +103,8 @@ pub mod spsc { } #[cfg(has_atomics)] - unsafe fn store_release(x: *const Self, val: Self) - where - C: XCore, - { - if C::is_multi_core() { - (*(x as *const AtomicUsize)).store(val, Ordering::Release) - } else { - atomic::compiler_fence(Ordering::Release); // ▲ - (*(x as *const AtomicUsize)).store(val, Ordering::Relaxed); // write - } + unsafe fn store_release(x: *const Self, val: Self) { + (*(x as *const AtomicUsize)).store(val, Ordering::Release) } } } diff --git a/src/ser.rs b/src/ser.rs index b050346462..7214f096db 100644 --- a/src/ser.rs +++ b/src/ser.rs @@ -1,19 +1,15 @@ -use generic_array::{typenum::PowerOfTwo, ArrayLength}; -use hash32::{BuildHasher, Hash}; -use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer}; - use crate::{ - indexmap::{Bucket, Pos}, - sealed::binary_heap::Kind as BinaryHeapKind, - BinaryHeap, IndexMap, IndexSet, LinearMap, String, Vec, + sealed::binary_heap::Kind as BinaryHeapKind, BinaryHeap, IndexMap, IndexSet, LinearMap, String, + Vec, }; +use hash32::{BuildHasher, Hash}; +use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer}; // Sequential containers -impl Serialize for BinaryHeap +impl Serialize for BinaryHeap where T: Ord + Serialize, - N: ArrayLength, KIND: BinaryHeapKind, { fn serialize(&self, serializer: S) -> Result @@ -28,11 +24,10 @@ where } } -impl Serialize for IndexSet +impl Serialize for IndexSet where T: Eq + Hash + Serialize, S: BuildHasher, - N: ArrayLength> + ArrayLength> + PowerOfTwo, { fn serialize(&self, serializer: SER) -> Result where @@ -46,10 +41,9 @@ where } } -impl Serialize for Vec +impl Serialize for Vec where T: Serialize, - N: ArrayLength, { fn serialize(&self, serializer: S) -> Result where @@ -65,10 +59,9 @@ where // Dictionaries -impl Serialize for IndexMap +impl Serialize for IndexMap where K: Eq + Hash + Serialize, - N: ArrayLength> + ArrayLength>, S: BuildHasher, V: Serialize, { @@ -84,9 +77,8 @@ where } } -impl Serialize for LinearMap +impl Serialize for LinearMap where - N: ArrayLength<(K, V)>, K: Eq + Serialize, V: Serialize, { @@ -104,10 +96,7 @@ where // String containers -impl Serialize for String -where - N: ArrayLength, -{ +impl Serialize for String { fn serialize(&self, serializer: S) -> Result where S: Serializer, diff --git a/src/spsc.rs b/src/spsc.rs new file mode 100644 index 0000000000..df46ef69c7 --- /dev/null +++ b/src/spsc.rs @@ -0,0 +1,897 @@ +//! Fixed capacity Single Producer Single Consumer (SPSC) queue +//! +//! Implementation based on https://www.codeproject.com/Articles/43510/Lock-Free-Single-Producer-Single-Consumer-Circular +//! +//! NOTE: This module is not available on targets that do *not* support atomic loads, e.g. RISC-V +//! cores w/o the A (Atomic) extension +//! +//! # Examples +//! +//! - `Queue` can be used as a plain queue +//! +//! ``` +//! use heapless::spsc::Queue; +//! +//! let mut rb: Queue = Queue::new(); +//! +//! assert!(rb.enqueue(0).is_ok()); +//! assert!(rb.enqueue(1).is_ok()); +//! assert!(rb.enqueue(2).is_ok()); +//! assert!(rb.enqueue(3).is_err()); // full +//! +//! assert_eq!(rb.dequeue(), Some(0)); +//! ``` +//! +//! - `Queue` can be `split` and then be used in Single Producer Single Consumer mode +//! +//! ``` +//! use heapless::spsc::Queue; +//! +//! // Notice, type signature needs to be explicit for now. +//! // (min_const_eval, does not allow for default type assignments) +//! static mut Q: Queue = Queue::new(); +//! +//! enum Event { A, B } +//! +//! fn main() { +//! // NOTE(unsafe) beware of aliasing the `consumer` end point +//! let mut consumer = unsafe { Q.split().1 }; +//! +//! loop { +//! // `dequeue` is a lockless operation +//! match consumer.dequeue() { +//! Some(Event::A) => { /* .. */ }, +//! Some(Event::B) => { /* .. */ }, +//! None => { /* sleep */ }, +//! } +//! # break +//! } +//! } +//! +//! // this is a different execution context that can preempt `main` +//! fn interrupt_handler() { +//! // NOTE(unsafe) beware of aliasing the `producer` end point +//! let mut producer = unsafe { Q.split().0 }; +//! # let condition = true; +//! +//! // .. +//! +//! if condition { +//! producer.enqueue(Event::A).ok().unwrap(); +//! } else { +//! producer.enqueue(Event::B).ok().unwrap(); +//! } +//! +//! // .. +//! } +//! ``` +//! +//! # Benchmarks +//! +//! Measured on a ARM Cortex-M3 core running at 8 MHz and with zero Flash wait cycles +//! +//! `-C opt-level` |`3`| +//! -----------------------|---| +//! `Consumer::dequeue`| 15| +//! `Queue::dequeue` | 12| +//! `Producer::enqueue`| 16| +//! `Queue::enqueue` | 14| +//! +//! - All execution times are in clock cycles. 1 clock cycle = 125 ns. +//! - Execution time is *dependent* of `mem::size_of::()`. Both operations include one +//! `memcpy(T)` in their successful path. +//! - The optimization level is indicated in the first row. +//! - The numbers reported correspond to the successful path (i.e. `Some` is returned by `dequeue` +//! and `Ok` is returned by `enqueue`). + +use core::{ + cell::UnsafeCell, + fmt, hash, + mem::MaybeUninit, + ptr, + sync::atomic::{AtomicUsize, Ordering}, +}; + +/// A statically allocated single producer single consumer queue with a capacity of `N - 1` elements +/// +/// *IMPORTANT*: To get better performance use a capacity that is a power of 2 (e.g. `16`, `32`, +/// etc.). +pub struct Queue { + // this is from where we dequeue items + pub(crate) head: AtomicUsize, + + // this is where we enqueue new items + pub(crate) tail: AtomicUsize, + + pub(crate) buffer: [UnsafeCell>; N], +} + +impl Queue { + const INIT: UnsafeCell> = UnsafeCell::new(MaybeUninit::uninit()); + + #[inline] + fn increment(val: usize) -> usize { + (val + 1) % N + } + + /// Creates an empty queue with a fixed capacity of `N - 1` + pub const fn new() -> Self { + Queue { + head: AtomicUsize::new(0), + tail: AtomicUsize::new(0), + buffer: [Self::INIT; N], + } + } + + /// Returns the maximum number of elements the queue can hold + #[inline] + pub const fn capacity(&self) -> usize { + N - 1 + } + + /// Returns the number of elements in the queue + #[inline] + pub fn len(&self) -> usize { + let current_head = self.head.load(Ordering::Relaxed); + let current_tail = self.tail.load(Ordering::Relaxed); + + current_tail.wrapping_sub(current_head).wrapping_add(N) % N + } + + /// Returns `true` if the queue is empty + #[inline] + pub fn is_empty(&self) -> bool { + self.head.load(Ordering::Relaxed) == self.tail.load(Ordering::Relaxed) + } + + /// Returns `true` if the queue is full + #[inline] + pub fn is_full(&self) -> bool { + Self::increment(self.tail.load(Ordering::Relaxed)) == self.head.load(Ordering::Relaxed) + } + + /// Iterates from the front of the queue to the back + pub fn iter(&self) -> Iter<'_, T, N> { + Iter { + rb: self, + index: 0, + len: self.len(), + } + } + + /// Returns an iterator that allows modifying each value + pub fn iter_mut(&mut self) -> IterMut<'_, T, N> { + let len = self.len(); + IterMut { + rb: self, + index: 0, + len, + } + } + + /// Adds an `item` to the end of the queue + /// + /// Returns back the `item` if the queue is full + #[inline] + pub fn enqueue(&mut self, val: T) -> Result<(), T> { + unsafe { self.inner_enqueue(val) } + } + + /// Returns the item in the front of the queue, or `None` if the queue is empty + #[inline] + pub fn dequeue(&mut self) -> Option { + unsafe { self.inner_dequeue() } + } + + /// Returns a reference to the item in the front of the queue without dequeuing, or + /// `None` if the queue is empty. + /// + /// # Examples + /// ``` + /// use heapless::spsc::Queue; + /// + /// let mut queue: Queue = Queue::new(); + /// let (mut producer, mut consumer) = queue.split(); + /// assert_eq!(None, consumer.peek()); + /// producer.enqueue(1); + /// assert_eq!(Some(&1), consumer.peek()); + /// assert_eq!(Some(1), consumer.dequeue()); + /// assert_eq!(None, consumer.peek()); + /// ``` + pub fn peek(&self) -> Option<&T> { + if !self.is_empty() { + let head = self.head.load(Ordering::Relaxed); + Some(unsafe { &*(self.buffer.get_unchecked(head).get() as *const T) }) + } else { + None + } + } + + // The memory for enqueueing is "owned" by the tail pointer. + // NOTE: This internal function uses internal mutability to allow the [`Producer`] to enqueue + // items without doing pointer arithmetic and accessing internal fields of this type. + unsafe fn inner_enqueue(&self, val: T) -> Result<(), T> { + let current_tail = self.tail.load(Ordering::Relaxed); + let next_tail = Self::increment(current_tail); + + if next_tail != self.head.load(Ordering::Acquire) { + (self.buffer.get_unchecked(current_tail).get()).write(MaybeUninit::new(val)); + self.tail.store(next_tail, Ordering::Release); + + Ok(()) + } else { + Err(val) + } + } + + // The memory for enqueueing is "owned" by the tail pointer. + // NOTE: This internal function uses internal mutability to allow the [`Producer`] to enqueue + // items without doing pointer arithmetic and accessing internal fields of this type. + unsafe fn inner_enqueue_unchecked(&self, val: T) { + let current_tail = self.tail.load(Ordering::Relaxed); + + (self.buffer.get_unchecked(current_tail).get()).write(MaybeUninit::new(val)); + self.tail + .store(Self::increment(current_tail), Ordering::Release); + } + + /// Adds an `item` to the end of the queue, without checking if it's full + /// + /// # Unsafety + /// + /// If the queue is full this operation will leak a value (T's destructor won't run on + /// the value that got overwritten by `item`), *and* will allow the `dequeue` operation + /// to create a copy of `item`, which could result in `T`'s destructor running on `item` + /// twice. + pub unsafe fn enqueue_unchecked(&mut self, val: T) { + self.inner_enqueue_unchecked(val) + } + + // The memory for dequeuing is "owned" by the head pointer,. + // NOTE: This internal function uses internal mutability to allow the [`Consumer`] to dequeue + // items without doing pointer arithmetic and accessing internal fields of this type. + unsafe fn inner_dequeue(&self) -> Option { + let current_head = self.head.load(Ordering::Relaxed); + + if current_head == self.tail.load(Ordering::Acquire) { + None + } else { + let v = (self.buffer.get_unchecked(current_head).get() as *const T).read(); + + self.head + .store(Self::increment(current_head), Ordering::Release); + + Some(v) + } + } + + // The memory for dequeuing is "owned" by the head pointer,. + // NOTE: This internal function uses internal mutability to allow the [`Consumer`] to dequeue + // items without doing pointer arithmetic and accessing internal fields of this type. + unsafe fn inner_dequeue_unchecked(&self) -> T { + let current_head = self.head.load(Ordering::Relaxed); + let v = (self.buffer.get_unchecked(current_head).get() as *const T).read(); + + self.head + .store(Self::increment(current_head), Ordering::Release); + + v + } + + /// Returns the item in the front of the queue, without checking if there is something in the + /// queue + /// + /// # Unsafety + /// + /// If the queue is empty this operation will return uninitialized memory. + pub unsafe fn dequeue_unchecked(&mut self) -> T { + self.inner_dequeue_unchecked() + } + + /// Splits a queue into producer and consumer endpoints + pub fn split(&mut self) -> (Producer<'_, T, N>, Consumer<'_, T, N>) { + (Producer { rb: self }, Consumer { rb: self }) + } +} + +impl Clone for Queue +where + T: Clone, +{ + fn clone(&self) -> Self { + let mut new: Queue = Queue::new(); + + for s in self.iter() { + unsafe { + // NOTE(unsafe) new.capacity() == self.capacity() <= self.len() + // no overflow possible + new.enqueue_unchecked(s.clone()); + } + } + + new + } +} + +impl PartialEq> for Queue +where + T: PartialEq, +{ + fn eq(&self, other: &Queue) -> bool { + self.len() == other.len() && self.iter().zip(other.iter()).all(|(v1, v2)| v1 == v2) + } +} + +impl Eq for Queue where T: Eq {} + +/// An iterator over the items of a queue +pub struct Iter<'a, T, const N: usize> { + rb: &'a Queue, + index: usize, + len: usize, +} + +impl<'a, T, const N: usize> Clone for Iter<'a, T, N> { + fn clone(&self) -> Self { + Self { + rb: self.rb, + index: self.index, + len: self.len, + } + } +} + +/// A mutable iterator over the items of a queue +pub struct IterMut<'a, T, const N: usize> { + rb: &'a mut Queue, + index: usize, + len: usize, +} + +impl<'a, T, const N: usize> Iterator for Iter<'a, T, N> { + type Item = &'a T; + + fn next(&mut self) -> Option { + if self.index < self.len { + let head = self.rb.head.load(Ordering::Relaxed); + + let i = (head + self.index) % N; + self.index += 1; + + Some(unsafe { &*(self.rb.buffer.get_unchecked(i).get() as *const T) }) + } else { + None + } + } +} + +impl<'a, T, const N: usize> Iterator for IterMut<'a, T, N> { + type Item = &'a mut T; + + fn next(&mut self) -> Option { + if self.index < self.len { + let head = self.rb.head.load(Ordering::Relaxed); + + let i = (head + self.index) % N; + self.index += 1; + + Some(unsafe { &mut *(self.rb.buffer.get_unchecked(i).get() as *mut T) }) + } else { + None + } + } +} + +impl<'a, T, const N: usize> DoubleEndedIterator for Iter<'a, T, N> { + fn next_back(&mut self) -> Option { + if self.index < self.len { + let head = self.rb.head.load(Ordering::Relaxed); + + // self.len > 0, since it's larger than self.index > 0 + let i = (head + self.len - 1) % N; + self.len -= 1; + Some(unsafe { &*(self.rb.buffer.get_unchecked(i).get() as *const T) }) + } else { + None + } + } +} + +impl<'a, T, const N: usize> DoubleEndedIterator for IterMut<'a, T, N> { + fn next_back(&mut self) -> Option { + if self.index < self.len { + let head = self.rb.head.load(Ordering::Relaxed); + + // self.len > 0, since it's larger than self.index > 0 + let i = (head + self.len - 1) % N; + self.len -= 1; + Some(unsafe { &mut *(self.rb.buffer.get_unchecked(i).get() as *mut T) }) + } else { + None + } + } +} + +impl Drop for Queue { + fn drop(&mut self) { + for item in self { + unsafe { + ptr::drop_in_place(item); + } + } + } +} + +impl fmt::Debug for Queue +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + +impl hash::Hash for Queue +where + T: hash::Hash, +{ + fn hash(&self, state: &mut H) { + // iterate over self in order + for t in self.iter() { + hash::Hash::hash(t, state); + } + } +} + +impl hash32::Hash for Queue +where + T: hash32::Hash, +{ + fn hash(&self, state: &mut H) { + // iterate over self in order + for t in self.iter() { + hash32::Hash::hash(t, state); + } + } +} + +impl<'a, T, const N: usize> IntoIterator for &'a Queue { + type Item = &'a T; + type IntoIter = Iter<'a, T, N>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T, const N: usize> IntoIterator for &'a mut Queue { + type Item = &'a mut T; + type IntoIter = IterMut<'a, T, N>; + + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +/// A queue "consumer"; it can dequeue items from the queue +/// NOTE the consumer semantically owns the `head` pointer of the queue +pub struct Consumer<'a, T, const N: usize> { + rb: &'a Queue, +} + +unsafe impl<'a, T, const N: usize> Send for Consumer<'a, T, N> where T: Send {} + +/// A queue "producer"; it can enqueue items into the queue +/// NOTE the producer semantically owns the `tail` pointer of the queue +pub struct Producer<'a, T, const N: usize> { + rb: &'a Queue, +} + +unsafe impl<'a, T, const N: usize> Send for Producer<'a, T, N> where T: Send {} + +impl<'a, T, const N: usize> Consumer<'a, T, N> { + /// Returns the item in the front of the queue, or `None` if the queue is empty + #[inline] + pub fn dequeue(&mut self) -> Option { + unsafe { self.rb.inner_dequeue() } + } + + /// Returns the item in the front of the queue, without checking if there are elements in the + /// queue + /// + /// See [`Queue::dequeue_unchecked`] for safety + #[inline] + pub unsafe fn dequeue_unchecked(&mut self) -> T { + self.rb.inner_dequeue_unchecked() + } + + /// Returns if there are any items to dequeue. When this returns `true`, at least the + /// first subsequent dequeue will succeed + #[inline] + pub fn ready(&self) -> bool { + !self.rb.is_empty() + } + + /// Returns the number of elements in the queue + #[inline] + pub fn len(&self) -> usize { + self.rb.len() + } + + /// Returns the maximum number of elements the queue can hold + #[inline] + pub fn capacity(&self) -> usize { + self.rb.capacity() + } + + /// Returns the item in the front of the queue without dequeuing, or `None` if the queue is + /// empty + /// + /// # Examples + /// ``` + /// use heapless::spsc::Queue; + /// + /// let mut queue: Queue = Queue::new(); + /// let (mut producer, mut consumer) = queue.split(); + /// assert_eq!(None, consumer.peek()); + /// producer.enqueue(1); + /// assert_eq!(Some(&1), consumer.peek()); + /// assert_eq!(Some(1), consumer.dequeue()); + /// assert_eq!(None, consumer.peek()); + /// ``` + #[inline] + pub fn peek(&self) -> Option<&T> { + self.rb.peek() + } +} + +impl<'a, T, const N: usize> Producer<'a, T, N> { + /// Adds an `item` to the end of the queue, returns back the `item` if the queue is full + #[inline] + pub fn enqueue(&mut self, val: T) -> Result<(), T> { + unsafe { self.rb.inner_enqueue(val) } + } + + /// Adds an `item` to the end of the queue, without checking if the queue is full + /// + /// See [`Queue::enqueue_unchecked`] for safety + #[inline] + pub unsafe fn enqueue_unchecked(&mut self, val: T) { + self.rb.inner_enqueue_unchecked(val) + } + + /// Returns if there is any space to enqueue a new item. When this returns true, at + /// least the first subsequent enqueue will succeed. + #[inline] + pub fn ready(&self) -> bool { + !self.rb.is_full() + } + + /// Returns the number of elements in the queue + #[inline] + pub fn len(&self) -> usize { + self.rb.len() + } + + /// Returns the maximum number of elements the queue can hold + #[inline] + pub fn capacity(&self) -> usize { + self.rb.capacity() + } +} + +#[cfg(test)] +mod tests { + use crate::spsc::Queue; + use hash32::Hasher; + + #[test] + fn full() { + let mut rb: Queue = Queue::new(); + + assert_eq!(rb.is_full(), false); + + rb.enqueue(1).unwrap(); + assert_eq!(rb.is_full(), false); + + rb.enqueue(2).unwrap(); + assert_eq!(rb.is_full(), true); + } + + #[test] + fn empty() { + let mut rb: Queue = Queue::new(); + + assert_eq!(rb.is_empty(), true); + + rb.enqueue(1).unwrap(); + assert_eq!(rb.is_empty(), false); + + rb.enqueue(2).unwrap(); + assert_eq!(rb.is_empty(), false); + } + + #[test] + fn len() { + let mut rb: Queue = Queue::new(); + + assert_eq!(rb.len(), 0); + + rb.enqueue(1).unwrap(); + assert_eq!(rb.len(), 1); + + rb.enqueue(2).unwrap(); + assert_eq!(rb.len(), 2); + + for _ in 0..1_000_000 { + let v = rb.dequeue().unwrap(); + println!("{}", v); + rb.enqueue(v).unwrap(); + assert_eq!(rb.len(), 2); + } + } + + #[test] + fn try_overflow() { + const N: usize = 23; + let mut rb: Queue = Queue::new(); + + for i in 0..N as i32 - 1 { + rb.enqueue(i).unwrap(); + } + + for _ in 0..1_000_000 { + for i in 0..N as i32 - 1 { + let d = rb.dequeue().unwrap(); + assert_eq!(d, i); + rb.enqueue(i).unwrap(); + } + } + } + + #[test] + fn sanity() { + let mut rb: Queue = Queue::new(); + + let (mut p, mut c) = rb.split(); + + assert_eq!(p.ready(), true); + + assert_eq!(c.ready(), false); + + assert_eq!(c.dequeue(), None); + + p.enqueue(0).unwrap(); + + assert_eq!(c.dequeue(), Some(0)); + } + + #[test] + fn static_new() { + static mut _Q: Queue = Queue::new(); + } + + #[test] + fn drop() { + struct Droppable; + impl Droppable { + fn new() -> Self { + unsafe { + COUNT += 1; + } + Droppable + } + } + + impl Drop for Droppable { + fn drop(&mut self) { + unsafe { + COUNT -= 1; + } + } + } + + static mut COUNT: i32 = 0; + + { + let mut v: Queue = Queue::new(); + v.enqueue(Droppable::new()).ok().unwrap(); + v.enqueue(Droppable::new()).ok().unwrap(); + v.dequeue().unwrap(); + } + + assert_eq!(unsafe { COUNT }, 0); + + { + let mut v: Queue = Queue::new(); + v.enqueue(Droppable::new()).ok().unwrap(); + v.enqueue(Droppable::new()).ok().unwrap(); + } + + assert_eq!(unsafe { COUNT }, 0); + } + + #[test] + fn iter() { + let mut rb: Queue = Queue::new(); + + rb.enqueue(0).unwrap(); + rb.dequeue().unwrap(); + rb.enqueue(1).unwrap(); + rb.enqueue(2).unwrap(); + rb.enqueue(3).unwrap(); + + let mut items = rb.iter(); + + // assert_eq!(items.next(), Some(&0)); + assert_eq!(items.next(), Some(&1)); + assert_eq!(items.next(), Some(&2)); + assert_eq!(items.next(), Some(&3)); + assert_eq!(items.next(), None); + } + + #[test] + fn iter_double_ended() { + let mut rb: Queue = Queue::new(); + + rb.enqueue(0).unwrap(); + rb.enqueue(1).unwrap(); + rb.enqueue(2).unwrap(); + + let mut items = rb.iter(); + + assert_eq!(items.next(), Some(&0)); + assert_eq!(items.next_back(), Some(&2)); + assert_eq!(items.next(), Some(&1)); + assert_eq!(items.next(), None); + assert_eq!(items.next_back(), None); + } + + #[test] + fn iter_mut() { + let mut rb: Queue = Queue::new(); + + rb.enqueue(0).unwrap(); + rb.enqueue(1).unwrap(); + rb.enqueue(2).unwrap(); + + let mut items = rb.iter_mut(); + + assert_eq!(items.next(), Some(&mut 0)); + assert_eq!(items.next(), Some(&mut 1)); + assert_eq!(items.next(), Some(&mut 2)); + assert_eq!(items.next(), None); + } + + #[test] + fn iter_mut_double_ended() { + let mut rb: Queue = Queue::new(); + + rb.enqueue(0).unwrap(); + rb.enqueue(1).unwrap(); + rb.enqueue(2).unwrap(); + + let mut items = rb.iter_mut(); + + assert_eq!(items.next(), Some(&mut 0)); + assert_eq!(items.next_back(), Some(&mut 2)); + assert_eq!(items.next(), Some(&mut 1)); + assert_eq!(items.next(), None); + assert_eq!(items.next_back(), None); + } + + #[test] + fn wrap_around() { + let mut rb: Queue = Queue::new(); + + rb.enqueue(0).unwrap(); + rb.enqueue(1).unwrap(); + rb.enqueue(2).unwrap(); + rb.dequeue().unwrap(); + rb.dequeue().unwrap(); + rb.dequeue().unwrap(); + rb.enqueue(3).unwrap(); + rb.enqueue(4).unwrap(); + + assert_eq!(rb.len(), 2); + } + + #[test] + fn ready_flag() { + let mut rb: Queue = Queue::new(); + let (mut p, mut c) = rb.split(); + assert_eq!(c.ready(), false); + assert_eq!(p.ready(), true); + + p.enqueue(0).unwrap(); + + assert_eq!(c.ready(), true); + assert_eq!(p.ready(), true); + + p.enqueue(1).unwrap(); + + assert_eq!(c.ready(), true); + assert_eq!(p.ready(), false); + + c.dequeue().unwrap(); + + assert_eq!(c.ready(), true); + assert_eq!(p.ready(), true); + + c.dequeue().unwrap(); + + assert_eq!(c.ready(), false); + assert_eq!(p.ready(), true); + } + + #[test] + fn clone() { + let mut rb1: Queue = Queue::new(); + rb1.enqueue(0).unwrap(); + rb1.enqueue(0).unwrap(); + rb1.dequeue().unwrap(); + rb1.enqueue(0).unwrap(); + let rb2 = rb1.clone(); + assert_eq!(rb1.capacity(), rb2.capacity()); + assert_eq!(rb1.len(), rb2.len()); + assert!(rb1.iter().zip(rb2.iter()).all(|(v1, v2)| v1 == v2)); + } + + #[test] + fn eq() { + // generate two queues with same content + // but different buffer alignment + let mut rb1: Queue = Queue::new(); + rb1.enqueue(0).unwrap(); + rb1.enqueue(0).unwrap(); + rb1.dequeue().unwrap(); + rb1.enqueue(0).unwrap(); + let mut rb2: Queue = Queue::new(); + rb2.enqueue(0).unwrap(); + rb2.enqueue(0).unwrap(); + assert!(rb1 == rb2); + // test for symmetry + assert!(rb2 == rb1); + // test for changes in content + rb1.enqueue(0).unwrap(); + assert!(rb1 != rb2); + rb2.enqueue(1).unwrap(); + assert!(rb1 != rb2); + // test for refexive relation + assert!(rb1 == rb1); + assert!(rb2 == rb2); + } + + #[test] + fn hash_equality() { + // generate two queues with same content + // but different buffer alignment + let rb1 = { + let mut rb1: Queue = Queue::new(); + rb1.enqueue(0).unwrap(); + rb1.enqueue(0).unwrap(); + rb1.dequeue().unwrap(); + rb1.enqueue(0).unwrap(); + rb1 + }; + let rb2 = { + let mut rb2: Queue = Queue::new(); + rb2.enqueue(0).unwrap(); + rb2.enqueue(0).unwrap(); + rb2 + }; + let hash1 = { + let mut hasher1 = hash32::FnvHasher::default(); + hash32::Hash::hash(&rb1, &mut hasher1); + let hash1 = hasher1.finish(); + hash1 + }; + let hash2 = { + let mut hasher2 = hash32::FnvHasher::default(); + hash32::Hash::hash(&rb2, &mut hasher2); + let hash2 = hasher2.finish(); + hash2 + }; + assert_eq!(hash1, hash2); + } +} diff --git a/src/spsc/mod.rs b/src/spsc_old/mod.rs similarity index 68% rename from src/spsc/mod.rs rename to src/spsc_old/mod.rs index 2ec3d72f76..435c1443a8 100644 --- a/src/spsc/mod.rs +++ b/src/spsc_old/mod.rs @@ -9,9 +9,8 @@ //! //! ``` //! use heapless::spsc::Queue; -//! use heapless::consts::*; //! -//! let mut rb: Queue = Queue::new(); +//! let mut rb: Queue = Queue::new(); //! //! assert!(rb.enqueue(0).is_ok()); //! assert!(rb.enqueue(1).is_ok()); @@ -25,10 +24,11 @@ //! - `Queue` can be `split` and then be used in Single Producer Single Consumer mode //! //! ``` -//! use heapless::spsc::Queue; -//! use heapless::consts::*; +//! use heapless::spsc::{Queue}; //! -//! static mut Q: Queue = Queue(heapless::i::Queue::new()); +//! // Notice, type signature needs to be explicit for now. +//! // (min_const_eval, does not allow for default type assignments) +//! static mut Q: Queue = Queue::new(); //! //! enum Event { A, B } //! @@ -83,9 +83,8 @@ //! - The numbers reported correspond to the successful path (i.e. `Some` is returned by `dequeue` //! and `Ok` is returned by `enqueue`). -use core::{cell::UnsafeCell, fmt, hash, marker::PhantomData, mem::MaybeUninit, ptr}; +use core::{cell::UnsafeCell, fmt, hash, mem::MaybeUninit, ptr}; -use generic_array::{ArrayLength, GenericArray}; use hash32; use crate::sealed::spsc as sealed; @@ -93,32 +92,23 @@ pub use split::{Consumer, Producer}; mod split; -/// Multi core synchronization - a memory barrier is used for synchronization -pub struct MultiCore; - -/// Single core synchronization - no memory barrier synchronization, just a compiler fence -pub struct SingleCore; - -// Atomic{U8,U16, Usize} with no CAS operations that works on targets that have "no atomic support" +// Atomic{U8, U16, Usize} with no CAS operations that works on targets that have "no atomic support" // according to their specification -pub(crate) struct Atomic { +pub(crate) struct Atomic { v: UnsafeCell, - c: PhantomData, } -impl Atomic { +impl Atomic { pub(crate) const fn new(v: U) -> Self { Atomic { v: UnsafeCell::new(v), - c: PhantomData, } } } -impl Atomic +impl Atomic where U: sealed::Uxx, - C: sealed::XCore, { fn get(&self) -> &U { unsafe { &*self.v.get() } @@ -129,7 +119,7 @@ where } fn load_acquire(&self) -> U { - unsafe { U::load_acquire::(self.v.get()) } + unsafe { U::load_acquire(self.v.get()) } } fn load_relaxed(&self) -> U { @@ -137,13 +127,13 @@ where } fn store_release(&self, val: U) { - unsafe { U::store_release::(self.v.get(), val) } + unsafe { U::store_release(self.v.get(), val) } } } /// A statically allocated single producer single consumer queue with a capacity of `N` elements /// -/// *IMPORTANT*: To get better performance use a capacity that is a power of 2 (e.g. `U16`, `U32`, +/// *IMPORTANT*: To get better performance use a capacity that is a power of 2 (e.g. `16`, `32`, /// etc.). /// /// By default `spsc::Queue` will use `usize` integers to hold the indices to its head and tail. For @@ -154,30 +144,30 @@ where /// [`u8`]: struct.Queue.html#method.u8 /// [`u16`]: struct.Queue.html#method.u16 /// -/// *IMPORTANT*: `spsc::Queue<_, _, u8>` has a maximum capacity of 255 elements; `spsc::Queue<_, _, -/// u16>` has a maximum capacity of 65535 elements. -/// -/// `spsc::Queue` also comes in a single core variant. This variant can be created using the -/// following constructors: `u8_sc`, `u16_sc`, `usize_sc` and `new_sc`. This variant is `unsafe` to -/// create because the programmer must make sure that the queue's consumer and producer endpoints -/// (if split) are kept on a single core for their entire lifetime. -pub struct Queue( - #[doc(hidden)] pub crate::i::Queue, U, C>, -) +/// *IMPORTANT*: `spsc::Queue<_, u8, N>` has a maximum capacity of 255 elements; `spsc::Queue<_, +/// u16, N>` has a maximum capacity of 65535 elements. + +#[cfg(has_atomics)] +pub struct Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore; +{ + // this is from where we dequeue items + pub(crate) head: Atomic, + + // this is where we enqueue new items + pub(crate) tail: Atomic, -impl Queue + pub(crate) buffer: MaybeUninit<[T; N]>, +} + +impl Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { /// Returns the maximum number of elements the queue can hold pub fn capacity(&self) -> U { - U::saturate(N::to_usize()) + U::saturate(N) } /// Returns `true` if the queue has a length of 0 @@ -186,7 +176,7 @@ where } /// Iterates from the front of the queue to the back - pub fn iter(&self) -> Iter<'_, T, N, U, C> { + pub fn iter(&self) -> Iter<'_, T, U, N> { Iter { rb: self, index: 0, @@ -195,7 +185,7 @@ where } /// Returns an iterator that allows modifying each value. - pub fn iter_mut(&mut self) -> IterMut<'_, T, N, U, C> { + pub fn iter_mut(&mut self) -> IterMut<'_, T, U, N> { let len = self.len_usize(); IterMut { rb: self, @@ -205,18 +195,16 @@ where } fn len_usize(&self) -> usize { - let head = self.0.head.load_relaxed().into(); - let tail = self.0.tail.load_relaxed().into(); + let head = self.head.load_relaxed().into(); + let tail = self.tail.load_relaxed().into(); U::truncate(tail.wrapping_sub(head)).into() } } -impl Drop for Queue +impl Drop for Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { fn drop(&mut self) { for item in self { @@ -227,24 +215,20 @@ where } } -impl fmt::Debug for Queue +impl fmt::Debug for Queue where - N: ArrayLength, T: fmt::Debug, U: sealed::Uxx, - C: sealed::XCore, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } -impl hash::Hash for Queue +impl hash::Hash for Queue where - N: ArrayLength, T: hash::Hash, U: sealed::Uxx, - C: sealed::XCore, { fn hash(&self, state: &mut H) { // iterate over self in order @@ -254,12 +238,10 @@ where } } -impl hash32::Hash for Queue +impl hash32::Hash for Queue where - N: ArrayLength, T: hash32::Hash, U: sealed::Uxx, - C: sealed::XCore, { fn hash(&self, state: &mut H) { // iterate over self in order @@ -269,28 +251,24 @@ where } } -impl<'a, T, N, U, C> IntoIterator for &'a Queue +impl<'a, T, U, const N: usize> IntoIterator for &'a Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { type Item = &'a T; - type IntoIter = Iter<'a, T, N, U, C>; + type IntoIter = Iter<'a, T, U, N>; fn into_iter(self) -> Self::IntoIter { self.iter() } } -impl<'a, T, N, U, C> IntoIterator for &'a mut Queue +impl<'a, T, U, const N: usize> IntoIterator for &'a mut Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { type Item = &'a mut T; - type IntoIter = IterMut<'a, T, N, U, C>; + type IntoIter = IterMut<'a, T, U, N>; fn into_iter(self) -> Self::IntoIter { self.iter_mut() @@ -298,65 +276,27 @@ where } macro_rules! impl_ { - ($uxx:ident, $uxx_sc:ident) => { - impl Queue - where - N: ArrayLength, - { - /// Creates an empty queue with a fixed capacity of `N` - pub fn $uxx() -> Self { - Queue(crate::i::Queue::$uxx()) - } - } - - impl crate::i::Queue { - /// `spsc::Queue` `const` constructor; wrap the returned value in - /// [`spsc::Queue`](struct.Queue.html) - pub const fn $uxx() -> Self { - crate::i::Queue { - buffer: MaybeUninit::uninit(), + ($uxx:ident, $doc:tt $(,$unsf:ident)?) => { + impl Queue { + #[doc = $doc] + pub const $($unsf)* fn $uxx() -> Self { + Self { head: Atomic::new(0), tail: Atomic::new(0), - } - } - } - - impl Queue - where - N: ArrayLength, - { - /// Creates an empty queue with a fixed capacity of `N` (single core variant) - pub unsafe fn $uxx_sc() -> Self { - Queue(crate::i::Queue::$uxx_sc()) - } - } - - impl crate::i::Queue { - /// `spsc::Queue` `const` constructor; wrap the returned value in - /// [`spsc::Queue`](struct.Queue.html) - pub const unsafe fn $uxx_sc() -> Self { - crate::i::Queue { buffer: MaybeUninit::uninit(), - head: Atomic::new(0), - tail: Atomic::new(0), } } } - impl Queue - where - N: ArrayLength, - C: sealed::XCore, - { + impl Queue { /// Returns a reference to the item in the front of the queue without dequeuing, or /// `None` if the queue is empty. /// /// # Examples /// ``` /// use heapless::spsc::Queue; - /// use heapless::consts::*; /// - /// let mut queue: Queue = Queue::u8(); + /// let mut queue: Queue = Queue::new(); /// let (mut producer, mut consumer) = queue.split(); /// assert_eq!(None, consumer.peek()); /// producer.enqueue(1); @@ -367,10 +307,10 @@ macro_rules! impl_ { pub fn peek(&self) -> Option<&T> { let cap = self.capacity(); - let head = self.0.head.get(); - let tail = self.0.tail.get(); + let head = self.head.get(); + let tail = self.tail.get(); - let p = self.0.buffer.as_ptr(); + let p = self.buffer.as_ptr(); if *head != *tail { let item = unsafe { &*(p as *const T).add(usize::from(*head % cap)) }; @@ -384,10 +324,10 @@ macro_rules! impl_ { pub fn dequeue(&mut self) -> Option { let cap = self.capacity(); - let head = self.0.head.get_mut(); - let tail = self.0.tail.get_mut(); + let head = self.head.get_mut(); + let tail = self.tail.get_mut(); - let p = self.0.buffer.as_ptr(); + let p = self.buffer.as_ptr(); if *head != *tail { let item = unsafe { (p as *const T).add(usize::from(*head % cap)).read() }; @@ -403,8 +343,8 @@ macro_rules! impl_ { /// Returns back the `item` if the queue is full pub fn enqueue(&mut self, item: T) -> Result<(), T> { let cap = self.capacity(); - let head = *self.0.head.get_mut(); - let tail = *self.0.tail.get_mut(); + let head = *self.head.get_mut(); + let tail = *self.tail.get_mut(); if tail.wrapping_sub(head) > cap - 1 { Err(item) @@ -424,12 +364,12 @@ macro_rules! impl_ { /// twice. pub unsafe fn enqueue_unchecked(&mut self, item: T) { let cap = self.capacity(); - let tail = self.0.tail.get_mut(); + let tail = self.tail.get_mut(); // NOTE(ptr::write) the memory slot that we are about to write to is // uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the // uninitialized memory - (self.0.buffer.as_mut_ptr() as *mut T) + (self.buffer.as_mut_ptr() as *mut T) .add(usize::from(*tail % cap)) .write(item); *tail = tail.wrapping_add(1); @@ -437,24 +377,22 @@ macro_rules! impl_ { /// Returns the number of elements in the queue pub fn len(&self) -> $uxx { - let head = self.0.head.load_relaxed(); - let tail = self.0.tail.load_relaxed(); + let head = self.head.load_relaxed(); + let tail = self.tail.load_relaxed(); tail.wrapping_sub(head) } } - impl Clone for Queue + impl Clone for Queue where T: Clone, - N: ArrayLength, - C: sealed::XCore, { fn clone(&self) -> Self { - let mut new: Queue = Queue(crate::i::Queue { + let mut new: Queue = Queue { buffer: MaybeUninit::uninit(), head: Atomic::new(0), tail: Atomic::new(0), - }); + }; for s in self.iter() { unsafe { @@ -469,88 +407,57 @@ macro_rules! impl_ { }; } -impl crate::i::Queue { - /// `spsc::Queue` `const` constructor; wrap the returned value in - /// [`spsc::Queue`](struct.Queue.html) - pub const fn new() -> Self { - crate::i::Queue::usize() - } -} - -impl Queue -where - N: ArrayLength, -{ +impl Queue { /// Alias for [`spsc::Queue::usize`](struct.Queue.html#method.usize) - pub fn new() -> Self { - Queue(crate::i::Queue::new()) - } -} - -impl crate::i::Queue { - /// `spsc::Queue` `const` constructor; wrap the returned value in - /// [`spsc::Queue`](struct.Queue.html) - pub const unsafe fn new_sc() -> Self { - crate::i::Queue::usize_sc() - } -} - -impl Queue -where - N: ArrayLength, -{ - /// Alias for [`spsc::Queue::usize_sc`](struct.Queue.html#method.usize_sc) - pub unsafe fn new_sc() -> Self { - Queue(crate::i::Queue::new_sc()) + pub const fn new() -> Self { + Queue::usize() } } -impl_!(u8, u8_sc); -impl_!(u16, u16_sc); -impl_!(usize, usize_sc); - -impl PartialEq> for Queue +impl_!( + u8, + "Creates an empty queue with a fixed capacity of `N`. **Safety**: Assumes `N <= u8::MAX`.", + unsafe +); +impl_!( + u16, + "Creates an empty queue with a fixed capacity of `N`. **Safety**: Assumes `N <= u16::MAX`.", + unsafe +); +impl_!(usize, "Creates an empty queue with a fixed capacity of `N`"); + +impl PartialEq> for Queue where T: PartialEq, - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, - N2: ArrayLength, U2: sealed::Uxx, - C2: sealed::XCore, { - fn eq(&self, other: &Queue) -> bool { + fn eq(&self, other: &Queue) -> bool { self.len_usize() == other.len_usize() && self.iter().zip(other.iter()).all(|(v1, v2)| v1 == v2) } } -impl Eq for Queue +impl Eq for Queue where T: Eq, - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { } /// An iterator over the items of a queue -pub struct Iter<'a, T, N, U, C> +pub struct Iter<'a, T, U, const N: usize> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { - rb: &'a Queue, + rb: &'a Queue, index: usize, len: usize, } -impl<'a, T, N, U, C> Clone for Iter<'a, T, N, U, C> +impl<'a, T, U, const N: usize> Clone for Iter<'a, T, U, N> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { fn clone(&self) -> Self { Self { @@ -562,33 +469,29 @@ where } /// A mutable iterator over the items of a queue -pub struct IterMut<'a, T, N, U, C> +pub struct IterMut<'a, T, U, const N: usize> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { - rb: &'a mut Queue, + rb: &'a mut Queue, index: usize, len: usize, } macro_rules! iterator { (struct $name:ident -> $elem:ty, $ptr:ty, $asptr:ident, $mkref:ident) => { - impl<'a, T, N, U, C> Iterator for $name<'a, T, N, U, C> + impl<'a, T, U, const N: usize> Iterator for $name<'a, T, U, N> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { type Item = $elem; fn next(&mut self) -> Option<$elem> { if self.index < self.len { - let head = self.rb.0.head.load_relaxed().into(); + let head = self.rb.head.load_relaxed().into(); let cap = self.rb.capacity().into(); - let ptr = self.rb.0.buffer.$asptr() as $ptr; + let ptr = self.rb.buffer.$asptr() as $ptr; let i = (head + self.index) % cap; self.index += 1; Some(unsafe { $mkref!(*ptr.offset(i as isize)) }) @@ -598,18 +501,16 @@ macro_rules! iterator { } } - impl<'a, T, N, U, C> DoubleEndedIterator for $name<'a, T, N, U, C> + impl<'a, T, U, const N: usize> DoubleEndedIterator for $name<'a, T, U, N> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { fn next_back(&mut self) -> Option<$elem> { if self.index < self.len { - let head = self.rb.0.head.load_relaxed().into(); + let head = self.rb.head.load_relaxed().into(); let cap = self.rb.capacity().into(); - let ptr = self.rb.0.buffer.$asptr() as $ptr; + let ptr = self.rb.buffer.$asptr() as $ptr; // self.len > 0, since it's larger than self.index > 0 let i = (head + self.len - 1) % cap; self.len -= 1; @@ -641,11 +542,16 @@ iterator!(struct IterMut -> &'a mut T, *mut T, as_mut_ptr, make_ref_mut); mod tests { use hash32::Hasher; - use crate::{consts::*, spsc::Queue}; + use crate::spsc::Queue; + + #[test] + fn static_usize() { + static mut _Q: Queue = Queue::usize(); + } #[test] fn static_new() { - static mut _Q: Queue = Queue(crate::i::Queue::new()); + static mut _Q: Queue = Queue::new(); } #[test] @@ -671,7 +577,7 @@ mod tests { static mut COUNT: i32 = 0; { - let mut v: Queue = Queue::new(); + let mut v: Queue = Queue::new(); v.enqueue(Droppable::new()).ok().unwrap(); v.enqueue(Droppable::new()).ok().unwrap(); v.dequeue().unwrap(); @@ -680,7 +586,7 @@ mod tests { assert_eq!(unsafe { COUNT }, 0); { - let mut v: Queue = Queue::new(); + let mut v: Queue = Queue::new(); v.enqueue(Droppable::new()).ok().unwrap(); v.enqueue(Droppable::new()).ok().unwrap(); } @@ -690,7 +596,7 @@ mod tests { #[test] fn full() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -702,7 +608,7 @@ mod tests { #[test] fn iter() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -718,7 +624,7 @@ mod tests { #[test] fn iter_double_ended() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -735,7 +641,7 @@ mod tests { #[test] fn iter_overflow() { - let mut rb: Queue = Queue::u8(); + let mut rb: Queue = unsafe { Queue::u8() }; rb.enqueue(0).unwrap(); for _ in 0..300 { @@ -749,7 +655,7 @@ mod tests { #[test] fn iter_mut() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -765,7 +671,7 @@ mod tests { #[test] fn iter_mut_double_ended() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -782,21 +688,17 @@ mod tests { #[test] fn sanity() { - let mut rb: Queue = Queue::new(); - + let mut rb: Queue = Queue::new(); assert_eq!(rb.dequeue(), None); - rb.enqueue(0).unwrap(); - assert_eq!(rb.dequeue(), Some(0)); - assert_eq!(rb.dequeue(), None); } #[test] #[cfg(feature = "smaller-atomics")] fn u8() { - let mut rb: Queue = Queue::u8(); + let mut rb: Queue = Queue::u8(); for _ in 0..255 { rb.enqueue(0).unwrap(); @@ -807,7 +709,7 @@ mod tests { #[test] fn wrap_around() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); rb.enqueue(0).unwrap(); rb.enqueue(1).unwrap(); @@ -823,7 +725,7 @@ mod tests { #[test] fn ready_flag() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); let (mut p, mut c) = rb.split(); assert_eq!(c.ready(), false); assert_eq!(p.ready(), true); @@ -851,7 +753,7 @@ mod tests { #[test] fn clone() { - let mut rb1: Queue = Queue::new(); + let mut rb1: Queue = Queue::new(); rb1.enqueue(0).unwrap(); rb1.enqueue(0).unwrap(); rb1.dequeue().unwrap(); @@ -866,12 +768,12 @@ mod tests { fn eq() { // generate two queues with same content // but different buffer alignment - let mut rb1: Queue = Queue::new(); + let mut rb1: Queue = Queue::new(); rb1.enqueue(0).unwrap(); rb1.enqueue(0).unwrap(); rb1.dequeue().unwrap(); rb1.enqueue(0).unwrap(); - let mut rb2: Queue = Queue::new(); + let mut rb2: Queue = Queue::new(); rb2.enqueue(0).unwrap(); rb2.enqueue(0).unwrap(); assert!(rb1 == rb2); @@ -892,7 +794,7 @@ mod tests { // generate two queues with same content // but different buffer alignment let rb1 = { - let mut rb1: Queue = Queue::new(); + let mut rb1: Queue = Queue::new(); rb1.enqueue(0).unwrap(); rb1.enqueue(0).unwrap(); rb1.dequeue().unwrap(); @@ -900,7 +802,7 @@ mod tests { rb1 }; let rb2 = { - let mut rb2: Queue = Queue::new(); + let mut rb2: Queue = Queue::new(); rb2.enqueue(0).unwrap(); rb2.enqueue(0).unwrap(); rb2 diff --git a/src/spsc/split.rs b/src/spsc_old/split.rs similarity index 71% rename from src/spsc/split.rs rename to src/spsc_old/split.rs index 2a88bebe15..ee6b28815c 100644 --- a/src/spsc/split.rs +++ b/src/spsc_old/split.rs @@ -1,20 +1,13 @@ use core::{marker::PhantomData, ptr::NonNull}; -use generic_array::ArrayLength; +use crate::{sealed::spsc as sealed, spsc::Queue}; -use crate::{ - sealed::spsc as sealed, - spsc::{MultiCore, Queue}, -}; - -impl Queue +impl Queue where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { /// Splits a statically allocated queue into producer and consumer end points - pub fn split<'rb>(&'rb mut self) -> (Producer<'rb, T, N, U, C>, Consumer<'rb, T, N, U, C>) { + pub fn split<'rb>(&'rb mut self) -> (Producer<'rb, T, U, N>, Consumer<'rb, T, U, N>) { ( Producer { rb: unsafe { NonNull::new_unchecked(self) }, @@ -30,58 +23,46 @@ where /// A queue "consumer"; it can dequeue items from the queue // NOTE the consumer semantically owns the `head` pointer of the queue -pub struct Consumer<'a, T, N, U = usize, C = MultiCore> +pub struct Consumer<'a, T, U, const N: usize> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { - rb: NonNull>, + rb: NonNull>, _marker: PhantomData<&'a ()>, } -unsafe impl<'a, T, N, U, C> Send for Consumer<'a, T, N, U, C> +unsafe impl<'a, T, U, const N: usize> Send for Consumer<'a, T, U, N> where - N: ArrayLength, T: Send, U: sealed::Uxx, - C: sealed::XCore, { } /// A queue "producer"; it can enqueue items into the queue // NOTE the producer semantically owns the `tail` pointer of the queue -pub struct Producer<'a, T, N, U = usize, C = MultiCore> +pub struct Producer<'a, T, U, const N: usize> where - N: ArrayLength, U: sealed::Uxx, - C: sealed::XCore, { - rb: NonNull>, + rb: NonNull>, _marker: PhantomData<&'a ()>, } -unsafe impl<'a, T, N, U, C> Send for Producer<'a, T, N, U, C> +unsafe impl<'a, T, U, const N: usize> Send for Producer<'a, T, U, N> where - N: ArrayLength, T: Send, U: sealed::Uxx, - C: sealed::XCore, { } macro_rules! impl_ { ($uxx:ident) => { - impl<'a, T, N, C> Consumer<'a, T, N, $uxx, C> - where - N: ArrayLength, - C: sealed::XCore, - { + impl<'a, T, const N: usize> Consumer<'a, T, $uxx, N> { /// Returns if there are any items to dequeue. When this returns true, at least the /// first subsequent dequeue will succeed. pub fn ready(&self) -> bool { - let head = unsafe { self.rb.as_ref().0.head.load_relaxed() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼ + let head = unsafe { self.rb.as_ref().head.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼ return head != tail; } @@ -90,9 +71,8 @@ macro_rules! impl_ { /// # Examples /// ``` /// use heapless::spsc::Queue; - /// use heapless::consts::*; /// - /// let mut queue: Queue = Queue::u8(); + /// let mut queue: Queue = Queue::new(); /// let (mut producer, mut consumer) = queue.split(); /// assert_eq!(None, consumer.peek()); /// producer.enqueue(1); @@ -101,8 +81,8 @@ macro_rules! impl_ { /// assert_eq!(None, consumer.peek()); /// ``` pub fn peek(&self) -> Option<&T> { - let head = unsafe { self.rb.as_ref().0.head.load_relaxed() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; + let head = unsafe { self.rb.as_ref().head.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; if head != tail { Some(unsafe { self._peek(head) }) @@ -113,8 +93,8 @@ macro_rules! impl_ { /// Returns the item in the front of the queue, or `None` if the queue is empty pub fn dequeue(&mut self) -> Option { - let head = unsafe { self.rb.as_ref().0.head.load_relaxed() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼ + let head = unsafe { self.rb.as_ref().head.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼ if head != tail { Some(unsafe { self._dequeue(head) }) // ▲ @@ -129,8 +109,8 @@ macro_rules! impl_ { /// /// If the queue is empty this is equivalent to calling `mem::uninitialized` pub unsafe fn dequeue_unchecked(&mut self) -> T { - let head = self.rb.as_ref().0.head.load_relaxed(); - debug_assert_ne!(head, self.rb.as_ref().0.tail.load_acquire()); + let head = self.rb.as_ref().head.load_relaxed(); + debug_assert_ne!(head, self.rb.as_ref().tail.load_acquire()); self._dequeue(head) // ▲ } @@ -146,8 +126,8 @@ macro_rules! impl_ { /// This is a conservative estimate. Interrupt during this function /// might cause that the `Consumer` actually has more than N items available. pub fn len(&self) -> $uxx { - let head = unsafe { self.rb.as_ref().0.head.load_relaxed() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; + let head = unsafe { self.rb.as_ref().head.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; tail.wrapping_sub(head) } @@ -156,7 +136,7 @@ macro_rules! impl_ { let cap = rb.capacity(); - let item = (rb.0.buffer.as_ptr() as *const T).add(usize::from(head % cap)); + let item = (rb.buffer.as_ptr() as *const T).add(usize::from(head % cap)); &*item } @@ -165,31 +145,27 @@ macro_rules! impl_ { let cap = rb.capacity(); - let item = (rb.0.buffer.as_ptr() as *const T) + let item = (rb.buffer.as_ptr() as *const T) .add(usize::from(head % cap)) .read(); - rb.0.head.store_release(head.wrapping_add(1)); // ▲ + rb.head.store_release(head.wrapping_add(1)); // ▲ item } } - impl<'a, T, N, C> Producer<'a, T, N, $uxx, C> - where - N: ArrayLength, - C: sealed::XCore, - { + impl<'a, T, const N: usize> Producer<'a, T, $uxx, N> { /// Returns if there is any space to enqueue a new item. When this returns true, at /// least the first subsequent enqueue will succeed. pub fn ready(&self) -> bool { let cap = unsafe { self.rb.as_ref().capacity() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_relaxed() }; // NOTE we could replace this `load_acquire` with a `load_relaxed` and this method // would be sound on most architectures but that change would result in UB according // to the C++ memory model, which is what Rust currently uses, so we err on the side // of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for // more details. - let head = unsafe { self.rb.as_ref().0.head.load_acquire() }; + let head = unsafe { self.rb.as_ref().head.load_acquire() }; return head.wrapping_add(cap) != tail; } @@ -198,13 +174,13 @@ macro_rules! impl_ { /// Returns back the `item` if the queue is full pub fn enqueue(&mut self, item: T) -> Result<(), T> { let cap = unsafe { self.rb.as_ref().capacity() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() }; + let tail = unsafe { self.rb.as_ref().tail.load_relaxed() }; // NOTE we could replace this `load_acquire` with a `load_relaxed` and this method // would be sound on most architectures but that change would result in UB according // to the C++ memory model, which is what Rust currently uses, so we err on the side // of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for // more details. - let head = unsafe { self.rb.as_ref().0.head.load_acquire() }; // ▼ + let head = unsafe { self.rb.as_ref().head.load_acquire() }; // ▼ if tail.wrapping_sub(head) > cap - 1 { Err(item) @@ -226,8 +202,8 @@ macro_rules! impl_ { /// This is a conservative estimate. Interrupt during this function /// might cause that the `Producer` actually has more than N items of available space. pub fn len(&self) -> $uxx { - let head = unsafe { self.rb.as_ref().0.head.load_acquire() }; - let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() }; + let head = unsafe { self.rb.as_ref().head.load_acquire() }; + let tail = unsafe { self.rb.as_ref().tail.load_relaxed() }; tail.wrapping_sub(head) } @@ -240,8 +216,8 @@ macro_rules! impl_ { /// to create a copy of `item`, which could result in `T`'s destructor running on `item` /// twice. pub unsafe fn enqueue_unchecked(&mut self, item: T) { - let tail = self.rb.as_ref().0.tail.load_relaxed(); - debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().0.head.load_acquire()); + let tail = self.rb.as_ref().tail.load_relaxed(); + debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().head.load_acquire()); self._enqueue(tail, item); // ▲ } @@ -253,10 +229,10 @@ macro_rules! impl_ { // NOTE(ptr::write) the memory slot that we are about to write to is // uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the // uninitialized memory - (rb.0.buffer.as_mut_ptr() as *mut T) + (rb.buffer.as_mut_ptr() as *mut T) .add(usize::from(tail % cap)) .write(item); - rb.0.tail.store_release(tail.wrapping_add(1)); // ▲ + rb.tail.store_release(tail.wrapping_add(1)); // ▲ } } }; @@ -268,11 +244,11 @@ impl_!(usize); #[cfg(test)] mod tests { - use crate::{consts::*, spsc::Queue}; + use crate::spsc::Queue; #[test] fn sanity() { - let mut rb: Queue = Queue::new(); + let mut rb: Queue = Queue::new(); let (mut p, mut c) = rb.split(); diff --git a/src/string.rs b/src/string.rs index 90dc54e66d..5848095d0b 100644 --- a/src/string.rs +++ b/src/string.rs @@ -1,38 +1,15 @@ -use core::{ - fmt, - fmt::Write, - hash, - mem::{self, MaybeUninit}, - ops, str, - str::Utf8Error, -}; - -use generic_array::{ - typenum::{consts::*, IsGreaterOrEqual}, - ArrayLength, GenericArray, -}; +use core::{fmt, fmt::Write, hash, ops, str}; + use hash32; use crate::Vec; /// A fixed capacity [`String`](https://doc.rust-lang.org/std/string/struct.String.html) -pub struct String(#[doc(hidden)] pub crate::i::String>) -where - N: ArrayLength; - -impl crate::i::String { - /// `String` `const` constructor; wrap the returned value in [`String`](../struct.String.html) - pub const fn new() -> Self { - Self { - vec: crate::i::Vec::new(), - } - } +pub struct String { + vec: Vec, } -impl String -where - N: ArrayLength, -{ +impl String { /// Constructs a new, empty `String` with a fixed capacity of `N` /// /// # Examples @@ -41,78 +18,16 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// /// // allocate the string on the stack - /// let mut s: String = String::new(); + /// let mut s: String<4> = String::new(); /// /// // allocate the string in a static variable - /// static mut S: String = String(heapless::i::String::new()); - /// ``` - #[inline] - pub fn new() -> Self { - String(crate::i::String::new()) - } - - /// Converts a vector of bytes into a `String`. - /// - /// A string slice ([`&str`]) is made of bytes ([`u8`]), and a vector of bytes - /// ([`Vec`]) is made of bytes, so this function converts between the - /// two. Not all byte slices are valid `String`s, however: `String` - /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that - /// the bytes are valid UTF-8, and then does the conversion. - /// - /// See std::String for further information. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use heapless::{String, Vec}; - /// use heapless::consts::*; - /// - /// let mut v: Vec = Vec::new(); - /// v.push('a' as u8).unwrap(); - /// v.push('b' as u8).unwrap(); - /// - /// let s = String::from_utf8(v).unwrap(); - /// assert!(s.len() == 2); + /// static mut S: String<4> = String::new(); /// ``` - /// - /// Incorrect bytes: - /// - /// ``` - /// use heapless::{String, Vec}; - /// use heapless::consts::*; - /// - /// // some invalid bytes, in a vector - /// - /// let mut v: Vec = Vec::new(); - /// v.push(0).unwrap(); - /// v.push(159).unwrap(); - /// v.push(146).unwrap(); - /// v.push(150).unwrap(); - /// assert!(String::from_utf8(v).is_err()); - /// ``` - #[inline] - pub fn from_utf8(vec: Vec) -> Result, Utf8Error> { - // validate input - str::from_utf8(&*vec)?; - - Ok(unsafe { String::from_utf8_unchecked(vec) }) - } - - /// Converts a vector of bytes to a `String` without checking that the - /// string contains valid UTF-8. - /// - /// See the safe version, `from_utf8`, for more details. #[inline] - pub unsafe fn from_utf8_unchecked(mut vec: Vec) -> String { - // FIXME this may result in a memcpy at runtime - let vec_ = mem::replace(&mut vec.0, MaybeUninit::uninit().assume_init()); - mem::forget(vec); - String(crate::i::String { vec: vec_ }) + pub const fn new() -> Self { + Self { vec: Vec::new() } } /// Converts a `String` into a byte vector. @@ -125,9 +40,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let s: String = String::from("ab"); + /// let s: String<4> = String::from("ab"); /// let b = s.into_bytes(); /// assert!(b.len() == 2); /// @@ -135,7 +49,7 @@ where /// ``` #[inline] pub fn into_bytes(self) -> Vec { - Vec(self.0.vec) + self.vec } /// Extracts a string slice containing the entire string. @@ -146,9 +60,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("ab"); + /// let mut s: String<4> = String::from("ab"); /// assert!(s.as_str() == "ab"); /// /// let _s = s.as_str(); @@ -156,7 +69,7 @@ where /// ``` #[inline] pub fn as_str(&self) -> &str { - unsafe { str::from_utf8_unchecked(self.0.vec.as_slice()) } + unsafe { str::from_utf8_unchecked(self.vec.as_slice()) } } /// Converts a `String` into a mutable string slice. @@ -167,15 +80,14 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("ab"); + /// let mut s: String<4> = String::from("ab"); /// let s = s.as_mut_str(); /// s.make_ascii_uppercase(); /// ``` #[inline] pub fn as_mut_str(&mut self) -> &mut str { - unsafe { str::from_utf8_unchecked_mut(self.0.vec.as_mut_slice()) } + unsafe { str::from_utf8_unchecked_mut(self.vec.as_mut_slice()) } } /// Returns a mutable reference to the contents of this `String`. @@ -203,7 +115,7 @@ where /// assert_eq!(s, "olleh"); /// ``` pub unsafe fn as_mut_vec(&mut self) -> &mut Vec { - &mut *(&mut self.0.vec as *mut crate::i::Vec> as *mut Vec) + &mut self.vec } /// Appends a given string slice onto the end of this `String`. @@ -214,9 +126,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("foo"); + /// let mut s: String<8> = String::from("foo"); /// /// assert!(s.push_str("bar").is_ok()); /// @@ -226,7 +137,7 @@ where /// ``` #[inline] pub fn push_str(&mut self, string: &str) -> Result<(), ()> { - self.0.vec.extend_from_slice(string.as_bytes()) + self.vec.extend_from_slice(string.as_bytes()) } /// Returns the maximum number of elements the String can hold @@ -237,14 +148,13 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::new(); + /// let mut s: String<4> = String::new(); /// assert!(s.capacity() == 4); /// ``` #[inline] pub fn capacity(&self) -> usize { - self.0.vec.capacity() + self.vec.capacity() } /// Appends the given [`char`] to the end of this `String`. @@ -257,9 +167,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("abc"); + /// let mut s: String<8> = String::from("abc"); /// /// s.push('1').unwrap(); /// s.push('2').unwrap(); @@ -272,9 +181,8 @@ where #[inline] pub fn push(&mut self, c: char) -> Result<(), ()> { match c.len_utf8() { - 1 => self.0.vec.push(c as u8).map_err(|_| {}), + 1 => self.vec.push(c as u8).map_err(|_| {}), _ => self - .0 .vec .extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes()), } @@ -300,9 +208,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("hello"); + /// let mut s: String<8> = String::from("hello"); /// /// s.truncate(2); /// @@ -312,7 +219,7 @@ where pub fn truncate(&mut self, new_len: usize) { if new_len <= self.len() { assert!(self.is_char_boundary(new_len)); - self.0.vec.truncate(new_len) + self.vec.truncate(new_len) } } @@ -328,9 +235,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("foo"); + /// let mut s: String<8> = String::from("foo"); /// /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('o')); @@ -344,7 +250,7 @@ where // pop bytes that correspond to `ch` for _ in 0..ch.len_utf8() { unsafe { - self.0.vec.pop_unchecked(); + self.vec.pop_unchecked(); } } @@ -362,9 +268,8 @@ where /// /// ``` /// use heapless::String; - /// use heapless::consts::*; /// - /// let mut s: String = String::from("foo"); + /// let mut s: String<8> = String::from("foo"); /// /// s.clear(); /// @@ -374,23 +279,17 @@ where /// ``` #[inline] pub fn clear(&mut self) { - self.0.vec.clear() + self.vec.clear() } } -impl Default for String -where - N: ArrayLength, -{ +impl Default for String { fn default() -> Self { Self::new() } } -impl<'a, N> From<&'a str> for String -where - N: ArrayLength, -{ +impl<'a, const N: usize> From<&'a str> for String { fn from(s: &'a str) -> Self { let mut new = String::new(); new.push_str(s).unwrap(); @@ -398,10 +297,7 @@ where } } -impl str::FromStr for String -where - N: ArrayLength, -{ +impl str::FromStr for String { type Err = (); fn from_str(s: &str) -> Result { @@ -411,59 +307,41 @@ where } } -impl Clone for String -where - N: ArrayLength, -{ +impl Clone for String { fn clone(&self) -> Self { - Self(crate::i::String { - vec: self.0.vec.clone(), - }) + Self { + vec: self.vec.clone(), + } } } -impl fmt::Debug for String -where - N: ArrayLength, -{ +impl fmt::Debug for String { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ::fmt(self, f) } } -impl fmt::Display for String -where - N: ArrayLength, -{ +impl fmt::Display for String { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ::fmt(self, f) } } -impl hash::Hash for String -where - N: ArrayLength, -{ +impl hash::Hash for String { #[inline] fn hash(&self, hasher: &mut H) { ::hash(self, hasher) } } -impl hash32::Hash for String -where - N: ArrayLength, -{ +impl hash32::Hash for String { #[inline] fn hash(&self, hasher: &mut H) { ::hash(self, hasher) } } -impl fmt::Write for String -where - N: ArrayLength, -{ +impl fmt::Write for String { fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> { self.push_str(s).map_err(|_| fmt::Error) } @@ -473,10 +351,7 @@ where } } -impl ops::Deref for String -where - N: ArrayLength, -{ +impl ops::Deref for String { type Target = str; fn deref(&self) -> &str { @@ -484,40 +359,27 @@ where } } -impl ops::DerefMut for String -where - N: ArrayLength, -{ +impl ops::DerefMut for String { fn deref_mut(&mut self) -> &mut str { self.as_mut_str() } } -impl AsRef for String -where - N: ArrayLength, -{ +impl AsRef for String { #[inline] fn as_ref(&self) -> &str { self } } -impl AsRef<[u8]> for String -where - N: ArrayLength, -{ +impl AsRef<[u8]> for String { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } -impl PartialEq> for String -where - N1: ArrayLength, - N2: ArrayLength, -{ +impl PartialEq> for String { fn eq(&self, rhs: &String) -> bool { str::eq(&**self, &**rhs) } @@ -527,49 +389,59 @@ where } } -macro_rules! impl_eq { - ($lhs:ty, $rhs:ty) => { - impl<'a, 'b, N> PartialEq<$rhs> for $lhs - where - N: ArrayLength, - { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - str::eq(&self[..], &other[..]) - } - #[inline] - fn ne(&self, other: &$rhs) -> bool { - str::ne(&self[..], &other[..]) - } - } +// String == str +impl PartialEq for String { + #[inline] + fn eq(&self, other: &str) -> bool { + str::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &str) -> bool { + str::ne(&self[..], &other[..]) + } +} - impl<'a, 'b, N> PartialEq<$lhs> for $rhs - where - N: ArrayLength, - { - #[inline] - fn eq(&self, other: &$lhs) -> bool { - str::eq(&self[..], &other[..]) - } - #[inline] - fn ne(&self, other: &$lhs) -> bool { - str::ne(&self[..], &other[..]) - } - } - }; +// String == &'str +impl PartialEq<&str> for String { + #[inline] + fn eq(&self, other: &&str) -> bool { + str::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &&str) -> bool { + str::ne(&self[..], &other[..]) + } } -impl_eq! { String, str } -impl_eq! { String, &'a str } +// str == String +impl PartialEq> for str { + #[inline] + fn eq(&self, other: &String) -> bool { + str::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &String) -> bool { + str::ne(&self[..], &other[..]) + } +} -impl Eq for String where N: ArrayLength {} +// &'str == String +impl PartialEq> for &str { + #[inline] + fn eq(&self, other: &String) -> bool { + str::eq(&self[..], &other[..]) + } + #[inline] + fn ne(&self, other: &String) -> bool { + str::ne(&self[..], &other[..]) + } +} + +impl Eq for String {} macro_rules! impl_from_num { - ($num:ty, $size:ty) => { - impl From<$num> for String - where - N: ArrayLength + IsGreaterOrEqual<$size, Output = True>, - { + ($num:ty, $size:expr) => { + impl From<$num> for String { fn from(s: $num) -> Self { let mut new = String::new(); write!(&mut new, "{}", s).unwrap(); @@ -579,28 +451,28 @@ macro_rules! impl_from_num { }; } -impl_from_num!(i8, U4); -impl_from_num!(i16, U6); -impl_from_num!(i32, U11); -impl_from_num!(i64, U20); +impl_from_num!(i8, 4); +impl_from_num!(i16, 6); +impl_from_num!(i32, 11); +impl_from_num!(i64, 20); -impl_from_num!(u8, U3); -impl_from_num!(u16, U5); -impl_from_num!(u32, U10); -impl_from_num!(u64, U20); +impl_from_num!(u8, 3); +impl_from_num!(u16, 5); +impl_from_num!(u32, 10); +impl_from_num!(u64, 20); #[cfg(test)] mod tests { - use crate::{consts::*, String, Vec}; + use crate::{String, Vec}; #[test] fn static_new() { - static mut _S: String = String(crate::i::String::new()); + static mut _S: String<8> = String::new(); } #[test] fn clone() { - let s1: String = String::from("abcd"); + let s1: String<20> = String::from("abcd"); let mut s2 = s1.clone(); s2.push_str(" efgh").unwrap(); @@ -612,7 +484,7 @@ mod tests { fn debug() { use core::fmt::Write; - let s: String = String::from("abcd"); + let s: String<8> = String::from("abcd"); let mut std_s = std::string::String::new(); write!(std_s, "{:?}", s).unwrap(); assert_eq!("\"abcd\"", std_s); @@ -622,7 +494,7 @@ mod tests { fn display() { use core::fmt::Write; - let s: String = String::from("abcd"); + let s: String<8> = String::from("abcd"); let mut std_s = std::string::String::new(); write!(std_s, "{}", s).unwrap(); assert_eq!("abcd", std_s); @@ -630,7 +502,7 @@ mod tests { #[test] fn empty() { - let s: String = String::new(); + let s: String<4> = String::new(); assert!(s.capacity() == 4); assert_eq!(s, ""); assert_eq!(s.len(), 0); @@ -639,7 +511,7 @@ mod tests { #[test] fn from() { - let s: String = String::from("123"); + let s: String<4> = String::from("123"); assert!(s.len() == 3); assert_eq!(s, "123"); } @@ -648,84 +520,37 @@ mod tests { fn from_str() { use core::str::FromStr; - let s: String = String::::from_str("123").unwrap(); + let s: String<4> = String::<4>::from_str("123").unwrap(); assert!(s.len() == 3); assert_eq!(s, "123"); - let e: () = String::::from_str("123").unwrap_err(); + let e: () = String::<2>::from_str("123").unwrap_err(); assert_eq!(e, ()); } #[test] #[should_panic] fn from_panic() { - let _: String = String::from("12345"); - } - - #[test] - fn from_utf8() { - let mut v: Vec = Vec::new(); - v.push('a' as u8).unwrap(); - v.push('b' as u8).unwrap(); - - let s = String::from_utf8(v).unwrap(); - assert_eq!(s, "ab"); - } - - #[test] - fn from_utf8_uenc() { - let mut v: Vec = Vec::new(); - v.push(240).unwrap(); - v.push(159).unwrap(); - v.push(146).unwrap(); - v.push(150).unwrap(); - - assert!(String::from_utf8(v).is_ok()); - } - - #[test] - fn from_utf8_uenc_err() { - let mut v: Vec = Vec::new(); - v.push(0).unwrap(); - v.push(159).unwrap(); - v.push(146).unwrap(); - v.push(150).unwrap(); - - assert!(String::from_utf8(v).is_err()); - } - - #[test] - fn from_utf8_unchecked() { - let mut v: Vec = Vec::new(); - v.push(104).unwrap(); - v.push(101).unwrap(); - v.push(108).unwrap(); - v.push(108).unwrap(); - v.push(111).unwrap(); - - let s = unsafe { String::from_utf8_unchecked(v) }; - - assert_eq!(s, "hello"); + let _: String<4> = String::from("12345"); } #[test] fn from_num() { - let v = String::::from(18446744073709551615 as u64); - + let v: String<20> = String::from(18446744073709551615 as u64); assert_eq!(v, "18446744073709551615"); } #[test] fn into_bytes() { - let s: String = String::from("ab"); - let b: Vec = s.into_bytes(); + let s: String<4> = String::from("ab"); + let b: Vec = s.into_bytes(); assert_eq!(b.len(), 2); assert_eq!(&['a' as u8, 'b' as u8], &b[..]); } #[test] fn as_str() { - let s: String = String::from("ab"); + let s: String<4> = String::from("ab"); assert_eq!(s.as_str(), "ab"); // should be moved to fail test @@ -735,7 +560,7 @@ mod tests { #[test] fn as_mut_str() { - let mut s: String = String::from("ab"); + let mut s: String<4> = String::from("ab"); let s = s.as_mut_str(); s.make_ascii_uppercase(); assert_eq!(s, "AB"); @@ -743,16 +568,18 @@ mod tests { #[test] fn push_str() { - let mut s: String = String::from("foo"); + let mut s: String<8> = String::from("foo"); assert!(s.push_str("bar").is_ok()); assert_eq!("foobar", s); + assert_eq!(s, "foobar"); assert!(s.push_str("tender").is_err()); assert_eq!("foobar", s); + assert_eq!(s, "foobar"); } #[test] fn push() { - let mut s: String = String::from("abc"); + let mut s: String<6> = String::from("abc"); assert!(s.push('1').is_ok()); assert!(s.push('2').is_ok()); assert!(s.push('3').is_ok()); @@ -762,13 +589,13 @@ mod tests { #[test] fn as_bytes() { - let s: String = String::from("hello"); + let s: String<8> = String::from("hello"); assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes()); } #[test] fn truncate() { - let mut s: String = String::from("hello"); + let mut s: String<8> = String::from("hello"); s.truncate(6); assert_eq!(s.len(), 5); s.truncate(2); @@ -779,7 +606,7 @@ mod tests { #[test] fn pop() { - let mut s: String = String::from("foo"); + let mut s: String<8> = String::from("foo"); assert_eq!(s.pop(), Some('o')); assert_eq!(s.pop(), Some('o')); assert_eq!(s.pop(), Some('f')); @@ -788,7 +615,7 @@ mod tests { #[test] fn pop_uenc() { - let mut s: String = String::from("é"); + let mut s: String<8> = String::from("é"); assert_eq!(s.len(), 3); match s.pop() { Some(c) => { @@ -802,7 +629,7 @@ mod tests { #[test] fn is_empty() { - let mut v: String = String::new(); + let mut v: String<8> = String::new(); assert!(v.is_empty()); let _ = v.push('a'); assert!(!v.is_empty()); @@ -810,7 +637,7 @@ mod tests { #[test] fn clear() { - let mut s: String = String::from("foo"); + let mut s: String<8> = String::from("foo"); s.clear(); assert!(s.is_empty()); assert_eq!(0, s.len()); diff --git a/src/ufmt.rs b/src/ufmt.rs index 9e8ca37701..30da99acd4 100644 --- a/src/ufmt.rs +++ b/src/ufmt.rs @@ -1,21 +1,14 @@ +use crate::{string::String, vec::Vec}; use ufmt_write::uWrite; -use crate::{string::String, vec::Vec, ArrayLength}; - -impl uWrite for String -where - N: ArrayLength, -{ +impl uWrite for String { type Error = (); fn write_str(&mut self, s: &str) -> Result<(), Self::Error> { self.push_str(s) } } -impl uWrite for Vec -where - N: ArrayLength, -{ +impl uWrite for Vec { type Error = (); fn write_str(&mut self, s: &str) -> Result<(), Self::Error> { self.extend_from_slice(s.as_bytes()) @@ -28,8 +21,6 @@ mod tests { use ufmt::{derive::uDebug, uwrite}; - use crate::consts::*; - #[derive(uDebug)] struct Pair { x: u32, @@ -41,7 +32,7 @@ mod tests { let a = 123; let b = Pair { x: 0, y: 1234 }; - let mut s = String::::new(); + let mut s = String::<32>::new(); uwrite!(s, "{} -> {:?}", a, b).unwrap(); assert_eq!(s, "123 -> Pair { x: 0, y: 1234 }"); @@ -50,7 +41,7 @@ mod tests { #[test] fn test_string_err() { let p = Pair { x: 0, y: 1234 }; - let mut s = String::::new(); + let mut s = String::<4>::new(); assert!(uwrite!(s, "{:?}", p).is_err()); } @@ -59,7 +50,7 @@ mod tests { let a = 123; let b = Pair { x: 0, y: 1234 }; - let mut v = Vec::::new(); + let mut v = Vec::::new(); uwrite!(v, "{} -> {:?}", a, b).unwrap(); assert_eq!(v, b"123 -> Pair { x: 0, y: 1234 }"); diff --git a/src/vec.rs b/src/vec.rs index 12403ddbbf..f15aaeb7be 100644 --- a/src/vec.rs +++ b/src/vec.rs @@ -1,146 +1,16 @@ use core::{fmt, hash, iter::FromIterator, mem::MaybeUninit, ops, ptr, slice}; - -use generic_array::{ArrayLength, GenericArray}; use hash32; -impl crate::i::Vec { - /// `Vec` `const` constructor; wrap the returned value in [`Vec`](../struct.Vec.html) - pub const fn new() -> Self { - Self { - buffer: MaybeUninit::uninit(), - len: 0, - } - } -} - -impl crate::i::Vec> -where - N: ArrayLength, -{ - pub(crate) fn as_slice(&self) -> &[T] { - // NOTE(unsafe) avoid bound checks in the slicing operation - // &buffer[..self.len] - unsafe { slice::from_raw_parts(self.buffer.as_ptr() as *const T, self.len) } - } - - pub(crate) fn as_mut_slice(&mut self) -> &mut [T] { - // NOTE(unsafe) avoid bound checks in the slicing operation - // &mut buffer[..len] - unsafe { slice::from_raw_parts_mut(self.buffer.as_mut_ptr() as *mut T, self.len) } - } - - pub(crate) fn capacity(&self) -> usize { - N::to_usize() - } - - pub(crate) fn clear(&mut self) { - self.truncate(0); - } - - pub(crate) fn clone(&self) -> Self - where - T: Clone, - { - let mut new = Self::new(); - new.extend_from_slice(self.as_slice()).unwrap(); - new - } - - pub(crate) fn extend(&mut self, iter: I) - where - I: IntoIterator, - { - for elem in iter { - self.push(elem).ok().unwrap() - } - } - - pub(crate) fn extend_from_slice(&mut self, other: &[T]) -> Result<(), ()> - where - T: Clone, - { - if self.len + other.len() > self.capacity() { - // won't fit in the `Vec`; don't modify anything and return an error - Err(()) - } else { - for elem in other { - unsafe { - self.push_unchecked(elem.clone()); - } - } - Ok(()) - } - } - - pub(crate) fn is_full(&self) -> bool { - self.len == self.capacity() - } - - pub(crate) unsafe fn pop_unchecked(&mut self) -> T { - debug_assert!(!self.as_slice().is_empty()); - - self.len -= 1; - (self.buffer.as_ptr() as *const T).add(self.len).read() - } - - pub(crate) fn push(&mut self, item: T) -> Result<(), T> { - if self.len < self.capacity() { - unsafe { self.push_unchecked(item) } - Ok(()) - } else { - Err(item) - } - } - - pub(crate) unsafe fn push_unchecked(&mut self, item: T) { - // NOTE(ptr::write) the memory slot that we are about to write to is uninitialized. We - // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory - (self.buffer.as_mut_ptr() as *mut T) - .add(self.len) - .write(item); - - self.len += 1; - } - - unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T { - let length = self.len; - debug_assert!(index < length); - ptr::swap( - self.as_mut_slice().get_unchecked_mut(index), - self.as_mut_slice().get_unchecked_mut(length - 1), - ); - self.pop_unchecked() - } - - pub(crate) fn swap_remove(&mut self, index: usize) -> T { - assert!(index < self.len); - unsafe { self.swap_remove_unchecked(index) } - } - - pub(crate) fn truncate(&mut self, len: usize) { - unsafe { - // drop any extra elements - while len < self.len { - // decrement len before the drop_in_place(), so a panic on Drop - // doesn't re-drop the just-failed value. - self.len -= 1; - let len = self.len; - ptr::drop_in_place(self.as_mut_slice().get_unchecked_mut(len)); - } - } - } -} - /// A fixed capacity [`Vec`](https://doc.rust-lang.org/std/vec/struct.Vec.html) /// /// # Examples /// /// ``` /// use heapless::Vec; -/// use heapless::consts::*; +/// /// /// // A vector with a fixed capacity of 8 elements allocated on the stack -/// let mut vec = Vec::<_, U8>::new(); +/// let mut vec = Vec::<_, 8>::new(); /// vec.push(1); /// vec.push(2); /// @@ -158,45 +28,33 @@ where /// for x in &vec { /// println!("{}", x); /// } -/// assert_eq!(vec, [7, 1, 2, 3]); +/// assert_eq!(*vec, [7, 1, 2, 3]); /// ``` -// repr(transparent) is needed for [`String::as_mut_vec`] -#[repr(transparent)] -pub struct Vec(#[doc(hidden)] pub crate::i::Vec>) -where - N: ArrayLength; - -impl Clone for Vec -where - N: ArrayLength, - T: Clone, -{ - fn clone(&self) -> Self { - Vec(self.0.clone()) - } +pub struct Vec { + buffer: MaybeUninit<[T; N]>, + len: usize, } -impl Vec -where - N: ArrayLength, -{ - /* Constructors */ +impl Vec { /// Constructs a new, empty vector with a fixed capacity of `N` /// /// # Examples /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; /// /// // allocate the vector on the stack - /// let mut x: Vec = Vec::new(); + /// let mut x: Vec = Vec::new(); /// /// // allocate the vector in a static variable - /// static mut X: Vec = Vec(heapless::i::Vec::new()); + /// static mut X: Vec = Vec::new(); /// ``` - pub fn new() -> Self { - Vec(crate::i::Vec::new()) + /// `Vec` `const` constructor; wrap the returned value in [`Vec`](../struct.Vec.html) + pub const fn new() -> Self { + Self { + buffer: MaybeUninit::uninit(), + len: 0, + } } /// Constructs a new vector with a fixed capacity of `N` and fills it @@ -206,9 +64,8 @@ where /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; /// - /// let mut v: Vec = Vec::new(); + /// let mut v: Vec = Vec::new(); /// v.extend_from_slice(&[1, 2, 3]).unwrap(); /// ``` #[inline] @@ -221,15 +78,74 @@ where Ok(v) } - /* Public API */ - /// Returns the maximum number of elements the vector can hold - pub fn capacity(&self) -> usize { - self.0.capacity() + /// Clones a vec into a new vec + pub(crate) fn clone(&self) -> Self + where + T: Clone, + { + let mut new = Self::new(); + new.extend_from_slice(self.as_slice()).unwrap(); + new + } + + /// Extracts a slice containing the entire vector. + /// + /// Equivalent to `&s[..]`. + /// + /// # Examples + /// + /// ``` + /// use heapless::Vec; + /// let buffer: Vec = Vec::from_slice(&[1, 2, 3, 5, 8]).unwrap(); + /// assert_eq!(buffer.as_slice(), &[1, 2, 3, 5, 8]); + /// ``` + pub fn as_slice(&self) -> &[T] { + // NOTE(unsafe) avoid bound checks in the slicing operation + // &buffer[..self.len] + unsafe { slice::from_raw_parts(self.buffer.as_ptr() as *const T, self.len) } + } + + /// Extracts a mutable slice containing the entire vector. + /// + /// Equivalent to `&s[..]`. + /// + /// # Examples + /// + /// ``` + /// use heapless::Vec; + /// let mut buffer: Vec = Vec::from_slice(&[1, 2, 3, 5, 8]).unwrap(); + /// buffer[0] = 9; + /// assert_eq!(buffer.as_slice(), &[9, 2, 3, 5, 8]); + /// ``` + pub(crate) fn as_mut_slice(&mut self) -> &mut [T] { + // NOTE(unsafe) avoid bound checks in the slicing operation + // &mut buffer[..self.len] + unsafe { slice::from_raw_parts_mut(self.buffer.as_mut_ptr() as *mut T, self.len) } + } + + /// Returns the maximum number of elements the vector can hold. + pub const fn capacity(&self) -> usize { + N } /// Clears the vector, removing all values. + // PER: Check if non drop types correctly optimized. pub fn clear(&mut self) { - self.0.clear() + self.truncate(0); + } + + /// Extends the vec from an iterator. + /// + /// # Panic + /// + /// Panics if the vec cannot hold all elements of the iterator. + pub fn extend(&mut self, iter: I) + where + I: IntoIterator, + { + for elem in iter { + self.push(elem).ok().unwrap() + } } /// Clones and appends all elements in a slice to the `Vec`. @@ -241,9 +157,8 @@ where /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; /// - /// let mut vec = Vec::::new(); + /// let mut vec = Vec::::new(); /// vec.push(1).unwrap(); /// vec.extend_from_slice(&[2, 3, 4]).unwrap(); /// assert_eq!(*vec, [1, 2, 3, 4]); @@ -252,13 +167,23 @@ where where T: Clone, { - self.0.extend_from_slice(other) + if self.len + other.len() > self.capacity() { + // won't fit in the `Vec`; don't modify anything and return an error + Err(()) + } else { + for elem in other { + unsafe { + self.push_unchecked(elem.clone()); + } + } + Ok(()) + } } - /// Removes the last element from a vector and return it, or `None` if it's empty + /// Removes the last element from a vector and returns it, or `None` if it's empty pub fn pop(&mut self) -> Option { - if self.0.len != 0 { - Some(unsafe { self.0.pop_unchecked() }) + if self.len != 0 { + Some(unsafe { self.pop_unchecked() }) } else { None } @@ -268,23 +193,53 @@ where /// /// Returns back the `item` if the vector is full pub fn push(&mut self, item: T) -> Result<(), T> { - self.0.push(item) + if self.len < self.capacity() { + unsafe { self.push_unchecked(item) } + Ok(()) + } else { + Err(item) + } } - pub(crate) unsafe fn push_unchecked(&mut self, item: T) { - self.0.push_unchecked(item) + /// Removes the last element from a vector and returns it + /// + /// # Safety + /// + /// This assumes the vec to have at least one element. + pub(crate) unsafe fn pop_unchecked(&mut self) -> T { + debug_assert!(!self.as_slice().is_empty()); + + self.len -= 1; + (self.buffer.as_ptr() as *const T).add(self.len).read() + } + + /// Appends an `item` to the back of the collection + /// + /// # Safety + /// + /// This assumes the vec is not full. + pub unsafe fn push_unchecked(&mut self, item: T) { + // NOTE(ptr::write) the memory slot that we are about to write to is uninitialized. We + // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory + debug_assert!(!self.is_full()); + (self.buffer.as_mut_ptr() as *mut T) + .add(self.len) + .write(item); + + self.len += 1; } /// Shortens the vector, keeping the first `len` elements and dropping the rest. + // PER: Check that non drop types are correctly optimized pub fn truncate(&mut self, len: usize) { unsafe { // drop any extra elements - while len < self.len() { + while len < self.len { // decrement len before the drop_in_place(), so a panic on Drop // doesn't re-drop the just-failed value. - self.0.len -= 1; - let len = self.len(); - ptr::drop_in_place(self.get_unchecked_mut(len)); + self.len -= 1; + let len = self.len; + ptr::drop_in_place(self.as_mut_slice().get_unchecked_mut(len)); } } } @@ -304,8 +259,8 @@ where return Err(()); } - if new_len > self.len() { - while self.len() < new_len { + if new_len > self.len { + while self.len < new_len { self.push(value.clone()).ok(); } } else { @@ -356,7 +311,6 @@ where /// ```no_run /// # #![allow(dead_code)] /// use heapless::Vec; - /// use heapless::consts::*; /// /// # // This is just a minimal skeleton for the doc example; /// # // don't use this as a starting point for a real library. @@ -370,7 +324,7 @@ where /// # ) -> i32; /// # } /// # impl StreamWrapper { - /// pub fn get_dictionary(&self) -> Option> { + /// pub fn get_dictionary(&self) -> Option> { /// // Per the FFI method's docs, "32768 bytes is always enough". /// let mut dict = Vec::new(); /// let mut dict_length = 0; @@ -399,9 +353,8 @@ where /// ``` /// use core::iter::FromIterator; /// use heapless::Vec; - /// use heapless::consts::*; /// - /// let mut vec = Vec::, U3>::from_iter( + /// let mut vec = Vec::, 3>::from_iter( /// [ /// Vec::from_iter([1, 0, 0].iter().cloned()), /// Vec::from_iter([0, 1, 0].iter().cloned()), @@ -423,7 +376,7 @@ where pub unsafe fn set_len(&mut self, new_len: usize) { debug_assert!(new_len <= self.capacity()); - self.0.len = new_len + self.len = new_len } /// Removes an element from the vector and returns it. @@ -440,9 +393,9 @@ where /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; + ///// use heapless::consts::*; /// - /// let mut v: Vec<_, U8> = Vec::new(); + /// let mut v: Vec<_, 8> = Vec::new(); /// v.push("foo").unwrap(); /// v.push("bar").unwrap(); /// v.push("baz").unwrap(); @@ -455,15 +408,51 @@ where /// assert_eq!(&*v, ["baz", "qux"]); /// ``` pub fn swap_remove(&mut self, index: usize) -> T { - self.0.swap_remove(index) + assert!(index < self.len); + unsafe { self.swap_remove_unchecked(index) } } - pub(crate) unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T { - self.0.swap_remove_unchecked(index) + /// Removes an element from the vector and returns it. + /// + /// The removed element is replaced by the last element of the vector. + /// + /// This does not preserve ordering, but is O(1). + /// + /// # Safety + /// + /// Assumes `index` within bounds. + /// + /// # Examples + /// + /// ``` + /// use heapless::Vec; + /// + /// let mut v: Vec<_, 8> = Vec::new(); + /// v.push("foo").unwrap(); + /// v.push("bar").unwrap(); + /// v.push("baz").unwrap(); + /// v.push("qux").unwrap(); + /// + /// assert_eq!(unsafe { v.swap_remove_unchecked(1) }, "bar"); + /// assert_eq!(&*v, ["foo", "qux", "baz"]); + /// + /// assert_eq!(unsafe { v.swap_remove_unchecked(0) }, "foo"); + /// assert_eq!(&*v, ["baz", "qux"]); + /// ``` + pub unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T { + let length = self.len(); + debug_assert!(index < length); + ptr::swap( + self.as_mut_slice().get_unchecked_mut(index), + self.as_mut_slice().get_unchecked_mut(length - 1), + ); + self.pop_unchecked() } - pub(crate) fn is_full(&self) -> bool { - self.0.is_full() + /// Returns true if the vec is full + #[inline] + pub fn is_full(&self) -> bool { + self.len == self.capacity() } /// Returns `true` if `needle` is a prefix of the Vec. @@ -474,9 +463,8 @@ where /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; /// - /// let v: Vec<_, U8> = Vec::from_slice(b"abc").unwrap(); + /// let v: Vec<_, 8> = Vec::from_slice(b"abc").unwrap(); /// assert_eq!(v.starts_with(b""), true); /// assert_eq!(v.starts_with(b"ab"), true); /// assert_eq!(v.starts_with(b"bc"), false); @@ -487,7 +475,7 @@ where T: PartialEq, { let n = needle.len(); - self.len() >= n && needle == &self[..n] + self.len >= n && needle == &self[..n] } /// Returns `true` if `needle` is a suffix of the Vec. @@ -498,9 +486,8 @@ where /// /// ``` /// use heapless::Vec; - /// use heapless::consts::*; /// - /// let v: Vec<_, U8> = Vec::from_slice(b"abc").unwrap(); + /// let v: Vec<_, 8> = Vec::from_slice(b"abc").unwrap(); /// assert_eq!(v.ends_with(b""), true); /// assert_eq!(v.ends_with(b"ab"), false); /// assert_eq!(v.ends_with(b"bc"), true); @@ -515,29 +502,24 @@ where } } -impl Default for Vec -where - N: ArrayLength, -{ +// Trait implementations + +impl Default for Vec { fn default() -> Self { Self::new() } } -impl fmt::Debug for Vec +impl fmt::Debug for Vec where T: fmt::Debug, - N: ArrayLength, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { <[T] as fmt::Debug>::fmt(self, f) } } -impl fmt::Write for Vec -where - N: ArrayLength, -{ +impl fmt::Write for Vec { fn write_str(&mut self, s: &str) -> fmt::Result { match self.extend_from_slice(s.as_bytes()) { Ok(()) => Ok(()), @@ -546,31 +528,28 @@ where } } -impl Drop for Vec -where - N: ArrayLength, -{ +// PER: Please check if non drop types are correctly optimized +impl Drop for Vec { fn drop(&mut self) { - unsafe { ptr::drop_in_place(&mut self[..]) } + // We drop each element used in the vector by turning into a &mut[T] + unsafe { + ptr::drop_in_place(self.as_mut_slice()); + } } } -impl Extend for Vec -where - N: ArrayLength, -{ +impl Extend for Vec { fn extend(&mut self, iter: I) where I: IntoIterator, { - self.0.extend(iter) + self.extend(iter) } } -impl<'a, T, N> Extend<&'a T> for Vec +impl<'a, T, const N: usize> Extend<&'a T> for Vec where T: 'a + Copy, - N: ArrayLength, { fn extend(&mut self, iter: I) where @@ -580,30 +559,25 @@ where } } -impl hash::Hash for Vec +impl hash::Hash for Vec where T: core::hash::Hash, - N: ArrayLength, { fn hash(&self, state: &mut H) { <[T] as hash::Hash>::hash(self, state) } } -impl hash32::Hash for Vec +impl hash32::Hash for Vec where T: hash32::Hash, - N: ArrayLength, { fn hash(&self, state: &mut H) { <[T] as hash32::Hash>::hash(self, state) } } -impl<'a, T, N> IntoIterator for &'a Vec -where - N: ArrayLength, -{ +impl<'a, T, const N: usize> IntoIterator for &'a Vec { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; @@ -612,10 +586,7 @@ where } } -impl<'a, T, N> IntoIterator for &'a mut Vec -where - N: ArrayLength, -{ +impl<'a, T, const N: usize> IntoIterator for &'a mut Vec { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; @@ -624,10 +595,7 @@ where } } -impl FromIterator for Vec -where - N: ArrayLength, -{ +impl FromIterator for Vec { fn from_iter(iter: I) -> Self where I: IntoIterator, @@ -646,26 +614,16 @@ where /// /// [`Vec`]: (https://doc.rust-lang.org/std/vec/struct.Vec.html) /// -pub struct IntoIter -where - N: ArrayLength, -{ +pub struct IntoIter { vec: Vec, next: usize, } -impl Iterator for IntoIter -where - N: ArrayLength, -{ +impl Iterator for IntoIter { type Item = T; fn next(&mut self) -> Option { if self.next < self.vec.len() { - let item = unsafe { - (self.vec.0.buffer.as_ptr() as *const T) - .add(self.next) - .read() - }; + let item = unsafe { (self.vec.buffer.as_ptr() as *const T).add(self.next).read() }; self.next += 1; Some(item) } else { @@ -674,10 +632,9 @@ where } } -impl Clone for IntoIter +impl Clone for IntoIter where T: Clone, - N: ArrayLength, { fn clone(&self) -> Self { let mut vec = Vec::new(); @@ -685,7 +642,7 @@ where if self.next < self.vec.len() { let s = unsafe { slice::from_raw_parts( - (self.vec.0.buffer.as_ptr() as *const T).add(self.next), + (self.vec.buffer.as_ptr() as *const T).add(self.next), self.vec.len() - self.next, ) }; @@ -696,24 +653,19 @@ where } } -impl Drop for IntoIter -where - N: ArrayLength, -{ +// PER: is this correct +impl Drop for IntoIter { fn drop(&mut self) { unsafe { // Drop all the elements that have not been moved out of vec - ptr::drop_in_place(&mut self.vec[self.next..]); + ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next..]); // Prevent dropping of other elements - self.vec.0.len = 0; + self.vec.len = 0; } } } -impl IntoIterator for Vec -where - N: ArrayLength, -{ +impl IntoIterator for Vec { type Item = T; type IntoIter = IntoIter; @@ -722,10 +674,8 @@ where } } -impl PartialEq> for Vec +impl PartialEq> for Vec where - N1: ArrayLength, - N2: ArrayLength, A: PartialEq, { fn eq(&self, other: &Vec) -> bool { @@ -733,114 +683,125 @@ where } } -macro_rules! eq { - ($Lhs:ty, $Rhs:ty) => { - impl<'a, 'b, A, B, N> PartialEq<$Rhs> for $Lhs - where - A: PartialEq, - N: ArrayLength, - { - fn eq(&self, other: &$Rhs) -> bool { - <[A]>::eq(self, &other[..]) - } - } - }; +// Vec == [B] +impl PartialEq<[B]> for Vec +where + A: PartialEq, +{ + fn eq(&self, other: &[B]) -> bool { + <[A]>::eq(self, &other[..]) + } } -eq!(Vec, [B]); -eq!(Vec, &'a [B]); -eq!(Vec, &'a mut [B]); - -macro_rules! array { - ($($N:expr),+) => { - $( - eq!(Vec, [B; $N]); - eq!(Vec, &'a [B; $N]); - )+ +// Vec == &[B] +impl PartialEq<&[B]> for Vec +where + A: PartialEq, +{ + fn eq(&self, other: &&[B]) -> bool { + <[A]>::eq(self, &other[..]) } } -array!( - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32 -); +// Vec == &mut [B] +impl PartialEq<&mut [B]> for Vec +where + A: PartialEq, +{ + fn eq(&self, other: &&mut [B]) -> bool { + <[A]>::eq(self, &other[..]) + } +} -impl Eq for Vec +// Vec == [B; M] +// Equality does not require equal capacity +impl PartialEq<[B; M]> for Vec where - N: ArrayLength, - T: Eq, + A: PartialEq, { + fn eq(&self, other: &[B; M]) -> bool { + <[A]>::eq(self, &other[..]) + } } -impl ops::Deref for Vec +// Vec == &[B; M] +// Equality does not require equal capacity +impl PartialEq<&[B; M]> for Vec where - N: ArrayLength, + A: PartialEq, { + fn eq(&self, other: &&[B; M]) -> bool { + <[A]>::eq(self, &other[..]) + } +} + +// Implements Eq if underlying data is Eq +impl Eq for Vec where T: Eq {} + +impl ops::Deref for Vec { type Target = [T]; fn deref(&self) -> &[T] { - self.0.as_slice() + self.as_slice() } } -impl ops::DerefMut for Vec -where - N: ArrayLength, -{ +impl ops::DerefMut for Vec { fn deref_mut(&mut self) -> &mut [T] { - self.0.as_mut_slice() + self.as_mut_slice() } } -impl AsRef> for Vec -where - N: ArrayLength, -{ +impl AsRef> for Vec { #[inline] fn as_ref(&self) -> &Self { self } } -impl AsMut> for Vec -where - N: ArrayLength, -{ +impl AsMut> for Vec { #[inline] fn as_mut(&mut self) -> &mut Self { self } } -impl AsRef<[T]> for Vec -where - N: ArrayLength, -{ +impl AsRef<[T]> for Vec { #[inline] fn as_ref(&self) -> &[T] { self } } -impl AsMut<[T]> for Vec -where - N: ArrayLength, -{ +impl AsMut<[T]> for Vec { #[inline] fn as_mut(&mut self) -> &mut [T] { self } } +impl Clone for Vec +where + T: Clone, +{ + fn clone(&self) -> Self { + self.clone() + } +} + #[cfg(test)] mod tests { - use crate::{consts::*, Vec}; - use as_slice::AsSlice; + use crate::Vec; use core::fmt::Write; #[test] fn static_new() { - static mut _V: Vec = Vec(crate::i::Vec::new()); + static mut _V: Vec = Vec::new(); + } + + #[test] + fn stack_new() { + static mut _V: Vec = Vec::new(); } macro_rules! droppable { @@ -871,7 +832,7 @@ mod tests { droppable!(); { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap(); v.pop().unwrap(); @@ -880,7 +841,7 @@ mod tests { assert_eq!(unsafe { COUNT }, 0); { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap(); } @@ -890,8 +851,8 @@ mod tests { #[test] fn eq() { - let mut xs: Vec = Vec::new(); - let mut ys: Vec = Vec::new(); + let mut xs: Vec = Vec::new(); + let mut ys: Vec = Vec::new(); assert_eq!(xs, ys); @@ -903,7 +864,7 @@ mod tests { #[test] fn full() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(0).unwrap(); v.push(1).unwrap(); @@ -915,7 +876,7 @@ mod tests { #[test] fn iter() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(0).unwrap(); v.push(1).unwrap(); @@ -933,7 +894,7 @@ mod tests { #[test] fn iter_mut() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(0).unwrap(); v.push(1).unwrap(); @@ -952,20 +913,21 @@ mod tests { #[test] fn collect_from_iter() { let slice = &[1, 2, 3]; - let vec = slice.iter().cloned().collect::>(); - assert_eq!(vec, slice); + let vec: Vec = slice.iter().cloned().collect(); + // PER: Auto deref did not work + assert_eq!(vec.as_slice(), slice); } #[test] #[should_panic] fn collect_from_iter_overfull() { let slice = &[1, 2, 3]; - let _vec = slice.iter().cloned().collect::>(); + let _vec = slice.iter().cloned().collect::>(); } #[test] fn iter_move() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.push(0).unwrap(); v.push(1).unwrap(); v.push(2).unwrap(); @@ -985,7 +947,7 @@ mod tests { droppable!(); { - let mut vec: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); vec.push(Droppable::new()).ok().unwrap(); vec.push(Droppable::new()).ok().unwrap(); let mut items = vec.into_iter(); @@ -997,7 +959,7 @@ mod tests { assert_eq!(unsafe { COUNT }, 0); { - let mut vec: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); vec.push(Droppable::new()).ok().unwrap(); vec.push(Droppable::new()).ok().unwrap(); let _items = vec.into_iter(); @@ -1007,7 +969,7 @@ mod tests { assert_eq!(unsafe { COUNT }, 0); { - let mut vec: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); vec.push(Droppable::new()).ok().unwrap(); vec.push(Droppable::new()).ok().unwrap(); let mut items = vec.into_iter(); @@ -1019,7 +981,7 @@ mod tests { #[test] fn push_and_pop() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); assert_eq!(v.len(), 0); assert_eq!(v.pop(), None); @@ -1037,7 +999,7 @@ mod tests { #[test] fn resize_size_limit() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); v.resize(0, 0).unwrap(); v.resize(4, 0).unwrap(); @@ -1046,7 +1008,7 @@ mod tests { #[test] fn resize_length_cases() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); assert_eq!(v.len(), 0); @@ -1073,7 +1035,7 @@ mod tests { #[test] fn resize_contents() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); // New entries take supplied value when growing v.resize(1, 17).unwrap(); @@ -1096,7 +1058,7 @@ mod tests { #[test] fn resize_default() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); // resize_default is implemented using resize, so just check the // correct value is being written. @@ -1106,14 +1068,14 @@ mod tests { #[test] fn write() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); write!(v, "{:x}", 1234).unwrap(); assert_eq!(&v[..], b"4d2"); } #[test] fn extend_from_slice() { - let mut v: Vec = Vec::new(); + let mut v: Vec = Vec::new(); assert_eq!(v.len(), 0); v.extend_from_slice(&[1, 2]).unwrap(); assert_eq!(v.len(), 2); @@ -1129,17 +1091,17 @@ mod tests { #[test] fn from_slice() { // Successful construction - let v: Vec = Vec::from_slice(&[1, 2, 3]).unwrap(); + let v: Vec = Vec::from_slice(&[1, 2, 3]).unwrap(); assert_eq!(v.len(), 3); assert_eq!(v.as_slice(), &[1, 2, 3]); // Slice too large - assert!(Vec::::from_slice(&[1, 2, 3]).is_err()); + assert!(Vec::::from_slice(&[1, 2, 3]).is_err()); } #[test] fn starts_with() { - let v: Vec<_, U8> = Vec::from_slice(b"ab").unwrap(); + let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap(); assert!(v.starts_with(&[])); assert!(v.starts_with(b"")); assert!(v.starts_with(b"a")); @@ -1151,7 +1113,7 @@ mod tests { #[test] fn ends_with() { - let v: Vec<_, U8> = Vec::from_slice(b"ab").unwrap(); + let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap(); assert!(v.ends_with(&[])); assert!(v.ends_with(b"")); assert!(v.ends_with(b"b")); diff --git a/tests/cpass.rs b/tests/cpass.rs index 22d2012272..95f0b7847b 100644 --- a/tests/cpass.rs +++ b/tests/cpass.rs @@ -1,7 +1,6 @@ //! Collections of `Send`-able things are `Send` use heapless::{ - consts, spsc::{Consumer, Producer, Queue}, HistoryBuffer, Vec, }; @@ -18,9 +17,9 @@ fn send() { { } - is_send::>(); - is_send::>(); - is_send::>(); - is_send::>(); - is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); + is_send::>(); } diff --git a/tests/tsan.rs b/tests/tsan.rs index cdef4df2e2..f29b9303f3 100644 --- a/tests/tsan.rs +++ b/tests/tsan.rs @@ -4,13 +4,12 @@ use std::{sync::mpsc, thread}; -use generic_array::typenum::Unsigned; -use heapless::{consts::*, mpmc::Q64, spsc}; +use heapless::{mpmc::Q64, spsc}; use scoped_threadpool::Pool; #[test] fn once() { - static mut RB: spsc::Queue = spsc::Queue(heapless::i::Queue::new()); + static mut RB: spsc::Queue = spsc::Queue::new(); let rb = unsafe { &mut RB }; @@ -31,7 +30,7 @@ fn once() { #[test] fn twice() { - static mut RB: spsc::Queue = spsc::Queue(heapless::i::Queue::new()); + static mut RB: spsc::Queue = spsc::Queue::new(); let rb = unsafe { &mut RB }; @@ -53,7 +52,7 @@ fn twice() { #[test] fn scoped() { - let mut rb: spsc::Queue = spsc::Queue::new(); + let mut rb: spsc::Queue = spsc::Queue::new(); rb.enqueue(0).unwrap(); @@ -76,9 +75,9 @@ fn scoped() { #[test] fn contention() { - type N = U1024; + const N: usize = 1024; - let mut rb: spsc::Queue = spsc::Queue::new(); + let mut rb: spsc::Queue = spsc::Queue::new(); { let (mut p, mut c) = rb.split(); @@ -87,8 +86,8 @@ fn contention() { scope.execute(move || { let mut sum: u32 = 0; - for i in 0..(2 * N::to_u32()) { - sum = sum.wrapping_add(i); + for i in 0..(2 * N) { + sum = sum.wrapping_add(i as u32); while let Err(_) = p.enqueue(i as u8) {} } @@ -98,7 +97,7 @@ fn contention() { scope.execute(move || { let mut sum: u32 = 0; - for _ in 0..(2 * N::to_u32()) { + for _ in 0..(2 * N) { loop { match c.dequeue() { Some(v) => { @@ -163,11 +162,11 @@ fn mpmc_contention() { #[test] fn unchecked() { - type N = U1024; + const N: usize = 1024; let mut rb: spsc::Queue = spsc::Queue::new(); - for _ in 0..N::to_usize() / 2 { + for _ in 0..N / 2 - 1 { rb.enqueue(1).unwrap(); } @@ -176,31 +175,29 @@ fn unchecked() { Pool::new(2).scoped(move |scope| { scope.execute(move || { - for _ in 0..N::to_usize() / 2 { - unsafe { - p.enqueue_unchecked(2); - } + for _ in 0..N / 2 - 1 { + p.enqueue(2).unwrap(); } }); scope.execute(move || { let mut sum: usize = 0; - for _ in 0..N::to_usize() / 2 { - sum = sum.wrapping_add(usize::from(unsafe { c.dequeue_unchecked() })); + for _ in 0..N / 2 - 1 { + sum = sum.wrapping_add(usize::from(c.dequeue().unwrap())); } - assert_eq!(sum, N::to_usize() / 2); + assert_eq!(sum, N / 2 - 1); }); }); } - assert_eq!(rb.len(), N::to_usize() / 2); + assert_eq!(rb.len(), N / 2 - 1); } #[test] fn len_properly_wraps() { - type N = U3; + const N: usize = 4; let mut rb: spsc::Queue = spsc::Queue::new(); rb.enqueue(1).unwrap(); @@ -217,7 +214,7 @@ fn len_properly_wraps() { #[test] fn iterator_properly_wraps() { - type N = U3; + const N: usize = 4; let mut rb: spsc::Queue = spsc::Queue::new(); rb.enqueue(1).unwrap(); @@ -233,6 +230,7 @@ fn iterator_properly_wraps() { assert_eq!(expected, actual) } +#[cfg(all(target_arch = "x86_64", feature = "x86-sync-pool"))] #[test] fn pool() { use heapless::pool::singleton::Pool as _;