Skip to content
This repository was archived by the owner on Nov 27, 2020. It is now read-only.

Make trait functions immutable #11

Closed
wants to merge 3 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 20 additions & 24 deletions src/alloc/mod.rs
Original file line number Diff line number Diff line change
@@ -148,33 +148,29 @@ pub trait BuildAllocRef: Sized {
/// * `layout` must *fit* that block of memory
/// * the alignment of the `layout` must match the alignment used to allocate that block of
/// memory
unsafe fn build_alloc_ref(
&mut self,
ptr: NonNull<u8>,
layout: Option<NonZeroLayout>,
) -> Self::Ref;
unsafe fn build_alloc_ref(&self, ptr: NonNull<u8>, layout: Option<NonZeroLayout>) -> Self::Ref;
}

pub trait DeallocRef: Sized {
type BuildAlloc: BuildAllocRef<Ref = Self>;

fn get_build_alloc(&mut self) -> Self::BuildAlloc;
fn get_build_alloc(&self) -> Self::BuildAlloc;

/// # Safety
///
/// * `ptr` must denote a block of memory currently allocated via this allocator
/// * `layout` must *fit* that block of memory
/// * the alignment of the `layout` must match the alignment used to allocate that block of
/// memory
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout);
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout);
}

pub trait AllocRef: DeallocRef {
type Error;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
let size = layout.size();
let p = self.alloc(layout)?;
unsafe {
@@ -193,7 +189,7 @@ pub trait AllocRef: DeallocRef {
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
/// * `new_size` must not be less than `layout.size()`
unsafe fn grow_in_place(
&mut self,
&self,
ptr: NonNull<u8>,
layout: NonZeroLayout,
new_size: NonZeroUsize,
@@ -212,7 +208,7 @@ pub trait AllocRef: DeallocRef {
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
/// * `new_size` must not be greater than `layout.size()` (and must be greater than zero)
unsafe fn shrink_in_place(
&mut self,
&self,
ptr: NonNull<u8>,
layout: NonZeroLayout,
new_size: NonZeroUsize,
@@ -251,7 +247,7 @@ pub trait ReallocRef: AllocRef {
/// implement this trait atop an underlying native allocation
/// library that aborts on memory exhaustion.)
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
@@ -297,7 +293,7 @@ macro_rules! impl_buildalloc_alloc_zst {
type Ref = Self;

unsafe fn build_alloc_ref(
&mut self,
&self,
_ptr: NonNull<u8>,
_layout: Option<NonZeroLayout>,
) -> Self::Ref {
@@ -314,11 +310,11 @@ impl_buildalloc_alloc_zst!(System);
impl DeallocRef for Global {
type BuildAlloc = Self;

fn get_build_alloc(&mut self) -> Self::BuildAlloc {
fn get_build_alloc(&self) -> Self::BuildAlloc {
Self
}

unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
#[allow(deprecated)]
dealloc(ptr.as_ptr(), layout.into())
}
@@ -327,14 +323,14 @@ impl DeallocRef for Global {
impl AllocRef for Global {
type Error = AllocErr;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
#[allow(deprecated)]
unsafe {
NonNull::new(alloc(layout.into())).ok_or(AllocErr)
}
}

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
#[allow(deprecated)]
unsafe {
NonNull::new(alloc_zeroed(layout.into())).ok_or(AllocErr)
@@ -345,7 +341,7 @@ impl AllocRef for Global {
impl ReallocRef for Global {
// FIXME: Remove `else` branch. This is needed, as std provides old method.
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
@@ -369,11 +365,11 @@ impl ReallocRef for Global {
impl DeallocRef for System {
type BuildAlloc = Self;

fn get_build_alloc(&mut self) -> Self::BuildAlloc {
fn get_build_alloc(&self) -> Self::BuildAlloc {
Self
}

unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout.into())
}
}
@@ -382,11 +378,11 @@ impl DeallocRef for System {
impl AllocRef for System {
type Error = AllocErr;

fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
unsafe { NonNull::new(GlobalAlloc::alloc(self, layout.into())).ok_or(AllocErr) }
}

fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
unsafe { NonNull::new(GlobalAlloc::alloc_zeroed(self, layout.into())).ok_or(AllocErr) }
}
}
@@ -395,7 +391,7 @@ impl AllocRef for System {
impl ReallocRef for System {
// FIXME: Remove `else` branch. This is needed, as std provides old method.
unsafe fn realloc(
&mut self,
&self,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
@@ -417,7 +413,7 @@ impl ReallocRef for System {

#[inline]
unsafe fn alloc_copy_dealloc<A: ReallocRef>(
alloc: &mut A,
alloc: &A,
ptr: NonNull<u8>,
old_layout: NonZeroLayout,
new_layout: NonZeroLayout,
24 changes: 14 additions & 10 deletions src/boxed.rs
Original file line number Diff line number Diff line change
@@ -182,7 +182,7 @@ impl<T, A: AllocRef> Box<T, A> {
/// # #[allow(unused_variables)]
/// let five = Box::new_in(5, Global);
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_in(x: T, a: A) -> Self {
unsafe { Self::try_new_in(x, a).unwrap_unchecked() }
@@ -201,7 +201,8 @@ impl<T, A: AllocRef> Box<T, A> {
/// let five = Box::try_new_in(5, Global)?;
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
/// ```
pub fn try_new_in(x: T, mut a: A) -> Result<Self, A::Error> {
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_in(x: T, a: A) -> Result<Self, A::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr = a.alloc(layout)?.cast::<T>();
unsafe {
@@ -232,7 +233,7 @@ impl<T, A: AllocRef> Box<T, A> {
///
/// assert_eq!(*five, 5)
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_uninit_in(a: A) -> Box<mem::MaybeUninit<T>, A> {
unsafe { Self::try_new_uninit_in(a).unwrap_unchecked() }
@@ -257,7 +258,8 @@ impl<T, A: AllocRef> Box<T, A> {
/// assert_eq!(*five, 5);
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
/// ```
pub fn try_new_uninit_in(mut a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_uninit_in(a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr: NonNull<mem::MaybeUninit<T>> = a.alloc(layout)?.cast();
ptr
@@ -269,14 +271,15 @@ impl<T, A: AllocRef> Box<T, A> {

/// Constructs a new `Pin<Box<T, A>>` with the specified allocator. If `T` does not implement
/// `Unpin`, then `x` will be pinned in memory and unable to be moved.
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn pin_in(x: T, a: A) -> Pin<Self> {
unsafe { Self::try_pin_in(x, a).unwrap_unchecked() }
}

/// Constructs a new `Pin<Box<T, A>>` with the specified allocator. If `T` does not implement
/// `Unpin`, then `x` will be pinned in memory and unable to be moved.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn try_pin_in(x: T, a: A) -> Result<Pin<Self>, A::Error> {
Self::try_new_in(x, a).map(Pin::from)
@@ -335,7 +338,7 @@ impl<T, A: AllocRef> Box<[T], A> {
///
/// assert_eq!(*values, [1, 2, 3]);
/// ```
#[allow(clippy::inline_always)]
#[allow(clippy::inline_always, clippy::needless_pass_by_value)]
#[inline(always)]
pub fn new_uninit_slice_in(len: usize, a: A) -> Box<[mem::MaybeUninit<T>], A> {
unsafe { Self::try_new_uninit_slice_in(len, a).unwrap_unchecked() }
@@ -363,9 +366,10 @@ impl<T, A: AllocRef> Box<[T], A> {
/// assert_eq!(*values, [1, 2, 3]);
/// # Ok::<_, alloc_wg::collections::CollectionAllocErr<Global>>(())
/// ```
#[allow(clippy::needless_pass_by_value)]
pub fn try_new_uninit_slice_in(
len: usize,
mut a: A,
a: A,
) -> Result<Box<[mem::MaybeUninit<T>], A>, CollectionAllocErr<A>> {
let ptr = if mem::size_of::<T>() == 0 || len == 0 {
NonNull::dangling()
@@ -732,7 +736,7 @@ fn drop_box<T: ?Sized, A: DeallocRef>(boxed: &mut Box<T, A>) {
unsafe {
let ptr = boxed.ptr;
ptr::drop_in_place(ptr.as_ptr());
if let (mut alloc, Some(layout)) = boxed.alloc_ref() {
if let (alloc, Some(layout)) = boxed.alloc_ref() {
alloc.dealloc(ptr.cast().into(), layout)
}
}
@@ -807,7 +811,7 @@ where
/// ```
#[inline]
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());

@@ -1276,7 +1280,7 @@ where
A::BuildAlloc: Clone,
{
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());
let a = unsafe { b.build_alloc_ref(old_ptr.into(), old_layout) };
24 changes: 13 additions & 11 deletions src/raw_vec.rs
Original file line number Diff line number Diff line change
@@ -144,7 +144,8 @@ impl<T> RawVec<T> {

impl<T, A: DeallocRef> RawVec<T, A> {
/// Like `new` but parameterized over the choice of allocator for the returned `RawVec`.
pub fn new_in(mut a: A) -> Self {
#[allow(clippy::needless_pass_by_value)]
pub fn new_in(a: A) -> Self {
let capacity = if mem::size_of::<T>() == 0 { !0 } else { 0 };
Self {
ptr: Unique::empty(),
@@ -161,6 +162,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
///
/// * if the requested capacity exceeds `usize::MAX` bytes.
/// * on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
#[allow(clippy::needless_pass_by_value)]
pub fn with_capacity_in(capacity: usize, a: A) -> Self
where
A: AllocRef,
@@ -181,6 +183,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
/// * `CapacityOverflow` if the requested capacity exceeds `usize::MAX` bytes.
/// * `CapacityOverflow` on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
/// * `AllocError` on OOM
#[allow(clippy::needless_pass_by_value)]
pub fn try_with_capacity_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
@@ -196,6 +199,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
///
/// * if the requested capacity exceeds `usize::MAX` bytes.
/// * on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
#[allow(clippy::needless_pass_by_value)]
pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self
where
A: AllocRef,
@@ -216,18 +220,16 @@ impl<T, A: DeallocRef> RawVec<T, A> {
/// * `CapacityOverflow` if the requested capacity exceeds `usize::MAX` bytes.
/// * `CapacityOverflow` on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
/// * `AllocError` on OOM
#[allow(clippy::needless_pass_by_value)]
pub fn try_with_capacity_zeroed_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
{
Self::allocate_in(capacity, true, a)
}

fn allocate_in(
capacity: usize,
zeroed: bool,
mut alloc: A,
) -> Result<Self, CollectionAllocErr<A>>
#[allow(clippy::needless_pass_by_value)]
fn allocate_in(capacity: usize, zeroed: bool, alloc: A) -> Result<Self, CollectionAllocErr<A>>
where
A: AllocRef,
{
@@ -443,7 +445,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Err(CollectionAllocErr::CapacityOverflow);
}

let (mut alloc, old_layout) = self.alloc_ref();
let (alloc, old_layout) = self.alloc_ref();
let (new_cap, ptr) = if let Some(old_layout) = old_layout {
// Since we guarantee that we never allocate more than
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@@ -524,7 +526,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Err(CapacityOverflow);
}

let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); // nothing to double
@@ -701,7 +703,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
return Ok(false);
}

let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); // nothing to double
@@ -846,7 +848,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {

let _ = alloc_guard(new_layout.size().get(), new_layout.align().get())?;

let (mut alloc, old_layout) = self.alloc_ref();
let (alloc, old_layout) = self.alloc_ref();
let result = if let Some(layout) = old_layout {
unsafe { alloc.realloc(self.ptr.cast().into(), layout, new_layout) }
} else {
@@ -888,7 +890,7 @@ enum ReserveStrategy {
impl<T, A: DeallocRef> RawVec<T, A> {
/// Frees the memory owned by the `RawVec` *without* trying to Drop its contents.
pub fn dealloc_buffer(&mut self) {
if let (mut alloc, Some(layout)) = self.alloc_ref() {
if let (alloc, Some(layout)) = self.alloc_ref() {
unsafe { alloc.dealloc(self.ptr.cast().into(), layout) }
}
}
8 changes: 8 additions & 0 deletions src/string.rs
Original file line number Diff line number Diff line change
@@ -566,6 +566,7 @@ impl String {

impl<A: DeallocRef> String<A> {
/// Like `new` but parameterized over the choice of allocator for the returned `String`.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn new_in(a: A) -> Self {
Self {
@@ -577,6 +578,7 @@ impl<A: DeallocRef> String<A> {
///
/// # Panics
/// Panics if the allocation fails.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn with_capacity_in(capacity: usize, a: A) -> Self
where
@@ -588,6 +590,7 @@ impl<A: DeallocRef> String<A> {
}

/// Like `with_capacity_in` but returns errors instead of panicking.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn try_with_capacity_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
@@ -602,6 +605,7 @@ impl<A: DeallocRef> String<A> {
///
/// # Panics
/// Panics if the allocation fails.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn from_str_in(s: &str, a: A) -> Self
where
@@ -613,6 +617,7 @@ impl<A: DeallocRef> String<A> {
}

/// Like `from_str_in` but returns errors instead of panicking.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn try_from_str_in(s: &str, a: A) -> Result<Self, CollectionAllocErr<A>>
where
@@ -703,6 +708,7 @@ impl<A: DeallocRef> String<A> {
/// # Panics
///
/// Panics if allocation fails.
#[allow(clippy::needless_pass_by_value)]
pub fn from_utf8_lossy_in(v: &[u8], a: A) -> Self
where
A: ReallocRef,
@@ -715,6 +721,7 @@ impl<A: DeallocRef> String<A> {
}

/// Like `from_utf8_lossy_in` but returns errors instead of panicking.
#[allow(clippy::needless_pass_by_value)]
pub fn try_from_utf8_lossy_in(v: &[u8], a: A) -> Result<Self, CollectionAllocErr<A>>
where
A: ReallocRef,
@@ -751,6 +758,7 @@ impl<A: DeallocRef> String<A> {
}

/// Like `from_utf16` but parameterized over the choice of allocator for the returned `String`.
#[allow(clippy::needless_pass_by_value)]
pub fn from_utf16_in(v: &[u16], a: A) -> Result<Self, FromUtf16Error>
where
A: ReallocRef,
7 changes: 5 additions & 2 deletions src/vec.rs
Original file line number Diff line number Diff line change
@@ -472,6 +472,7 @@ impl<T> Vec<T> {

impl<T, A: DeallocRef> Vec<T, A> {
/// Like `new` but parameterized over the choice of allocator for the returned `Vec`.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn new_in(a: A) -> Self {
Self {
@@ -487,6 +488,7 @@ impl<T, A: DeallocRef> Vec<T, A> {
///
/// * if the requested capacity exceeds `usize::MAX` bytes.
/// * on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn with_capacity_in(capacity: usize, a: A) -> Self
where
@@ -506,6 +508,7 @@ impl<T, A: DeallocRef> Vec<T, A> {
/// * `CapacityOverflow` if the requested capacity exceeds `usize::MAX` bytes.
/// * `CapacityOverflow` on 32-bit platforms if the requested capacity exceeds `isize::MAX` bytes.
/// * `AllocError` on OOM
#[allow(clippy::needless_pass_by_value)]
#[inline]
pub fn try_with_capacity_in(capacity: usize, a: A) -> Result<Self, CollectionAllocErr<A>>
where
@@ -2185,7 +2188,7 @@ where
#[must_use]
#[inline]
fn clone(&self) -> Self {
let mut b = self.buf.build_alloc().clone();
let b = self.buf.build_alloc().clone();
let old_layout = self.buf.current_layout();

unsafe {
@@ -2463,7 +2466,7 @@ where
}

impl<T, A: ReallocRef> SpecExtend<T, IntoIter<T, A>, A> for Vec<T, A> {
fn try_from_iter_in(iter: IntoIter<T, A>, mut a: A) -> Result<Self, CollectionAllocErr<A>> {
fn try_from_iter_in(iter: IntoIter<T, A>, a: A) -> Result<Self, CollectionAllocErr<A>> {
// A common case is passing a vector into a function which immediately
// re-collects into a vector. We can short circuit this if the IntoIter
// has not been advanced at all.
2 changes: 1 addition & 1 deletion tests/heap.rs
Original file line number Diff line number Diff line change
@@ -13,7 +13,7 @@ fn std_heap_overaligned_request() {
check_overalign_requests(Global)
}

fn check_overalign_requests<T: AllocRef>(mut allocator: T)
fn check_overalign_requests<T: AllocRef>(allocator: T)
where
T::Error: Debug,
{