|
1 | 1 | use core::alloc::Layout;
|
| 2 | +use core::cmp::Ordering; |
2 | 3 | use core::ffi::c_void;
|
3 | 4 | use core::mem;
|
4 | 5 | use core::ptr::{self, NonNull};
|
@@ -67,6 +68,81 @@ unsafe impl Allocator for Pool {
|
67 | 68 | ngx_pfree(self.0.as_ptr(), ptr.as_ptr().cast());
|
68 | 69 | }
|
69 | 70 | }
|
| 71 | + |
| 72 | + unsafe fn grow( |
| 73 | + &self, |
| 74 | + ptr: NonNull<u8>, |
| 75 | + old_layout: Layout, |
| 76 | + new_layout: Layout, |
| 77 | + ) -> Result<NonNull<[u8]>, AllocError> { |
| 78 | + match new_layout.size().cmp(&old_layout.size()) { |
| 79 | + Ordering::Less => Err(AllocError), |
| 80 | + Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())), |
| 81 | + Ordering::Greater => { |
| 82 | + // if `ptr` is the last allocation in the pool and there is enough room, |
| 83 | + // it is possible to shift `last` pointer in the pool without any real allocation. |
| 84 | + if ptr.byte_add(old_layout.size()).as_ptr() == self.as_ref().d.last |
| 85 | + && ptr.byte_add(new_layout.size()).as_ptr() <= self.as_ref().d.end |
| 86 | + && ptr.align_offset(new_layout.align()) == 0 |
| 87 | + { |
| 88 | + let pool = self.0.as_ptr(); |
| 89 | + (*pool).d.last = (*pool) |
| 90 | + .d |
| 91 | + .last |
| 92 | + .byte_add(new_layout.size() - old_layout.size()); |
| 93 | + Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())) |
| 94 | + } else { |
| 95 | + let new_ptr = self.allocate(new_layout)?; |
| 96 | + unsafe { |
| 97 | + ptr::copy_nonoverlapping( |
| 98 | + ptr.as_ptr(), |
| 99 | + new_ptr.as_ptr().cast(), |
| 100 | + old_layout.size(), |
| 101 | + ); |
| 102 | + self.deallocate(ptr, old_layout); |
| 103 | + } |
| 104 | + Ok(new_ptr) |
| 105 | + } |
| 106 | + } |
| 107 | + } |
| 108 | + } |
| 109 | + |
| 110 | + unsafe fn shrink( |
| 111 | + &self, |
| 112 | + ptr: NonNull<u8>, |
| 113 | + old_layout: Layout, |
| 114 | + new_layout: Layout, |
| 115 | + ) -> Result<NonNull<[u8]>, AllocError> { |
| 116 | + match old_layout.size().cmp(&new_layout.size()) { |
| 117 | + Ordering::Less => Err(AllocError), |
| 118 | + Ordering::Equal => Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())), |
| 119 | + Ordering::Greater => { |
| 120 | + // if `ptr` is the last allocation in the pool, it is possible to shift `last` |
| 121 | + // pointer in the pool without any real allocation. |
| 122 | + if ptr.byte_add(old_layout.size()).as_ptr() == self.as_ref().d.last |
| 123 | + && ptr.align_offset(new_layout.align()) == 0 |
| 124 | + { |
| 125 | + let pool = self.0.as_ptr(); |
| 126 | + (*pool).d.last = (*pool) |
| 127 | + .d |
| 128 | + .last |
| 129 | + .byte_sub(old_layout.size() - new_layout.size()); |
| 130 | + Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size())) |
| 131 | + } else { |
| 132 | + let new_ptr = self.allocate(new_layout)?; |
| 133 | + unsafe { |
| 134 | + ptr::copy_nonoverlapping( |
| 135 | + ptr.as_ptr(), |
| 136 | + new_ptr.as_ptr().cast(), |
| 137 | + new_layout.size(), |
| 138 | + ); |
| 139 | + self.deallocate(ptr, old_layout); |
| 140 | + } |
| 141 | + Ok(new_ptr) |
| 142 | + } |
| 143 | + } |
| 144 | + } |
| 145 | + } |
70 | 146 | }
|
71 | 147 |
|
72 | 148 | impl AsRef<ngx_pool_t> for Pool {
|
|
0 commit comments