|
|
|
@ -1,79 +1,85 @@
|
|
|
|
|
# The Final Code
|
|
|
|
|
|
|
|
|
|
```rust
|
|
|
|
|
#![feature(ptr_internals)]
|
|
|
|
|
#![feature(allocator_api)]
|
|
|
|
|
#![feature(alloc_layout_extra)]
|
|
|
|
|
|
|
|
|
|
use std::ptr::{Unique, NonNull, self};
|
|
|
|
|
use std::alloc::{self, Layout};
|
|
|
|
|
use std::marker::PhantomData;
|
|
|
|
|
use std::mem;
|
|
|
|
|
use std::ops::{Deref, DerefMut};
|
|
|
|
|
use std::marker::PhantomData;
|
|
|
|
|
use std::alloc::{
|
|
|
|
|
Allocator,
|
|
|
|
|
Global,
|
|
|
|
|
GlobalAlloc,
|
|
|
|
|
Layout,
|
|
|
|
|
handle_alloc_error
|
|
|
|
|
};
|
|
|
|
|
use std::ptr::{self, NonNull};
|
|
|
|
|
|
|
|
|
|
struct RawVec<T> {
|
|
|
|
|
ptr: Unique<T>,
|
|
|
|
|
ptr: NonNull<T>,
|
|
|
|
|
cap: usize,
|
|
|
|
|
_marker: PhantomData<T>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe impl<T: Send> Send for RawVec<T> {}
|
|
|
|
|
unsafe impl<T: Sync> Sync for RawVec<T> {}
|
|
|
|
|
|
|
|
|
|
impl<T> RawVec<T> {
|
|
|
|
|
fn new() -> Self {
|
|
|
|
|
// !0 is usize::MAX. This branch should be stripped at compile time.
|
|
|
|
|
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
|
|
|
|
|
|
|
|
|
|
// Unique::dangling() doubles as "unallocated" and "zero-sized allocation"
|
|
|
|
|
RawVec { ptr: Unique::dangling(), cap: cap }
|
|
|
|
|
// `NonNull::dangling()` doubles as "unallocated" and "zero-sized allocation"
|
|
|
|
|
RawVec {
|
|
|
|
|
ptr: NonNull::dangling(),
|
|
|
|
|
cap: cap,
|
|
|
|
|
_marker: PhantomData,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn grow(&mut self) {
|
|
|
|
|
unsafe {
|
|
|
|
|
let elem_size = mem::size_of::<T>();
|
|
|
|
|
|
|
|
|
|
// since we set the capacity to usize::MAX when elem_size is
|
|
|
|
|
// 0, getting to here necessarily means the Vec is overfull.
|
|
|
|
|
assert!(elem_size != 0, "capacity overflow");
|
|
|
|
|
// since we set the capacity to usize::MAX when T has size 0,
|
|
|
|
|
// getting to here necessarily means the Vec is overfull.
|
|
|
|
|
assert!(mem::size_of::<T>() != 0, "capacity overflow");
|
|
|
|
|
|
|
|
|
|
let (new_cap, ptr) = if self.cap == 0 {
|
|
|
|
|
let ptr = Global.allocate(Layout::array::<T>(1).unwrap());
|
|
|
|
|
(1, ptr)
|
|
|
|
|
} else {
|
|
|
|
|
let new_cap = 2 * self.cap;
|
|
|
|
|
let c: NonNull<T> = self.ptr.into();
|
|
|
|
|
let ptr = Global.grow(c.cast(),
|
|
|
|
|
Layout::array::<T>(self.cap).unwrap(),
|
|
|
|
|
Layout::array::<T>(new_cap).unwrap());
|
|
|
|
|
(new_cap, ptr)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// If allocate or reallocate fail, oom
|
|
|
|
|
if ptr.is_err() {
|
|
|
|
|
handle_alloc_error(Layout::from_size_align_unchecked(
|
|
|
|
|
new_cap * elem_size,
|
|
|
|
|
mem::align_of::<T>(),
|
|
|
|
|
))
|
|
|
|
|
}
|
|
|
|
|
let ptr = ptr.unwrap();
|
|
|
|
|
|
|
|
|
|
self.ptr = Unique::new_unchecked(ptr.as_ptr() as *mut _);
|
|
|
|
|
self.cap = new_cap;
|
|
|
|
|
}
|
|
|
|
|
let (new_cap, new_layout) = if self.cap == 0 {
|
|
|
|
|
(1, Layout::array::<T>(1).unwrap())
|
|
|
|
|
} else {
|
|
|
|
|
// This can't overflow because we ensure self.cap <= isize::MAX.
|
|
|
|
|
let new_cap = 2 * self.cap;
|
|
|
|
|
|
|
|
|
|
// `Layout::array` checks that the number of bytes is <= usize::MAX,
|
|
|
|
|
// but this is redundant since old_layout.size() <= isize::MAX,
|
|
|
|
|
// so the `unwrap` should never fail.
|
|
|
|
|
let new_layout = Layout::array::<T>(new_cap).unwrap();
|
|
|
|
|
(new_cap, new_layout)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Ensure that the new allocation doesn't exceed `isize::MAX` bytes.
|
|
|
|
|
assert!(
|
|
|
|
|
new_layout.size() <= isize::MAX as usize,
|
|
|
|
|
"Allocation too large"
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let new_ptr = if self.cap == 0 {
|
|
|
|
|
unsafe { alloc::alloc(new_layout) }
|
|
|
|
|
} else {
|
|
|
|
|
let old_layout = Layout::array::<T>(self.cap).unwrap();
|
|
|
|
|
let old_ptr = self.ptr.as_ptr() as *mut u8;
|
|
|
|
|
unsafe { alloc::realloc(old_ptr, old_layout, new_layout.size()) }
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// If allocation fails, `new_ptr` will be null, in which case we abort.
|
|
|
|
|
self.ptr = match NonNull::new(new_ptr as *mut T) {
|
|
|
|
|
Some(p) => p,
|
|
|
|
|
None => alloc::handle_alloc_error(new_layout),
|
|
|
|
|
};
|
|
|
|
|
self.cap = new_cap;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> Drop for RawVec<T> {
|
|
|
|
|
fn drop(&mut self) {
|
|
|
|
|
let elem_size = mem::size_of::<T>();
|
|
|
|
|
|
|
|
|
|
if self.cap != 0 && elem_size != 0 {
|
|
|
|
|
unsafe {
|
|
|
|
|
let c: NonNull<T> = self.ptr.into();
|
|
|
|
|
Global.deallocate(c.cast(),
|
|
|
|
|
Layout::array::<T>(self.cap).unwrap());
|
|
|
|
|
alloc::dealloc(
|
|
|
|
|
self.ptr.as_ptr() as *mut u8,
|
|
|
|
|
Layout::array::<T>(self.cap).unwrap(),
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -85,21 +91,30 @@ pub struct Vec<T> {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> Vec<T> {
|
|
|
|
|
fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
|
|
|
|
|
fn ptr(&self) -> *mut T {
|
|
|
|
|
self.buf.ptr.as_ptr()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cap(&self) -> usize { self.buf.cap }
|
|
|
|
|
fn cap(&self) -> usize {
|
|
|
|
|
self.buf.cap
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn new() -> Self {
|
|
|
|
|
Vec { buf: RawVec::new(), len: 0 }
|
|
|
|
|
Vec {
|
|
|
|
|
buf: RawVec::new(),
|
|
|
|
|
len: 0,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
pub fn push(&mut self, elem: T) {
|
|
|
|
|
if self.len == self.cap() { self.buf.grow(); }
|
|
|
|
|
if self.len == self.cap() {
|
|
|
|
|
self.buf.grow();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
|
ptr::write(self.ptr().offset(self.len as isize), elem);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Can't fail, we'll OOM first.
|
|
|
|
|
// Can't overflow, we'll OOM first.
|
|
|
|
|
self.len += 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -108,22 +123,22 @@ impl<T> Vec<T> {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
self.len -= 1;
|
|
|
|
|
unsafe {
|
|
|
|
|
Some(ptr::read(self.ptr().offset(self.len as isize)))
|
|
|
|
|
}
|
|
|
|
|
unsafe { Some(ptr::read(self.ptr().offset(self.len as isize))) }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn insert(&mut self, index: usize, elem: T) {
|
|
|
|
|
assert!(index <= self.len, "index out of bounds");
|
|
|
|
|
if self.cap() == self.len { self.buf.grow(); }
|
|
|
|
|
if self.cap() == self.len {
|
|
|
|
|
self.buf.grow();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
|
if index < self.len {
|
|
|
|
|
ptr::copy(self.ptr().offset(index as isize),
|
|
|
|
|
self.ptr().offset(index as isize + 1),
|
|
|
|
|
self.len - index);
|
|
|
|
|
}
|
|
|
|
|
ptr::copy(
|
|
|
|
|
self.ptr().offset(index as isize),
|
|
|
|
|
self.ptr().offset(index as isize + 1),
|
|
|
|
|
self.len - index,
|
|
|
|
|
);
|
|
|
|
|
ptr::write(self.ptr().offset(index as isize), elem);
|
|
|
|
|
self.len += 1;
|
|
|
|
|
}
|
|
|
|
@ -134,9 +149,11 @@ impl<T> Vec<T> {
|
|
|
|
|
unsafe {
|
|
|
|
|
self.len -= 1;
|
|
|
|
|
let result = ptr::read(self.ptr().offset(index as isize));
|
|
|
|
|
ptr::copy(self.ptr().offset(index as isize + 1),
|
|
|
|
|
self.ptr().offset(index as isize),
|
|
|
|
|
self.len - index);
|
|
|
|
|
ptr::copy(
|
|
|
|
|
self.ptr().offset(index as isize + 1),
|
|
|
|
|
self.ptr().offset(index as isize),
|
|
|
|
|
self.len - index,
|
|
|
|
|
);
|
|
|
|
|
result
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -181,24 +198,16 @@ impl<T> Drop for Vec<T> {
|
|
|
|
|
impl<T> Deref for Vec<T> {
|
|
|
|
|
type Target = [T];
|
|
|
|
|
fn deref(&self) -> &[T] {
|
|
|
|
|
unsafe {
|
|
|
|
|
std::slice::from_raw_parts(self.ptr(), self.len)
|
|
|
|
|
}
|
|
|
|
|
unsafe { std::slice::from_raw_parts(self.ptr(), self.len) }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> DerefMut for Vec<T> {
|
|
|
|
|
fn deref_mut(&mut self) -> &mut [T] {
|
|
|
|
|
unsafe {
|
|
|
|
|
std::slice::from_raw_parts_mut(self.ptr(), self.len)
|
|
|
|
|
}
|
|
|
|
|
unsafe { std::slice::from_raw_parts_mut(self.ptr(), self.len) }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
struct RawValIter<T> {
|
|
|
|
|
start: *const T,
|
|
|
|
|
end: *const T,
|
|
|
|
@ -239,8 +248,8 @@ impl<T> Iterator for RawValIter<T> {
|
|
|
|
|
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
|
let elem_size = mem::size_of::<T>();
|
|
|
|
|
let len = (self.end as usize - self.start as usize)
|
|
|
|
|
/ if elem_size == 0 { 1 } else { elem_size };
|
|
|
|
|
let len = (self.end as usize - self.start as usize) /
|
|
|
|
|
if elem_size == 0 { 1 } else { elem_size };
|
|
|
|
|
(len, Some(len))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -262,9 +271,6 @@ impl<T> DoubleEndedIterator for RawValIter<T> {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pub struct IntoIter<T> {
|
|
|
|
|
_buf: RawVec<T>, // we don't actually care about this. Just need it to live.
|
|
|
|
|
iter: RawValIter<T>,
|
|
|
|
@ -272,12 +278,18 @@ pub struct IntoIter<T> {
|
|
|
|
|
|
|
|
|
|
impl<T> Iterator for IntoIter<T> {
|
|
|
|
|
type Item = T;
|
|
|
|
|
fn next(&mut self) -> Option<T> { self.iter.next() }
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
|
|
|
|
|
fn next(&mut self) -> Option<T> {
|
|
|
|
|
self.iter.next()
|
|
|
|
|
}
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
|
self.iter.size_hint()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> DoubleEndedIterator for IntoIter<T> {
|
|
|
|
|
fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
|
|
|
|
|
fn next_back(&mut self) -> Option<T> {
|
|
|
|
|
self.iter.next_back()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> Drop for IntoIter<T> {
|
|
|
|
@ -286,9 +298,6 @@ impl<T> Drop for IntoIter<T> {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pub struct Drain<'a, T: 'a> {
|
|
|
|
|
vec: PhantomData<&'a mut Vec<T>>,
|
|
|
|
|
iter: RawValIter<T>,
|
|
|
|
@ -296,12 +305,18 @@ pub struct Drain<'a, T: 'a> {
|
|
|
|
|
|
|
|
|
|
impl<'a, T> Iterator for Drain<'a, T> {
|
|
|
|
|
type Item = T;
|
|
|
|
|
fn next(&mut self) -> Option<T> { self.iter.next() }
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
|
|
|
|
|
fn next(&mut self) -> Option<T> {
|
|
|
|
|
self.iter.next()
|
|
|
|
|
}
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
|
self.iter.size_hint()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
|
|
|
|
|
fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
|
|
|
|
|
fn next_back(&mut self) -> Option<T> {
|
|
|
|
|
self.iter.next_back()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a, T> Drop for Drain<'a, T> {
|
|
|
|
@ -321,6 +336,7 @@ impl<'a, T> Drop for Drain<'a, T> {
|
|
|
|
|
#
|
|
|
|
|
# mod tests {
|
|
|
|
|
# use super::*;
|
|
|
|
|
#
|
|
|
|
|
# pub fn create_push_pop() {
|
|
|
|
|
# let mut v = Vec::new();
|
|
|
|
|
# v.push(1);
|
|
|
|
|