Merge pull request #16 from Gankro/master

use new Unique/heap APIs
pull/18/head
Brian Anderson 8 years ago committed by GitHub
commit 1a933af954

@ -40,8 +40,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> {
fn drop(&mut self) {
unsafe {
drop_in_place(*self.ptr);
heap::deallocate((*self.ptr) as *mut u8,
drop_in_place(self.ptr.as_ptr());
heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(),
mem::align_of::<T>());
}
@ -71,8 +71,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> {
fn drop(&mut self) {
unsafe {
drop_in_place(*self.ptr);
heap::deallocate((*self.ptr) as *mut u8,
drop_in_place(self.ptr.as_ptr());
heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(),
mem::align_of::<T>());
}
@ -86,7 +86,7 @@ impl<T> Drop for SuperBox<T> {
unsafe {
// Hyper-optimized: deallocate the box's contents for it
// without `drop`ing the contents
heap::deallocate((*self.my_box.ptr) as *mut u8,
heap::deallocate(self.my_box.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(),
mem::align_of::<T>());
}
@ -149,8 +149,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> {
fn drop(&mut self) {
unsafe {
drop_in_place(*self.ptr);
heap::deallocate((*self.ptr) as *mut u8,
drop_in_place(self.ptr.as_ptr());
heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(),
mem::align_of::<T>());
}
@ -166,7 +166,7 @@ impl<T> Drop for SuperBox<T> {
// without `drop`ing the contents. Need to set the `box`
// field as `None` to prevent Rust from trying to Drop it.
let my_box = self.my_box.take().unwrap();
heap::deallocate((*my_box.ptr) as *mut u8,
heap::deallocate(my_box.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(),
mem::align_of::<T>());
mem::forget(my_box);

@ -7,16 +7,12 @@ can't allocate, but also can't put a null pointer in `ptr`, what do we do in
This is perfectly fine because we already have `cap == 0` as our sentinel for no
allocation. We don't even need to handle it specially in almost any code because
we usually need to check if `cap > len` or `len > 0` anyway. The traditional
Rust value to put here is `0x01`. The standard library actually exposes this
as `alloc::heap::EMPTY`. There are quite a few places where we'll
want to use `heap::EMPTY` because there's no real allocation to talk about but
we usually need to check if `cap > len` or `len > 0` anyway. The recommended
Rust value to put here is `mem::align_of::<T>()`. Unique provides a convenience
for this: `Unique::empty()`. There are quite a few places where we'll
want to use `empty` because there's no real allocation to talk about but
`null` would make the compiler do bad things.
All of the `heap` API is totally unstable under the `heap_api` feature, though.
We could trivially define `heap::EMPTY` ourselves, but we'll want the rest of
the `heap` API anyway, so let's just get that dependency over with.
So:
```rust,ignore
@ -24,16 +20,10 @@ So:
use std::mem;
use alloc::heap::EMPTY;
impl<T> Vec<T> {
fn new() -> Self {
assert!(mem::size_of::<T>() != 0, "We're not ready to handle ZSTs");
unsafe {
// need to cast EMPTY to the actual ptr type we want, let
// inference handle it.
Vec { ptr: Unique::new(heap::EMPTY as *mut _), len: 0, cap: 0 }
}
Vec { ptr: Unique::empty(), len: 0, cap: 0 }
}
}
```
@ -202,7 +192,7 @@ fn grow(&mut self) {
"capacity overflow");
let new_num_bytes = old_num_bytes * 2;
let ptr = heap::reallocate(*self.ptr as *mut _,
let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
old_num_bytes,
new_num_bytes,
align);

@ -21,7 +21,7 @@ impl<T> Drop for Vec<T> {
let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
}
}
}

@ -18,7 +18,7 @@ impl<T> Deref for Vec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe {
::std::slice::from_raw_parts(*self.ptr, self.len)
::std::slice::from_raw_parts(self.ptr.as_ptr(), self.len)
}
}
}
@ -32,7 +32,7 @@ use std::ops::DerefMut;
impl<T> DerefMut for Vec<T> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe {
::std::slice::from_raw_parts_mut(*self.ptr, self.len)
::std::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len)
}
}
}

@ -20,13 +20,11 @@ struct RawVec<T> {
impl<T> RawVec<T> {
fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
}
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::empty(), cap: cap }
}
fn grow(&mut self) {
@ -44,7 +42,7 @@ impl<T> RawVec<T> {
(1, ptr)
} else {
let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _,
let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size,
new_cap * elem_size,
align);
@ -68,7 +66,7 @@ impl<T> Drop for RawVec<T> {
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
}
}
}
@ -84,7 +82,7 @@ pub struct Vec<T> {
}
impl<T> Vec<T> {
fn ptr(&self) -> *mut T { *self.buf.ptr }
fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
fn cap(&self) -> usize { self.buf.cap }

@ -139,7 +139,7 @@ impl<T> Drop for IntoIter<T> {
let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.buf as *mut _, num_bytes, align);
heap::deallocate(self.buf.as_ptr() as *mut _, num_bytes, align);
}
}
}

@ -32,7 +32,6 @@ As a recap, Unique is a wrapper around a raw pointer that declares that:
* We are variant over `T`
* We may own a value of type `T` (for drop check)
* We are Send/Sync if `T` is Send/Sync
* We deref to `*mut T` (so it largely acts like a `*mut` in our code)
* Our pointer is never null (so `Option<Vec<T>>` is null-pointer-optimized)
We can implement all of the above requirements except for the last
@ -53,21 +52,16 @@ struct Unique<T> {
unsafe impl<T: Send> Send for Unique<T> {}
unsafe impl<T: Sync> Sync for Unique<T> {}
impl<T> Unique<T> {
impl<T: ?Sized> Unique<T> {
pub fn new(ptr: *mut T) -> Self {
Unique { ptr: ptr, _marker: PhantomData }
}
}
impl<T> Deref for Unique<T> {
type Target = *mut T;
fn deref(&self) -> &*mut T {
// There's no way to cast the *const to a *mut
// while also taking a reference. So we just
// transmute it since it's all "just pointers".
unsafe { mem::transmute(&self.ptr) }
pub fn as_ptr(&self) -> *mut T {
self.ptr as *mut T
}
}
# fn main() {}
```
@ -92,7 +86,7 @@ pub struct Vec<T> {
If you don't care about the null-pointer optimization, then you can use the
stable code. However we will be designing the rest of the code around enabling
the optimization. In particular, `Unique::new` is unsafe to call, because
this optimization. It should be noted that `Unique::new` is unsafe to call, because
putting `null` inside of it is Undefined Behavior. Our stable Unique doesn't
need `new` to be unsafe because it doesn't make any interesting guarantees about
its contents.

@ -1,3 +1,4 @@
# RawVec
We've actually reached an interesting situation here: we've duplicated the logic
@ -17,9 +18,7 @@ struct RawVec<T> {
impl<T> RawVec<T> {
fn new() -> Self {
assert!(mem::size_of::<T>() != 0, "TODO: implement ZST support");
unsafe {
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: 0 }
}
RawVec { ptr: Unique::empty(), cap: 0 }
}
// unchanged from Vec
@ -33,7 +32,7 @@ impl<T> RawVec<T> {
(1, ptr)
} else {
let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _,
let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size,
new_cap * elem_size,
align);
@ -57,7 +56,7 @@ impl<T> Drop for RawVec<T> {
let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr.as_mut() as *mut _, num_bytes, align);
}
}
}
@ -73,7 +72,7 @@ pub struct Vec<T> {
}
impl<T> Vec<T> {
fn ptr(&self) -> *mut T { *self.buf.ptr }
fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
fn cap(&self) -> usize { self.buf.cap }

@ -19,7 +19,7 @@ RawValIter and RawVec respectively. How mysteriously convenient.
## Allocating Zero-Sized Types
So if the allocator API doesn't support zero-sized allocations, what on earth
do we store as our allocation? Why, `heap::EMPTY` of course! Almost every operation
do we store as our allocation? `Unique::empty()` of course! Almost every operation
with a ZST is a no-op since ZSTs have exactly one value, and therefore no state needs
to be considered to store or load them. This actually extends to `ptr::read` and
`ptr::write`: they won't actually look at the pointer at all. As such we never need
@ -35,13 +35,11 @@ method of RawVec.
```rust,ignore
impl<T> RawVec<T> {
fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
}
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::empty(), cap: cap }
}
fn grow(&mut self) {
@ -59,7 +57,7 @@ impl<T> RawVec<T> {
(1, ptr)
} else {
let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _,
let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size,
new_cap * elem_size,
align);
@ -85,7 +83,7 @@ impl<T> Drop for RawVec<T> {
let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
}
}
}

Loading…
Cancel
Save