Merge pull request #16 from Gankro/master

use new Unique/heap APIs
pull/18/head
Brian Anderson 7 years ago committed by GitHub
commit 1a933af954

@ -40,8 +40,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> { impl<T> Drop for Box<T> {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
drop_in_place(*self.ptr); drop_in_place(self.ptr.as_ptr());
heap::deallocate((*self.ptr) as *mut u8, heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(), mem::size_of::<T>(),
mem::align_of::<T>()); mem::align_of::<T>());
} }
@ -71,8 +71,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> { impl<T> Drop for Box<T> {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
drop_in_place(*self.ptr); drop_in_place(self.ptr.as_ptr());
heap::deallocate((*self.ptr) as *mut u8, heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(), mem::size_of::<T>(),
mem::align_of::<T>()); mem::align_of::<T>());
} }
@ -86,7 +86,7 @@ impl<T> Drop for SuperBox<T> {
unsafe { unsafe {
// Hyper-optimized: deallocate the box's contents for it // Hyper-optimized: deallocate the box's contents for it
// without `drop`ing the contents // without `drop`ing the contents
heap::deallocate((*self.my_box.ptr) as *mut u8, heap::deallocate(self.my_box.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(), mem::size_of::<T>(),
mem::align_of::<T>()); mem::align_of::<T>());
} }
@ -149,8 +149,8 @@ struct Box<T>{ ptr: Unique<T> }
impl<T> Drop for Box<T> { impl<T> Drop for Box<T> {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
drop_in_place(*self.ptr); drop_in_place(self.ptr.as_ptr());
heap::deallocate((*self.ptr) as *mut u8, heap::deallocate(self.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(), mem::size_of::<T>(),
mem::align_of::<T>()); mem::align_of::<T>());
} }
@ -166,7 +166,7 @@ impl<T> Drop for SuperBox<T> {
// without `drop`ing the contents. Need to set the `box` // without `drop`ing the contents. Need to set the `box`
// field as `None` to prevent Rust from trying to Drop it. // field as `None` to prevent Rust from trying to Drop it.
let my_box = self.my_box.take().unwrap(); let my_box = self.my_box.take().unwrap();
heap::deallocate((*my_box.ptr) as *mut u8, heap::deallocate(my_box.ptr.as_ptr() as *mut u8,
mem::size_of::<T>(), mem::size_of::<T>(),
mem::align_of::<T>()); mem::align_of::<T>());
mem::forget(my_box); mem::forget(my_box);

@ -7,16 +7,12 @@ can't allocate, but also can't put a null pointer in `ptr`, what do we do in
This is perfectly fine because we already have `cap == 0` as our sentinel for no This is perfectly fine because we already have `cap == 0` as our sentinel for no
allocation. We don't even need to handle it specially in almost any code because allocation. We don't even need to handle it specially in almost any code because
we usually need to check if `cap > len` or `len > 0` anyway. The traditional we usually need to check if `cap > len` or `len > 0` anyway. The recommended
Rust value to put here is `0x01`. The standard library actually exposes this Rust value to put here is `mem::align_of::<T>()`. Unique provides a convenience
as `alloc::heap::EMPTY`. There are quite a few places where we'll for this: `Unique::empty()`. There are quite a few places where we'll
want to use `heap::EMPTY` because there's no real allocation to talk about but want to use `empty` because there's no real allocation to talk about but
`null` would make the compiler do bad things. `null` would make the compiler do bad things.
All of the `heap` API is totally unstable under the `heap_api` feature, though.
We could trivially define `heap::EMPTY` ourselves, but we'll want the rest of
the `heap` API anyway, so let's just get that dependency over with.
So: So:
```rust,ignore ```rust,ignore
@ -24,16 +20,10 @@ So:
use std::mem; use std::mem;
use alloc::heap::EMPTY;
impl<T> Vec<T> { impl<T> Vec<T> {
fn new() -> Self { fn new() -> Self {
assert!(mem::size_of::<T>() != 0, "We're not ready to handle ZSTs"); assert!(mem::size_of::<T>() != 0, "We're not ready to handle ZSTs");
unsafe { Vec { ptr: Unique::empty(), len: 0, cap: 0 }
// need to cast EMPTY to the actual ptr type we want, let
// inference handle it.
Vec { ptr: Unique::new(heap::EMPTY as *mut _), len: 0, cap: 0 }
}
} }
} }
``` ```
@ -202,7 +192,7 @@ fn grow(&mut self) {
"capacity overflow"); "capacity overflow");
let new_num_bytes = old_num_bytes * 2; let new_num_bytes = old_num_bytes * 2;
let ptr = heap::reallocate(*self.ptr as *mut _, let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
old_num_bytes, old_num_bytes,
new_num_bytes, new_num_bytes,
align); align);

@ -21,7 +21,7 @@ impl<T> Drop for Vec<T> {
let elem_size = mem::size_of::<T>(); let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap; let num_bytes = elem_size * self.cap;
unsafe { unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align); heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
} }
} }
} }

@ -18,7 +18,7 @@ impl<T> Deref for Vec<T> {
type Target = [T]; type Target = [T];
fn deref(&self) -> &[T] { fn deref(&self) -> &[T] {
unsafe { unsafe {
::std::slice::from_raw_parts(*self.ptr, self.len) ::std::slice::from_raw_parts(self.ptr.as_ptr(), self.len)
} }
} }
} }
@ -32,7 +32,7 @@ use std::ops::DerefMut;
impl<T> DerefMut for Vec<T> { impl<T> DerefMut for Vec<T> {
fn deref_mut(&mut self) -> &mut [T] { fn deref_mut(&mut self) -> &mut [T] {
unsafe { unsafe {
::std::slice::from_raw_parts_mut(*self.ptr, self.len) ::std::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len)
} }
} }
} }

@ -20,13 +20,11 @@ struct RawVec<T> {
impl<T> RawVec<T> { impl<T> RawVec<T> {
fn new() -> Self { fn new() -> Self {
unsafe { // !0 is usize::MAX. This branch should be stripped at compile time.
// !0 is usize::MAX. This branch should be stripped at compile time. let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation" // Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap } RawVec { ptr: Unique::empty(), cap: cap }
}
} }
fn grow(&mut self) { fn grow(&mut self) {
@ -44,7 +42,7 @@ impl<T> RawVec<T> {
(1, ptr) (1, ptr)
} else { } else {
let new_cap = 2 * self.cap; let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _, let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size, self.cap * elem_size,
new_cap * elem_size, new_cap * elem_size,
align); align);
@ -68,7 +66,7 @@ impl<T> Drop for RawVec<T> {
let num_bytes = elem_size * self.cap; let num_bytes = elem_size * self.cap;
unsafe { unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align); heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
} }
} }
} }
@ -84,7 +82,7 @@ pub struct Vec<T> {
} }
impl<T> Vec<T> { impl<T> Vec<T> {
fn ptr(&self) -> *mut T { *self.buf.ptr } fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
fn cap(&self) -> usize { self.buf.cap } fn cap(&self) -> usize { self.buf.cap }

@ -139,7 +139,7 @@ impl<T> Drop for IntoIter<T> {
let elem_size = mem::size_of::<T>(); let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap; let num_bytes = elem_size * self.cap;
unsafe { unsafe {
heap::deallocate(*self.buf as *mut _, num_bytes, align); heap::deallocate(self.buf.as_ptr() as *mut _, num_bytes, align);
} }
} }
} }

@ -32,7 +32,6 @@ As a recap, Unique is a wrapper around a raw pointer that declares that:
* We are variant over `T` * We are variant over `T`
* We may own a value of type `T` (for drop check) * We may own a value of type `T` (for drop check)
* We are Send/Sync if `T` is Send/Sync * We are Send/Sync if `T` is Send/Sync
* We deref to `*mut T` (so it largely acts like a `*mut` in our code)
* Our pointer is never null (so `Option<Vec<T>>` is null-pointer-optimized) * Our pointer is never null (so `Option<Vec<T>>` is null-pointer-optimized)
We can implement all of the above requirements except for the last We can implement all of the above requirements except for the last
@ -53,21 +52,16 @@ struct Unique<T> {
unsafe impl<T: Send> Send for Unique<T> {} unsafe impl<T: Send> Send for Unique<T> {}
unsafe impl<T: Sync> Sync for Unique<T> {} unsafe impl<T: Sync> Sync for Unique<T> {}
impl<T> Unique<T> { impl<T: ?Sized> Unique<T> {
pub fn new(ptr: *mut T) -> Self { pub fn new(ptr: *mut T) -> Self {
Unique { ptr: ptr, _marker: PhantomData } Unique { ptr: ptr, _marker: PhantomData }
} }
}
impl<T> Deref for Unique<T> { pub fn as_ptr(&self) -> *mut T {
type Target = *mut T; self.ptr as *mut T
fn deref(&self) -> &*mut T {
// There's no way to cast the *const to a *mut
// while also taking a reference. So we just
// transmute it since it's all "just pointers".
unsafe { mem::transmute(&self.ptr) }
} }
} }
# fn main() {} # fn main() {}
``` ```
@ -92,7 +86,7 @@ pub struct Vec<T> {
If you don't care about the null-pointer optimization, then you can use the If you don't care about the null-pointer optimization, then you can use the
stable code. However we will be designing the rest of the code around enabling stable code. However we will be designing the rest of the code around enabling
the optimization. In particular, `Unique::new` is unsafe to call, because this optimization. It should be noted that `Unique::new` is unsafe to call, because
putting `null` inside of it is Undefined Behavior. Our stable Unique doesn't putting `null` inside of it is Undefined Behavior. Our stable Unique doesn't
need `new` to be unsafe because it doesn't make any interesting guarantees about need `new` to be unsafe because it doesn't make any interesting guarantees about
its contents. its contents.

@ -1,3 +1,4 @@
# RawVec # RawVec
We've actually reached an interesting situation here: we've duplicated the logic We've actually reached an interesting situation here: we've duplicated the logic
@ -17,9 +18,7 @@ struct RawVec<T> {
impl<T> RawVec<T> { impl<T> RawVec<T> {
fn new() -> Self { fn new() -> Self {
assert!(mem::size_of::<T>() != 0, "TODO: implement ZST support"); assert!(mem::size_of::<T>() != 0, "TODO: implement ZST support");
unsafe { RawVec { ptr: Unique::empty(), cap: 0 }
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: 0 }
}
} }
// unchanged from Vec // unchanged from Vec
@ -33,7 +32,7 @@ impl<T> RawVec<T> {
(1, ptr) (1, ptr)
} else { } else {
let new_cap = 2 * self.cap; let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _, let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size, self.cap * elem_size,
new_cap * elem_size, new_cap * elem_size,
align); align);
@ -57,7 +56,7 @@ impl<T> Drop for RawVec<T> {
let elem_size = mem::size_of::<T>(); let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.cap; let num_bytes = elem_size * self.cap;
unsafe { unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align); heap::deallocate(self.ptr.as_mut() as *mut _, num_bytes, align);
} }
} }
} }
@ -73,7 +72,7 @@ pub struct Vec<T> {
} }
impl<T> Vec<T> { impl<T> Vec<T> {
fn ptr(&self) -> *mut T { *self.buf.ptr } fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
fn cap(&self) -> usize { self.buf.cap } fn cap(&self) -> usize { self.buf.cap }

@ -19,7 +19,7 @@ RawValIter and RawVec respectively. How mysteriously convenient.
## Allocating Zero-Sized Types ## Allocating Zero-Sized Types
So if the allocator API doesn't support zero-sized allocations, what on earth So if the allocator API doesn't support zero-sized allocations, what on earth
do we store as our allocation? Why, `heap::EMPTY` of course! Almost every operation do we store as our allocation? `Unique::empty()` of course! Almost every operation
with a ZST is a no-op since ZSTs have exactly one value, and therefore no state needs with a ZST is a no-op since ZSTs have exactly one value, and therefore no state needs
to be considered to store or load them. This actually extends to `ptr::read` and to be considered to store or load them. This actually extends to `ptr::read` and
`ptr::write`: they won't actually look at the pointer at all. As such we never need `ptr::write`: they won't actually look at the pointer at all. As such we never need
@ -35,13 +35,11 @@ method of RawVec.
```rust,ignore ```rust,ignore
impl<T> RawVec<T> { impl<T> RawVec<T> {
fn new() -> Self { fn new() -> Self {
unsafe { // !0 is usize::MAX. This branch should be stripped at compile time.
// !0 is usize::MAX. This branch should be stripped at compile time. let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation" // Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap } RawVec { ptr: Unique::empty(), cap: cap }
}
} }
fn grow(&mut self) { fn grow(&mut self) {
@ -59,7 +57,7 @@ impl<T> RawVec<T> {
(1, ptr) (1, ptr)
} else { } else {
let new_cap = 2 * self.cap; let new_cap = 2 * self.cap;
let ptr = heap::reallocate(*self.ptr as *mut _, let ptr = heap::reallocate(self.ptr.as_ptr() as *mut _,
self.cap * elem_size, self.cap * elem_size,
new_cap * elem_size, new_cap * elem_size,
align); align);
@ -85,7 +83,7 @@ impl<T> Drop for RawVec<T> {
let num_bytes = elem_size * self.cap; let num_bytes = elem_size * self.cap;
unsafe { unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align); heap::deallocate(self.ptr.as_ptr() as *mut _, num_bytes, align);
} }
} }
} }

Loading…
Cancel
Save