Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit e29f420

Browse files
committedFeb 11, 2015
Auto merge of #21972 - pnkfelix:new-dtor-semantics-6, r=nikomatsakis
This is a resurrection and heavy revision/expansion of a PR that pcwalton did to resolve #8861. The most relevant, user-visible semantic change is this: #[unsafe_destructor] is gone. Instead, if a type expression for some value has a destructor, then any lifetimes referenced within that type expression must strictly outlive the scope of the value. See discussion on rust-lang/rfcs#769
2 parents 446bc89 + 2c9d81b commit e29f420

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+1863
-136
lines changed
 

‎src/libarena/lib.rs

Lines changed: 50 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell};
4242
use std::cmp;
4343
use std::intrinsics::{TyDesc, get_tydesc};
4444
use std::intrinsics;
45+
use std::marker;
4546
use std::mem;
4647
use std::num::{Int, UnsignedInt};
4748
use std::ptr;
@@ -88,27 +89,29 @@ impl Chunk {
8889
/// than objects without destructors. This reduces overhead when initializing
8990
/// plain-old-data (`Copy` types) and means we don't need to waste time running
9091
/// their destructors.
91-
pub struct Arena {
92+
pub struct Arena<'longer_than_self> {
9293
// The head is separated out from the list as a unbenchmarked
9394
// microoptimization, to avoid needing to case on the list to access the
9495
// head.
9596
head: RefCell<Chunk>,
9697
copy_head: RefCell<Chunk>,
9798
chunks: RefCell<Vec<Chunk>>,
99+
_invariant: marker::InvariantLifetime<'longer_than_self>,
98100
}
99101

100-
impl Arena {
102+
impl<'a> Arena<'a> {
101103
/// Allocates a new Arena with 32 bytes preallocated.
102-
pub fn new() -> Arena {
104+
pub fn new() -> Arena<'a> {
103105
Arena::new_with_size(32)
104106
}
105107

106108
/// Allocates a new Arena with `initial_size` bytes preallocated.
107-
pub fn new_with_size(initial_size: usize) -> Arena {
109+
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
108110
Arena {
109111
head: RefCell::new(chunk(initial_size, false)),
110112
copy_head: RefCell::new(chunk(initial_size, true)),
111113
chunks: RefCell::new(Vec::new()),
114+
_invariant: marker::InvariantLifetime,
112115
}
113116
}
114117
}
@@ -122,7 +125,7 @@ fn chunk(size: usize, is_copy: bool) -> Chunk {
122125
}
123126

124127
#[unsafe_destructor]
125-
impl Drop for Arena {
128+
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
126129
fn drop(&mut self) {
127130
unsafe {
128131
destroy_chunk(&*self.head.borrow());
@@ -180,7 +183,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
180183
((p & !1) as *const TyDesc, p & 1 == 1)
181184
}
182185

183-
impl Arena {
186+
impl<'longer_than_self> Arena<'longer_than_self> {
184187
fn chunk_size(&self) -> usize {
185188
self.copy_head.borrow().capacity()
186189
}
@@ -293,7 +296,7 @@ impl Arena {
293296
/// Allocates a new item in the arena, using `op` to initialize the value,
294297
/// and returns a reference to it.
295298
#[inline]
296-
pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
299+
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
297300
unsafe {
298301
if intrinsics::needs_drop::<T>() {
299302
self.alloc_noncopy(op)
@@ -317,20 +320,6 @@ fn test_arena_destructors() {
317320
}
318321
}
319322

320-
#[test]
321-
fn test_arena_alloc_nested() {
322-
struct Inner { value: usize }
323-
struct Outer<'a> { inner: &'a Inner }
324-
325-
let arena = Arena::new();
326-
327-
let result = arena.alloc(|| Outer {
328-
inner: arena.alloc(|| Inner { value: 10 })
329-
});
330-
331-
assert_eq!(result.inner.value, 10);
332-
}
333-
334323
#[test]
335324
#[should_fail]
336325
fn test_arena_destructors_fail() {
@@ -365,6 +354,10 @@ pub struct TypedArena<T> {
365354

366355
/// A pointer to the first arena segment.
367356
first: RefCell<*mut TypedArenaChunk<T>>,
357+
358+
/// Marker indicating that dropping the arena causes its owned
359+
/// instances of `T` to be dropped.
360+
_own: marker::PhantomData<T>,
368361
}
369362

370363
struct TypedArenaChunk<T> {
@@ -460,6 +453,7 @@ impl<T> TypedArena<T> {
460453
ptr: Cell::new((*chunk).start() as *const T),
461454
end: Cell::new((*chunk).end() as *const T),
462455
first: RefCell::new(chunk),
456+
_own: marker::PhantomData,
463457
}
464458
}
465459
}
@@ -523,6 +517,41 @@ mod tests {
523517
z: i32,
524518
}
525519

520+
#[test]
521+
fn test_arena_alloc_nested() {
522+
struct Inner { value: u8 }
523+
struct Outer<'a> { inner: &'a Inner }
524+
enum EI<'e> { I(Inner), O(Outer<'e>) }
525+
526+
struct Wrap<'a>(TypedArena<EI<'a>>);
527+
528+
impl<'a> Wrap<'a> {
529+
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
530+
let r: &EI = self.0.alloc(EI::I(f()));
531+
if let &EI::I(ref i) = r {
532+
i
533+
} else {
534+
panic!("mismatch");
535+
}
536+
}
537+
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
538+
let r: &EI = self.0.alloc(EI::O(f()));
539+
if let &EI::O(ref o) = r {
540+
o
541+
} else {
542+
panic!("mismatch");
543+
}
544+
}
545+
}
546+
547+
let arena = Wrap(TypedArena::new());
548+
549+
let result = arena.alloc_outer(|| Outer {
550+
inner: arena.alloc_inner(|| Inner { value: 10 }) });
551+
552+
assert_eq!(result.inner.value, 10);
553+
}
554+
526555
#[test]
527556
pub fn test_copy() {
528557
let arena = TypedArena::new();

‎src/libcollections/btree/node.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ impl<T> Drop for RawItems<T> {
278278
#[unsafe_destructor]
279279
impl<K, V> Drop for Node<K, V> {
280280
fn drop(&mut self) {
281-
if self.keys.0.is_null() {
281+
if self.keys.ptr.is_null() {
282282
// We have already cleaned up this node.
283283
return;
284284
}
@@ -292,7 +292,7 @@ impl<K, V> Drop for Node<K, V> {
292292
self.destroy();
293293
}
294294

295-
self.keys.0 = ptr::null_mut();
295+
self.keys.ptr = ptr::null_mut();
296296
}
297297
}
298298

@@ -337,18 +337,18 @@ impl<K, V> Node<K, V> {
337337
unsafe fn destroy(&mut self) {
338338
let (alignment, size) =
339339
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
340-
heap::deallocate(self.keys.0 as *mut u8, size, alignment);
340+
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
341341
}
342342

343343
#[inline]
344344
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
345345
unsafe {(
346346
mem::transmute(raw::Slice {
347-
data: self.keys.0,
347+
data: self.keys.ptr,
348348
len: self.len()
349349
}),
350350
mem::transmute(raw::Slice {
351-
data: self.vals.0,
351+
data: self.vals.ptr,
352352
len: self.len()
353353
})
354354
)}
@@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
368368
} else {
369369
unsafe {
370370
mem::transmute(raw::Slice {
371-
data: self.edges.0,
371+
data: self.edges.ptr,
372372
len: self.len() + 1
373373
})
374374
}
@@ -586,7 +586,7 @@ impl <K, V> Node<K, V> {
586586

587587
/// If the node has any children
588588
pub fn is_leaf(&self) -> bool {
589-
self.edges.0.is_null()
589+
self.edges.ptr.is_null()
590590
}
591591

592592
/// if the node has too few elements
@@ -1064,7 +1064,7 @@ impl<K, V> Node<K, V> {
10641064
vals: RawItems::from_slice(self.vals()),
10651065
edges: RawItems::from_slice(self.edges()),
10661066

1067-
ptr: self.keys.0 as *mut u8,
1067+
ptr: self.keys.ptr as *mut u8,
10681068
capacity: self.capacity(),
10691069
is_leaf: self.is_leaf()
10701070
},

0 commit comments

Comments
 (0)
Please sign in to comment.