about summary refs log tree commit diff
path: root/src/liballoc/arc.rs
diff options
context:
space:
mode:
authorAlex Crichton <alex@alexcrichton.com>2017-06-03 14:54:08 -0700
committerAlex Crichton <alex@alexcrichton.com>2017-07-05 14:37:01 -0700
commit695dee063bcd40f154bb27b7beafcb3d4dd775ac (patch)
tree8ac64f40091434e679b4221343dce7447c4f1236 /src/liballoc/arc.rs
parent4c225c4d1732537aff63dd97c2b7ac681fd3d188 (diff)
downloadrust-695dee063bcd40f154bb27b7beafcb3d4dd775ac.tar.gz
rust-695dee063bcd40f154bb27b7beafcb3d4dd775ac.zip
rustc: Implement the #[global_allocator] attribute
This PR is an implementation of [RFC 1974] which specifies a new method of
defining a global allocator for a program. This obsoletes the old
`#![allocator]` attribute and also removes support for it.

[RFC 1974]: https://github.com/rust-lang/rfcs/pull/197

The new `#[global_allocator]` attribute solves many issues encountered with the
`#![allocator]` attribute such as composition and restrictions on the crate
graph itself. The compiler now has much more control over the ABI of the
allocator and how it's implemented, allowing much more freedom in terms of how
this feature is implemented.

cc #27389
Diffstat (limited to 'src/liballoc/arc.rs')
-rw-r--r--src/liballoc/arc.rs10
1 files changed, 6 insertions, 4 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 7c51c4b161c..d9edf50b9c8 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -23,7 +23,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
 use core::borrow;
 use core::fmt;
 use core::cmp::Ordering;
-use core::mem::{align_of_val, size_of_val};
 use core::intrinsics::abort;
 use core::mem;
 use core::mem::uninitialized;
@@ -34,7 +33,8 @@ use core::marker::Unsize;
 use core::hash::{Hash, Hasher};
 use core::{isize, usize};
 use core::convert::From;
-use heap::deallocate;
+
+use heap::{Heap, Alloc, Layout};
 
 /// A soft limit on the amount of references that may be made to an `Arc`.
 ///
@@ -503,7 +503,7 @@ impl<T: ?Sized> Arc<T> {
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
+            Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
         }
     }
 
@@ -1007,7 +1007,9 @@ impl<T: ?Sized> Drop for Weak<T> {
         // ref, which can only happen after the lock is released.
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) }
+            unsafe {
+                Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+            }
         }
     }
 }