about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/liballoc/alloc.rs8
-rw-r--r--src/liballoc/arc.rs13
-rw-r--r--src/liballoc/btree/node.rs13
-rw-r--r--src/liballoc/raw_vec.rs23
-rw-r--r--src/liballoc/rc.rs13
-rw-r--r--src/liballoc/tests/heap.rs4
-rw-r--r--src/libstd/alloc.rs3
-rw-r--r--src/libstd/collections/hash/map.rs4
-rw-r--r--src/libstd/collections/hash/table.rs12
9 files changed, 47 insertions, 46 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 1bd95cfd08c..12ee7701903 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -81,8 +81,12 @@ pub struct Global;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
-pub use self::Global as Heap;
+pub type Heap = Global;
 
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
+#[allow(non_upper_case_globals)]
+pub const Heap: Global = Global;
 
 unsafe impl Alloc for Global {
     #[inline]
@@ -268,7 +272,7 @@ mod tests {
     extern crate test;
     use self::test::Bencher;
     use boxed::Box;
-    use heap::{Global, Alloc, Layout};
+    use alloc::{Global, Alloc, Layout};
 
     #[test]
     fn allocate_zeroed() {
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index ccf2e2768d1..d63ed24aa4f 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -21,7 +21,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
 use core::borrow;
 use core::fmt;
 use core::cmp::Ordering;
-use core::heap::{Alloc, Layout};
 use core::intrinsics::abort;
 use core::mem::{self, align_of_val, size_of_val, uninitialized};
 use core::ops::Deref;
@@ -32,7 +31,7 @@ use core::hash::{Hash, Hasher};
 use core::{isize, usize};
 use core::convert::From;
 
-use heap::{Heap, box_free};
+use alloc::{Global, Alloc, Layout, box_free};
 use boxed::Box;
 use string::String;
 use vec::Vec;
@@ -521,7 +520,7 @@ impl<T: ?Sized> Arc<T> {
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+            Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
         }
     }
 
@@ -555,8 +554,8 @@ impl<T: ?Sized> Arc<T> {
 
         let layout = Layout::for_value(&*fake_ptr);
 
-        let mem = Heap.alloc(layout)
-            .unwrap_or_else(|e| Heap.oom(e));
+        let mem = Global.alloc(layout)
+            .unwrap_or_else(|e| Global.oom(e));
 
         // Initialize the real ArcInner
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
@@ -640,7 +639,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Heap.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem, self.layout.clone());
                 }
             }
         }
@@ -1161,7 +1160,7 @@ impl<T: ?Sized> Drop for Weak<T> {
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
             unsafe {
-                Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
             }
         }
     }
diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs
index 49109d522e9..8e23228bd28 100644
--- a/src/liballoc/btree/node.rs
+++ b/src/liballoc/btree/node.rs
@@ -41,14 +41,13 @@
 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
 //   This implies that even an empty internal node has at least one edge.
 
-use core::heap::{Alloc, Layout};
 use core::marker::PhantomData;
 use core::mem;
 use core::ptr::{self, Unique, NonNull};
 use core::slice;
 
+use alloc::{Global, Alloc, Layout};
 use boxed::Box;
-use heap::Heap;
 
 const B: usize = 6;
 pub const MIN_LEN: usize = B - 1;
@@ -250,7 +249,7 @@ impl<K, V> Root<K, V> {
         self.as_mut().as_leaf_mut().parent = ptr::null();
 
         unsafe {
-            Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
+            Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
         }
     }
 }
@@ -436,7 +435,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
     > {
         let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
         let ret = self.ascend().ok();
-        Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
+        Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
         ret
     }
 }
@@ -457,7 +456,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
     > {
         let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
         let ret = self.ascend().ok();
-        Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
+        Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
         ret
     }
 }
@@ -1239,12 +1238,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                     ).correct_parent_link();
                 }
 
-                Heap.dealloc(
+                Global.dealloc(
                     right_node.node.as_ptr() as *mut u8,
                     Layout::new::<InternalNode<K, V>>(),
                 );
             } else {
-                Heap.dealloc(
+                Global.dealloc(
                     right_node.node.as_ptr() as *mut u8,
                     Layout::new::<LeafNode<K, V>>(),
                 );
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 3edce8aebdf..51f39dc6cc7 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -8,13 +8,12 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use alloc::{Alloc, Layout, Global};
 use core::cmp;
-use core::heap::{Alloc, Layout};
 use core::mem;
 use core::ops::Drop;
 use core::ptr::{self, Unique};
 use core::slice;
-use heap::Heap;
 use super::boxed::Box;
 use super::allocator::CollectionAllocErr;
 use super::allocator::CollectionAllocErr::*;
@@ -47,7 +46,7 @@ use super::allocator::CollectionAllocErr::*;
 /// field. This allows zero-sized types to not be special-cased by consumers of
 /// this type.
 #[allow(missing_debug_implementations)]
-pub struct RawVec<T, A: Alloc = Heap> {
+pub struct RawVec<T, A: Alloc = Global> {
     ptr: Unique<T>,
     cap: usize,
     a: A,
@@ -114,14 +113,14 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Creates the biggest possible RawVec (on the system heap)
     /// without allocating. If T has positive size, then this makes a
     /// RawVec with capacity 0. If T has 0 size, then it makes a
     /// RawVec with capacity `usize::MAX`. Useful for implementing
     /// delayed allocation.
     pub fn new() -> Self {
-        Self::new_in(Heap)
+        Self::new_in(Global)
     }
 
     /// Creates a RawVec (on the system heap) with exactly the
@@ -141,13 +140,13 @@ impl<T> RawVec<T, Heap> {
     /// Aborts on OOM
     #[inline]
     pub fn with_capacity(cap: usize) -> Self {
-        RawVec::allocate_in(cap, false, Heap)
+        RawVec::allocate_in(cap, false, Global)
     }
 
     /// Like `with_capacity` but guarantees the buffer is zeroed.
     #[inline]
     pub fn with_capacity_zeroed(cap: usize) -> Self {
-        RawVec::allocate_in(cap, true, Heap)
+        RawVec::allocate_in(cap, true, Global)
     }
 }
 
@@ -168,7 +167,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Reconstitutes a RawVec from a pointer, capacity.
     ///
     /// # Undefined Behavior
@@ -180,7 +179,7 @@ impl<T> RawVec<T, Heap> {
         RawVec {
             ptr: Unique::new_unchecked(ptr),
             cap,
-            a: Heap,
+            a: Global,
         }
     }
 
@@ -678,7 +677,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Converts the entire buffer into `Box<[T]>`.
     ///
     /// While it is not *strictly* Undefined Behavior to call
@@ -763,13 +762,13 @@ mod tests {
                 if size > self.fuel {
                     return Err(AllocErr::Unsupported { details: "fuel exhausted" });
                 }
-                match Heap.alloc(layout) {
+                match Global.alloc(layout) {
                     ok @ Ok(_) => { self.fuel -= size; ok }
                     err @ Err(_) => err,
                 }
             }
             unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-                Heap.dealloc(ptr, layout)
+                Global.dealloc(ptr, layout)
             }
         }
 
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 8bdc57f96a6..c134b181158 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -250,7 +250,6 @@ use core::cell::Cell;
 use core::cmp::Ordering;
 use core::fmt;
 use core::hash::{Hash, Hasher};
-use core::heap::{Alloc, Layout};
 use core::intrinsics::abort;
 use core::marker;
 use core::marker::{Unsize, PhantomData};
@@ -260,7 +259,7 @@ use core::ops::CoerceUnsized;
 use core::ptr::{self, NonNull};
 use core::convert::From;
 
-use heap::{Heap, box_free};
+use alloc::{Global, Alloc, Layout, box_free};
 use string::String;
 use vec::Vec;
 
@@ -668,8 +667,8 @@ impl<T: ?Sized> Rc<T> {
 
         let layout = Layout::for_value(&*fake_ptr);
 
-        let mem = Heap.alloc(layout)
-            .unwrap_or_else(|e| Heap.oom(e));
+        let mem = Global.alloc(layout)
+            .unwrap_or_else(|e| Global.oom(e));
 
         // Initialize the real RcBox
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
@@ -752,7 +751,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Heap.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem, self.layout.clone());
                 }
             }
         }
@@ -847,7 +846,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
                 self.dec_weak();
 
                 if self.weak() == 0 {
-                    Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                    Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
                 }
             }
         }
@@ -1273,7 +1272,7 @@ impl<T: ?Sized> Drop for Weak<T> {
             // the weak count starts at 1, and will only go to zero if all
             // the strong pointers have disappeared.
             if self.weak() == 0 {
-                Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
             }
         }
     }
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index d3ce12056bb..328131e2fef 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use alloc_system::System;
-use std::heap::{Heap, Alloc, Layout};
+use std::alloc::{Global, Alloc, Layout};
 
 /// https://github.com/rust-lang/rust/issues/45955
 ///
@@ -22,7 +22,7 @@ fn alloc_system_overaligned_request() {
 
 #[test]
 fn std_heap_overaligned_request() {
-    check_overalign_requests(Heap)
+    check_overalign_requests(Global)
 }
 
 fn check_overalign_requests<T: Alloc>(mut allocator: T) {
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index 77be3e52d76..eb0c960732d 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -12,7 +12,8 @@
 
 #![unstable(issue = "32838", feature = "allocator_api")]
 
-#[doc(inline)] pub use alloc_crate::alloc::Heap;
+#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
+#[doc(inline)] pub use alloc_crate::alloc::Global;
 #[doc(inline)] pub use alloc_system::System;
 #[doc(inline)] pub use core::alloc::*;
 
diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index 73a5df8dc28..c4ef9e62577 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -11,13 +11,13 @@
 use self::Entry::*;
 use self::VacantEntryState::*;
 
+use alloc::{Global, Alloc, CollectionAllocErr};
 use cell::Cell;
 use borrow::Borrow;
 use cmp::max;
 use fmt::{self, Debug};
 #[allow(deprecated)]
 use hash::{Hash, Hasher, BuildHasher, SipHasher13};
-use heap::{Heap, Alloc, CollectionAllocErr};
 use iter::{FromIterator, FusedIterator};
 use mem::{self, replace};
 use ops::{Deref, Index};
@@ -784,7 +784,7 @@ impl<K, V, S> HashMap<K, V, S>
     pub fn reserve(&mut self, additional: usize) {
         match self.try_reserve(additional) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(()) => { /* yay */ }
          }
     }
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index 878cd82a258..10bab5df8b5 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -8,9 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use alloc::{Global, Alloc, Layout, CollectionAllocErr};
 use cmp;
 use hash::{BuildHasher, Hash, Hasher};
-use heap::{Heap, Alloc, Layout, CollectionAllocErr};
 use marker;
 use mem::{align_of, size_of, needs_drop};
 use mem;
@@ -754,7 +754,7 @@ impl<K, V> RawTable<K, V> {
             return Err(CollectionAllocErr::CapacityOverflow);
         }
 
-        let buffer = Heap.alloc(Layout::from_size_align(size, alignment)
+        let buffer = Global.alloc(Layout::from_size_align(size, alignment)
             .ok_or(CollectionAllocErr::CapacityOverflow)?)?;
 
         let hashes = buffer as *mut HashUint;
@@ -772,7 +772,7 @@ impl<K, V> RawTable<K, V> {
     unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
         match Self::try_new_uninitialized(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(table) => { table }
         }
     }
@@ -811,7 +811,7 @@ impl<K, V> RawTable<K, V> {
     pub fn new(capacity: usize) -> RawTable<K, V> {
         match Self::try_new(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(table) => { table }
         }
     }
@@ -1185,8 +1185,8 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
         debug_assert!(!oflo, "should be impossible");
 
         unsafe {
-            Heap.dealloc(self.hashes.ptr() as *mut u8,
-                         Layout::from_size_align(size, align).unwrap());
+            Global.dealloc(self.hashes.ptr() as *mut u8,
+                           Layout::from_size_align(size, align).unwrap());
             // Remember how everything was allocated out of one buffer
             // during initialization? We only need one call to free here.
         }