diff options
| author | Alex Crichton <alex@alexcrichton.com> | 2014-06-13 23:23:31 -0700 |
|---|---|---|
| committer | Alex Crichton <alex@alexcrichton.com> | 2014-06-16 18:15:48 -0700 |
| commit | 4cd932f94e76046500e180bc941e36a2a17cade8 (patch) | |
| tree | 2599677dd8c573cbebf1a3e4d124b38e1d8fc77a /src/liballoc | |
| parent | 7613c9dd59aa771bf02a00c77af0ba4266392373 (diff) | |
| download | rust-4cd932f94e76046500e180bc941e36a2a17cade8.tar.gz rust-4cd932f94e76046500e180bc941e36a2a17cade8.zip | |
alloc: Allow disabling jemalloc
Diffstat (limited to 'src/liballoc')
| -rw-r--r-- | src/liballoc/heap.rs | 247 |
1 files changed, 199 insertions, 48 deletions
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 34a6dfca69b..0e7445e737c 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -12,36 +12,9 @@ // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` // and `nonnull` -use core::intrinsics::{abort, cttz32}; -use core::option::{None, Option}; -use core::ptr::{RawPtr, mut_null, null}; -use libc::{c_char, c_int, c_void, size_t}; - #[cfg(not(test))] use core::raw; #[cfg(not(test))] use util; -#[link(name = "jemalloc", kind = "static")] -extern { - fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void; - fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; - fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, - flags: c_int) -> size_t; - fn je_dallocx(ptr: *mut c_void, flags: c_int); - fn je_nallocx(size: size_t, flags: c_int) -> size_t; - fn je_malloc_stats_print(write_cb: Option<extern "C" fn(cbopaque: *mut c_void, *c_char)>, - cbopaque: *mut c_void, - opts: *c_char); -} - -// -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough -#[cfg(not(windows), not(target_os = "android"))] -#[link(name = "pthread")] -extern {} - -// MALLOCX_ALIGN(a) macro -#[inline(always)] -fn mallocx_align(a: uint) -> c_int { unsafe { cttz32(a as u32) as c_int } } - /// Return a pointer to `size` bytes of memory. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a @@ -49,11 +22,7 @@ fn mallocx_align(a: uint) -> c_int { unsafe { cttz32(a as u32) as c_int } } /// size on the platform. #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { - let ptr = je_mallocx(size as size_t, mallocx_align(align)) as *mut u8; - if ptr.is_null() { - abort() - } - ptr + imp::allocate(size, align) } /// Extend or shrink the allocation referenced by `ptr` to `size` bytes of @@ -67,15 +36,9 @@ pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { /// create the allocation referenced by `ptr`. The `old_size` parameter may also /// be the value returned by `usable_size` for the requested size. #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> *mut u8 { - let ptr = je_rallocx(ptr as *mut c_void, size as size_t, - mallocx_align(align)) as *mut u8; - if ptr.is_null() { - abort() - } - ptr + imp::reallocate(ptr, size, align, old_size) } /// Extend or shrink the allocation referenced by `ptr` to `size` bytes of @@ -92,11 +55,9 @@ pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> bool { - je_xallocx(ptr as *mut c_void, size as size_t, 0, - mallocx_align(align)) == size as size_t + imp::reallocate_inplace(ptr, size, align, old_size) } /// Deallocate the memory referenced by `ptr`. @@ -107,16 +68,15 @@ pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, /// create the allocation referenced by `ptr`. The `size` parameter may also be /// the value returned by `usable_size` for the requested size. #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) { - je_dallocx(ptr as *mut c_void, mallocx_align(align)) + imp::deallocate(ptr, size, align) } /// Return the usable size of an allocation created with the specified the /// `size` and `align`. #[inline] pub fn usable_size(size: uint, align: uint) -> uint { - unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint } + imp::usable_size(size, align) } /// Print implementation-defined allocator statistics. @@ -125,9 +85,7 @@ pub fn usable_size(size: uint, align: uint) -> uint { /// during the call. #[unstable] pub fn stats_print() { - unsafe { - je_malloc_stats_print(None, mut_null(), null()) - } + imp::stats_print(); } // The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size @@ -170,6 +128,199 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, alloc as *mut u8 } +#[cfg(jemalloc)] +mod imp { + use core::intrinsics::abort; + use core::option::{None, Option}; + use core::ptr::{RawPtr, mut_null, null}; + use core::num::Bitwise; + use libc::{c_char, c_int, c_void, size_t}; + + #[link(name = "jemalloc", kind = "static")] + extern { + fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void; + fn je_rallocx(ptr: *mut c_void, size: size_t, + flags: c_int) -> *mut c_void; + fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, + flags: c_int) -> size_t; + fn je_dallocx(ptr: *mut c_void, flags: c_int); + fn je_nallocx(size: size_t, flags: c_int) -> size_t; + fn je_malloc_stats_print(write_cb: Option<extern "C" fn(cbopaque: *mut c_void, *c_char)>, + cbopaque: *mut c_void, + opts: *c_char); + } + + // -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough + #[cfg(not(windows), not(target_os = "android"))] + #[link(name = "pthread")] + extern {} + + // MALLOCX_ALIGN(a) macro + #[inline(always)] + fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + let ptr = je_mallocx(size as size_t, mallocx_align(align)) as *mut u8; + if ptr.is_null() { + abort() + } + ptr + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> *mut u8 { + let ptr = je_rallocx(ptr as *mut c_void, size as size_t, + mallocx_align(align)) as *mut u8; + if ptr.is_null() { + abort() + } + ptr + } + + #[inline] + pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> bool { + je_xallocx(ptr as *mut c_void, size as size_t, 0, + mallocx_align(align)) == size as size_t + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) { + je_dallocx(ptr as *mut c_void, mallocx_align(align)) + } + + #[inline] + pub fn usable_size(size: uint, align: uint) -> uint { + unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint } + } + + pub fn stats_print() { + unsafe { + je_malloc_stats_print(None, mut_null(), null()) + } + } +} + +#[cfg(not(jemalloc), unix)] +mod imp { + use core::mem; + use core::ptr; + use libc; + use libc_heap; + + extern { + fn posix_memalign(memptr: *mut *mut libc::c_void, + align: libc::size_t, + size: libc::size_t) -> libc::c_int; + } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + // The posix_memalign manpage states + // + // alignment [...] must be a power of and a multiple of + // sizeof(void *) + // + // The `align` parameter to this function is the *minimum* alignment for + // a block of memory, so we special case everything under `*uint` to + // just pass it to malloc, which is guaranteed to align to at least the + // size of `*uint`. + if align < mem::size_of::<*uint>() { + libc_heap::malloc_raw(size) + } else { + let mut out = 0 as *mut libc::c_void; + let ret = posix_memalign(&mut out, + align as libc::size_t, + size as libc::size_t); + if ret != 0 { + ::oom(); + } + out as *mut u8 + } + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + old_size: uint) -> *mut u8 { + let new_ptr = allocate(size, align); + ptr::copy_memory(new_ptr, ptr as *u8, old_size); + deallocate(ptr, old_size, align); + return new_ptr; + } + + #[inline] + pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, + _old_size: uint) -> bool { + false + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) { + libc::free(ptr as *mut libc::c_void) + } + + #[inline] + pub fn usable_size(size: uint, _align: uint) -> uint { + size + } + + pub fn stats_print() { + } +} + +#[cfg(not(jemalloc), windows)] +mod imp { + use libc::{c_void, size_t}; + use core::ptr::RawPtr; + + extern { + fn _aligned_malloc(size: size_t, align: size_t) -> *mut c_void; + fn _aligned_realloc(block: *mut c_void, size: size_t, + align: size_t) -> *mut c_void; + fn _aligned_free(ptr: *mut c_void); + } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + let ptr = _aligned_malloc(size as size_t, align as size_t); + if ptr.is_null() { + ::oom(); + } + ptr as *mut u8 + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> *mut u8 { + let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t, + align as size_t); + if ptr.is_null() { + ::oom(); + } + ptr as *mut u8 + } + + #[inline] + pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, + _old_size: uint) -> bool { + false + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) { + _aligned_free(ptr as *mut c_void) + } + + #[inline] + pub fn usable_size(size: uint, _align: uint) -> uint { + size + } + + pub fn stats_print() {} +} + #[cfg(test)] mod bench { extern crate test; |
