about summary refs log tree commit diff
path: root/src/tools/miri/tests/pass/box-custom-alloc-aliasing.rs
blob: 633291182839facbbf38c93095b4b8c0270c5a4e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
//! Regression test for <https://github.com/rust-lang/miri/issues/3341>:
//! If `Box` has a local allocator, then it can't be `noalias` as the allocator
//! may want to access allocator state based on the data pointer.

//@revisions: stack tree
//@[tree]compile-flags: -Zmiri-tree-borrows
#![feature(allocator_api)]

use std::alloc::{AllocError, Allocator, Layout};
use std::cell::{Cell, UnsafeCell};
use std::mem;
use std::ptr::{self, NonNull, addr_of};
use std::thread::{self, ThreadId};

const BIN_SIZE: usize = 8;

// A bin represents a collection of blocks of a specific layout.
#[repr(align(128))]
struct MyBin {
    top: Cell<usize>,
    thread_id: ThreadId,
    memory: UnsafeCell<[usize; BIN_SIZE]>,
}

impl MyBin {
    fn pop(&self) -> Option<NonNull<u8>> {
        let top = self.top.get();
        if top == BIN_SIZE {
            return None;
        }
        // Cast the *entire* thing to a raw pointer to not restrict its provenance.
        let bin = self as *const MyBin;
        let base_ptr = UnsafeCell::raw_get(unsafe { addr_of!((*bin).memory) }).cast::<usize>();
        let ptr = unsafe { NonNull::new_unchecked(base_ptr.add(top)) };
        self.top.set(top + 1);
        Some(ptr.cast())
    }

    // Pretends to not be a throwaway allocation method like this. A more realistic
    // substitute is using intrusive linked lists, which requires access to the
    // metadata of this bin as well.
    unsafe fn push(&self, ptr: NonNull<u8>) {
        // For now just check that this really is in this bin.
        let start = self.memory.get().addr();
        let end = start + BIN_SIZE * mem::size_of::<usize>();
        let addr = ptr.addr().get();
        assert!((start..end).contains(&addr));
    }
}

// A collection of bins.
struct MyAllocator {
    thread_id: ThreadId,
    // Pretends to be some complex collection of bins, such as an array of linked lists.
    bins: Box<[MyBin; 1]>,
}

impl MyAllocator {
    fn new() -> Self {
        let thread_id = thread::current().id();
        MyAllocator {
            thread_id,
            bins: Box::new(
                [MyBin { top: Cell::new(0), thread_id, memory: UnsafeCell::default() }; 1],
            ),
        }
    }

    // Pretends to be expensive finding a suitable bin for the layout.
    fn find_bin(&self, layout: Layout) -> Option<&MyBin> {
        if layout == Layout::new::<usize>() { Some(&self.bins[0]) } else { None }
    }
}

unsafe impl Allocator for MyAllocator {
    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
        // Expensive bin search.
        let bin = self.find_bin(layout).ok_or(AllocError)?;
        let ptr = bin.pop().ok_or(AllocError)?;
        Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
    }

    unsafe fn deallocate(&self, ptr: NonNull<u8>, _layout: Layout) {
        // Make sure accesses via `self` don't disturb anything.
        let _val = self.bins[0].top.get();
        // Since manually finding the corresponding bin of `ptr` is very expensive,
        // doing pointer arithmetics is preferred.
        // But this means we access `top` via `ptr` rather than `self`!
        // That is fundamentally the source of the aliasing trouble in this example.
        let their_bin = ptr.as_ptr().map_addr(|addr| addr & !127).cast::<MyBin>();
        let thread_id = ptr::read(ptr::addr_of!((*their_bin).thread_id));
        if self.thread_id == thread_id {
            unsafe { (*their_bin).push(ptr) };
        } else {
            todo!("Deallocating from another thread");
        }
        // Make sure we can also still access this via `self` after the rest is done.
        let _val = self.bins[0].top.get();
    }
}

// Make sure to involve `Box` in allocating these,
// as that's where `noalias` may come from.
fn v1<T, A: Allocator>(t: T, a: A) -> Vec<T, A> {
    (Box::new_in([t], a) as Box<[T], A>).into_vec()
}
fn v2<T, A: Allocator>(t: T, a: A) -> Vec<T, A> {
    let v = v1(t, a);
    // There was a bug in `into_boxed_slice` that caused aliasing issues,
    // so round-trip through that as well.
    v.into_boxed_slice().into_vec()
}

fn main() {
    assert!(mem::size_of::<MyBin>() <= 128); // if it grows bigger, the trick to access the "header" no longer works
    let my_alloc = MyAllocator::new();
    let a = v1(1usize, &my_alloc);
    let b = v2(2usize, &my_alloc);
    assert_eq!(a[0] + 1, b[0]);
    assert_eq!(addr_of!(a[0]).wrapping_add(1), addr_of!(b[0]));
    drop((a, b));
}