diff options
Diffstat (limited to 'library/std/tests')
| -rw-r--r-- | library/std/tests/env.rs | 2 | ||||
| -rw-r--r-- | library/std/tests/env_modify.rs | 9 | ||||
| -rw-r--r-- | library/std/tests/path.rs | 8 | ||||
| -rw-r--r-- | library/std/tests/sync/lib.rs | 1 | ||||
| -rw-r--r-- | library/std/tests/sync/rwlock.rs | 946 | ||||
| -rw-r--r-- | library/std/tests/thread.rs | 1 |
6 files changed, 539 insertions, 428 deletions
diff --git a/library/std/tests/env.rs b/library/std/tests/env.rs index e754cf8263b..b53fd69b707 100644 --- a/library/std/tests/env.rs +++ b/library/std/tests/env.rs @@ -16,7 +16,7 @@ fn test_self_exe_path() { #[test] fn test() { - assert!((!Path::new("test-path").is_absolute())); + assert!(!Path::new("test-path").is_absolute()); #[cfg(not(target_env = "sgx"))] current_dir().unwrap(); diff --git a/library/std/tests/env_modify.rs b/library/std/tests/env_modify.rs index ba84978b35f..fe0ae68806e 100644 --- a/library/std/tests/env_modify.rs +++ b/library/std/tests/env_modify.rs @@ -1,5 +1,6 @@ // These tests are in a separate integration test as they modify the environment, // and would otherwise cause some other tests to fail. +#![feature(cfg_select)] use std::env::*; use std::ffi::{OsStr, OsString}; @@ -110,8 +111,8 @@ fn env_home_dir() { } } - cfg_if::cfg_if! { - if #[cfg(unix)] { + cfg_select! { + unix => { let oldhome = var_to_os_string(var("HOME")); unsafe { @@ -130,7 +131,8 @@ fn env_home_dir() { } if let Some(oldhome) = oldhome { unsafe { set_var("HOME", oldhome); } } - } else if #[cfg(windows)] { + } + windows => { let oldhome = var_to_os_string(var("HOME")); let olduserprofile = var_to_os_string(var("USERPROFILE")); @@ -159,6 +161,7 @@ fn env_home_dir() { if let Some(olduserprofile) = olduserprofile { set_var("USERPROFILE", olduserprofile); } } } + _ => {} } } diff --git a/library/std/tests/path.rs b/library/std/tests/path.rs index 901d2770f20..e1576a0d423 100644 --- a/library/std/tests/path.rs +++ b/library/std/tests/path.rs @@ -1,10 +1,4 @@ -#![feature( - clone_to_uninit, - path_add_extension, - path_file_prefix, - maybe_uninit_slice, - normalize_lexically -)] +#![feature(clone_to_uninit, path_add_extension, maybe_uninit_slice, normalize_lexically)] use std::clone::CloneToUninit; use std::ffi::OsStr; diff --git a/library/std/tests/sync/lib.rs b/library/std/tests/sync/lib.rs index 94f1fe96b6a..f874c2ba389 100644 --- a/library/std/tests/sync/lib.rs +++ b/library/std/tests/sync/lib.rs @@ -8,6 +8,7 @@ #![feature(std_internals)] #![feature(sync_nonpoison)] #![feature(nonpoison_mutex)] +#![feature(nonpoison_rwlock)] #![allow(internal_features)] #![feature(macro_metavar_expr_concat)] // For concatenating identifiers in macros. diff --git a/library/std/tests/sync/rwlock.rs b/library/std/tests/sync/rwlock.rs index 1d55a176948..eca15d2a4ad 100644 --- a/library/std/tests/sync/rwlock.rs +++ b/library/std/tests/sync/rwlock.rs @@ -29,239 +29,457 @@ fn test_needs_drop() { assert!(mem::needs_drop::<NonCopyNeedsDrop>()); } -#[derive(Clone, Eq, PartialEq, Debug)] -struct Cloneable(i32); - -#[test] -fn smoke() { - let l = RwLock::new(()); - drop(l.read().unwrap()); - drop(l.write().unwrap()); - drop((l.read().unwrap(), l.read().unwrap())); - drop(l.write().unwrap()); -} +//////////////////////////////////////////////////////////////////////////////////////////////////// +// Non-poison & Poison Tests +//////////////////////////////////////////////////////////////////////////////////////////////////// +use super::nonpoison_and_poison_unwrap_test; + +nonpoison_and_poison_unwrap_test!( + name: smoke, + test_body: { + use locks::RwLock; + + let l = RwLock::new(()); + drop(maybe_unwrap(l.read())); + drop(maybe_unwrap(l.write())); + drop((maybe_unwrap(l.read()), maybe_unwrap(l.read()))); + drop(maybe_unwrap(l.write())); + } +); -#[test] // FIXME: On macOS we use a provenance-incorrect implementation and Miri // catches that issue with a chance of around 1/1000. // See <https://github.com/rust-lang/rust/issues/121950> for details. #[cfg_attr(all(miri, target_os = "macos"), ignore)] -fn frob() { - const N: u32 = 10; - const M: usize = if cfg!(miri) { 100 } else { 1000 }; +nonpoison_and_poison_unwrap_test!( + name: frob, + test_body: { + use locks::RwLock; - let r = Arc::new(RwLock::new(())); + const N: u32 = 10; + const M: usize = if cfg!(miri) { 100 } else { 1000 }; - let (tx, rx) = channel::<()>(); - for _ in 0..N { - let tx = tx.clone(); - let r = r.clone(); - thread::spawn(move || { - let mut rng = crate::common::test_rng(); - for _ in 0..M { - if rng.random_bool(1.0 / (N as f64)) { - drop(r.write().unwrap()); - } else { - drop(r.read().unwrap()); + let r = Arc::new(RwLock::new(())); + + let (tx, rx) = channel::<()>(); + for _ in 0..N { + let tx = tx.clone(); + let r = r.clone(); + thread::spawn(move || { + let mut rng = crate::common::test_rng(); + for _ in 0..M { + if rng.random_bool(1.0 / (N as f64)) { + drop(maybe_unwrap(r.write())); + } else { + drop(maybe_unwrap(r.read())); + } } + drop(tx); + }); + } + drop(tx); + let _ = rx.recv(); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_rw_arc, + test_body: { + use locks::RwLock; + + let arc = Arc::new(RwLock::new(0)); + let arc2 = arc.clone(); + let (tx, rx) = channel(); + + thread::spawn(move || { + let mut lock = maybe_unwrap(arc2.write()); + for _ in 0..10 { + let tmp = *lock; + *lock = -1; + thread::yield_now(); + *lock = tmp + 1; } - drop(tx); + tx.send(()).unwrap(); }); + + // Readers try to catch the writer in the act + let mut children = Vec::new(); + for _ in 0..5 { + let arc3 = arc.clone(); + children.push(thread::spawn(move || { + let lock = maybe_unwrap(arc3.read()); + assert!(*lock >= 0); + })); + } + + // Wait for children to pass their asserts + for r in children { + assert!(r.join().is_ok()); + } + + // Wait for writer to finish + rx.recv().unwrap(); + let lock = maybe_unwrap(arc.read()); + assert_eq!(*lock, 10); } - drop(tx); - let _ = rx.recv(); -} +); -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_poison_wr() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let _lock = arc2.write().unwrap(); - panic!(); - }) - .join(); - assert!(arc.read().is_err()); -} +nonpoison_and_poison_unwrap_test!( + name: test_rw_arc_access_in_unwind, + test_body: { + use locks::RwLock; + + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _ = thread::spawn(move || -> () { + struct Unwinder { + i: Arc<RwLock<isize>>, + } + impl Drop for Unwinder { + fn drop(&mut self) { + let mut lock = maybe_unwrap(self.i.write()); + *lock += 1; + } + } + let _u = Unwinder { i: arc2 }; + panic!(); + }) + .join(); + let lock = maybe_unwrap(arc.read()); + assert_eq!(*lock, 2); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_rwlock_unsized, + test_body: { + use locks::RwLock; + + let rw: &RwLock<[i32]> = &RwLock::new([1, 2, 3]); + { + let b = &mut *maybe_unwrap(rw.write()); + b[0] = 4; + b[2] = 5; + } + let comp: &[i32] = &[4, 2, 5]; + assert_eq!(&*maybe_unwrap(rw.read()), comp); + } +); -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_poison_mapped_w_r() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let lock = arc2.write().unwrap(); - let _lock = RwLockWriteGuard::map(lock, |val| val); - panic!(); - }) - .join(); - assert!(arc.read().is_err()); -} +nonpoison_and_poison_unwrap_test!( + name: test_into_inner, + test_body: { + use locks::RwLock; -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_poison_ww() { - let arc = Arc::new(RwLock::new(1)); - assert!(!arc.is_poisoned()); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let _lock = arc2.write().unwrap(); - panic!(); - }) - .join(); - assert!(arc.write().is_err()); - assert!(arc.is_poisoned()); -} + let m = RwLock::new(NonCopy(10)); + assert_eq!(maybe_unwrap(m.into_inner()), NonCopy(10)); + } +); -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_poison_mapped_w_w() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let lock = arc2.write().unwrap(); - let _lock = RwLockWriteGuard::map(lock, |val| val); - panic!(); - }) - .join(); - assert!(arc.write().is_err()); - assert!(arc.is_poisoned()); -} +nonpoison_and_poison_unwrap_test!( + name: test_into_inner_drop, + test_body: { + use locks::RwLock; -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_no_poison_rr() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let _lock = arc2.read().unwrap(); - panic!(); - }) - .join(); - let lock = arc.read().unwrap(); - assert_eq!(*lock, 1); -} + struct Foo(Arc<AtomicUsize>); + impl Drop for Foo { + fn drop(&mut self) { + self.0.fetch_add(1, Ordering::SeqCst); + } + } -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_no_poison_mapped_r_r() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let lock = arc2.read().unwrap(); - let _lock = RwLockReadGuard::map(lock, |val| val); - panic!(); - }) - .join(); - let lock = arc.read().unwrap(); - assert_eq!(*lock, 1); -} + let num_drops = Arc::new(AtomicUsize::new(0)); + let m = RwLock::new(Foo(num_drops.clone())); + assert_eq!(num_drops.load(Ordering::SeqCst), 0); + { + let _inner = maybe_unwrap(m.into_inner()); + assert_eq!(num_drops.load(Ordering::SeqCst), 0); + } + assert_eq!(num_drops.load(Ordering::SeqCst), 1); + } +); -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_no_poison_rw() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let _lock = arc2.read().unwrap(); - panic!() - }) - .join(); - let lock = arc.write().unwrap(); - assert_eq!(*lock, 1); -} +nonpoison_and_poison_unwrap_test!( + name: test_get_cloned, + test_body: { + use locks::RwLock; -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_no_poison_mapped_r_w() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _: Result<(), _> = thread::spawn(move || { - let lock = arc2.read().unwrap(); - let _lock = RwLockReadGuard::map(lock, |val| val); - panic!(); - }) - .join(); - let lock = arc.write().unwrap(); - assert_eq!(*lock, 1); -} + #[derive(Clone, Eq, PartialEq, Debug)] + struct Cloneable(i32); -#[test] -fn test_rw_arc() { - let arc = Arc::new(RwLock::new(0)); - let arc2 = arc.clone(); - let (tx, rx) = channel(); - - thread::spawn(move || { - let mut lock = arc2.write().unwrap(); - for _ in 0..10 { - let tmp = *lock; - *lock = -1; - thread::yield_now(); - *lock = tmp + 1; + let m = RwLock::new(Cloneable(10)); + + assert_eq!(maybe_unwrap(m.get_cloned()), Cloneable(10)); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_get_mut, + test_body: { + use locks::RwLock; + + let mut m = RwLock::new(NonCopy(10)); + *maybe_unwrap(m.get_mut()) = NonCopy(20); + assert_eq!(maybe_unwrap(m.into_inner()), NonCopy(20)); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_set, + test_body: { + use locks::RwLock; + + fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) + where + T: Debug + Eq, + { + let m = RwLock::new(init()); + + assert_eq!(*maybe_unwrap(m.read()), init()); + maybe_unwrap(m.set(value())); + assert_eq!(*maybe_unwrap(m.read()), value()); + } + + inner(|| NonCopy(10), || NonCopy(20)); + inner(|| NonCopyNeedsDrop(10), || NonCopyNeedsDrop(20)); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_replace, + test_body: { + use locks::RwLock; + + fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) + where + T: Debug + Eq, + { + let m = RwLock::new(init()); + + assert_eq!(*maybe_unwrap(m.read()), init()); + assert_eq!(maybe_unwrap(m.replace(value())), init()); + assert_eq!(*maybe_unwrap(m.read()), value()); } - tx.send(()).unwrap(); - }); - // Readers try to catch the writer in the act - let mut children = Vec::new(); - for _ in 0..5 { - let arc3 = arc.clone(); - children.push(thread::spawn(move || { - let lock = arc3.read().unwrap(); - assert!(*lock >= 0); - })); + inner(|| NonCopy(10), || NonCopy(20)); + inner(|| NonCopyNeedsDrop(10), || NonCopyNeedsDrop(20)); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_read_guard_covariance, + test_body: { + use locks::{RwLock, RwLockReadGuard}; + + fn do_stuff<'a>(_: RwLockReadGuard<'_, &'a i32>, _: &'a i32) {} + let j: i32 = 5; + let lock = RwLock::new(&j); + { + let i = 6; + do_stuff(maybe_unwrap(lock.read()), &i); + } + drop(lock); } +); + +nonpoison_and_poison_unwrap_test!( + name: test_mapped_read_guard_covariance, + test_body: { + use locks::{RwLock, RwLockReadGuard, MappedRwLockReadGuard}; + + fn do_stuff<'a>(_: MappedRwLockReadGuard<'_, &'a i32>, _: &'a i32) {} + let j: i32 = 5; + let lock = RwLock::new((&j, &j)); + { + let i = 6; + let guard = maybe_unwrap(lock.read()); + let guard = RwLockReadGuard::map(guard, |(val, _val)| val); + do_stuff(guard, &i); + } + drop(lock); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_downgrade_basic, + test_body: { + use locks::{RwLock, RwLockWriteGuard}; + + let r = RwLock::new(()); - // Wait for children to pass their asserts - for r in children { - assert!(r.join().is_ok()); + let write_guard = maybe_unwrap(r.write()); + let _read_guard = RwLockWriteGuard::downgrade(write_guard); } +); - // Wait for writer to finish - rx.recv().unwrap(); - let lock = arc.read().unwrap(); - assert_eq!(*lock, 10); -} +// FIXME: On macOS we use a provenance-incorrect implementation and Miri catches that issue. +// See <https://github.com/rust-lang/rust/issues/121950> for details. +#[cfg_attr(all(miri, target_os = "macos"), ignore)] +nonpoison_and_poison_unwrap_test!( + name: test_downgrade_observe, + test_body: { + use locks::{RwLock, RwLockWriteGuard}; + + // Inspired by the test `test_rwlock_downgrade` from: + // https://github.com/Amanieu/parking_lot/blob/master/src/rwlock.rs + + const W: usize = 20; + const N: usize = if cfg!(miri) { 40 } else { 100 }; + + // This test spawns `W` writer threads, where each will increment a counter `N` times, + // ensuring that the value they wrote has not changed after downgrading. + + let rw = Arc::new(RwLock::new(0)); + + // Spawn the writers that will do `W * N` operations and checks. + let handles: Vec<_> = (0..W) + .map(|_| { + let rw = rw.clone(); + thread::spawn(move || { + for _ in 0..N { + // Increment the counter. + let mut write_guard = maybe_unwrap(rw.write()); + *write_guard += 1; + let cur_val = *write_guard; + + // Downgrade the lock to read mode, where the value protected cannot be + // modified. + let read_guard = RwLockWriteGuard::downgrade(write_guard); + assert_eq!(cur_val, *read_guard); + } + }) + }) + .collect(); -#[test] -#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] -fn test_rw_arc_access_in_unwind() { - let arc = Arc::new(RwLock::new(1)); - let arc2 = arc.clone(); - let _ = thread::spawn(move || -> () { - struct Unwinder { - i: Arc<RwLock<isize>>, + for handle in handles { + handle.join().unwrap(); } - impl Drop for Unwinder { - fn drop(&mut self) { - let mut lock = self.i.write().unwrap(); - *lock += 1; - } + + assert_eq!(*maybe_unwrap(rw.read()), W * N); + } +); + +// FIXME: On macOS we use a provenance-incorrect implementation and Miri catches that issue. +// See <https://github.com/rust-lang/rust/issues/121950> for details. +#[cfg_attr(all(miri, target_os = "macos"), ignore)] +nonpoison_and_poison_unwrap_test!( + name: test_downgrade_atomic, + test_body: { + use locks::{RwLock, RwLockWriteGuard}; + + const NEW_VALUE: i32 = -1; + + // This test checks that `downgrade` is atomic, meaning as soon as a write lock has been + // downgraded, the lock must be in read mode and no other threads can take the write lock to + // modify the protected value. + + // `W` is the number of evil writer threads. + const W: usize = 20; + let rwlock = Arc::new(RwLock::new(0)); + + // Spawns many evil writer threads that will try and write to the locked value before the + // initial writer (who has the exclusive lock) can read after it downgrades. + // If the `RwLock` behaves correctly, then the initial writer should read the value it wrote + // itself as no other thread should be able to mutate the protected value. + + // Put the lock in write mode, causing all future threads trying to access this go to sleep. + let mut main_write_guard = maybe_unwrap(rwlock.write()); + + // Spawn all of the evil writer threads. They will each increment the protected value by 1. + let handles: Vec<_> = (0..W) + .map(|_| { + let rwlock = rwlock.clone(); + thread::spawn(move || { + // Will go to sleep since the main thread initially has the write lock. + let mut evil_guard = maybe_unwrap(rwlock.write()); + *evil_guard += 1; + }) + }) + .collect(); + + // Wait for a good amount of time so that evil threads go to sleep. + // Note: this is not strictly necessary... + let eternity = std::time::Duration::from_millis(42); + thread::sleep(eternity); + + // Once everyone is asleep, set the value to `NEW_VALUE`. + *main_write_guard = NEW_VALUE; + + // Atomically downgrade the write guard into a read guard. + let main_read_guard = RwLockWriteGuard::downgrade(main_write_guard); + + // If the above is not atomic, then it would be possible for an evil thread to get in front + // of this read and change the value to be non-negative. + assert_eq!(*main_read_guard, NEW_VALUE, "`downgrade` was not atomic"); + + // Drop the main read guard and allow the evil writer threads to start incrementing. + drop(main_read_guard); + + for handle in handles { + handle.join().unwrap(); } - let _u = Unwinder { i: arc2 }; - panic!(); - }) - .join(); - let lock = arc.read().unwrap(); - assert_eq!(*lock, 2); -} + + let final_check = maybe_unwrap(rwlock.read()); + assert_eq!(*final_check, W as i32 + NEW_VALUE); + } +); + +nonpoison_and_poison_unwrap_test!( + name: test_mapping_mapped_guard, + test_body: { + use locks::{ + RwLock, RwLockReadGuard, RwLockWriteGuard, MappedRwLockReadGuard, MappedRwLockWriteGuard + }; + + let arr = [0; 4]; + let mut lock = RwLock::new(arr); + let guard = maybe_unwrap(lock.write()); + let guard = RwLockWriteGuard::map(guard, |arr| &mut arr[..2]); + let mut guard = MappedRwLockWriteGuard::map(guard, |slice| &mut slice[1..]); + assert_eq!(guard.len(), 1); + guard[0] = 42; + drop(guard); + assert_eq!(*maybe_unwrap(lock.get_mut()), [0, 42, 0, 0]); + + let guard = maybe_unwrap(lock.read()); + let guard = RwLockReadGuard::map(guard, |arr| &arr[..2]); + let guard = MappedRwLockReadGuard::map(guard, |slice| &slice[1..]); + assert_eq!(*guard, [42]); + drop(guard); + assert_eq!(*maybe_unwrap(lock.get_mut()), [0, 42, 0, 0]); + } +); #[test] -fn test_rwlock_unsized() { - let rw: &RwLock<[i32]> = &RwLock::new([1, 2, 3]); - { - let b = &mut *rw.write().unwrap(); - b[0] = 4; - b[2] = 5; +fn nonpoison_test_rwlock_try_write() { + use std::sync::nonpoison::{RwLock, RwLockReadGuard, WouldBlock}; + + let lock = RwLock::new(0isize); + let read_guard = lock.read(); + + let write_result = lock.try_write(); + match write_result { + Err(WouldBlock) => (), + Ok(_) => assert!(false, "try_write should not succeed while read_guard is in scope"), + } + + drop(read_guard); + let mapped_read_guard = RwLockReadGuard::map(lock.read(), |_| &()); + + let write_result = lock.try_write(); + match write_result { + Err(WouldBlock) => (), + Ok(_) => assert!(false, "try_write should not succeed while mapped_read_guard is in scope"), } - let comp: &[i32] = &[4, 2, 5]; - assert_eq!(&*rw.read().unwrap(), comp); + + drop(mapped_read_guard); } #[test] -fn test_rwlock_try_write() { +fn poison_test_rwlock_try_write() { + use std::sync::poison::{RwLock, RwLockReadGuard, TryLockError}; + let lock = RwLock::new(0isize); let read_guard = lock.read().unwrap(); @@ -285,6 +503,11 @@ fn test_rwlock_try_write() { drop(mapped_read_guard); } +//////////////////////////////////////////////////////////////////////////////////////////////////// +// Poison Tests +//////////////////////////////////////////////////////////////////////////////////////////////////// + +/// Creates a rwlock that is immediately poisoned. fn new_poisoned_rwlock<T>(value: T) -> RwLock<T> { let lock = RwLock::new(value); @@ -301,30 +524,6 @@ fn new_poisoned_rwlock<T>(value: T) -> RwLock<T> { } #[test] -fn test_into_inner() { - let m = RwLock::new(NonCopy(10)); - assert_eq!(m.into_inner().unwrap(), NonCopy(10)); -} - -#[test] -fn test_into_inner_drop() { - struct Foo(Arc<AtomicUsize>); - impl Drop for Foo { - fn drop(&mut self) { - self.0.fetch_add(1, Ordering::SeqCst); - } - } - let num_drops = Arc::new(AtomicUsize::new(0)); - let m = RwLock::new(Foo(num_drops.clone())); - assert_eq!(num_drops.load(Ordering::SeqCst), 0); - { - let _inner = m.into_inner().unwrap(); - assert_eq!(num_drops.load(Ordering::SeqCst), 0); - } - assert_eq!(num_drops.load(Ordering::SeqCst), 1); -} - -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_into_inner_poison() { let m = new_poisoned_rwlock(NonCopy(10)); @@ -336,15 +535,11 @@ fn test_into_inner_poison() { } #[test] -fn test_get_cloned() { - let m = RwLock::new(Cloneable(10)); - - assert_eq!(m.get_cloned().unwrap(), Cloneable(10)); -} - -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_get_cloned_poison() { + #[derive(Clone, Eq, PartialEq, Debug)] + struct Cloneable(i32); + let m = new_poisoned_rwlock(Cloneable(10)); match m.get_cloned() { @@ -354,13 +549,6 @@ fn test_get_cloned_poison() { } #[test] -fn test_get_mut() { - let mut m = RwLock::new(NonCopy(10)); - *m.get_mut().unwrap() = NonCopy(20); - assert_eq!(m.into_inner().unwrap(), NonCopy(20)); -} - -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_get_mut_poison() { let mut m = new_poisoned_rwlock(NonCopy(10)); @@ -372,23 +560,6 @@ fn test_get_mut_poison() { } #[test] -fn test_set() { - fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) - where - T: Debug + Eq, - { - let m = RwLock::new(init()); - - assert_eq!(*m.read().unwrap(), init()); - m.set(value()).unwrap(); - assert_eq!(*m.read().unwrap(), value()); - } - - inner(|| NonCopy(10), || NonCopy(20)); - inner(|| NonCopyNeedsDrop(10), || NonCopyNeedsDrop(20)); -} - -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_set_poison() { fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) @@ -411,23 +582,6 @@ fn test_set_poison() { } #[test] -fn test_replace() { - fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) - where - T: Debug + Eq, - { - let m = RwLock::new(init()); - - assert_eq!(*m.read().unwrap(), init()); - assert_eq!(m.replace(value()).unwrap(), init()); - assert_eq!(*m.read().unwrap(), value()); - } - - inner(|| NonCopy(10), || NonCopy(20)); - inner(|| NonCopyNeedsDrop(10), || NonCopyNeedsDrop(20)); -} - -#[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_replace_poison() { fn inner<T>(mut init: impl FnMut() -> T, mut value: impl FnMut() -> T) @@ -450,49 +604,118 @@ fn test_replace_poison() { } #[test] -fn test_read_guard_covariance() { - fn do_stuff<'a>(_: RwLockReadGuard<'_, &'a i32>, _: &'a i32) {} - let j: i32 = 5; - let lock = RwLock::new(&j); - { - let i = 6; - do_stuff(lock.read().unwrap(), &i); - } - drop(lock); +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_poison_wr() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let _lock = arc2.write().unwrap(); + panic!(); + }) + .join(); + assert!(arc.read().is_err()); } #[test] -fn test_mapped_read_guard_covariance() { - fn do_stuff<'a>(_: MappedRwLockReadGuard<'_, &'a i32>, _: &'a i32) {} - let j: i32 = 5; - let lock = RwLock::new((&j, &j)); - { - let i = 6; - let guard = lock.read().unwrap(); - let guard = RwLockReadGuard::map(guard, |(val, _val)| val); - do_stuff(guard, &i); - } - drop(lock); +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_poison_mapped_w_r() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.write().unwrap(); + let _lock = RwLockWriteGuard::map(lock, |val| val); + panic!(); + }) + .join(); + assert!(arc.read().is_err()); } #[test] -fn test_mapping_mapped_guard() { - let arr = [0; 4]; - let mut lock = RwLock::new(arr); - let guard = lock.write().unwrap(); - let guard = RwLockWriteGuard::map(guard, |arr| &mut arr[..2]); - let mut guard = MappedRwLockWriteGuard::map(guard, |slice| &mut slice[1..]); - assert_eq!(guard.len(), 1); - guard[0] = 42; - drop(guard); - assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); - - let guard = lock.read().unwrap(); - let guard = RwLockReadGuard::map(guard, |arr| &arr[..2]); - let guard = MappedRwLockReadGuard::map(guard, |slice| &slice[1..]); - assert_eq!(*guard, [42]); - drop(guard); - assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_poison_ww() { + let arc = Arc::new(RwLock::new(1)); + assert!(!arc.is_poisoned()); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let _lock = arc2.write().unwrap(); + panic!(); + }) + .join(); + assert!(arc.write().is_err()); + assert!(arc.is_poisoned()); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_poison_mapped_w_w() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.write().unwrap(); + let _lock = RwLockWriteGuard::map(lock, |val| val); + panic!(); + }) + .join(); + assert!(arc.write().is_err()); + assert!(arc.is_poisoned()); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_no_poison_rr() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let _lock = arc2.read().unwrap(); + panic!(); + }) + .join(); + let lock = arc.read().unwrap(); + assert_eq!(*lock, 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_no_poison_mapped_r_r() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.read().unwrap(); + let _lock = RwLockReadGuard::map(lock, |val| val); + panic!(); + }) + .join(); + let lock = arc.read().unwrap(); + assert_eq!(*lock, 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_no_poison_rw() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let _lock = arc2.read().unwrap(); + panic!() + }) + .join(); + let lock = arc.write().unwrap(); + assert_eq!(*lock, 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_rw_arc_no_poison_mapped_r_w() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.read().unwrap(); + let _lock = RwLockReadGuard::map(lock, |val| val); + panic!(); + }) + .join(); + let lock = arc.write().unwrap(); + assert_eq!(*lock, 1); } #[test] @@ -638,114 +861,3 @@ fn panic_while_mapping_write_unlocked_poison() { drop(lock); } - -#[test] -fn test_downgrade_basic() { - let r = RwLock::new(()); - - let write_guard = r.write().unwrap(); - let _read_guard = RwLockWriteGuard::downgrade(write_guard); -} - -#[test] -// FIXME: On macOS we use a provenance-incorrect implementation and Miri catches that issue. -// See <https://github.com/rust-lang/rust/issues/121950> for details. -#[cfg_attr(all(miri, target_os = "macos"), ignore)] -fn test_downgrade_observe() { - // Taken from the test `test_rwlock_downgrade` from: - // https://github.com/Amanieu/parking_lot/blob/master/src/rwlock.rs - - const W: usize = 20; - const N: usize = if cfg!(miri) { 40 } else { 100 }; - - // This test spawns `W` writer threads, where each will increment a counter `N` times, ensuring - // that the value they wrote has not changed after downgrading. - - let rw = Arc::new(RwLock::new(0)); - - // Spawn the writers that will do `W * N` operations and checks. - let handles: Vec<_> = (0..W) - .map(|_| { - let rw = rw.clone(); - thread::spawn(move || { - for _ in 0..N { - // Increment the counter. - let mut write_guard = rw.write().unwrap(); - *write_guard += 1; - let cur_val = *write_guard; - - // Downgrade the lock to read mode, where the value protected cannot be modified. - let read_guard = RwLockWriteGuard::downgrade(write_guard); - assert_eq!(cur_val, *read_guard); - } - }) - }) - .collect(); - - for handle in handles { - handle.join().unwrap(); - } - - assert_eq!(*rw.read().unwrap(), W * N); -} - -#[test] -// FIXME: On macOS we use a provenance-incorrect implementation and Miri catches that issue. -// See <https://github.com/rust-lang/rust/issues/121950> for details. -#[cfg_attr(all(miri, target_os = "macos"), ignore)] -fn test_downgrade_atomic() { - const NEW_VALUE: i32 = -1; - - // This test checks that `downgrade` is atomic, meaning as soon as a write lock has been - // downgraded, the lock must be in read mode and no other threads can take the write lock to - // modify the protected value. - - // `W` is the number of evil writer threads. - const W: usize = 20; - let rwlock = Arc::new(RwLock::new(0)); - - // Spawns many evil writer threads that will try and write to the locked value before the - // initial writer (who has the exclusive lock) can read after it downgrades. - // If the `RwLock` behaves correctly, then the initial writer should read the value it wrote - // itself as no other thread should be able to mutate the protected value. - - // Put the lock in write mode, causing all future threads trying to access this go to sleep. - let mut main_write_guard = rwlock.write().unwrap(); - - // Spawn all of the evil writer threads. They will each increment the protected value by 1. - let handles: Vec<_> = (0..W) - .map(|_| { - let rwlock = rwlock.clone(); - thread::spawn(move || { - // Will go to sleep since the main thread initially has the write lock. - let mut evil_guard = rwlock.write().unwrap(); - *evil_guard += 1; - }) - }) - .collect(); - - // Wait for a good amount of time so that evil threads go to sleep. - // Note: this is not strictly necessary... - let eternity = std::time::Duration::from_millis(42); - thread::sleep(eternity); - - // Once everyone is asleep, set the value to `NEW_VALUE`. - *main_write_guard = NEW_VALUE; - - // Atomically downgrade the write guard into a read guard. - let main_read_guard = RwLockWriteGuard::downgrade(main_write_guard); - - // If the above is not atomic, then it would be possible for an evil thread to get in front of - // this read and change the value to be non-negative. - assert_eq!(*main_read_guard, NEW_VALUE, "`downgrade` was not atomic"); - - // Drop the main read guard and allow the evil writer threads to start incrementing. - drop(main_read_guard); - - for handle in handles { - handle.join().unwrap(); - } - - let final_check = rwlock.read().unwrap(); - assert_eq!(*final_check, W as i32 + NEW_VALUE); -} diff --git a/library/std/tests/thread.rs b/library/std/tests/thread.rs index 32561dd6ab6..29f220d8a70 100644 --- a/library/std/tests/thread.rs +++ b/library/std/tests/thread.rs @@ -19,6 +19,7 @@ fn sleep_very_long() { } #[test] +#[cfg_attr(target_env = "sgx", ignore = "Time within SGX enclave cannot be trusted")] fn sleep_until() { let now = Instant::now(); let period = Duration::from_millis(100); |
