about summary refs log tree commit diff
diff options
context:
space:
mode:
authorYuki Okushi <huyuumi.dev@gmail.com>2021-01-27 04:43:18 +0900
committerGitHub <noreply@github.com>2021-01-27 04:43:18 +0900
commit8299105821327f2a316d0ffa925e55e770b65ad2 (patch)
tree612cf5dd644286bc1862fc430b4e4ef0c92fbb5a
parentc2c90bf5489a50c6cfc808fba5d587009eea4bff (diff)
parent495f7cca85b2ca3762ad2bc812238621e4f28c60 (diff)
downloadrust-8299105821327f2a316d0ffa925e55e770b65ad2.tar.gz
rust-8299105821327f2a316d0ffa925e55e770b65ad2.zip
Rollup merge of #81191 - ssomers:btree_more_order_chaos, r=Mark-Simulacrum
BTreeMap: test all borrowing interfaces and test more chaotic order behavior

Inspired by #81169, test what happens if you mess up order of the type with which you search (as opposed to the key type).

r? `@Mark-Simulacrum`
-rw-r--r--library/alloc/src/collections/btree/map/tests.rs88
1 files changed, 87 insertions, 1 deletions
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
index f92aed8ce15..ba5a4442f56 100644
--- a/library/alloc/src/collections/btree/map/tests.rs
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -777,7 +777,7 @@ fn test_range_backwards_4() {
 
 #[test]
 #[should_panic]
-fn test_range_backwards_5() {
+fn test_range_finding_ill_order_in_map() {
     let mut map = BTreeMap::new();
     map.insert(Cyclic3::B, ());
     // Lacking static_assert, call `range` conditionally, to emphasise that
@@ -789,6 +789,47 @@ fn test_range_backwards_5() {
 }
 
 #[test]
+#[should_panic]
+fn test_range_finding_ill_order_in_range_ord() {
+    // Has proper order the first time asked, then flips around.
+    struct EvilTwin(i32);
+
+    impl PartialOrd for EvilTwin {
+        fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+            Some(self.cmp(other))
+        }
+    }
+
+    static COMPARES: AtomicUsize = AtomicUsize::new(0);
+    impl Ord for EvilTwin {
+        fn cmp(&self, other: &Self) -> Ordering {
+            let ord = self.0.cmp(&other.0);
+            if COMPARES.fetch_add(1, SeqCst) > 0 { ord.reverse() } else { ord }
+        }
+    }
+
+    impl PartialEq for EvilTwin {
+        fn eq(&self, other: &Self) -> bool {
+            self.0.eq(&other.0)
+        }
+    }
+
+    impl Eq for EvilTwin {}
+
+    #[derive(PartialEq, Eq, PartialOrd, Ord)]
+    struct CompositeKey(i32, EvilTwin);
+
+    impl Borrow<EvilTwin> for CompositeKey {
+        fn borrow(&self) -> &EvilTwin {
+            &self.1
+        }
+    }
+
+    let map = (0..12).map(|i| (CompositeKey(i, EvilTwin(i)), ())).collect::<BTreeMap<_, _>>();
+    map.range(EvilTwin(5)..=EvilTwin(7));
+}
+
+#[test]
 fn test_range_1000() {
     // Miri is too slow
     let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 as u32 } else { 1000 };
@@ -1222,6 +1263,51 @@ fn test_borrow() {
         map.insert(Rc::new(0), 1);
         assert_eq!(map[&0], 1);
     }
+
+    #[allow(dead_code)]
+    fn get<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+        v.get(t);
+    }
+
+    #[allow(dead_code)]
+    fn get_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+        v.get_mut(t);
+    }
+
+    #[allow(dead_code)]
+    fn get_key_value<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+        v.get_key_value(t);
+    }
+
+    #[allow(dead_code)]
+    fn contains_key<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+        v.contains_key(t);
+    }
+
+    #[allow(dead_code)]
+    fn range<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: T) {
+        v.range(t..);
+    }
+
+    #[allow(dead_code)]
+    fn range_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: T) {
+        v.range_mut(t..);
+    }
+
+    #[allow(dead_code)]
+    fn remove<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+        v.remove(t);
+    }
+
+    #[allow(dead_code)]
+    fn remove_entry<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+        v.remove_entry(t);
+    }
+
+    #[allow(dead_code)]
+    fn split_off<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+        v.split_off(t);
+    }
 }
 
 #[test]