diff options
Diffstat (limited to 'library/core/src')
24 files changed, 519 insertions, 306 deletions
diff --git a/library/core/src/any.rs b/library/core/src/any.rs index e7d9763d46e..76ea2d18a82 100644 --- a/library/core/src/any.rs +++ b/library/core/src/any.rs @@ -705,7 +705,8 @@ impl dyn Any + Send + Sync { /// std::mem::forget(fake_one_ring); /// } /// ``` -#[derive(Clone, Copy, Eq, PartialOrd, Ord)] +#[derive(Copy, PartialOrd, Ord)] +#[derive_const(Clone, Eq)] #[stable(feature = "rust1", since = "1.0.0")] #[lang = "type_id"] pub struct TypeId { diff --git a/library/core/src/array/equality.rs b/library/core/src/array/equality.rs index 1ad2cca64a3..c2c7ccf0daa 100644 --- a/library/core/src/array/equality.rs +++ b/library/core/src/array/equality.rs @@ -1,9 +1,10 @@ use crate::cmp::BytewiseEq; #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<[U; N]> for [T; N] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<[U; N]> for [T; N] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &[U; N]) -> bool { @@ -16,9 +17,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<[U]> for [T; N] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<[U]> for [T; N] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &[U]) -> bool { @@ -37,9 +39,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<[U; N]> for [T] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<[U; N]> for [T] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &[U; N]) -> bool { @@ -58,9 +61,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<&[U]> for [T; N] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<&[U]> for [T; N] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &&[U]) -> bool { @@ -73,9 +77,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<[U; N]> for &[T] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<[U; N]> for &[T] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &[U; N]) -> bool { @@ -88,9 +93,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<&mut [U]> for [T; N] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<&mut [U]> for [T; N] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &&mut [U]) -> bool { @@ -103,9 +109,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T, U, const N: usize> PartialEq<[U; N]> for &mut [T] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T, U, const N: usize> const PartialEq<[U; N]> for &mut [T] where - T: PartialEq<U>, + T: [const] PartialEq<U>, { #[inline] fn eq(&self, other: &[U; N]) -> bool { @@ -122,14 +129,18 @@ where // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: Eq, const N: usize> Eq for [T; N] {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] Eq, const N: usize> const Eq for [T; N] {} +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] trait SpecArrayEq<Other, const N: usize>: Sized { fn spec_eq(a: &[Self; N], b: &[Other; N]) -> bool; fn spec_ne(a: &[Self; N], b: &[Other; N]) -> bool; } -impl<T: PartialEq<Other>, Other, const N: usize> SpecArrayEq<Other, N> for T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] PartialEq<Other>, Other, const N: usize> const SpecArrayEq<Other, N> for T { default fn spec_eq(a: &[Self; N], b: &[Other; N]) -> bool { a[..] == b[..] } @@ -138,7 +149,8 @@ impl<T: PartialEq<Other>, Other, const N: usize> SpecArrayEq<Other, N> for T { } } -impl<T: BytewiseEq<U>, U, const N: usize> SpecArrayEq<U, N> for T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] BytewiseEq<U>, U, const N: usize> const SpecArrayEq<U, N> for T { fn spec_eq(a: &[T; N], b: &[U; N]) -> bool { // SAFETY: Arrays are compared element-wise, and don't add any padding // between elements, so when the elements are `BytewiseEq`, we can diff --git a/library/core/src/ascii/ascii_char.rs b/library/core/src/ascii/ascii_char.rs index 49d540314a2..178af2c0e3b 100644 --- a/library/core/src/ascii/ascii_char.rs +++ b/library/core/src/ascii/ascii_char.rs @@ -54,7 +54,8 @@ use crate::{assert_unsafe_precondition, fmt}; /// [chart]: https://www.unicode.org/charts/PDF/U0000.pdf /// [NIST FIPS 1-2]: https://nvlpubs.nist.gov/nistpubs/Legacy/FIPS/fipspub1-2-1977.pdf /// [NamesList]: https://www.unicode.org/Public/15.0.0/ucd/NamesList.txt -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive(Copy, Hash)] +#[derive_const(Clone, Eq, PartialEq, Ord, PartialOrd)] #[unstable(feature = "ascii_char", issue = "110998")] #[repr(u8)] pub enum AsciiChar { diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs index 0812322f3fb..94536f25b41 100644 --- a/library/core/src/cmp.rs +++ b/library/core/src/cmp.rs @@ -29,7 +29,7 @@ mod bytewise; pub(crate) use bytewise::BytewiseEq; use self::Ordering::*; -use crate::marker::PointeeSized; +use crate::marker::{Destruct, PointeeSized}; use crate::ops::ControlFlow; /// Trait for comparisons using the equality operator. @@ -334,7 +334,9 @@ pub macro PartialEq($item:item) { #[doc(alias = "!=")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Eq"] -pub trait Eq: PartialEq<Self> + PointeeSized { +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub trait Eq: [const] PartialEq<Self> + PointeeSized { // this method is used solely by `impl Eq or #[derive(Eq)]` to assert that every component of a // type implements `Eq` itself. The current deriving infrastructure means doing this assertion // without using a method on this trait is nearly impossible. @@ -380,8 +382,8 @@ pub struct AssertParamIsEq<T: Eq + PointeeSized> { /// /// assert_eq!(2.cmp(&1), Ordering::Greater); /// ``` -#[derive(Clone, Copy, Eq, PartialOrd, Ord, Debug, Hash)] -#[derive_const(PartialEq)] +#[derive(Copy, Debug, Hash)] +#[derive_const(Clone, Eq, PartialOrd, Ord, PartialEq)] #[stable(feature = "rust1", since = "1.0.0")] // This is a lang item only so that `BinOp::Cmp` in MIR can return it. // It has no special behavior, but does require that the three variants @@ -635,7 +637,11 @@ impl Ordering { #[inline] #[must_use] #[stable(feature = "ordering_chaining", since = "1.17.0")] - pub fn then_with<F: FnOnce() -> Ordering>(self, f: F) -> Ordering { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + pub const fn then_with<F>(self, f: F) -> Ordering + where + F: [const] FnOnce() -> Ordering + [const] Destruct, + { match self { Equal => f(), _ => self, @@ -659,13 +665,15 @@ impl Ordering { /// v.sort_by_key(|&num| (num > 3, Reverse(num))); /// assert_eq!(v, vec![3, 2, 1, 6, 5, 4]); /// ``` -#[derive(PartialEq, Eq, Debug, Copy, Default, Hash)] +#[derive(Copy, Debug, Hash)] +#[derive_const(PartialEq, Eq, Default)] #[stable(feature = "reverse_cmp_key", since = "1.19.0")] #[repr(transparent)] pub struct Reverse<T>(#[stable(feature = "reverse_cmp_key", since = "1.19.0")] pub T); #[stable(feature = "reverse_cmp_key", since = "1.19.0")] -impl<T: PartialOrd> PartialOrd for Reverse<T> { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] PartialOrd> const PartialOrd for Reverse<T> { #[inline] fn partial_cmp(&self, other: &Reverse<T>) -> Option<Ordering> { other.0.partial_cmp(&self.0) @@ -690,7 +698,8 @@ impl<T: PartialOrd> PartialOrd for Reverse<T> { } #[stable(feature = "reverse_cmp_key", since = "1.19.0")] -impl<T: Ord> Ord for Reverse<T> { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] Ord> const Ord for Reverse<T> { #[inline] fn cmp(&self, other: &Reverse<T>) -> Ordering { other.0.cmp(&self.0) @@ -957,7 +966,9 @@ impl<T: Clone> Clone for Reverse<T> { #[doc(alias = ">=")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Ord"] -pub trait Ord: Eq + PartialOrd<Self> + PointeeSized { +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub trait Ord: [const] Eq + [const] PartialOrd<Self> + PointeeSized { /// This method returns an [`Ordering`] between `self` and `other`. /// /// By convention, `self.cmp(&other)` returns the ordering matching the expression @@ -1011,7 +1022,7 @@ pub trait Ord: Eq + PartialOrd<Self> + PointeeSized { #[rustc_diagnostic_item = "cmp_ord_max"] fn max(self, other: Self) -> Self where - Self: Sized, + Self: Sized + [const] Destruct, { if other < self { self } else { other } } @@ -1050,7 +1061,7 @@ pub trait Ord: Eq + PartialOrd<Self> + PointeeSized { #[rustc_diagnostic_item = "cmp_ord_min"] fn min(self, other: Self) -> Self where - Self: Sized, + Self: Sized + [const] Destruct, { if other < self { other } else { self } } @@ -1076,7 +1087,7 @@ pub trait Ord: Eq + PartialOrd<Self> + PointeeSized { #[stable(feature = "clamp", since = "1.50.0")] fn clamp(self, min: Self, max: Self) -> Self where - Self: Sized, + Self: Sized + [const] Destruct, { assert!(min <= max); if self < min { @@ -1341,6 +1352,8 @@ pub macro Ord($item:item) { )] #[rustc_diagnostic_item = "PartialOrd"] #[allow(multiple_supertrait_upcastable)] // FIXME(sized_hierarchy): remove this +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] pub trait PartialOrd<Rhs: PointeeSized = Self>: PartialEq<Rhs> + PointeeSized { /// This method returns an ordering between `self` and `other` values if one exists. /// @@ -1481,13 +1494,14 @@ pub trait PartialOrd<Rhs: PointeeSized = Self>: PartialEq<Rhs> + PointeeSized { } } -fn default_chaining_impl<T, U>( +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +const fn default_chaining_impl<T, U>( lhs: &T, rhs: &U, - p: impl FnOnce(Ordering) -> bool, + p: impl [const] FnOnce(Ordering) -> bool + [const] Destruct, ) -> ControlFlow<bool> where - T: PartialOrd<U> + PointeeSized, + T: [const] PartialOrd<U> + PointeeSized, U: PointeeSized, { // It's important that this only call `partial_cmp` once, not call `eq` then @@ -1545,7 +1559,8 @@ pub macro PartialOrd($item:item) { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "cmp_min"] -pub fn min<T: Ord>(v1: T, v2: T) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn min<T: [const] Ord + [const] Destruct>(v1: T, v2: T) -> T { v1.min(v2) } @@ -1575,7 +1590,12 @@ pub fn min<T: Ord>(v1: T, v2: T) -> T { #[inline] #[must_use] #[stable(feature = "cmp_min_max_by", since = "1.53.0")] -pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn min_by<T: [const] Destruct, F: [const] FnOnce(&T, &T) -> Ordering>( + v1: T, + v2: T, + compare: F, +) -> T { if compare(&v1, &v2).is_le() { v1 } else { v2 } } @@ -1600,7 +1620,13 @@ pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T { #[inline] #[must_use] #[stable(feature = "cmp_min_max_by", since = "1.53.0")] -pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn min_by_key<T, F, K>(v1: T, v2: T, mut f: F) -> T +where + T: [const] Destruct, + F: [const] FnMut(&T) -> K + [const] Destruct, + K: [const] Ord + [const] Destruct, +{ if f(&v2) < f(&v1) { v2 } else { v1 } } @@ -1640,7 +1666,8 @@ pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "cmp_max"] -pub fn max<T: Ord>(v1: T, v2: T) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn max<T: [const] Ord + [const] Destruct>(v1: T, v2: T) -> T { v1.max(v2) } @@ -1670,7 +1697,12 @@ pub fn max<T: Ord>(v1: T, v2: T) -> T { #[inline] #[must_use] #[stable(feature = "cmp_min_max_by", since = "1.53.0")] -pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn max_by<T: [const] Destruct, F: [const] FnOnce(&T, &T) -> Ordering>( + v1: T, + v2: T, + compare: F, +) -> T { if compare(&v1, &v2).is_gt() { v1 } else { v2 } } @@ -1695,7 +1727,13 @@ pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T { #[inline] #[must_use] #[stable(feature = "cmp_min_max_by", since = "1.53.0")] -pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn max_by_key<T, F, K>(v1: T, v2: T, mut f: F) -> T +where + T: [const] Destruct, + F: [const] FnMut(&T) -> K + [const] Destruct, + K: [const] Ord + [const] Destruct, +{ if f(&v2) < f(&v1) { v1 } else { v2 } } @@ -1739,9 +1777,10 @@ pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T { #[inline] #[must_use] #[unstable(feature = "cmp_minmax", issue = "115939")] -pub fn minmax<T>(v1: T, v2: T) -> [T; 2] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn minmax<T>(v1: T, v2: T) -> [T; 2] where - T: Ord, + T: [const] Ord, { if v2 < v1 { [v2, v1] } else { [v1, v2] } } @@ -1773,9 +1812,10 @@ where #[inline] #[must_use] #[unstable(feature = "cmp_minmax", issue = "115939")] -pub fn minmax_by<T, F>(v1: T, v2: T, compare: F) -> [T; 2] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn minmax_by<T, F>(v1: T, v2: T, compare: F) -> [T; 2] where - F: FnOnce(&T, &T) -> Ordering, + F: [const] FnOnce(&T, &T) -> Ordering, { if compare(&v1, &v2).is_le() { [v1, v2] } else { [v2, v1] } } @@ -1801,10 +1841,11 @@ where #[inline] #[must_use] #[unstable(feature = "cmp_minmax", issue = "115939")] -pub fn minmax_by_key<T, F, K>(v1: T, v2: T, mut f: F) -> [T; 2] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +pub const fn minmax_by_key<T, F, K>(v1: T, v2: T, mut f: F) -> [T; 2] where - F: FnMut(&T) -> K, - K: Ord, + F: [const] FnMut(&T) -> K + [const] Destruct, + K: [const] Ord + [const] Destruct, { if f(&v2) < f(&v1) { [v2, v1] } else { [v1, v2] } } @@ -1830,7 +1871,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialEq for () { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialEq for () { #[inline] fn eq(&self, _other: &()) -> bool { true @@ -1848,7 +1890,8 @@ mod impls { macro_rules! eq_impl { ($($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] - impl Eq for $t {} + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Eq for $t {} )*) } @@ -1896,7 +1939,8 @@ mod impls { macro_rules! partial_ord_impl { ($($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd for $t { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialOrd for $t { #[inline] fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (*self <= *other, *self >= *other) { @@ -1913,7 +1957,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd for () { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialOrd for () { #[inline] fn partial_cmp(&self, _: &()) -> Option<Ordering> { Some(Equal) @@ -1921,7 +1966,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd for bool { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialOrd for bool { #[inline] fn partial_cmp(&self, other: &bool) -> Option<Ordering> { Some(self.cmp(other)) @@ -1935,7 +1981,8 @@ mod impls { macro_rules! ord_impl { ($($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd for $t { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialOrd for $t { #[inline] fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(crate::intrinsics::three_way_compare(*self, *other)) @@ -1945,7 +1992,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for $t { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Ord for $t { #[inline] fn cmp(&self, other: &Self) -> Ordering { crate::intrinsics::three_way_compare(*self, *other) @@ -1955,7 +2003,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for () { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Ord for () { #[inline] fn cmp(&self, _other: &()) -> Ordering { Equal @@ -1963,7 +2012,8 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for bool { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Ord for bool { #[inline] fn cmp(&self, other: &bool) -> Ordering { // Casting to i8's and converting the difference to an Ordering generates @@ -1998,7 +2048,8 @@ mod impls { ord_impl! { char usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[unstable(feature = "never_type", issue = "35121")] - impl PartialEq for ! { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialEq for ! { #[inline] fn eq(&self, _: &!) -> bool { *self @@ -2006,10 +2057,12 @@ mod impls { } #[unstable(feature = "never_type", issue = "35121")] - impl Eq for ! {} + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Eq for ! {} #[unstable(feature = "never_type", issue = "35121")] - impl PartialOrd for ! { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const PartialOrd for ! { #[inline] fn partial_cmp(&self, _: &!) -> Option<Ordering> { *self @@ -2017,7 +2070,8 @@ mod impls { } #[unstable(feature = "never_type", issue = "35121")] - impl Ord for ! { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl const Ord for ! { #[inline] fn cmp(&self, _: &!) -> Ordering { *self @@ -2042,9 +2096,10 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized, B: PointeeSized> PartialOrd<&B> for &A + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized, B: PointeeSized> const PartialOrd<&B> for &A where - A: PartialOrd<B>, + A: [const] PartialOrd<B>, { #[inline] fn partial_cmp(&self, other: &&B) -> Option<Ordering> { @@ -2084,9 +2139,10 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized> Ord for &A + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized> const Ord for &A where - A: Ord, + A: [const] Ord, { #[inline] fn cmp(&self, other: &Self) -> Ordering { @@ -2094,7 +2150,8 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized> Eq for &A where A: Eq {} + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized> const Eq for &A where A: [const] Eq {} // &mut pointers @@ -2114,9 +2171,10 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized, B: PointeeSized> PartialOrd<&mut B> for &mut A + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized, B: PointeeSized> const PartialOrd<&mut B> for &mut A where - A: PartialOrd<B>, + A: [const] PartialOrd<B>, { #[inline] fn partial_cmp(&self, other: &&mut B) -> Option<Ordering> { @@ -2156,9 +2214,10 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized> Ord for &mut A + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized> const Ord for &mut A where - A: Ord, + A: [const] Ord, { #[inline] fn cmp(&self, other: &Self) -> Ordering { @@ -2166,7 +2225,8 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl<A: PointeeSized> Eq for &mut A where A: Eq {} + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<A: PointeeSized> const Eq for &mut A where A: [const] Eq {} #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index a3cfd85974a..89cda30c030 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -963,17 +963,20 @@ impl const PartialEq for Infallible { } #[stable(feature = "convert_infallible", since = "1.34.0")] -impl Eq for Infallible {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl const Eq for Infallible {} #[stable(feature = "convert_infallible", since = "1.34.0")] -impl PartialOrd for Infallible { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl const PartialOrd for Infallible { fn partial_cmp(&self, _other: &Self) -> Option<crate::cmp::Ordering> { match *self {} } } #[stable(feature = "convert_infallible", since = "1.34.0")] -impl Ord for Infallible { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl const Ord for Infallible { fn cmp(&self, _other: &Self) -> crate::cmp::Ordering { match *self {} } diff --git a/library/core/src/ffi/va_list.rs b/library/core/src/ffi/va_list.rs index 88ad1197777..643bd95df84 100644 --- a/library/core/src/ffi/va_list.rs +++ b/library/core/src/ffi/va_list.rs @@ -202,18 +202,23 @@ mod sealed { impl<T> Sealed for *const T {} } -/// Trait which permits the allowed types to be used with [`VaListImpl::arg`]. +/// Types that are valid to read using [`VaListImpl::arg`]. /// /// # Safety /// -/// This trait must only be implemented for types that C passes as varargs without implicit promotion. +/// The standard library implements this trait for primitive types that are +/// expected to have a variable argument application-binary interface (ABI) on all +/// platforms. /// -/// In C varargs, integers smaller than [`c_int`] and floats smaller than [`c_double`] -/// are implicitly promoted to [`c_int`] and [`c_double`] respectively. Implementing this trait for -/// types that are subject to this promotion rule is invalid. +/// When C passes variable arguments, integers smaller than [`c_int`] and floats smaller +/// than [`c_double`] are implicitly promoted to [`c_int`] and [`c_double`] respectively. +/// Implementing this trait for types that are subject to this promotion rule is invalid. /// /// [`c_int`]: core::ffi::c_int /// [`c_double`]: core::ffi::c_double +// We may unseal this trait in the future, but currently our `va_arg` implementations don't support +// types with an alignment larger than 8, or with a non-scalar layout. Inline assembly can be used +// to accept unsupported types in the meantime. pub unsafe trait VaArgSafe: sealed::Sealed {} // i8 and i16 are implicitly promoted to c_int in C, and cannot implement `VaArgSafe`. @@ -233,7 +238,19 @@ unsafe impl<T> VaArgSafe for *mut T {} unsafe impl<T> VaArgSafe for *const T {} impl<'f> VaListImpl<'f> { - /// Advance to the next arg. + /// Advance to and read the next variable argument. + /// + /// # Safety + /// + /// This function is only sound to call when the next variable argument: + /// + /// - has a type that is ABI-compatible with the type `T` + /// - has a value that is a properly initialized value of type `T` + /// + /// Calling this function with an incompatible type, an invalid value, or when there + /// are no more variable arguments, is unsound. + /// + /// [valid]: https://doc.rust-lang.org/nightly/nomicon/what-unsafe-does.html #[inline] pub unsafe fn arg<T: VaArgSafe>(&mut self) -> T { // SAFETY: the caller must uphold the safety contract for `va_arg`. diff --git a/library/core/src/num/bignum.rs b/library/core/src/num/bignum.rs index e33f58197bb..f21fe0b4438 100644 --- a/library/core/src/num/bignum.rs +++ b/library/core/src/num/bignum.rs @@ -335,43 +335,6 @@ macro_rules! define_bignum { } (self, borrow) } - - /// Divide self by another bignum, overwriting `q` with the quotient and `r` with the - /// remainder. - pub fn div_rem(&self, d: &$name, q: &mut $name, r: &mut $name) { - // Stupid slow base-2 long division taken from - // https://en.wikipedia.org/wiki/Division_algorithm - // FIXME use a greater base ($ty) for the long division. - assert!(!d.is_zero()); - let digitbits = <$ty>::BITS as usize; - for digit in &mut q.base[..] { - *digit = 0; - } - for digit in &mut r.base[..] { - *digit = 0; - } - r.size = d.size; - q.size = 1; - let mut q_is_zero = true; - let end = self.bit_length(); - for i in (0..end).rev() { - r.mul_pow2(1); - r.base[0] |= self.get_bit(i) as $ty; - if &*r >= d { - r.sub(d); - // Set bit `i` of q to 1. - let digit_idx = i / digitbits; - let bit_idx = i % digitbits; - if q_is_zero { - q.size = digit_idx + 1; - q_is_zero = false; - } - q.base[digit_idx] |= 1 << bit_idx; - } - } - debug_assert!(q.base[q.size..].iter().all(|&d| d == 0)); - debug_assert!(r.base[r.size..].iter().all(|&d| d == 0)); - } } impl crate::cmp::PartialEq for $name { diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs index 0f38081c355..1b7c28bb95a 100644 --- a/library/core/src/num/nonzero.rs +++ b/library/core/src/num/nonzero.rs @@ -4,7 +4,7 @@ use super::{IntErrorKind, ParseIntError}; use crate::clone::UseCloned; use crate::cmp::Ordering; use crate::hash::{Hash, Hasher}; -use crate::marker::{Freeze, StructuralPartialEq}; +use crate::marker::{Destruct, Freeze, StructuralPartialEq}; use crate::ops::{BitOr, BitOrAssign, Div, DivAssign, Neg, Rem, RemAssign}; use crate::panic::{RefUnwindSafe, UnwindSafe}; use crate::str::FromStr; @@ -220,12 +220,14 @@ where impl<T> StructuralPartialEq for NonZero<T> where T: ZeroablePrimitive + StructuralPartialEq {} #[stable(feature = "nonzero", since = "1.28.0")] -impl<T> Eq for NonZero<T> where T: ZeroablePrimitive + Eq {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T> const Eq for NonZero<T> where T: ZeroablePrimitive + [const] Eq {} #[stable(feature = "nonzero", since = "1.28.0")] -impl<T> PartialOrd for NonZero<T> +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T> const PartialOrd for NonZero<T> where - T: ZeroablePrimitive + PartialOrd, + T: ZeroablePrimitive + [const] PartialOrd, { #[inline] fn partial_cmp(&self, other: &Self) -> Option<Ordering> { @@ -254,9 +256,12 @@ where } #[stable(feature = "nonzero", since = "1.28.0")] -impl<T> Ord for NonZero<T> +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T> const Ord for NonZero<T> where - T: ZeroablePrimitive + Ord, + // FIXME(const_hack): the T: ~const Destruct should be inferred from the Self: ~const Destruct. + // See https://github.com/rust-lang/rust/issues/144207 + T: ZeroablePrimitive + [const] Ord + [const] Destruct, { #[inline] fn cmp(&self, other: &Self) -> Ordering { diff --git a/library/core/src/ops/control_flow.rs b/library/core/src/ops/control_flow.rs index 73b74d53323..b760a7c4e21 100644 --- a/library/core/src/ops/control_flow.rs +++ b/library/core/src/ops/control_flow.rs @@ -83,7 +83,8 @@ use crate::{convert, ops}; #[must_use] // ControlFlow should not implement PartialOrd or Ord, per RFC 3058: // https://rust-lang.github.io/rfcs/3058-try-trait-v2.html#traits-for-controlflow -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Copy, Debug, Hash)] +#[derive_const(Clone, PartialEq, Eq)] pub enum ControlFlow<B, C = ()> { /// Move on to the next phase of the operation as normal. #[stable(feature = "control_flow_enum_type", since = "1.55.0")] diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 198636c67d0..886d581b0a6 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -585,7 +585,8 @@ use crate::{cmp, convert, hint, mem, slice}; /// The `Option` type. See [the module level documentation](self) for more. #[doc(search_unbox)] -#[derive(Copy, Eq, Debug, Hash)] +#[derive(Copy, Debug, Hash)] +#[derive_const(Eq)] #[rustc_diagnostic_item = "Option"] #[lang = "Option"] #[stable(feature = "rust1", since = "1.0.0")] @@ -2363,7 +2364,8 @@ impl<T: [const] PartialEq> const PartialEq for Option<T> { // https://github.com/rust-lang/rust/issues/49892, although still // not optimal. #[stable(feature = "rust1", since = "1.0.0")] -impl<T: PartialOrd> PartialOrd for Option<T> { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] PartialOrd> const PartialOrd for Option<T> { #[inline] fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> { match (self, other) { @@ -2376,7 +2378,8 @@ impl<T: PartialOrd> PartialOrd for Option<T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: Ord> Ord for Option<T> { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] Ord> const Ord for Option<T> { #[inline] fn cmp(&self, other: &Self) -> cmp::Ordering { match (self, other) { diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index c5f0cb8016e..45109270944 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -146,28 +146,7 @@ impl<T: PointeeSized> *const T { self as _ } - /// Gets the "address" portion of the pointer. - /// - /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of - /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that - /// casting the returned address back to a pointer yields a [pointer without - /// provenance][without_provenance], which is undefined behavior to dereference. To properly - /// restore the lost information and obtain a dereferenceable pointer, use - /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. - /// - /// If using those APIs is not possible because there is no way to preserve a pointer with the - /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts - /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance] - /// instead. However, note that this makes your code less portable and less amenable to tools - /// that check for compliance with the Rust memory model. - /// - /// On most platforms this will produce a value with the same bytes as the original - /// pointer, because all the bytes are dedicated to describing the address. - /// Platforms which need to store additional information in the pointer may - /// perform a change of representation to produce a value containing only the address - /// portion of the pointer. What that means is up to the platform to define. - /// - /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. + #[doc = include_str!("./docs/addr.md")] #[must_use] #[inline(always)] #[stable(feature = "strict_provenance", since = "1.84.0")] @@ -254,23 +233,16 @@ impl<T: PointeeSized> *const T { (self.cast(), metadata(self)) } - /// Returns `None` if the pointer is null, or else returns a shared reference to - /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`] - /// must be used instead. - /// - /// [`as_uninit_ref`]: #method.as_uninit_ref - /// - /// # Safety - /// - /// When calling this method, you have to ensure that *either* the pointer is null *or* - /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). + #[doc = include_str!("./docs/as_ref.md")] /// - /// # Panics during const evaluation - /// - /// This method will panic during const evaluation if the pointer cannot be - /// determined to be null or not. See [`is_null`] for more information. + /// ``` + /// let ptr: *const u8 = &10u8 as *const u8; /// - /// [`is_null`]: #method.is_null + /// unsafe { + /// let val_back = &*ptr; + /// assert_eq!(val_back, &10); + /// } + /// ``` /// /// # Examples /// @@ -284,20 +256,9 @@ impl<T: PointeeSized> *const T { /// } /// ``` /// - /// # Null-unchecked version /// - /// If you are sure the pointer can never be null and are looking for some kind of - /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can - /// dereference the pointer directly. - /// - /// ``` - /// let ptr: *const u8 = &10u8 as *const u8; - /// - /// unsafe { - /// let val_back = &*ptr; - /// assert_eq!(val_back, &10); - /// } - /// ``` + /// [`is_null`]: #method.is_null + /// [`as_uninit_ref`]: #method.as_uninit_ref #[stable(feature = "ptr_as_ref", since = "1.9.0")] #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")] #[inline] @@ -338,23 +299,10 @@ impl<T: PointeeSized> *const T { unsafe { &*self } } - /// Returns `None` if the pointer is null, or else returns a shared reference to - /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require - /// that the value has to be initialized. - /// - /// [`as_ref`]: #method.as_ref - /// - /// # Safety - /// - /// When calling this method, you have to ensure that *either* the pointer is null *or* - /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). - /// - /// # Panics during const evaluation - /// - /// This method will panic during const evaluation if the pointer cannot be - /// determined to be null or not. See [`is_null`] for more information. + #[doc = include_str!("./docs/as_uninit_ref.md")] /// /// [`is_null`]: #method.is_null + /// [`as_ref`]: #method.as_ref /// /// # Examples /// diff --git a/library/core/src/ptr/docs/INFO.md b/library/core/src/ptr/docs/INFO.md new file mode 100644 index 00000000000..28a0da4926a --- /dev/null +++ b/library/core/src/ptr/docs/INFO.md @@ -0,0 +1,21 @@ +This directory holds method documentation that otherwise +would be duplicated across mutable and immutable pointers. + +Note that most of the docs here are not the complete docs +for their corresponding method. This is for a few reasons: + +1. Examples need to be different for mutable/immutable + pointers, in order to actually call the correct method. +2. Link reference definitions are frequently different + between mutable/immutable pointers, in order to link to + the correct method. + For example, `<*const T>::as_ref` links to + `<*const T>::is_null`, while `<*mut T>::as_ref` links to + `<*mut T>::is_null`. +3. Many methods on mutable pointers link to an alternate + version that returns a mutable reference instead of + a shared reference. + +Always review the rendered docs manually when making +changes to these files to make sure you're not accidentally +splitting up a section. diff --git a/library/core/src/ptr/docs/addr.md b/library/core/src/ptr/docs/addr.md new file mode 100644 index 00000000000..785b88a9987 --- /dev/null +++ b/library/core/src/ptr/docs/addr.md @@ -0,0 +1,22 @@ +Gets the "address" portion of the pointer. + +This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of +the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that +casting the returned address back to a pointer yields a [pointer without +provenance][without_provenance], which is undefined behavior to dereference. To properly +restore the lost information and obtain a dereferenceable pointer, use +[`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. + +If using those APIs is not possible because there is no way to preserve a pointer with the +required provenance, then Strict Provenance might not be for you. Use pointer-integer casts +or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance] +instead. However, note that this makes your code less portable and less amenable to tools +that check for compliance with the Rust memory model. + +On most platforms this will produce a value with the same bytes as the original +pointer, because all the bytes are dedicated to describing the address. +Platforms which need to store additional information in the pointer may +perform a change of representation to produce a value containing only the address +portion of the pointer. What that means is up to the platform to define. + +This is a [Strict Provenance][crate::ptr#strict-provenance] API. diff --git a/library/core/src/ptr/docs/as_ref.md b/library/core/src/ptr/docs/as_ref.md new file mode 100644 index 00000000000..0c0d2768c74 --- /dev/null +++ b/library/core/src/ptr/docs/as_ref.md @@ -0,0 +1,19 @@ +Returns `None` if the pointer is null, or else returns a shared reference to +the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`] +must be used instead. + +# Safety + +When calling this method, you have to ensure that *either* the pointer is null *or* +the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). + +# Panics during const evaluation + +This method will panic during const evaluation if the pointer cannot be +determined to be null or not. See [`is_null`] for more information. + +# Null-unchecked version + +If you are sure the pointer can never be null and are looking for some kind of +`as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can +dereference the pointer directly. diff --git a/library/core/src/ptr/docs/as_uninit_ref.md b/library/core/src/ptr/docs/as_uninit_ref.md new file mode 100644 index 00000000000..5b9a1ecb85b --- /dev/null +++ b/library/core/src/ptr/docs/as_uninit_ref.md @@ -0,0 +1,15 @@ +Returns `None` if the pointer is null, or else returns a shared reference to +the value wrapped in `Some`. In contrast to [`as_ref`], this does not require +that the value has to be initialized. + +# Safety + +When calling this method, you have to ensure that *either* the pointer is null *or* +the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). +Note that because the created reference is to `MaybeUninit<T>`, the +source pointer can point to uninitialized memory. + +# Panics during const evaluation + +This method will panic during const evaluation if the pointer cannot be +determined to be null or not. See [`is_null`] for more information. diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index ce6eee4f911..ba78afc7ea1 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -135,28 +135,9 @@ impl<T: PointeeSized> *mut T { self as _ } - /// Gets the "address" portion of the pointer. - /// - /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of - /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that - /// casting the returned address back to a pointer yields a [pointer without - /// provenance][without_provenance_mut], which is undefined behavior to dereference. To properly - /// restore the lost information and obtain a dereferenceable pointer, use - /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. - /// - /// If using those APIs is not possible because there is no way to preserve a pointer with the - /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts - /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance] - /// instead. However, note that this makes your code less portable and less amenable to tools - /// that check for compliance with the Rust memory model. - /// - /// On most platforms this will produce a value with the same bytes as the original - /// pointer, because all the bytes are dedicated to describing the address. - /// Platforms which need to store additional information in the pointer may - /// perform a change of representation to produce a value containing only the address - /// portion of the pointer. What that means is up to the platform to define. + #[doc = include_str!("./docs/addr.md")] /// - /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. + /// [without_provenance]: without_provenance_mut #[must_use] #[inline(always)] #[stable(feature = "strict_provenance", since = "1.84.0")] @@ -243,26 +224,16 @@ impl<T: PointeeSized> *mut T { (self.cast(), super::metadata(self)) } - /// Returns `None` if the pointer is null, or else returns a shared reference to - /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`] - /// must be used instead. - /// - /// For the mutable counterpart see [`as_mut`]. + #[doc = include_str!("./docs/as_ref.md")] /// - /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1 - /// [`as_mut`]: #method.as_mut - /// - /// # Safety - /// - /// When calling this method, you have to ensure that *either* the pointer is null *or* - /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). - /// - /// # Panics during const evaluation - /// - /// This method will panic during const evaluation if the pointer cannot be - /// determined to be null or not. See [`is_null`] for more information. + /// ``` + /// let ptr: *mut u8 = &mut 10u8 as *mut u8; /// - /// [`is_null`]: #method.is_null-1 + /// unsafe { + /// let val_back = &*ptr; + /// println!("We got back the value: {val_back}!"); + /// } + /// ``` /// /// # Examples /// @@ -276,20 +247,14 @@ impl<T: PointeeSized> *mut T { /// } /// ``` /// - /// # Null-unchecked version - /// - /// If you are sure the pointer can never be null and are looking for some kind of - /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can - /// dereference the pointer directly. + /// # See Also /// - /// ``` - /// let ptr: *mut u8 = &mut 10u8 as *mut u8; + /// For the mutable counterpart see [`as_mut`]. /// - /// unsafe { - /// let val_back = &*ptr; - /// println!("We got back the value: {val_back}!"); - /// } - /// ``` + /// [`is_null`]: #method.is_null-1 + /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1 + /// [`as_mut`]: #method.as_mut + #[stable(feature = "ptr_as_ref", since = "1.9.0")] #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")] #[inline] @@ -332,28 +297,15 @@ impl<T: PointeeSized> *mut T { unsafe { &*self } } - /// Returns `None` if the pointer is null, or else returns a shared reference to - /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require - /// that the value has to be initialized. - /// - /// For the mutable counterpart see [`as_uninit_mut`]. + #[doc = include_str!("./docs/as_uninit_ref.md")] /// + /// [`is_null`]: #method.is_null-1 /// [`as_ref`]: pointer#method.as_ref-1 - /// [`as_uninit_mut`]: #method.as_uninit_mut - /// - /// # Safety - /// - /// When calling this method, you have to ensure that *either* the pointer is null *or* - /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). - /// Note that because the created reference is to `MaybeUninit<T>`, the - /// source pointer can point to uninitialized memory. - /// - /// # Panics during const evaluation /// - /// This method will panic during const evaluation if the pointer cannot be - /// determined to be null or not. See [`is_null`] for more information. + /// # See Also + /// For the mutable counterpart see [`as_uninit_mut`]. /// - /// [`is_null`]: #method.is_null-1 + /// [`as_uninit_mut`]: #method.as_uninit_mut /// /// # Examples /// diff --git a/library/core/src/range.rs b/library/core/src/range.rs index 332ae51d848..a096a8ceafc 100644 --- a/library/core/src/range.rs +++ b/library/core/src/range.rs @@ -31,9 +31,7 @@ pub use iter::{IterRange, IterRangeFrom, IterRangeInclusive}; #[doc(inline)] pub use crate::iter::Step; #[doc(inline)] -pub use crate::ops::{ - Bound, IntoBounds, OneSidedRange, RangeBounds, RangeFull, RangeTo, RangeToInclusive, -}; +pub use crate::ops::{Bound, IntoBounds, OneSidedRange, RangeBounds, RangeFull, RangeTo}; /// A (half-open) range bounded inclusively below and exclusively above /// (`start..end` in a future edition). @@ -51,7 +49,8 @@ pub use crate::ops::{ /// assert_eq!(3 + 4 + 5, Range::from(3..6).into_iter().sum()); /// ``` #[lang = "RangeCopy"] -#[derive(Clone, Copy, Default, PartialEq, Eq, Hash)] +#[derive(Copy, Hash)] +#[derive_const(Clone, Default, PartialEq, Eq)] #[unstable(feature = "new_range_api", issue = "125687")] pub struct Range<Idx> { /// The lower bound of the range (inclusive). @@ -209,20 +208,20 @@ impl<T> const From<legacy::Range<T>> for Range<T> { } } -/// A range bounded inclusively below and above (`start..=end`). +/// A range bounded inclusively below and above (`start..=last`). /// -/// The `RangeInclusive` `start..=end` contains all values with `x >= start` -/// and `x <= end`. It is empty unless `start <= end`. +/// The `RangeInclusive` `start..=last` contains all values with `x >= start` +/// and `x <= last`. It is empty unless `start <= last`. /// /// # Examples /// -/// The `start..=end` syntax is a `RangeInclusive`: +/// The `start..=last` syntax is a `RangeInclusive`: /// /// ``` /// #![feature(new_range_api)] /// use core::range::RangeInclusive; /// -/// assert_eq!(RangeInclusive::from(3..=5), RangeInclusive { start: 3, end: 5 }); +/// assert_eq!(RangeInclusive::from(3..=5), RangeInclusive { start: 3, last: 5 }); /// assert_eq!(3 + 4 + 5, RangeInclusive::from(3..=5).into_iter().sum()); /// ``` #[lang = "RangeInclusiveCopy"] @@ -234,7 +233,7 @@ pub struct RangeInclusive<Idx> { pub start: Idx, /// The upper bound of the range (inclusive). #[unstable(feature = "new_range_api", issue = "125687")] - pub end: Idx, + pub last: Idx, } #[unstable(feature = "new_range_api", issue = "125687")] @@ -242,7 +241,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { self.start.fmt(fmt)?; write!(fmt, "..=")?; - self.end.fmt(fmt)?; + self.last.fmt(fmt)?; Ok(()) } } @@ -306,7 +305,7 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> { #[unstable(feature = "new_range_api", issue = "125687")] #[inline] pub fn is_empty(&self) -> bool { - !(self.start <= self.end) + !(self.start <= self.last) } } @@ -335,10 +334,10 @@ impl<Idx: Step> RangeInclusive<Idx> { impl RangeInclusive<usize> { /// Converts to an exclusive `Range` for `SliceIndex` implementations. - /// The caller is responsible for dealing with `end == usize::MAX`. + /// The caller is responsible for dealing with `last == usize::MAX`. #[inline] pub(crate) const fn into_slice_range(self) -> Range<usize> { - Range { start: self.start, end: self.end + 1 } + Range { start: self.start, end: self.last + 1 } } } @@ -348,7 +347,7 @@ impl<T> RangeBounds<T> for RangeInclusive<T> { Included(&self.start) } fn end_bound(&self) -> Bound<&T> { - Included(&self.end) + Included(&self.last) } } @@ -364,7 +363,7 @@ impl<T> RangeBounds<T> for RangeInclusive<&T> { Included(self.start) } fn end_bound(&self) -> Bound<&T> { - Included(self.end) + Included(self.last) } } @@ -372,7 +371,7 @@ impl<T> RangeBounds<T> for RangeInclusive<&T> { #[unstable(feature = "new_range_api", issue = "125687")] impl<T> IntoBounds<T> for RangeInclusive<T> { fn into_bounds(self) -> (Bound<T>, Bound<T>) { - (Included(self.start), Included(self.end)) + (Included(self.start), Included(self.last)) } } @@ -381,7 +380,7 @@ impl<T> IntoBounds<T> for RangeInclusive<T> { impl<T> const From<RangeInclusive<T>> for legacy::RangeInclusive<T> { #[inline] fn from(value: RangeInclusive<T>) -> Self { - Self::new(value.start, value.end) + Self::new(value.start, value.last) } } #[unstable(feature = "new_range_api", issue = "125687")] @@ -394,8 +393,8 @@ impl<T> const From<legacy::RangeInclusive<T>> for RangeInclusive<T> { "attempted to convert from an exhausted `legacy::RangeInclusive` (unspecified behavior)" ); - let (start, end) = value.into_inner(); - RangeInclusive { start, end } + let (start, last) = value.into_inner(); + RangeInclusive { start, last } } } @@ -426,7 +425,8 @@ impl<T> const From<legacy::RangeInclusive<T>> for RangeInclusive<T> { /// assert_eq!(2 + 3 + 4, RangeFrom::from(2..).into_iter().take(3).sum()); /// ``` #[lang = "RangeFromCopy"] -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Copy, Hash)] +#[derive_const(Clone, PartialEq, Eq)] #[unstable(feature = "new_range_api", issue = "125687")] pub struct RangeFrom<Idx> { /// The lower bound of the range (inclusive). @@ -544,3 +544,107 @@ impl<T> const From<legacy::RangeFrom<T>> for RangeFrom<T> { Self { start: value.start } } } + +/// A range only bounded inclusively above (`..=last`). +/// +/// The `RangeToInclusive` `..=last` contains all values with `x <= last`. +/// It cannot serve as an [`Iterator`] because it doesn't have a starting point. +/// +/// # Examples +/// +/// The `..=last` syntax is a `RangeToInclusive`: +/// +/// ``` +/// #![feature(new_range_api)] +/// #![feature(new_range)] +/// assert_eq!((..=5), std::range::RangeToInclusive{ last: 5 }); +/// ``` +/// +/// It does not have an [`IntoIterator`] implementation, so you can't use it in a +/// `for` loop directly. This won't compile: +/// +/// ```compile_fail,E0277 +/// // error[E0277]: the trait bound `std::range::RangeToInclusive<{integer}>: +/// // std::iter::Iterator` is not satisfied +/// for i in ..=5 { +/// // ... +/// } +/// ``` +/// +/// When used as a [slicing index], `RangeToInclusive` produces a slice of all +/// array elements up to and including the index indicated by `last`. +/// +/// ``` +/// let arr = [0, 1, 2, 3, 4]; +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); // This is a `RangeToInclusive` +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); +/// ``` +/// +/// [slicing index]: crate::slice::SliceIndex +#[lang = "RangeToInclusiveCopy"] +#[doc(alias = "..=")] +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[unstable(feature = "new_range_api", issue = "125687")] +pub struct RangeToInclusive<Idx> { + /// The upper bound of the range (inclusive) + #[unstable(feature = "new_range_api", issue = "125687")] + pub last: Idx, +} + +#[unstable(feature = "new_range_api", issue = "125687")] +impl<Idx: fmt::Debug> fmt::Debug for RangeToInclusive<Idx> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(fmt, "..=")?; + self.last.fmt(fmt)?; + Ok(()) + } +} + +impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> { + /// Returns `true` if `item` is contained in the range. + /// + /// # Examples + /// + /// ``` + /// assert!( (..=5).contains(&-1_000_000_000)); + /// assert!( (..=5).contains(&5)); + /// assert!(!(..=5).contains(&6)); + /// + /// assert!( (..=1.0).contains(&1.0)); + /// assert!(!(..=1.0).contains(&f32::NAN)); + /// assert!(!(..=f32::NAN).contains(&0.5)); + /// ``` + #[inline] + #[unstable(feature = "new_range_api", issue = "125687")] + pub fn contains<U>(&self, item: &U) -> bool + where + Idx: PartialOrd<U>, + U: ?Sized + PartialOrd<Idx>, + { + <Self as RangeBounds<Idx>>::contains(self, item) + } +} + +// RangeToInclusive<Idx> cannot impl From<RangeTo<Idx>> +// because underflow would be possible with (..0).into() + +#[unstable(feature = "new_range_api", issue = "125687")] +impl<T> RangeBounds<T> for RangeToInclusive<T> { + fn start_bound(&self) -> Bound<&T> { + Unbounded + } + fn end_bound(&self) -> Bound<&T> { + Included(&self.last) + } +} + +#[unstable(feature = "range_into_bounds", issue = "136903")] +impl<T> IntoBounds<T> for RangeToInclusive<T> { + fn into_bounds(self) -> (Bound<T>, Bound<T>) { + (Unbounded, Included(self.last)) + } +} diff --git a/library/core/src/range/iter.rs b/library/core/src/range/iter.rs index 1e261d8c1d9..24efd4a204a 100644 --- a/library/core/src/range/iter.rs +++ b/library/core/src/range/iter.rs @@ -164,7 +164,7 @@ impl<A: Step> IterRangeInclusive<A> { return None; } - Some(RangeInclusive { start: self.0.start, end: self.0.end }) + Some(RangeInclusive { start: self.0.start, last: self.0.end }) } } diff --git a/library/core/src/range/legacy.rs b/library/core/src/range/legacy.rs index 6723c4903f7..aa11331382d 100644 --- a/library/core/src/range/legacy.rs +++ b/library/core/src/range/legacy.rs @@ -1,10 +1,10 @@ //! # Legacy range types //! //! The types within this module will be replaced by the types -//! [`Range`], [`RangeInclusive`], and [`RangeFrom`] in the parent +//! [`Range`], [`RangeInclusive`], [`RangeToInclusive`], and [`RangeFrom`] in the parent //! module, [`core::range`]. //! //! The types here are equivalent to those in [`core::ops`]. #[doc(inline)] -pub use crate::ops::{Range, RangeFrom, RangeInclusive}; +pub use crate::ops::{Range, RangeFrom, RangeInclusive, RangeToInclusive}; diff --git a/library/core/src/result.rs b/library/core/src/result.rs index 7dffab9b316..5c1f64bfe14 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -542,7 +542,8 @@ use crate::{convert, fmt, hint}; /// /// See the [module documentation](self) for details. #[doc(search_unbox)] -#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] +#[derive(Copy, Debug, Hash)] +#[derive_const(PartialEq, PartialOrd, Eq, Ord)] #[must_use = "this `Result` may be an `Err` variant, which should be handled"] #[rustc_diagnostic_item = "Result"] #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/slice/cmp.rs b/library/core/src/slice/cmp.rs index 0d801306984..103630aba0f 100644 --- a/library/core/src/slice/cmp.rs +++ b/library/core/src/slice/cmp.rs @@ -23,7 +23,8 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: Eq> Eq for [T] {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<T: [const] Eq> const Eq for [T] {} /// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] @@ -34,7 +35,7 @@ impl<T: Ord> Ord for [T] { } #[inline] -fn as_underlying(x: ControlFlow<bool>) -> u8 { +const fn as_underlying(x: ControlFlow<bool>) -> u8 { // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same // size (which isn't guaranteed but this is libcore). Because they have the same // size, it's a niched implementation, which in one byte means there can't be @@ -154,12 +155,16 @@ where } #[doc(hidden)] +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's PartialOrd trait SlicePartialOrd: Sized { fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>; } #[doc(hidden)] +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's PartialOrd chaining methods trait SliceChain: Sized { fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>; @@ -231,14 +236,17 @@ where } */ -impl<A: AlwaysApplicableOrd> SlicePartialOrd for A { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A { fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> { Some(SliceOrd::compare(left, right)) } } #[rustc_specialization_trait] -trait AlwaysApplicableOrd: SliceOrd + Ord {} +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {} macro_rules! always_applicable_ord { ($([$($p:tt)*] $t:ty,)*) => { @@ -257,6 +265,8 @@ always_applicable_ord! { } #[doc(hidden)] +#[const_trait] +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's Ord trait SliceOrd: Sized { fn compare(left: &[Self], right: &[Self]) -> Ordering; @@ -282,17 +292,24 @@ impl<A: Ord> SliceOrd for A { /// * For every `x` and `y` of this type, `Ord(x, y)` must return the same /// value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`. #[rustc_specialization_trait] -unsafe trait UnsignedBytewiseOrd: Ord {} +#[const_trait] +unsafe trait UnsignedBytewiseOrd: [const] Ord {} -unsafe impl UnsignedBytewiseOrd for bool {} -unsafe impl UnsignedBytewiseOrd for u8 {} -unsafe impl UnsignedBytewiseOrd for NonZero<u8> {} -unsafe impl UnsignedBytewiseOrd for Option<NonZero<u8>> {} -unsafe impl UnsignedBytewiseOrd for ascii::Char {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +unsafe impl const UnsignedBytewiseOrd for bool {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +unsafe impl const UnsignedBytewiseOrd for u8 {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +unsafe impl const UnsignedBytewiseOrd for ascii::Char {} // `compare_bytes` compares a sequence of unsigned bytes lexicographically, so // use it if the requirements for `UnsignedBytewiseOrd` are fulfilled. -impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A { +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A { #[inline] fn compare(left: &[Self], right: &[Self]) -> Ordering { // Since the length of a slice is always less than or equal to @@ -317,7 +334,9 @@ impl<A: Ord + UnsignedBytewiseOrd> SliceOrd for A { } // Don't generate our own chaining loops for `memcmp`-able things either. -impl<A: PartialOrd + UnsignedBytewiseOrd> SliceChain for A { + +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A { #[inline] fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> { match SliceOrd::compare(left, right) { diff --git a/library/core/src/slice/index.rs b/library/core/src/slice/index.rs index 8e1bc0bae70..a8147d745f3 100644 --- a/library/core/src/slice/index.rs +++ b/library/core/src/slice/index.rs @@ -129,6 +129,8 @@ mod private_slice_index { #[unstable(feature = "new_range_api", issue = "125687")] impl Sealed for range::RangeInclusive<usize> {} #[unstable(feature = "new_range_api", issue = "125687")] + impl Sealed for range::RangeToInclusive<usize> {} + #[unstable(feature = "new_range_api", issue = "125687")] impl Sealed for range::RangeFrom<usize> {} impl Sealed for ops::IndexRange {} @@ -788,6 +790,45 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeToInclusive<usize> { } } +/// The methods `index` and `index_mut` panic if the end of the range is out of bounds. +#[stable(feature = "inclusive_range", since = "1.26.0")] +#[rustc_const_unstable(feature = "const_index", issue = "143775")] +unsafe impl<T> const SliceIndex<[T]> for range::RangeToInclusive<usize> { + type Output = [T]; + + #[inline] + fn get(self, slice: &[T]) -> Option<&[T]> { + (0..=self.last).get(slice) + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + (0..=self.last).get_mut(slice) + } + + #[inline] + unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. + unsafe { (0..=self.last).get_unchecked(slice) } + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. + unsafe { (0..=self.last).get_unchecked_mut(slice) } + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + (0..=self.last).index(slice) + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + (0..=self.last).index_mut(slice) + } +} + /// Performs bounds checking of a range. /// /// This method is similar to [`Index::index`] for slices, but it returns a diff --git a/library/core/src/str/traits.rs b/library/core/src/str/traits.rs index 4f228edf78e..a7cc943994c 100644 --- a/library/core/src/str/traits.rs +++ b/library/core/src/str/traits.rs @@ -32,7 +32,8 @@ impl const PartialEq for str { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for str {} +#[rustc_const_unstable(feature = "const_cmp", issue = "143800")] +impl const Eq for str {} /// Implements comparison operations on strings. /// @@ -677,11 +678,11 @@ unsafe impl const SliceIndex<str> for range::RangeInclusive<usize> { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if self.end == usize::MAX { None } else { self.into_slice_range().get(slice) } + if self.last == usize::MAX { None } else { self.into_slice_range().get(slice) } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if self.end == usize::MAX { None } else { self.into_slice_range().get_mut(slice) } + if self.last == usize::MAX { None } else { self.into_slice_range().get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: *const str) -> *const Self::Output { @@ -695,14 +696,14 @@ unsafe impl const SliceIndex<str> for range::RangeInclusive<usize> { } #[inline] fn index(self, slice: &str) -> &Self::Output { - if self.end == usize::MAX { + if self.last == usize::MAX { str_index_overflow_fail(); } self.into_slice_range().index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { - if self.end == usize::MAX { + if self.last == usize::MAX { str_index_overflow_fail(); } self.into_slice_range().index_mut(slice) diff --git a/library/core/src/tuple.rs b/library/core/src/tuple.rs index 2cdee1803a9..3892f831076 100644 --- a/library/core/src/tuple.rs +++ b/library/core/src/tuple.rs @@ -23,7 +23,8 @@ macro_rules! tuple_impls { maybe_tuple_doc! { $($T)+ @ #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T: PartialEq),+> PartialEq for ($($T,)+) { + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<$($T: [const] PartialEq),+> const PartialEq for ($($T,)+) { #[inline] fn eq(&self, other: &($($T,)+)) -> bool { $( ${ignore($T)} self.${index()} == other.${index()} )&&+ @@ -38,7 +39,8 @@ macro_rules! tuple_impls { maybe_tuple_doc! { $($T)+ @ #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T: Eq),+> Eq for ($($T,)+) + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<$($T: [const] Eq),+> const Eq for ($($T,)+) {} } @@ -66,7 +68,8 @@ macro_rules! tuple_impls { maybe_tuple_doc! { $($T)+ @ #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T: PartialOrd),+> PartialOrd for ($($T,)+) + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<$($T: [const] PartialOrd),+> const PartialOrd for ($($T,)+) { #[inline] fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> { @@ -110,7 +113,8 @@ macro_rules! tuple_impls { maybe_tuple_doc! { $($T)+ @ #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T: Ord),+> Ord for ($($T,)+) + #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] + impl<$($T: [const] Ord),+> const Ord for ($($T,)+) { #[inline] fn cmp(&self, other: &($($T,)+)) -> Ordering { |
