diff options
| author | Mark Rousskov <mark.simulacrum@gmail.com> | 2019-12-24 17:38:22 -0500 |
|---|---|---|
| committer | Mark Rousskov <mark.simulacrum@gmail.com> | 2019-12-24 17:38:22 -0500 |
| commit | 6891388e661c2bbf965a330ff73bf8c08a7dbf7d (patch) | |
| tree | ff2f518c1dfdb558a162c73b3ee3f2a0b44d5adc | |
| parent | 48291a9dda821a81957b44034572c0ec62728f35 (diff) | |
| download | rust-6891388e661c2bbf965a330ff73bf8c08a7dbf7d.tar.gz rust-6891388e661c2bbf965a330ff73bf8c08a7dbf7d.zip | |
x.py fmt after previous deignore
42 files changed, 9637 insertions, 9795 deletions
diff --git a/src/libcore/iter/adapters/mod.rs b/src/libcore/iter/adapters/mod.rs index 019a3290f01..6eb837ed0fe 100644 --- a/src/libcore/iter/adapters/mod.rs +++ b/src/libcore/iter/adapters/mod.rs @@ -1,11 +1,11 @@ use crate::cmp; use crate::fmt; +use crate::intrinsics; use crate::ops::{Add, AddAssign, Try}; use crate::usize; -use crate::intrinsics; -use super::{Iterator, DoubleEndedIterator, ExactSizeIterator, FusedIterator, TrustedLen}; -use super::{LoopState, from_fn}; +use super::{from_fn, LoopState}; +use super::{DoubleEndedIterator, ExactSizeIterator, FusedIterator, Iterator, TrustedLen}; mod chain; mod flatten; @@ -14,8 +14,8 @@ mod zip; pub use self::chain::Chain; #[stable(feature = "rust1", since = "1.0.0")] pub use self::flatten::{FlatMap, Flatten}; -pub use self::zip::Zip; pub(crate) use self::zip::TrustedRandomAccess; +pub use self::zip::Zip; /// A double-ended iterator with the direction inverted. /// @@ -28,7 +28,7 @@ pub(crate) use self::zip::TrustedRandomAccess; #[must_use = "iterators are lazy and do nothing unless consumed"] #[stable(feature = "rust1", since = "1.0.0")] pub struct Rev<T> { - iter: T + iter: T, } impl<T> Rev<T> { pub(super) fn new(iter: T) -> Rev<T> { @@ -37,59 +37,85 @@ impl<T> Rev<T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Rev<I> where I: DoubleEndedIterator { +impl<I> Iterator for Rev<I> +where + I: DoubleEndedIterator, +{ type Item = <I as Iterator>::Item; #[inline] - fn next(&mut self) -> Option<<I as Iterator>::Item> { self.iter.next_back() } + fn next(&mut self) -> Option<<I as Iterator>::Item> { + self.iter.next_back() + } #[inline] - fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } + fn size_hint(&self) -> (usize, Option<usize>) { + self.iter.size_hint() + } #[inline] - fn nth(&mut self, n: usize) -> Option<<I as Iterator>::Item> { self.iter.nth_back(n) } + fn nth(&mut self, n: usize) -> Option<<I as Iterator>::Item> { + self.iter.nth_back(n) + } - fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.iter.try_rfold(init, f) } fn fold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, f) } #[inline] fn find<P>(&mut self, predicate: P) -> Option<Self::Item> - where P: FnMut(&Self::Item) -> bool + where + P: FnMut(&Self::Item) -> bool, { self.iter.rfind(predicate) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> DoubleEndedIterator for Rev<I> where I: DoubleEndedIterator { +impl<I> DoubleEndedIterator for Rev<I> +where + I: DoubleEndedIterator, +{ #[inline] - fn next_back(&mut self) -> Option<<I as Iterator>::Item> { self.iter.next() } + fn next_back(&mut self) -> Option<<I as Iterator>::Item> { + self.iter.next() + } #[inline] - fn nth_back(&mut self, n: usize) -> Option<<I as Iterator>::Item> { self.iter.nth(n) } + fn nth_back(&mut self, n: usize) -> Option<<I as Iterator>::Item> { + self.iter.nth(n) + } - fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.iter.try_fold(init, f) } fn rfold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, f) } fn rfind<P>(&mut self, predicate: P) -> Option<Self::Item> - where P: FnMut(&Self::Item) -> bool + where + P: FnMut(&Self::Item) -> bool, { self.iter.find(predicate) } @@ -97,7 +123,8 @@ impl<I> DoubleEndedIterator for Rev<I> where I: DoubleEndedIterator { #[stable(feature = "rust1", since = "1.0.0")] impl<I> ExactSizeIterator for Rev<I> - where I: ExactSizeIterator + DoubleEndedIterator +where + I: ExactSizeIterator + DoubleEndedIterator, { fn len(&self) -> usize { self.iter.len() @@ -109,12 +136,10 @@ impl<I> ExactSizeIterator for Rev<I> } #[stable(feature = "fused", since = "1.26.0")] -impl<I> FusedIterator for Rev<I> - where I: FusedIterator + DoubleEndedIterator {} +impl<I> FusedIterator for Rev<I> where I: FusedIterator + DoubleEndedIterator {} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl<I> TrustedLen for Rev<I> - where I: TrustedLen + DoubleEndedIterator {} +unsafe impl<I> TrustedLen for Rev<I> where I: TrustedLen + DoubleEndedIterator {} /// An iterator that copies the elements of an underlying iterator. /// @@ -136,21 +161,19 @@ impl<I> Copied<I> { } } -fn copy_fold<T: Copy, Acc>( - mut f: impl FnMut(Acc, T) -> Acc, -) -> impl FnMut(Acc, &T) -> Acc { +fn copy_fold<T: Copy, Acc>(mut f: impl FnMut(Acc, T) -> Acc) -> impl FnMut(Acc, &T) -> Acc { move |acc, &elt| f(acc, elt) } -fn copy_try_fold<T: Copy, Acc, R>( - mut f: impl FnMut(Acc, T) -> R, -) -> impl FnMut(Acc, &T) -> R { +fn copy_try_fold<T: Copy, Acc, R>(mut f: impl FnMut(Acc, T) -> R) -> impl FnMut(Acc, &T) -> R { move |acc, &elt| f(acc, elt) } #[stable(feature = "iter_copied", since = "1.36.0")] impl<'a, I, T: 'a> Iterator for Copied<I> - where I: Iterator<Item=&'a T>, T: Copy +where + I: Iterator<Item = &'a T>, + T: Copy, { type Item = T; @@ -162,14 +185,18 @@ impl<'a, I, T: 'a> Iterator for Copied<I> self.it.size_hint() } - fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.it.try_fold(init, copy_try_fold(f)) } fn fold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.fold(init, copy_fold(f)) } @@ -177,20 +204,26 @@ impl<'a, I, T: 'a> Iterator for Copied<I> #[stable(feature = "iter_copied", since = "1.36.0")] impl<'a, I, T: 'a> DoubleEndedIterator for Copied<I> - where I: DoubleEndedIterator<Item=&'a T>, T: Copy +where + I: DoubleEndedIterator<Item = &'a T>, + T: Copy, { fn next_back(&mut self) -> Option<T> { self.it.next_back().copied() } - fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.it.try_rfold(init, copy_try_fold(f)) } fn rfold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.rfold(init, copy_fold(f)) } @@ -198,7 +231,9 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Copied<I> #[stable(feature = "iter_copied", since = "1.36.0")] impl<'a, I, T: 'a> ExactSizeIterator for Copied<I> - where I: ExactSizeIterator<Item=&'a T>, T: Copy +where + I: ExactSizeIterator<Item = &'a T>, + T: Copy, { fn len(&self) -> usize { self.it.len() @@ -211,12 +246,17 @@ impl<'a, I, T: 'a> ExactSizeIterator for Copied<I> #[stable(feature = "iter_copied", since = "1.36.0")] impl<'a, I, T: 'a> FusedIterator for Copied<I> - where I: FusedIterator<Item=&'a T>, T: Copy -{} +where + I: FusedIterator<Item = &'a T>, + T: Copy, +{ +} #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Copied<I> - where I: TrustedRandomAccess<Item=&'a T>, T: Copy +where + I: TrustedRandomAccess<Item = &'a T>, + T: Copy, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { *self.it.get_unchecked(i) @@ -230,9 +270,11 @@ unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Copied<I> #[stable(feature = "iter_copied", since = "1.36.0")] unsafe impl<'a, I, T: 'a> TrustedLen for Copied<I> - where I: TrustedLen<Item=&'a T>, - T: Copy -{} +where + I: TrustedLen<Item = &'a T>, + T: Copy, +{ +} /// An iterator that clones the elements of an underlying iterator. /// @@ -253,15 +295,15 @@ impl<I> Cloned<I> { } } -fn clone_try_fold<T: Clone, Acc, R>( - mut f: impl FnMut(Acc, T) -> R, -) -> impl FnMut(Acc, &T) -> R { +fn clone_try_fold<T: Clone, Acc, R>(mut f: impl FnMut(Acc, T) -> R) -> impl FnMut(Acc, &T) -> R { move |acc, elt| f(acc, elt.clone()) } #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> Iterator for Cloned<I> - where I: Iterator<Item=&'a T>, T: Clone +where + I: Iterator<Item = &'a T>, + T: Clone, { type Item = T; @@ -273,14 +315,18 @@ impl<'a, I, T: 'a> Iterator for Cloned<I> self.it.size_hint() } - fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.it.try_fold(init, clone_try_fold(f)) } fn fold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.map(T::clone).fold(init, f) } @@ -288,20 +334,26 @@ impl<'a, I, T: 'a> Iterator for Cloned<I> #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> DoubleEndedIterator for Cloned<I> - where I: DoubleEndedIterator<Item=&'a T>, T: Clone +where + I: DoubleEndedIterator<Item = &'a T>, + T: Clone, { fn next_back(&mut self) -> Option<T> { self.it.next_back().cloned() } - fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { self.it.try_rfold(init, clone_try_fold(f)) } fn rfold<Acc, F>(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.map(T::clone).rfold(init, f) } @@ -309,7 +361,9 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned<I> #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I> - where I: ExactSizeIterator<Item=&'a T>, T: Clone +where + I: ExactSizeIterator<Item = &'a T>, + T: Clone, { fn len(&self) -> usize { self.it.len() @@ -322,24 +376,33 @@ impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I> #[stable(feature = "fused", since = "1.26.0")] impl<'a, I, T: 'a> FusedIterator for Cloned<I> - where I: FusedIterator<Item=&'a T>, T: Clone -{} +where + I: FusedIterator<Item = &'a T>, + T: Clone, +{ +} #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned<I> - where I: TrustedRandomAccess<Item=&'a T>, T: Clone +where + I: TrustedRandomAccess<Item = &'a T>, + T: Clone, { default unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { self.it.get_unchecked(i).clone() } #[inline] - default fn may_have_side_effect() -> bool { true } + default fn may_have_side_effect() -> bool { + true + } } #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned<I> - where I: TrustedRandomAccess<Item=&'a T>, T: Copy +where + I: TrustedRandomAccess<Item = &'a T>, + T: Copy, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { *self.it.get_unchecked(i) @@ -353,9 +416,11 @@ unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned<I> #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl<'a, I, T: 'a> TrustedLen for Cloned<I> - where I: TrustedLen<Item=&'a T>, - T: Clone -{} +where + I: TrustedLen<Item = &'a T>, + T: Clone, +{ +} /// An iterator that repeats endlessly. /// @@ -378,14 +443,20 @@ impl<I: Clone> Cycle<I> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Cycle<I> where I: Clone + Iterator { +impl<I> Iterator for Cycle<I> +where + I: Clone + Iterator, +{ type Item = <I as Iterator>::Item; #[inline] fn next(&mut self) -> Option<<I as Iterator>::Item> { match self.iter.next() { - None => { self.iter = self.orig.clone(); self.iter.next() } - y => y + None => { + self.iter = self.orig.clone(); + self.iter.next() + } + y => y, } } @@ -395,7 +466,7 @@ impl<I> Iterator for Cycle<I> where I: Clone + Iterator { match self.orig.size_hint() { sz @ (0, Some(0)) => sz, (0, _) => (0, None), - _ => (usize::MAX, None) + _ => (usize::MAX, None), } } @@ -456,7 +527,10 @@ impl<I> StepBy<I> { } #[stable(feature = "iterator_step_by", since = "1.28.0")] -impl<I> Iterator for StepBy<I> where I: Iterator { +impl<I> Iterator for StepBy<I> +where + I: Iterator, +{ type Item = I::Item; #[inline] @@ -558,7 +632,10 @@ impl<I> Iterator for StepBy<I> where I: Iterator { } } -impl<I> StepBy<I> where I: ExactSizeIterator { +impl<I> StepBy<I> +where + I: ExactSizeIterator, +{ // The zero-based index starting from the end of the iterator of the // last element. Used in the `DoubleEndedIterator` implementation. fn next_back_index(&self) -> usize { @@ -572,7 +649,10 @@ impl<I> StepBy<I> where I: ExactSizeIterator { } #[stable(feature = "double_ended_step_by_iterator", since = "1.38.0")] -impl<I> DoubleEndedIterator for StepBy<I> where I: DoubleEndedIterator + ExactSizeIterator { +impl<I> DoubleEndedIterator for StepBy<I> +where + I: DoubleEndedIterator + ExactSizeIterator, +{ #[inline] fn next_back(&mut self) -> Option<Self::Item> { self.iter.nth_back(self.next_back_index()) @@ -584,9 +664,7 @@ impl<I> DoubleEndedIterator for StepBy<I> where I: DoubleEndedIterator + ExactSi // is out of bounds because the length of `self.iter` does not exceed // `usize::MAX` (because `I: ExactSizeIterator`) and `nth_back` is // zero-indexed - let n = n - .saturating_mul(self.step + 1) - .saturating_add(self.next_back_index()); + let n = n.saturating_mul(self.step + 1).saturating_add(self.next_back_index()); self.iter.nth_back(n) } @@ -683,9 +761,7 @@ impl<I, F> Map<I, F> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, F> fmt::Debug for Map<I, F> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Map") - .field("iter", &self.iter) - .finish() + f.debug_struct("Map").field("iter", &self.iter).finish() } } @@ -704,7 +780,10 @@ fn map_try_fold<'a, T, B, Acc, R>( } #[stable(feature = "rust1", since = "1.0.0")] -impl<B, I: Iterator, F> Iterator for Map<I, F> where F: FnMut(I::Item) -> B { +impl<B, I: Iterator, F> Iterator for Map<I, F> +where + F: FnMut(I::Item) -> B, +{ type Item = B; #[inline] @@ -717,21 +796,26 @@ impl<B, I: Iterator, F> Iterator for Map<I, F> where F: FnMut(I::Item) -> B { self.iter.size_hint() } - fn try_fold<Acc, G, R>(&mut self, init: Acc, g: G) -> R where - Self: Sized, G: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, G, R>(&mut self, init: Acc, g: G) -> R + where + Self: Sized, + G: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_fold(init, map_try_fold(&mut self.f, g)) } fn fold<Acc, G>(self, init: Acc, g: G) -> Acc - where G: FnMut(Acc, Self::Item) -> Acc, + where + G: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, map_fold(self.f, g)) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where +impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> +where F: FnMut(I::Item) -> B, { #[inline] @@ -739,14 +823,18 @@ impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where self.iter.next_back().map(&mut self.f) } - fn try_rfold<Acc, G, R>(&mut self, init: Acc, g: G) -> R where - Self: Sized, G: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, G, R>(&mut self, init: Acc, g: G) -> R + where + Self: Sized, + G: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_rfold(init, map_try_fold(&mut self.f, g)) } fn rfold<Acc, G>(self, init: Acc, g: G) -> Acc - where G: FnMut(Acc, Self::Item) -> Acc, + where + G: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, map_fold(self.f, g)) } @@ -754,7 +842,8 @@ impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where #[stable(feature = "rust1", since = "1.0.0")] impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F> - where F: FnMut(I::Item) -> B +where + F: FnMut(I::Item) -> B, { fn len(&self) -> usize { self.iter.len() @@ -766,24 +855,29 @@ impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F> } #[stable(feature = "fused", since = "1.26.0")] -impl<B, I: FusedIterator, F> FusedIterator for Map<I, F> - where F: FnMut(I::Item) -> B {} +impl<B, I: FusedIterator, F> FusedIterator for Map<I, F> where F: FnMut(I::Item) -> B {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl<B, I, F> TrustedLen for Map<I, F> - where I: TrustedLen, - F: FnMut(I::Item) -> B {} +where + I: TrustedLen, + F: FnMut(I::Item) -> B, +{ +} #[doc(hidden)] unsafe impl<B, I, F> TrustedRandomAccess for Map<I, F> - where I: TrustedRandomAccess, - F: FnMut(I::Item) -> B, +where + I: TrustedRandomAccess, + F: FnMut(I::Item) -> B, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { (self.f)(self.iter.get_unchecked(i)) } #[inline] - fn may_have_side_effect() -> bool { true } + fn may_have_side_effect() -> bool { + true + } } /// An iterator that filters the elements of `iter` with `predicate`. @@ -809,9 +903,7 @@ impl<I, P> Filter<I, P> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, P> fmt::Debug for Filter<I, P> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Filter") - .field("iter", &self.iter) - .finish() + f.debug_struct("Filter").field("iter", &self.iter).finish() } } @@ -830,7 +922,10 @@ fn filter_try_fold<'a, T, Acc, R: Try<Ok = Acc>>( } #[stable(feature = "rust1", since = "1.0.0")] -impl<I: Iterator, P> Iterator for Filter<I, P> where P: FnMut(&I::Item) -> bool { +impl<I: Iterator, P> Iterator for Filter<I, P> +where + P: FnMut(&I::Item) -> bool, +{ type Item = I::Item; #[inline] @@ -866,15 +961,19 @@ impl<I: Iterator, P> Iterator for Filter<I, P> where P: FnMut(&I::Item) -> bool } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_fold(init, filter_try_fold(&mut self.predicate, fold)) } #[inline] fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, filter_fold(self.predicate, fold)) } @@ -882,7 +981,8 @@ impl<I: Iterator, P> Iterator for Filter<I, P> where P: FnMut(&I::Item) -> bool #[stable(feature = "rust1", since = "1.0.0")] impl<I: DoubleEndedIterator, P> DoubleEndedIterator for Filter<I, P> - where P: FnMut(&I::Item) -> bool, +where + P: FnMut(&I::Item) -> bool, { #[inline] fn next_back(&mut self) -> Option<I::Item> { @@ -890,23 +990,26 @@ impl<I: DoubleEndedIterator, P> DoubleEndedIterator for Filter<I, P> } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_rfold(init, filter_try_fold(&mut self.predicate, fold)) } #[inline] fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, filter_fold(self.predicate, fold)) } } #[stable(feature = "fused", since = "1.26.0")] -impl<I: FusedIterator, P> FusedIterator for Filter<I, P> - where P: FnMut(&I::Item) -> bool {} +impl<I: FusedIterator, P> FusedIterator for Filter<I, P> where P: FnMut(&I::Item) -> bool {} /// An iterator that uses `f` to both filter and map elements from `iter`. /// @@ -931,9 +1034,7 @@ impl<I, F> FilterMap<I, F> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, F> fmt::Debug for FilterMap<I, F> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("FilterMap") - .field("iter", &self.iter) - .finish() + f.debug_struct("FilterMap").field("iter", &self.iter).finish() } } @@ -959,7 +1060,8 @@ fn filter_map_try_fold<'a, T, B, Acc, R: Try<Ok = Acc>>( #[stable(feature = "rust1", since = "1.0.0")] impl<B, I: Iterator, F> Iterator for FilterMap<I, F> - where F: FnMut(I::Item) -> Option<B>, +where + F: FnMut(I::Item) -> Option<B>, { type Item = B; @@ -975,15 +1077,19 @@ impl<B, I: Iterator, F> Iterator for FilterMap<I, F> } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_fold(init, filter_map_try_fold(&mut self.f, fold)) } #[inline] fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, filter_map_fold(self.f, fold)) } @@ -991,13 +1097,14 @@ impl<B, I: Iterator, F> Iterator for FilterMap<I, F> #[stable(feature = "rust1", since = "1.0.0")] impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for FilterMap<I, F> - where F: FnMut(I::Item) -> Option<B>, +where + F: FnMut(I::Item) -> Option<B>, { #[inline] fn next_back(&mut self) -> Option<B> { #[inline] fn find<T, B>( - f: &mut impl FnMut(T) -> Option<B> + f: &mut impl FnMut(T) -> Option<B>, ) -> impl FnMut((), T) -> LoopState<(), B> + '_ { move |(), x| match f(x) { Some(x) => LoopState::Break(x), @@ -1009,23 +1116,26 @@ impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for FilterMap<I, F> } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_rfold(init, filter_map_try_fold(&mut self.f, fold)) } #[inline] fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, filter_map_fold(self.f, fold)) } } #[stable(feature = "fused", since = "1.26.0")] -impl<B, I: FusedIterator, F> FusedIterator for FilterMap<I, F> - where F: FnMut(I::Item) -> Option<B> {} +impl<B, I: FusedIterator, F> FusedIterator for FilterMap<I, F> where F: FnMut(I::Item) -> Option<B> {} /// An iterator that yields the current count and the element during iteration. /// @@ -1048,7 +1158,10 @@ impl<I> Enumerate<I> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Enumerate<I> where I: Iterator { +impl<I> Iterator for Enumerate<I> +where + I: Iterator, +{ type Item = (usize, <I as Iterator>::Item); /// # Overflow Behavior @@ -1089,8 +1202,11 @@ impl<I> Iterator for Enumerate<I> where I: Iterator { } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { #[inline] fn enumerate<'a, T, Acc, R>( @@ -1110,7 +1226,8 @@ impl<I> Iterator for Enumerate<I> where I: Iterator { #[inline] fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { #[inline] fn enumerate<T, Acc>( @@ -1130,8 +1247,9 @@ impl<I> Iterator for Enumerate<I> where I: Iterator { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> DoubleEndedIterator for Enumerate<I> where - I: ExactSizeIterator + DoubleEndedIterator +impl<I> DoubleEndedIterator for Enumerate<I> +where + I: ExactSizeIterator + DoubleEndedIterator, { #[inline] fn next_back(&mut self) -> Option<(usize, <I as Iterator>::Item)> { @@ -1152,8 +1270,11 @@ impl<I> DoubleEndedIterator for Enumerate<I> where } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { // Can safely add and subtract the count, as `ExactSizeIterator` promises // that the number of elements fits into a `usize`. @@ -1173,7 +1294,8 @@ impl<I> DoubleEndedIterator for Enumerate<I> where #[inline] fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { // Can safely add and subtract the count, as `ExactSizeIterator` promises // that the number of elements fits into a `usize`. @@ -1193,7 +1315,10 @@ impl<I> DoubleEndedIterator for Enumerate<I> where } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator { +impl<I> ExactSizeIterator for Enumerate<I> +where + I: ExactSizeIterator, +{ fn len(&self) -> usize { self.iter.len() } @@ -1205,7 +1330,8 @@ impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator { #[doc(hidden)] unsafe impl<I> TrustedRandomAccess for Enumerate<I> - where I: TrustedRandomAccess +where + I: TrustedRandomAccess, { unsafe fn get_unchecked(&mut self, i: usize) -> (usize, I::Item) { (self.count + i, self.iter.get_unchecked(i)) @@ -1220,10 +1346,7 @@ unsafe impl<I> TrustedRandomAccess for Enumerate<I> impl<I> FusedIterator for Enumerate<I> where I: FusedIterator {} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl<I> TrustedLen for Enumerate<I> - where I: TrustedLen, -{} - +unsafe impl<I> TrustedLen for Enumerate<I> where I: TrustedLen {} /// An iterator with a `peek()` that returns an optional reference to the next /// element. @@ -1310,8 +1433,11 @@ impl<I: Iterator> Iterator for Peekable<I> { } #[inline] - fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { let acc = match self.peeked.take() { Some(None) => return Try::from_ok(init), @@ -1323,7 +1449,8 @@ impl<I: Iterator> Iterator for Peekable<I> { #[inline] fn fold<Acc, Fold>(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let acc = match self.peeked { Some(None) => return init, @@ -1335,15 +1462,21 @@ impl<I: Iterator> Iterator for Peekable<I> { } #[stable(feature = "double_ended_peek_iterator", since = "1.38.0")] -impl<I> DoubleEndedIterator for Peekable<I> where I: DoubleEndedIterator { +impl<I> DoubleEndedIterator for Peekable<I> +where + I: DoubleEndedIterator, +{ #[inline] fn next_back(&mut self) -> Option<Self::Item> { self.iter.next_back().or_else(|| self.peeked.take().and_then(|x| x)) } #[inline] - fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { match self.peeked.take() { Some(None) => Try::from_ok(init), @@ -1360,7 +1493,8 @@ impl<I> DoubleEndedIterator for Peekable<I> where I: DoubleEndedIterator { #[inline] fn rfold<Acc, Fold>(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { match self.peeked { Some(None) => init, @@ -1449,16 +1583,14 @@ impl<I, P> SkipWhile<I, P> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, P> fmt::Debug for SkipWhile<I, P> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SkipWhile") - .field("iter", &self.iter) - .field("flag", &self.flag) - .finish() + f.debug_struct("SkipWhile").field("iter", &self.iter).field("flag", &self.flag).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<I: Iterator, P> Iterator for SkipWhile<I, P> - where P: FnMut(&I::Item) -> bool +where + P: FnMut(&I::Item) -> bool, { type Item = I::Item; @@ -1490,8 +1622,11 @@ impl<I: Iterator, P> Iterator for SkipWhile<I, P> } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, mut init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, mut init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { if !self.flag { match self.next() { @@ -1504,7 +1639,8 @@ impl<I: Iterator, P> Iterator for SkipWhile<I, P> #[inline] fn fold<Acc, Fold>(mut self, mut init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if !self.flag { match self.next() { @@ -1518,7 +1654,11 @@ impl<I: Iterator, P> Iterator for SkipWhile<I, P> #[stable(feature = "fused", since = "1.26.0")] impl<I, P> FusedIterator for SkipWhile<I, P> - where I: FusedIterator, P: FnMut(&I::Item) -> bool {} +where + I: FusedIterator, + P: FnMut(&I::Item) -> bool, +{ +} /// An iterator that only accepts elements while `predicate` returns `true`. /// @@ -1544,16 +1684,14 @@ impl<I, P> TakeWhile<I, P> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, P> fmt::Debug for TakeWhile<I, P> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TakeWhile") - .field("iter", &self.iter) - .field("flag", &self.flag) - .finish() + f.debug_struct("TakeWhile").field("iter", &self.iter).field("flag", &self.flag).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<I: Iterator, P> Iterator for TakeWhile<I, P> - where P: FnMut(&I::Item) -> bool +where + P: FnMut(&I::Item) -> bool, { type Item = I::Item; @@ -1583,8 +1721,11 @@ impl<I: Iterator, P> Iterator for TakeWhile<I, P> } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { fn check<'a, T, Acc, R: Try<Ok = Acc>>( flag: &'a mut bool, @@ -1613,7 +1754,11 @@ impl<I: Iterator, P> Iterator for TakeWhile<I, P> #[stable(feature = "fused", since = "1.26.0")] impl<I, P> FusedIterator for TakeWhile<I, P> - where I: FusedIterator, P: FnMut(&I::Item) -> bool {} +where + I: FusedIterator, + P: FnMut(&I::Item) -> bool, +{ +} /// An iterator that skips over `n` elements of `iter`. /// @@ -1627,7 +1772,7 @@ impl<I, P> FusedIterator for TakeWhile<I, P> #[stable(feature = "rust1", since = "1.0.0")] pub struct Skip<I> { iter: I, - n: usize + n: usize, } impl<I> Skip<I> { pub(super) fn new(iter: I, n: usize) -> Skip<I> { @@ -1636,7 +1781,10 @@ impl<I> Skip<I> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Skip<I> where I: Iterator { +impl<I> Iterator for Skip<I> +where + I: Iterator, +{ type Item = <I as Iterator>::Item; #[inline] @@ -1659,7 +1807,7 @@ impl<I> Iterator for Skip<I> where I: Iterator { let to_skip = self.n; self.n = 0; // nth(n) skips n+1 - if self.iter.nth(to_skip-1).is_none() { + if self.iter.nth(to_skip - 1).is_none() { return None; } self.iter.nth(n) @@ -1700,8 +1848,11 @@ impl<I> Iterator for Skip<I> where I: Iterator { } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { let n = self.n; self.n = 0; @@ -1716,7 +1867,8 @@ impl<I> Iterator for Skip<I> where I: Iterator { #[inline] fn fold<Acc, Fold>(mut self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if self.n > 0 { // nth(n) skips n+1 @@ -1732,13 +1884,12 @@ impl<I> Iterator for Skip<I> where I: Iterator { impl<I> ExactSizeIterator for Skip<I> where I: ExactSizeIterator {} #[stable(feature = "double_ended_skip_iterator", since = "1.9.0")] -impl<I> DoubleEndedIterator for Skip<I> where I: DoubleEndedIterator + ExactSizeIterator { +impl<I> DoubleEndedIterator for Skip<I> +where + I: DoubleEndedIterator + ExactSizeIterator, +{ fn next_back(&mut self) -> Option<Self::Item> { - if self.len() > 0 { - self.iter.next_back() - } else { - None - } + if self.len() > 0 { self.iter.next_back() } else { None } } #[inline] @@ -1749,14 +1900,17 @@ impl<I> DoubleEndedIterator for Skip<I> where I: DoubleEndedIterator + ExactSize } else { if len > 0 { // consume the original iterator - self.iter.nth_back(len-1); + self.iter.nth_back(len - 1); } None } } - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { fn check<T, Acc, R: Try<Ok = Acc>>( mut n: usize, @@ -1765,8 +1919,7 @@ impl<I> DoubleEndedIterator for Skip<I> where I: DoubleEndedIterator + ExactSize move |acc, x| { n -= 1; let r = fold(acc, x); - if n == 0 { LoopState::Break(r) } - else { LoopState::from_try(r) } + if n == 0 { LoopState::Break(r) } else { LoopState::from_try(r) } } } @@ -1794,7 +1947,7 @@ impl<I> FusedIterator for Skip<I> where I: FusedIterator {} #[stable(feature = "rust1", since = "1.0.0")] pub struct Take<I> { pub(super) iter: I, - pub(super) n: usize + pub(super) n: usize, } impl<I> Take<I> { pub(super) fn new(iter: I, n: usize) -> Take<I> { @@ -1803,7 +1956,10 @@ impl<I> Take<I> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Take<I> where I: Iterator{ +impl<I> Iterator for Take<I> +where + I: Iterator, +{ type Item = <I as Iterator>::Item; #[inline] @@ -1842,15 +1998,18 @@ impl<I> Iterator for Take<I> where I: Iterator{ let upper = match upper { Some(x) if x < self.n => Some(x), - _ => Some(self.n) + _ => Some(self.n), }; (lower, upper) } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { fn check<'a, T, Acc, R: Try<Ok = Acc>>( n: &'a mut usize, @@ -1859,8 +2018,7 @@ impl<I> Iterator for Take<I> where I: Iterator{ move |acc, x| { *n -= 1; let r = fold(acc, x); - if *n == 0 { LoopState::Break(r) } - else { LoopState::from_try(r) } + if *n == 0 { LoopState::Break(r) } else { LoopState::from_try(r) } } } @@ -1874,7 +2032,10 @@ impl<I> Iterator for Take<I> where I: Iterator{ } #[stable(feature = "double_ended_take_iterator", since = "1.38.0")] -impl<I> DoubleEndedIterator for Take<I> where I: DoubleEndedIterator + ExactSizeIterator { +impl<I> DoubleEndedIterator for Take<I> +where + I: DoubleEndedIterator + ExactSizeIterator, +{ #[inline] fn next_back(&mut self) -> Option<Self::Item> { if self.n == 0 { @@ -1902,8 +2063,11 @@ impl<I> DoubleEndedIterator for Take<I> where I: DoubleEndedIterator + ExactSize } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok = Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { if self.n == 0 { Try::from_ok(init) @@ -1951,15 +2115,13 @@ impl<I, St, F> Scan<I, St, F> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, St: fmt::Debug, F> fmt::Debug for Scan<I, St, F> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Scan") - .field("iter", &self.iter) - .field("state", &self.state) - .finish() + f.debug_struct("Scan").field("iter", &self.iter).field("state", &self.state).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl<B, I, St, F> Iterator for Scan<I, St, F> where +impl<B, I, St, F> Iterator for Scan<I, St, F> +where I: Iterator, F: FnMut(&mut St, I::Item) -> Option<B>, { @@ -1978,19 +2140,20 @@ impl<B, I, St, F> Iterator for Scan<I, St, F> where } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { fn scan<'a, T, St, B, Acc, R: Try<Ok = Acc>>( state: &'a mut St, f: &'a mut impl FnMut(&mut St, T) -> Option<B>, mut fold: impl FnMut(Acc, B) -> R + 'a, ) -> impl FnMut(Acc, T) -> LoopState<Acc, R> + 'a { - move |acc, x| { - match f(state, x) { - None => LoopState::Break(Try::from_ok(acc)), - Some(x) => LoopState::from_try(fold(acc, x)), - } + move |acc, x| match f(state, x) { + None => LoopState::Break(Try::from_ok(acc)), + Some(x) => LoopState::from_try(fold(acc, x)), } } @@ -2013,7 +2176,7 @@ impl<B, I, St, F> Iterator for Scan<I, St, F> where #[stable(feature = "rust1", since = "1.0.0")] pub struct Fuse<I> { iter: I, - done: bool + done: bool, } impl<I> Fuse<I> { pub(super) fn new(iter: I) -> Fuse<I> { @@ -2025,7 +2188,10 @@ impl<I> Fuse<I> { impl<I> FusedIterator for Fuse<I> where I: Iterator {} #[stable(feature = "rust1", since = "1.0.0")] -impl<I> Iterator for Fuse<I> where I: Iterator { +impl<I> Iterator for Fuse<I> +where + I: Iterator, +{ type Item = <I as Iterator>::Item; #[inline] @@ -2052,34 +2218,25 @@ impl<I> Iterator for Fuse<I> where I: Iterator { #[inline] default fn last(self) -> Option<I::Item> { - if self.done { - None - } else { - self.iter.last() - } + if self.done { None } else { self.iter.last() } } #[inline] default fn count(self) -> usize { - if self.done { - 0 - } else { - self.iter.count() - } + if self.done { 0 } else { self.iter.count() } } #[inline] default fn size_hint(&self) -> (usize, Option<usize>) { - if self.done { - (0, Some(0)) - } else { - self.iter.size_hint() - } + if self.done { (0, Some(0)) } else { self.iter.size_hint() } } #[inline] - default fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + default fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { if self.done { Try::from_ok(init) @@ -2092,18 +2249,18 @@ impl<I> Iterator for Fuse<I> where I: Iterator { #[inline] default fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { - if self.done { - init - } else { - self.iter.fold(init, fold) - } + if self.done { init } else { self.iter.fold(init, fold) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl<I> DoubleEndedIterator for Fuse<I> where I: DoubleEndedIterator { +impl<I> DoubleEndedIterator for Fuse<I> +where + I: DoubleEndedIterator, +{ #[inline] default fn next_back(&mut self) -> Option<<I as Iterator>::Item> { if self.done { @@ -2127,8 +2284,11 @@ impl<I> DoubleEndedIterator for Fuse<I> where I: DoubleEndedIterator { } #[inline] - default fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + default fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { if self.done { Try::from_ok(init) @@ -2141,18 +2301,16 @@ impl<I> DoubleEndedIterator for Fuse<I> where I: DoubleEndedIterator { #[inline] default fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { - if self.done { - init - } else { - self.iter.rfold(init, fold) - } + if self.done { init } else { self.iter.rfold(init, fold) } } } unsafe impl<I> TrustedRandomAccess for Fuse<I> - where I: TrustedRandomAccess, +where + I: TrustedRandomAccess, { unsafe fn get_unchecked(&mut self, i: usize) -> I::Item { self.iter.get_unchecked(i) @@ -2164,7 +2322,10 @@ unsafe impl<I> TrustedRandomAccess for Fuse<I> } #[stable(feature = "fused", since = "1.26.0")] -impl<I> Iterator for Fuse<I> where I: FusedIterator { +impl<I> Iterator for Fuse<I> +where + I: FusedIterator, +{ #[inline] fn next(&mut self) -> Option<<I as Iterator>::Item> { self.iter.next() @@ -2191,15 +2352,19 @@ impl<I> Iterator for Fuse<I> where I: FusedIterator { } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_fold(init, fold) } #[inline] fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, fold) } @@ -2207,7 +2372,8 @@ impl<I> Iterator for Fuse<I> where I: FusedIterator { #[stable(feature = "fused", since = "1.26.0")] impl<I> DoubleEndedIterator for Fuse<I> - where I: DoubleEndedIterator + FusedIterator +where + I: DoubleEndedIterator + FusedIterator, { #[inline] fn next_back(&mut self) -> Option<<I as Iterator>::Item> { @@ -2220,23 +2386,29 @@ impl<I> DoubleEndedIterator for Fuse<I> } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_rfold(init, fold) } #[inline] fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, fold) } } - #[stable(feature = "rust1", since = "1.0.0")] -impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator { +impl<I> ExactSizeIterator for Fuse<I> +where + I: ExactSizeIterator, +{ fn len(&self) -> usize { self.iter.len() } @@ -2270,13 +2442,14 @@ impl<I, F> Inspect<I, F> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl<I: fmt::Debug, F> fmt::Debug for Inspect<I, F> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Inspect") - .field("iter", &self.iter) - .finish() + f.debug_struct("Inspect").field("iter", &self.iter).finish() } } -impl<I: Iterator, F> Inspect<I, F> where F: FnMut(&I::Item) { +impl<I: Iterator, F> Inspect<I, F> +where + F: FnMut(&I::Item), +{ #[inline] fn do_inspect(&mut self, elt: Option<I::Item>) -> Option<I::Item> { if let Some(ref a) = elt { @@ -2291,18 +2464,27 @@ fn inspect_fold<T, Acc>( mut f: impl FnMut(&T), mut fold: impl FnMut(Acc, T) -> Acc, ) -> impl FnMut(Acc, T) -> Acc { - move |acc, item| { f(&item); fold(acc, item) } + move |acc, item| { + f(&item); + fold(acc, item) + } } fn inspect_try_fold<'a, T, Acc, R>( f: &'a mut impl FnMut(&T), mut fold: impl FnMut(Acc, T) -> R + 'a, ) -> impl FnMut(Acc, T) -> R + 'a { - move |acc, item| { f(&item); fold(acc, item) } + move |acc, item| { + f(&item); + fold(acc, item) + } } #[stable(feature = "rust1", since = "1.0.0")] -impl<I: Iterator, F> Iterator for Inspect<I, F> where F: FnMut(&I::Item) { +impl<I: Iterator, F> Iterator for Inspect<I, F> +where + F: FnMut(&I::Item), +{ type Item = I::Item; #[inline] @@ -2317,15 +2499,19 @@ impl<I: Iterator, F> Iterator for Inspect<I, F> where F: FnMut(&I::Item) { } #[inline] - fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_fold(init, inspect_try_fold(&mut self.f, fold)) } #[inline] fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, inspect_fold(self.f, fold)) } @@ -2333,7 +2519,8 @@ impl<I: Iterator, F> Iterator for Inspect<I, F> where F: FnMut(&I::Item) { #[stable(feature = "rust1", since = "1.0.0")] impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F> - where F: FnMut(&I::Item), +where + F: FnMut(&I::Item), { #[inline] fn next_back(&mut self) -> Option<I::Item> { @@ -2342,15 +2529,19 @@ impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F> } #[inline] - fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc> + fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try<Ok = Acc>, { self.iter.try_rfold(init, inspect_try_fold(&mut self.f, fold)) } #[inline] fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, inspect_fold(self.f, fold)) } @@ -2358,7 +2549,8 @@ impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F> #[stable(feature = "rust1", since = "1.0.0")] impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F> - where F: FnMut(&I::Item) +where + F: FnMut(&I::Item), { fn len(&self) -> usize { self.iter.len() @@ -2370,8 +2562,7 @@ impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F> } #[stable(feature = "fused", since = "1.26.0")] -impl<I: FusedIterator, F> FusedIterator for Inspect<I, F> - where F: FnMut(&I::Item) {} +impl<I: FusedIterator, F> FusedIterator for Inspect<I, F> where F: FnMut(&I::Item) {} /// An iterator adapter that produces output as long as the underlying /// iterator produces `Result::Ok` values. @@ -2392,16 +2583,14 @@ where for<'a> F: FnMut(ResultShunt<'a, I, E>) -> U, { let mut error = Ok(()); - let shunt = ResultShunt { - iter, - error: &mut error, - }; + let shunt = ResultShunt { iter, error: &mut error }; let value = f(shunt); error.map(|()| value) } impl<I, T, E> Iterator for ResultShunt<'_, I, E> - where I: Iterator<Item = Result<T, E>> +where + I: Iterator<Item = Result<T, E>>, { type Item = T; diff --git a/src/libcore/iter/traits/iterator.rs b/src/libcore/iter/traits/iterator.rs index 61e8b07511a..25be26491e3 100644 --- a/src/libcore/iter/traits/iterator.rs +++ b/src/libcore/iter/traits/iterator.rs @@ -1,13 +1,15 @@ +// ignore-tidy-filelength + use crate::cmp::{self, Ordering}; use crate::ops::{Add, Try}; use super::super::LoopState; -use super::super::{Chain, Cycle, Copied, Cloned, Enumerate, Filter, FilterMap, Fuse}; -use super::super::{Flatten, FlatMap}; -use super::super::{Inspect, Map, Peekable, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile, Rev}; -use super::super::{Zip, Sum, Product, FromIterator}; +use super::super::{Chain, Cloned, Copied, Cycle, Enumerate, Filter, FilterMap, Fuse}; +use super::super::{FlatMap, Flatten}; +use super::super::{FromIterator, Product, Sum, Zip}; +use super::super::{Inspect, Map, Peekable, Rev, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile}; -fn _assert_is_object_safe(_: &dyn Iterator<Item=()>) {} +fn _assert_is_object_safe(_: &dyn Iterator<Item = ()>) {} /// An interface for dealing with iterators. /// @@ -20,71 +22,71 @@ fn _assert_is_object_safe(_: &dyn Iterator<Item=()>) {} #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( on( - _Self="[std::ops::Range<Idx>; 1]", - label="if you meant to iterate between two values, remove the square brackets", - note="`[start..end]` is an array of one `Range`; you might have meant to have a `Range` \ - without the brackets: `start..end`" + _Self = "[std::ops::Range<Idx>; 1]", + label = "if you meant to iterate between two values, remove the square brackets", + note = "`[start..end]` is an array of one `Range`; you might have meant to have a `Range` \ + without the brackets: `start..end`" ), on( - _Self="[std::ops::RangeFrom<Idx>; 1]", - label="if you meant to iterate from a value onwards, remove the square brackets", - note="`[start..]` is an array of one `RangeFrom`; you might have meant to have a \ + _Self = "[std::ops::RangeFrom<Idx>; 1]", + label = "if you meant to iterate from a value onwards, remove the square brackets", + note = "`[start..]` is an array of one `RangeFrom`; you might have meant to have a \ `RangeFrom` without the brackets: `start..`, keeping in mind that iterating over an \ unbounded iterator will run forever unless you `break` or `return` from within the \ loop" ), on( - _Self="[std::ops::RangeTo<Idx>; 1]", - label="if you meant to iterate until a value, remove the square brackets and add a \ - starting value", - note="`[..end]` is an array of one `RangeTo`; you might have meant to have a bounded \ - `Range` without the brackets: `0..end`" + _Self = "[std::ops::RangeTo<Idx>; 1]", + label = "if you meant to iterate until a value, remove the square brackets and add a \ + starting value", + note = "`[..end]` is an array of one `RangeTo`; you might have meant to have a bounded \ + `Range` without the brackets: `0..end`" ), on( - _Self="[std::ops::RangeInclusive<Idx>; 1]", - label="if you meant to iterate between two values, remove the square brackets", - note="`[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a \ + _Self = "[std::ops::RangeInclusive<Idx>; 1]", + label = "if you meant to iterate between two values, remove the square brackets", + note = "`[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a \ `RangeInclusive` without the brackets: `start..=end`" ), on( - _Self="[std::ops::RangeToInclusive<Idx>; 1]", - label="if you meant to iterate until a value (including it), remove the square brackets \ - and add a starting value", - note="`[..=end]` is an array of one `RangeToInclusive`; you might have meant to have a \ - bounded `RangeInclusive` without the brackets: `0..=end`" + _Self = "[std::ops::RangeToInclusive<Idx>; 1]", + label = "if you meant to iterate until a value (including it), remove the square brackets \ + and add a starting value", + note = "`[..=end]` is an array of one `RangeToInclusive`; you might have meant to have a \ + bounded `RangeInclusive` without the brackets: `0..=end`" ), on( - _Self="std::ops::RangeTo<Idx>", - label="if you meant to iterate until a value, add a starting value", - note="`..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a \ + _Self = "std::ops::RangeTo<Idx>", + label = "if you meant to iterate until a value, add a starting value", + note = "`..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a \ bounded `Range`: `0..end`" ), on( - _Self="std::ops::RangeToInclusive<Idx>", - label="if you meant to iterate until a value (including it), add a starting value", - note="`..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant \ + _Self = "std::ops::RangeToInclusive<Idx>", + label = "if you meant to iterate until a value (including it), add a starting value", + note = "`..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant \ to have a bounded `RangeInclusive`: `0..=end`" ), on( - _Self="&str", - label="`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" + _Self = "&str", + label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( - _Self="std::string::String", - label="`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" + _Self = "std::string::String", + label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( - _Self="[]", - label="borrow the array with `&` or call `.iter()` on it to iterate over it", - note="arrays are not iterators, but slices like the following are: `&[1, 2, 3]`" + _Self = "[]", + label = "borrow the array with `&` or call `.iter()` on it to iterate over it", + note = "arrays are not iterators, but slices like the following are: `&[1, 2, 3]`" ), on( - _Self="{integral}", - note="if you want to iterate between `start` until a value `end`, use the exclusive range \ + _Self = "{integral}", + note = "if you want to iterate between `start` until a value `end`, use the exclusive range \ syntax `start..end` or the inclusive range syntax `start..=end`" ), - label="`{Self}` is not an iterator", - message="`{Self}` is not an iterator" + label = "`{Self}` is not an iterator", + message = "`{Self}` is not an iterator" )] #[doc(spotlight)] #[must_use = "iterators are lazy and do nothing unless consumed"] @@ -197,7 +199,9 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn size_hint(&self) -> (usize, Option<usize>) { (0, None) } + fn size_hint(&self) -> (usize, Option<usize>) { + (0, None) + } /// Consumes the iterator, counting the number of iterations and returning it. /// @@ -236,7 +240,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn count(self) -> usize where Self: Sized { + fn count(self) -> usize + where + Self: Sized, + { #[inline] fn add1<T>(count: usize, _: T) -> usize { // Might overflow. @@ -267,7 +274,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn last(self) -> Option<Self::Item> where Self: Sized { + fn last(self) -> Option<Self::Item> + where + Self: Sized, + { #[inline] fn some<T>(_: Option<T>, x: T) -> Option<T> { Some(x) @@ -321,7 +331,9 @@ pub trait Iterator { #[stable(feature = "rust1", since = "1.0.0")] fn nth(&mut self, mut n: usize) -> Option<Self::Item> { for x in self { - if n == 0 { return Some(x) } + if n == 0 { + return Some(x); + } n -= 1; } None @@ -373,7 +385,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_step_by", since = "1.28.0")] - fn step_by(self, step: usize) -> StepBy<Self> where Self: Sized { + fn step_by(self, step: usize) -> StepBy<Self> + where + Self: Sized, + { StepBy::new(self, step) } @@ -443,8 +458,10 @@ pub trait Iterator { /// [`OsStr`]: ../../std/ffi/struct.OsStr.html #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn chain<U>(self, other: U) -> Chain<Self, U::IntoIter> where - Self: Sized, U: IntoIterator<Item=Self::Item>, + fn chain<U>(self, other: U) -> Chain<Self, U::IntoIter> + where + Self: Sized, + U: IntoIterator<Item = Self::Item>, { Chain::new(self, other.into_iter()) } @@ -521,8 +538,10 @@ pub trait Iterator { /// [`None`]: ../../std/option/enum.Option.html#variant.None #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn zip<U>(self, other: U) -> Zip<Self, U::IntoIter> where - Self: Sized, U: IntoIterator + fn zip<U>(self, other: U) -> Zip<Self, U::IntoIter> + where + Self: Sized, + U: IntoIterator, { Zip::new(self, other.into_iter()) } @@ -578,8 +597,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn map<B, F>(self, f: F) -> Map<Self, F> where - Self: Sized, F: FnMut(Self::Item) -> B, + fn map<B, F>(self, f: F) -> Map<Self, F> + where + Self: Sized, + F: FnMut(Self::Item) -> B, { Map::new(self, f) } @@ -621,8 +642,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_for_each", since = "1.21.0")] - fn for_each<F>(self, f: F) where - Self: Sized, F: FnMut(Self::Item), + fn for_each<F>(self, f: F) + where + Self: Sized, + F: FnMut(Self::Item), { #[inline] fn call<T>(mut f: impl FnMut(T)) -> impl FnMut((), T) { @@ -694,8 +717,10 @@ pub trait Iterator { /// of these layers. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn filter<P>(self, predicate: P) -> Filter<Self, P> where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn filter<P>(self, predicate: P) -> Filter<Self, P> + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { Filter::new(self, predicate) } @@ -751,8 +776,10 @@ pub trait Iterator { /// [`None`]: ../../std/option/enum.Option.html#variant.None #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F> where - Self: Sized, F: FnMut(Self::Item) -> Option<B>, + fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F> + where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>, { FilterMap::new(self, f) } @@ -797,7 +824,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn enumerate(self) -> Enumerate<Self> where Self: Sized { + fn enumerate(self) -> Enumerate<Self> + where + Self: Sized, + { Enumerate::new(self) } @@ -843,7 +873,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn peekable(self) -> Peekable<Self> where Self: Sized { + fn peekable(self) -> Peekable<Self> + where + Self: Sized, + { Peekable::new(self) } @@ -904,8 +937,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P> where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P> + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { SkipWhile::new(self, predicate) } @@ -983,8 +1018,10 @@ pub trait Iterator { /// the iteration should stop, but wasn't placed back into the iterator. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P> where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P> + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { TakeWhile::new(self, predicate) } @@ -1008,7 +1045,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn skip(self, n: usize) -> Skip<Self> where Self: Sized { + fn skip(self, n: usize) -> Skip<Self> + where + Self: Sized, + { Skip::new(self, n) } @@ -1040,7 +1080,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn take(self, n: usize) -> Take<Self> where Self: Sized, { + fn take(self, n: usize) -> Take<Self> + where + Self: Sized, + { Take::new(self, n) } @@ -1084,7 +1127,9 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F> - where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>, + where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>, { Scan::new(self, initial_state, f) } @@ -1122,7 +1167,10 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F> - where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U, + where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U, { FlatMap::new(self, f) } @@ -1191,7 +1239,10 @@ pub trait Iterator { #[inline] #[stable(feature = "iterator_flatten", since = "1.29.0")] fn flatten(self) -> Flatten<Self> - where Self: Sized, Self::Item: IntoIterator { + where + Self: Sized, + Self::Item: IntoIterator, + { Flatten::new(self) } @@ -1251,7 +1302,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn fuse(self) -> Fuse<Self> where Self: Sized { + fn fuse(self) -> Fuse<Self> + where + Self: Sized, + { Fuse::new(self) } @@ -1332,8 +1386,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn inspect<F>(self, f: F) -> Inspect<Self, F> where - Self: Sized, F: FnMut(&Self::Item), + fn inspect<F>(self, f: F) -> Inspect<Self, F> + where + Self: Sized, + F: FnMut(&Self::Item), { Inspect::new(self, f) } @@ -1375,7 +1431,12 @@ pub trait Iterator { /// assert_eq!(iter.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn by_ref(&mut self) -> &mut Self where Self: Sized { self } + fn by_ref(&mut self) -> &mut Self + where + Self: Sized, + { + self + } /// Transforms an iterator into a collection. /// @@ -1490,7 +1551,10 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "if you really need to exhaust the iterator, consider `.for_each(drop)` instead"] - fn collect<B: FromIterator<Self::Item>>(self) -> B where Self: Sized { + fn collect<B: FromIterator<Self::Item>>(self) -> B + where + Self: Sized, + { FromIterator::from_iter(self) } @@ -1520,10 +1584,11 @@ pub trait Iterator { /// assert_eq!(odd, vec![1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn partition<B, F>(self, f: F) -> (B, B) where + fn partition<B, F>(self, f: F) -> (B, B) + where Self: Sized, B: Default + Extend<Self::Item>, - F: FnMut(&Self::Item) -> bool + F: FnMut(&Self::Item) -> bool, { #[inline] fn extend<'a, T, B: Extend<T>>( @@ -1597,9 +1662,7 @@ pub trait Iterator { } #[inline] - fn is_true<T>( - predicate: &mut impl FnMut(&T) -> bool - ) -> impl FnMut(&&mut T) -> bool + '_ { + fn is_true<T>(predicate: &mut impl FnMut(&T) -> bool) -> impl FnMut(&&mut T) -> bool + '_ { move |x| predicate(&**x) } @@ -1702,8 +1765,11 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_try_fold", since = "1.27.0")] - fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B> + fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Ok = B>, { let mut accum = init; while let Some(x) = self.next() { @@ -1741,8 +1807,11 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_try_fold", since = "1.27.0")] - fn try_for_each<F, R>(&mut self, f: F) -> R where - Self: Sized, F: FnMut(Self::Item) -> R, R: Try<Ok=()> + fn try_for_each<F, R>(&mut self, f: F) -> R + where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Ok = ()>, { #[inline] fn call<T, R>(mut f: impl FnMut(T) -> R) -> impl FnMut((), T) -> R { @@ -1821,8 +1890,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn fold<B, F>(mut self, init: B, f: F) -> B where - Self: Sized, F: FnMut(B, Self::Item) -> B, + fn fold<B, F>(mut self, init: B, f: F) -> B + where + Self: Sized, + F: FnMut(B, Self::Item) -> B, { #[inline] fn ok<B, T>(mut f: impl FnMut(B, T) -> B) -> impl FnMut(B, T) -> Result<B, !> { @@ -1871,14 +1942,15 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn all<F>(&mut self, f: F) -> bool where - Self: Sized, F: FnMut(Self::Item) -> bool + fn all<F>(&mut self, f: F) -> bool + where + Self: Sized, + F: FnMut(Self::Item) -> bool, { #[inline] fn check<T>(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> LoopState<(), ()> { move |(), x| { - if f(x) { LoopState::Continue(()) } - else { LoopState::Break(()) } + if f(x) { LoopState::Continue(()) } else { LoopState::Break(()) } } } self.try_fold((), check(f)) == LoopState::Continue(()) @@ -1923,15 +1995,15 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn any<F>(&mut self, f: F) -> bool where + fn any<F>(&mut self, f: F) -> bool + where Self: Sized, - F: FnMut(Self::Item) -> bool + F: FnMut(Self::Item) -> bool, { #[inline] fn check<T>(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> LoopState<(), ()> { move |(), x| { - if f(x) { LoopState::Break(()) } - else { LoopState::Continue(()) } + if f(x) { LoopState::Break(()) } else { LoopState::Continue(()) } } } @@ -1982,17 +2054,17 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn find<P>(&mut self, predicate: P) -> Option<Self::Item> where + fn find<P>(&mut self, predicate: P) -> Option<Self::Item> + where Self: Sized, P: FnMut(&Self::Item) -> bool, { #[inline] fn check<T>( - mut predicate: impl FnMut(&T) -> bool + mut predicate: impl FnMut(&T) -> bool, ) -> impl FnMut((), T) -> LoopState<(), T> { move |(), x| { - if predicate(&x) { LoopState::Break(x) } - else { LoopState::Continue(()) } + if predicate(&x) { LoopState::Break(x) } else { LoopState::Continue(()) } } } @@ -2016,7 +2088,8 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_find_map", since = "1.30.0")] - fn find_map<B, F>(&mut self, f: F) -> Option<B> where + fn find_map<B, F>(&mut self, f: F) -> Option<B> + where Self: Sized, F: FnMut(Self::Item) -> Option<B>, { @@ -2087,7 +2160,8 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn position<P>(&mut self, predicate: P) -> Option<usize> where + fn position<P>(&mut self, predicate: P) -> Option<usize> + where Self: Sized, P: FnMut(Self::Item) -> bool, { @@ -2097,8 +2171,7 @@ pub trait Iterator { ) -> impl FnMut(usize, T) -> LoopState<usize, usize> { // The addition might panic on overflow move |i, x| { - if predicate(x) { LoopState::Break(i) } - else { LoopState::Continue(Add::add(i, 1)) } + if predicate(x) { LoopState::Break(i) } else { LoopState::Continue(Add::add(i, 1)) } } } @@ -2145,9 +2218,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn rposition<P>(&mut self, predicate: P) -> Option<usize> where + fn rposition<P>(&mut self, predicate: P) -> Option<usize> + where P: FnMut(Self::Item) -> bool, - Self: Sized + ExactSizeIterator + DoubleEndedIterator + Self: Sized + ExactSizeIterator + DoubleEndedIterator, { // No need for an overflow check here, because `ExactSizeIterator` // implies that the number of elements fits into a `usize`. @@ -2157,8 +2231,7 @@ pub trait Iterator { ) -> impl FnMut(usize, T) -> LoopState<usize, usize> { move |i, x| { let i = i - 1; - if predicate(x) { LoopState::Break(i) } - else { LoopState::Continue(i) } + if predicate(x) { LoopState::Break(i) } else { LoopState::Continue(i) } } } @@ -2186,7 +2259,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn max(self) -> Option<Self::Item> where Self: Sized, Self::Item: Ord + fn max(self) -> Option<Self::Item> + where + Self: Sized, + Self::Item: Ord, { self.max_by(Ord::cmp) } @@ -2211,7 +2287,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn min(self) -> Option<Self::Item> where Self: Sized, Self::Item: Ord + fn min(self) -> Option<Self::Item> + where + Self: Sized, + Self::Item: Ord, { self.min_by(Ord::cmp) } @@ -2233,7 +2312,9 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] fn max_by_key<B: Ord, F>(self, f: F) -> Option<Self::Item> - where Self: Sized, F: FnMut(&Self::Item) -> B, + where + Self: Sized, + F: FnMut(&Self::Item) -> B, { #[inline] fn key<T, B>(mut f: impl FnMut(&T) -> B) -> impl FnMut(T) -> (B, T) { @@ -2266,7 +2347,9 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_max_by", since = "1.15.0")] fn max_by<F>(self, compare: F) -> Option<Self::Item> - where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, + where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering, { #[inline] fn fold<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(T, T) -> T { @@ -2293,7 +2376,9 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] fn min_by_key<B: Ord, F>(self, f: F) -> Option<Self::Item> - where Self: Sized, F: FnMut(&Self::Item) -> B, + where + Self: Sized, + F: FnMut(&Self::Item) -> B, { #[inline] fn key<T, B>(mut f: impl FnMut(&T) -> B) -> impl FnMut(T) -> (B, T) { @@ -2326,7 +2411,9 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_min_by", since = "1.15.0")] fn min_by<F>(self, compare: F) -> Option<Self::Item> - where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, + where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering, { #[inline] fn fold<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(T, T) -> T { @@ -2336,7 +2423,6 @@ pub trait Iterator { fold1(self, fold(compare)) } - /// Reverses an iterator's direction. /// /// Usually, iterators iterate from left to right. After using `rev()`, @@ -2362,7 +2448,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn rev(self) -> Rev<Self> where Self: Sized + DoubleEndedIterator { + fn rev(self) -> Rev<Self> + where + Self: Sized + DoubleEndedIterator, + { Rev::new(self) } @@ -2389,10 +2478,11 @@ pub trait Iterator { /// assert_eq!(right, [2, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB) where + fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB) + where FromA: Default + Extend<A>, FromB: Default + Extend<B>, - Self: Sized + Iterator<Item=(A, B)>, + Self: Sized + Iterator<Item = (A, B)>, { fn extend<'a, A, B>( ts: &'a mut impl Extend<A>, @@ -2434,7 +2524,9 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_copied", since = "1.36.0")] fn copied<'a, T: 'a>(self) -> Copied<Self> - where Self: Sized + Iterator<Item=&'a T>, T: Copy + where + Self: Sized + Iterator<Item = &'a T>, + T: Copy, { Copied::new(self) } @@ -2463,7 +2555,9 @@ pub trait Iterator { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn cloned<'a, T: 'a>(self) -> Cloned<Self> - where Self: Sized + Iterator<Item=&'a T>, T: Clone + where + Self: Sized + Iterator<Item = &'a T>, + T: Clone, { Cloned::new(self) } @@ -2495,7 +2589,10 @@ pub trait Iterator { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - fn cycle(self) -> Cycle<Self> where Self: Sized + Clone { + fn cycle(self) -> Cycle<Self> + where + Self: Sized + Clone, + { Cycle::new(self) } @@ -2523,8 +2620,9 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_arith", since = "1.11.0")] fn sum<S>(self) -> S - where Self: Sized, - S: Sum<Self::Item>, + where + Self: Sized, + S: Sum<Self::Item>, { Sum::sum(self) } @@ -2551,8 +2649,9 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_arith", since = "1.11.0")] fn product<P>(self) -> P - where Self: Sized, - P: Product<Self::Item>, + where + Self: Sized, + P: Product<Self::Item>, { Product::product(self) } @@ -2609,11 +2708,13 @@ pub trait Iterator { loop { let x = match self.next() { - None => if other.next().is_none() { - return Ordering::Equal - } else { - return Ordering::Less - }, + None => { + if other.next().is_none() { + return Ordering::Equal; + } else { + return Ordering::Less; + } + } Some(val) => val, }; @@ -2692,11 +2793,13 @@ pub trait Iterator { loop { let x = match self.next() { - None => if other.next().is_none() { - return Some(Ordering::Equal) - } else { - return Some(Ordering::Less) - }, + None => { + if other.next().is_none() { + return Some(Ordering::Equal); + } else { + return Some(Ordering::Less); + } + } Some(val) => val, }; @@ -2782,7 +2885,8 @@ pub trait Iterator { /// assert_eq!([1].iter().ne([1, 2].iter()), true); /// ``` #[stable(feature = "iter_order", since = "1.5.0")] - fn ne<I>(self, other: I) -> bool where + fn ne<I>(self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialEq<I::Item>, Self: Sized, @@ -2801,7 +2905,8 @@ pub trait Iterator { /// assert_eq!([1, 2].iter().lt([1].iter()), false); /// ``` #[stable(feature = "iter_order", since = "1.5.0")] - fn lt<I>(self, other: I) -> bool where + fn lt<I>(self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd<I::Item>, Self: Sized, @@ -2820,7 +2925,8 @@ pub trait Iterator { /// assert_eq!([1, 2].iter().le([1].iter()), false); /// ``` #[stable(feature = "iter_order", since = "1.5.0")] - fn le<I>(self, other: I) -> bool where + fn le<I>(self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd<I::Item>, Self: Sized, @@ -2842,7 +2948,8 @@ pub trait Iterator { /// assert_eq!([1, 2].iter().gt([1].iter()), true); /// ``` #[stable(feature = "iter_order", since = "1.5.0")] - fn gt<I>(self, other: I) -> bool where + fn gt<I>(self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd<I::Item>, Self: Sized, @@ -2861,7 +2968,8 @@ pub trait Iterator { /// assert_eq!([1, 2].iter().ge([1].iter()), true); /// ``` #[stable(feature = "iter_order", since = "1.5.0")] - fn ge<I>(self, other: I) -> bool where + fn ge<I>(self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd<I::Item>, Self: Sized, @@ -2925,7 +3033,7 @@ pub trait Iterator { fn is_sorted_by<F>(mut self, mut compare: F) -> bool where Self: Sized, - F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering> + F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>, { let mut last = match self.next() { Some(e) => e, @@ -2965,7 +3073,7 @@ pub trait Iterator { where Self: Sized, F: FnMut(Self::Item) -> K, - K: PartialOrd + K: PartialOrd, { self.map(f).is_sorted() } @@ -2974,9 +3082,9 @@ pub trait Iterator { /// Fold an iterator without having to provide an initial value. #[inline] fn fold1<I, F>(mut it: I, f: F) -> Option<I::Item> - where - I: Iterator, - F: FnMut(I::Item, I::Item) -> I::Item, +where + I: Iterator, + F: FnMut(I::Item, I::Item) -> I::Item, { // start with the first element as our selection. This avoids // having to use `Option`s inside the loop, translating to a @@ -2988,8 +3096,12 @@ fn fold1<I, F>(mut it: I, f: F) -> Option<I::Item> #[stable(feature = "rust1", since = "1.0.0")] impl<I: Iterator + ?Sized> Iterator for &mut I { type Item = I::Item; - fn next(&mut self) -> Option<I::Item> { (**self).next() } - fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() } + fn next(&mut self) -> Option<I::Item> { + (**self).next() + } + fn size_hint(&self) -> (usize, Option<usize>) { + (**self).size_hint() + } fn nth(&mut self, n: usize) -> Option<Self::Item> { (**self).nth(n) } diff --git a/src/libcore/tests/pattern.rs b/src/libcore/tests/pattern.rs index 6ec61cc97c9..d4bec996d89 100644 --- a/src/libcore/tests/pattern.rs +++ b/src/libcore/tests/pattern.rs @@ -42,8 +42,6 @@ impl From<Option<(usize, usize)>> for Step { } } -// ignore-tidy-linelength - // FIXME(Manishearth) these tests focus on single-character searching (CharSearcher) // and on next()/next_match(), not next_reject(). This is because // the memchr changes make next_match() for single chars complex, but next_reject() diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index a702eb83984..f6db451d57e 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -37,39 +37,39 @@ mod item; use crate::arena::Arena; use crate::dep_graph::DepGraph; -use crate::hir::{self, ParamName}; -use crate::hir::HirVec; -use crate::hir::map::{DefKey, DefPathData, Definitions}; +use crate::hir::def::{DefKind, Namespace, PartialRes, PerNS, Res}; use crate::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; -use crate::hir::def::{Namespace, Res, DefKind, PartialRes, PerNS}; -use crate::hir::{GenericArg, ConstArg}; +use crate::hir::map::{DefKey, DefPathData, Definitions}; use crate::hir::ptr::P; +use crate::hir::HirVec; +use crate::hir::{self, ParamName}; +use crate::hir::{ConstArg, GenericArg}; use crate::lint; use crate::lint::builtin::{self, ELIDED_LIFETIMES_IN_PATHS}; use crate::middle::cstore::CrateStore; -use crate::session::Session; use crate::session::config::nightly_options; +use crate::session::Session; use crate::util::common::FN_OUTPUT_NAME; use crate::util::nodemap::{DefIdMap, NodeMap}; use errors::Applicability; use rustc_data_structures::fx::FxHashSet; -use rustc_index::vec::IndexVec; use rustc_data_structures::sync::Lrc; +use rustc_index::vec::IndexVec; +use smallvec::SmallVec; use std::collections::BTreeMap; use std::mem; -use smallvec::SmallVec; -use syntax::attr; use syntax::ast; -use syntax::ptr::P as AstP; use syntax::ast::*; +use syntax::attr; use syntax::errors; use syntax::print::pprust; -use syntax::token::{self, Nonterminal, Token}; -use syntax::tokenstream::{TokenStream, TokenTree}; +use syntax::ptr::P as AstP; use syntax::sess::ParseSess; -use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned}; +use syntax::source_map::{respan, DesugaringKind, ExpnData, ExpnKind, Spanned}; use syntax::symbol::{kw, sym, Symbol}; +use syntax::token::{self, Nonterminal, Token}; +use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::visit::{self, Visitor}; use syntax_pos::hygiene::ExpnId; use syntax_pos::Span; @@ -291,7 +291,8 @@ pub fn lower_crate<'a, 'hir>( in_scope_lifetimes: Vec::new(), allow_try_trait: Some([sym::try_trait][..].into()), allow_gen_future: Some([sym::gen_future][..].into()), - }.lower_crate(krate) + } + .lower_crate(krate) } #[derive(Copy, Clone, PartialEq)] @@ -359,26 +360,22 @@ enum AnonymousLifetimeMode { PassThrough, } -struct ImplTraitTypeIdVisitor<'a> { ids: &'a mut SmallVec<[NodeId; 1]> } +struct ImplTraitTypeIdVisitor<'a> { + ids: &'a mut SmallVec<[NodeId; 1]>, +} impl<'a, 'b> Visitor<'a> for ImplTraitTypeIdVisitor<'b> { fn visit_ty(&mut self, ty: &'a Ty) { match ty.kind { - | TyKind::Typeof(_) - | TyKind::BareFn(_) - => return, + TyKind::Typeof(_) | TyKind::BareFn(_) => return, TyKind::ImplTrait(id, _) => self.ids.push(id), - _ => {}, + _ => {} } visit::walk_ty(self, ty); } - fn visit_path_segment( - &mut self, - path_span: Span, - path_segment: &'v PathSegment, - ) { + fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) { if let Some(ref p) = path_segment.args { if let GenericArgs::Parenthesized(_) = **p { return; @@ -401,11 +398,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } impl MiscCollector<'_, '_, '_> { - fn allocate_use_tree_hir_id_counters( - &mut self, - tree: &UseTree, - owner: DefIndex, - ) { + fn allocate_use_tree_hir_id_counters(&mut self, tree: &UseTree, owner: DefIndex) { match tree.kind { UseTreeKind::Simple(_, id1, id2) => { for &id in &[id1, id2] { @@ -488,13 +481,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { match item.kind { AssocItemKind::Fn(_, None) => { // Ignore patterns in trait methods without bodies - self.with_hir_id_owner(None, |this| { - visit::walk_trait_item(this, item) - }); + self.with_hir_id_owner(None, |this| visit::walk_trait_item(this, item)); } _ => self.with_hir_id_owner(Some(item.id), |this| { visit::walk_trait_item(this, item); - }) + }), } } @@ -507,20 +498,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn visit_foreign_item(&mut self, i: &'tcx ForeignItem) { // Ignore patterns in foreign items - self.with_hir_id_owner(None, |this| { - visit::walk_foreign_item(this, i) - }); + self.with_hir_id_owner(None, |this| visit::walk_foreign_item(this, i)); } fn visit_ty(&mut self, t: &'tcx Ty) { match t.kind { // Mirrors the case in visit::walk_ty TyKind::BareFn(ref f) => { - walk_list!( - self, - visit_generic_param, - &f.generic_params - ); + walk_list!(self, visit_generic_param, &f.generic_params); // Mirrors visit::walk_fn_decl for parameter in &f.decl.inputs { // We don't lower the ids of argument patterns @@ -546,9 +531,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let attrs = self.arena.alloc_from_iter(self.lower_attrs(&c.attrs).into_iter()); let body_ids = body_ids(&self.bodies); - self.resolver - .definitions() - .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id); + self.resolver.definitions().init_node_id_to_hir_id_mapping(self.node_id_to_hir_id); hir::Crate { module, @@ -614,7 +597,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { where F: FnOnce(&mut Self) -> T, { - let counter = self.item_local_id_counters + let counter = self + .item_local_id_counters .insert(owner, HIR_ID_COUNTER_LOCKED) .unwrap_or_else(|| panic!("no `item_local_id_counters` entry for {:?}", owner)); let def_index = self.resolver.definitions().opt_def_index(owner).unwrap(); @@ -625,9 +609,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { debug_assert!(def_index == new_def_index); debug_assert!(new_counter >= counter); - let prev = self.item_local_id_counters - .insert(owner, new_counter) - .unwrap(); + let prev = self.item_local_id_counters.insert(owner, new_counter).unwrap(); debug_assert!(prev == HIR_ID_COUNTER_LOCKED); ret } @@ -644,10 +626,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { this.current_hir_id_owner.last_mut().unwrap(); let local_id = *local_id_counter; *local_id_counter += 1; - hir::HirId { - owner: def_index, - local_id: hir::ItemLocalId::from_u32(local_id), - } + hir::HirId { owner: def_index, local_id: hir::ItemLocalId::from_u32(local_id) } }) } @@ -665,17 +644,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { debug_assert!(local_id != HIR_ID_COUNTER_LOCKED); *local_id_counter += 1; - let def_index = this - .resolver - .definitions() - .opt_def_index(owner) - .expect("you forgot to call `create_def_with_parent` or are lowering node-IDs \ - that do not belong to the current owner"); - - hir::HirId { - owner: def_index, - local_id: hir::ItemLocalId::from_u32(local_id), - } + let def_index = this.resolver.definitions().opt_def_index(owner).expect( + "you forgot to call `create_def_with_parent` or are lowering node-IDs \ + that do not belong to the current owner", + ); + + hir::HirId { owner: def_index, local_id: hir::ItemLocalId::from_u32(local_id) } }) } @@ -736,8 +710,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.anonymous_lifetime_mode = anonymous_lifetime_mode; let result = op(self); self.anonymous_lifetime_mode = old_anonymous_lifetime_mode; - debug!("with_anonymous_lifetime_mode: restoring anonymous_lifetime_mode={:?}", - old_anonymous_lifetime_mode); + debug!( + "with_anonymous_lifetime_mode: restoring anonymous_lifetime_mode={:?}", + old_anonymous_lifetime_mode + ); result } @@ -774,9 +750,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let params = lifetimes_to_define .into_iter() - .map(|(span, hir_name)| self.lifetime_to_generic_param( - span, hir_name, parent_id.index, - )) + .map(|(span, hir_name)| self.lifetime_to_generic_param(span, hir_name, parent_id.index)) .chain(in_band_ty_params.into_iter()) .collect(); @@ -796,18 +770,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // that collisions are ok here and this shouldn't // really show up for end-user. let (str_name, kind) = match hir_name { - ParamName::Plain(ident) => ( - ident.name, - hir::LifetimeParamKind::InBand, - ), - ParamName::Fresh(_) => ( - kw::UnderscoreLifetime, - hir::LifetimeParamKind::Elided, - ), - ParamName::Error => ( - kw::UnderscoreLifetime, - hir::LifetimeParamKind::Error, - ), + ParamName::Plain(ident) => (ident.name, hir::LifetimeParamKind::InBand), + ParamName::Fresh(_) => (kw::UnderscoreLifetime, hir::LifetimeParamKind::Elided), + ParamName::Error => (kw::UnderscoreLifetime, hir::LifetimeParamKind::Error), }; // Add a definition for the in-band lifetime def. @@ -826,7 +791,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { bounds: hir_vec![], span, pure_wrt_drop: false, - kind: hir::GenericParamKind::Lifetime { kind } + kind: hir::GenericParamKind::Lifetime { kind }, } } @@ -849,8 +814,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let hir_name = ParamName::Plain(ident); - if self.lifetimes_to_define.iter() - .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) { + if self.lifetimes_to_define.iter().any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) + { return; } @@ -904,9 +869,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { where F: FnOnce(&mut LoweringContext<'_, '_>, &mut Vec<hir::GenericParam>) -> T, { - let (in_band_defs, (mut lowered_generics, res)) = self.with_in_scope_lifetime_defs( - &generics.params, - |this| { + let (in_band_defs, (mut lowered_generics, res)) = + self.with_in_scope_lifetime_defs(&generics.params, |this| { this.collect_in_band_defs(parent_id, anonymous_lifetime_mode, |this| { let mut params = Vec::new(); // Note: it is necessary to lower generics *before* calling `f`. @@ -916,32 +880,24 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // `lifetimes_to_define`. If we swapped the order of these two, // in-band-lifetimes introduced by generics or where-clauses // wouldn't have been added yet. - let generics = this.lower_generics( - generics, - ImplTraitContext::Universal(&mut params), - ); + let generics = + this.lower_generics(generics, ImplTraitContext::Universal(&mut params)); let res = f(this, &mut params); (params, (generics, res)) }) - }, - ); + }); - let mut lowered_params: Vec<_> = lowered_generics - .params - .into_iter() - .chain(in_band_defs) - .collect(); + let mut lowered_params: Vec<_> = + lowered_generics.params.into_iter().chain(in_band_defs).collect(); // FIXME(const_generics): the compiler doesn't always cope with // unsorted generic parameters at the moment, so we make sure // that they're ordered correctly here for now. (When we chain // the `in_band_defs`, we might make the order unsorted.) - lowered_params.sort_by_key(|param| { - match param.kind { - hir::GenericParamKind::Lifetime { .. } => ParamKindOrd::Lifetime, - hir::GenericParamKind::Type { .. } => ParamKindOrd::Type, - hir::GenericParamKind::Const { .. } => ParamKindOrd::Const, - } + lowered_params.sort_by_key(|param| match param.kind { + hir::GenericParamKind::Lifetime { .. } => ParamKindOrd::Lifetime, + hir::GenericParamKind::Type { .. } => ParamKindOrd::Type, + hir::GenericParamKind::Const { .. } => ParamKindOrd::Const, }); lowered_generics.params = lowered_params.into(); @@ -990,9 +946,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn lower_attrs_arena(&mut self, attrs: &[Attribute]) -> &'hir [Attribute] { - self.arena.alloc_from_iter( - attrs.iter().map(|a| self.lower_attr(a)) - ) + self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a))) } fn lower_attrs(&mut self, attrs: &[Attribute]) -> hir::HirVec<Attribute> { @@ -1004,48 +958,38 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // lower attributes (we use the AST version) there is nowhere to keep // the `HirId`s. We don't actually need HIR version of attributes anyway. let kind = match attr.kind { - AttrKind::Normal(ref item) => { - AttrKind::Normal(AttrItem { - path: item.path.clone(), - args: self.lower_mac_args(&item.args), - }) - } - AttrKind::DocComment(comment) => AttrKind::DocComment(comment) + AttrKind::Normal(ref item) => AttrKind::Normal(AttrItem { + path: item.path.clone(), + args: self.lower_mac_args(&item.args), + }), + AttrKind::DocComment(comment) => AttrKind::DocComment(comment), }; - Attribute { - kind, - id: attr.id, - style: attr.style, - span: attr.span, - } + Attribute { kind, id: attr.id, style: attr.style, span: attr.span } } fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs { match *args { MacArgs::Empty => MacArgs::Empty, - MacArgs::Delimited(dspan, delim, ref tokens) => - MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone())), - MacArgs::Eq(eq_span, ref tokens) => - MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone())), + MacArgs::Delimited(dspan, delim, ref tokens) => { + MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone())) + } + MacArgs::Eq(eq_span, ref tokens) => { + MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone())) + } } } fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream { - tokens - .into_trees() - .flat_map(|tree| self.lower_token_tree(tree).into_trees()) - .collect() + tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect() } fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream { match tree { TokenTree::Token(token) => self.lower_token(token), - TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( - span, - delim, - self.lower_token_stream(tts), - ).into(), + TokenTree::Delimited(span, delim, tts) => { + TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into() + } } } @@ -1077,9 +1021,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { debug!("lower_assoc_ty_constraint(constraint={:?}, itctx={:?})", constraint, itctx); let kind = match constraint.kind { - AssocTyConstraintKind::Equality { ref ty } => hir::TypeBindingKind::Equality { - ty: self.lower_ty(ty, itctx) - }, + AssocTyConstraintKind::Equality { ref ty } => { + hir::TypeBindingKind::Equality { ty: self.lower_ty(ty, itctx) } + } AssocTyConstraintKind::Bound { ref bounds } => { // Piggy-back on the `impl Trait` context to figure out the correct behavior. let (desugar_to_impl_trait, itctx) = match itctx { @@ -1107,8 +1051,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // then to an opaque type). // // FIXME: this is only needed until `impl Trait` is allowed in type aliases. - ImplTraitContext::Disallowed(_) if self.is_in_dyn_type => - (true, ImplTraitContext::OpaqueTy(None)), + ImplTraitContext::Disallowed(_) if self.is_in_dyn_type => { + (true, ImplTraitContext::OpaqueTy(None)) + } // We are in the parameter position, but not within a dyn type: // @@ -1145,18 +1090,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { itctx, ); - hir::TypeBindingKind::Equality { - ty - } + hir::TypeBindingKind::Equality { ty } }) } else { // Desugar `AssocTy: Bounds` into a type binding where the // later desugars into a trait predicate. let bounds = self.lower_param_bounds(bounds, itctx); - hir::TypeBindingKind::Constraint { - bounds - } + hir::TypeBindingKind::Constraint { bounds } } } }; @@ -1172,7 +1113,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_generic_arg( &mut self, arg: &ast::GenericArg, - itctx: ImplTraitContext<'_> + itctx: ImplTraitContext<'_>, ) -> hir::GenericArg { match arg { ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(<)), @@ -1192,8 +1133,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // Construct a AnonConst where the expr is the "ty"'s path. - let parent_def_index = - self.current_hir_id_owner.last().unwrap().0; + let parent_def_index = self.current_hir_id_owner.last().unwrap().0; let node_id = self.resolver.next_node_id(); // Add a definition for the in-band const def. @@ -1212,27 +1152,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { attrs: AttrVec::new(), }; - let ct = self.with_new_scopes(|this| { - hir::AnonConst { - hir_id: this.lower_node_id(node_id), - body: this.lower_const_body(path_expr.span, Some(&path_expr)), - } - }); - return GenericArg::Const(ConstArg { - value: ct, - span: ty.span, + let ct = self.with_new_scopes(|this| hir::AnonConst { + hir_id: this.lower_node_id(node_id), + body: this.lower_const_body(path_expr.span, Some(&path_expr)), }); + return GenericArg::Const(ConstArg { value: ct, span: ty.span }); } } } GenericArg::Type(self.lower_ty_direct(&ty, itctx)) } - ast::GenericArg::Const(ct) => { - GenericArg::Const(ConstArg { - value: self.lower_anon_const(&ct), - span: ct.value.span, - }) - } + ast::GenericArg::Const(ct) => GenericArg::Const(ConstArg { + value: self.lower_anon_const(&ct), + span: ct.value.span, + }), } } @@ -1246,7 +1179,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { qself: &Option<QSelf>, path: &Path, param_mode: ParamMode, - itctx: ImplTraitContext<'_> + itctx: ImplTraitContext<'_>, ) -> hir::Ty { let id = self.lower_node_id(t.id); let qpath = self.lower_qpath(t.id, qself, path, param_mode, itctx); @@ -1279,33 +1212,25 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; hir::TyKind::Rptr(lifetime, self.lower_mt(mt, itctx)) } - TyKind::BareFn(ref f) => self.with_in_scope_lifetime_defs( - &f.generic_params, - |this| { - this.with_anonymous_lifetime_mode( - AnonymousLifetimeMode::PassThrough, - |this| { - hir::TyKind::BareFn(P(hir::BareFnTy { - generic_params: this.lower_generic_params( - &f.generic_params, - &NodeMap::default(), - ImplTraitContext::disallowed(), - ), - unsafety: f.unsafety, - abi: this.lower_extern(f.ext), - decl: this.lower_fn_decl(&f.decl, None, false, None), - param_names: this.lower_fn_params_to_names(&f.decl), - })) - }, - ) - }, - ), + TyKind::BareFn(ref f) => self.with_in_scope_lifetime_defs(&f.generic_params, |this| { + this.with_anonymous_lifetime_mode(AnonymousLifetimeMode::PassThrough, |this| { + hir::TyKind::BareFn(P(hir::BareFnTy { + generic_params: this.lower_generic_params( + &f.generic_params, + &NodeMap::default(), + ImplTraitContext::disallowed(), + ), + unsafety: f.unsafety, + abi: this.lower_extern(f.ext), + decl: this.lower_fn_decl(&f.decl, None, false, None), + param_names: this.lower_fn_params_to_names(&f.decl), + })) + }) + }), TyKind::Never => hir::TyKind::Never, - TyKind::Tup(ref tys) => { - hir::TyKind::Tup(tys.iter().map(|ty| { - self.lower_ty_direct(ty, itctx.reborrow()) - }).collect()) - } + TyKind::Tup(ref tys) => hir::TyKind::Tup( + tys.iter().map(|ty| self.lower_ty_direct(ty, itctx.reborrow())).collect(), + ), TyKind::Paren(ref ty) => { return self.lower_ty_direct(ty, itctx); } @@ -1319,19 +1244,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { None, P(hir::Path { res, - segments: hir_vec![hir::PathSegment::from_ident( - Ident::with_dummy_span(kw::SelfUpper) - )], + segments: hir_vec![hir::PathSegment::from_ident(Ident::with_dummy_span( + kw::SelfUpper + ))], span: t.span, }), )) - }, + } TyKind::Array(ref ty, ref length) => { hir::TyKind::Array(self.lower_ty(ty, itctx), self.lower_anon_const(length)) } - TyKind::Typeof(ref expr) => { - hir::TyKind::Typeof(self.lower_anon_const(expr)) - } + TyKind::Typeof(ref expr) => hir::TyKind::Typeof(self.lower_anon_const(expr)), TyKind::TraitObject(ref bounds, kind) => { let mut lifetime_bound = None; let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| { @@ -1363,18 +1286,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let span = t.span; match itctx { ImplTraitContext::OpaqueTy(fn_def_id) => { - self.lower_opaque_impl_trait( - span, fn_def_id, def_node_id, - |this| this.lower_param_bounds(bounds, itctx), - ) + self.lower_opaque_impl_trait(span, fn_def_id, def_node_id, |this| { + this.lower_param_bounds(bounds, itctx) + }) } ImplTraitContext::Universal(in_band_ty_params) => { // Add a definition for the in-band `Param`. - let def_index = self - .resolver - .definitions() - .opt_def_index(def_node_id) - .unwrap(); + let def_index = + self.resolver.definitions().opt_def_index(def_node_id).unwrap(); let hir_bounds = self.lower_param_bounds( bounds, @@ -1392,7 +1311,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { kind: hir::GenericParamKind::Type { default: None, synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), - } + }, }); hir::TyKind::Path(hir::QPath::Resolved( @@ -1405,8 +1324,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { )) } ImplTraitContext::Disallowed(pos) => { - let allowed_in = if self.sess.features_untracked() - .impl_trait_in_bindings { + let allowed_in = if self.sess.features_untracked().impl_trait_in_bindings { "bindings or function and inherent method return types" } else { "function and inherent method return types" @@ -1418,11 +1336,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { "`impl Trait` not allowed outside of {}", allowed_in, ); - if pos == ImplTraitPosition::Binding && - nightly_options::is_nightly_build() { - help!(err, - "add `#![feature(impl_trait_in_bindings)]` to the crate \ - attributes to enable"); + if pos == ImplTraitPosition::Binding && nightly_options::is_nightly_build() + { + help!( + err, + "add `#![feature(impl_trait_in_bindings)]` to the crate \ + attributes to enable" + ); } err.emit(); hir::TyKind::Err @@ -1439,11 +1359,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } }; - hir::Ty { - kind, - span: t.span, - hir_id: self.lower_node_id(t.id), - } + hir::Ty { kind, span: t.span, hir_id: self.lower_node_id(t.id) } } fn lower_opaque_impl_trait( @@ -1455,9 +1371,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ) -> hir::TyKind { debug!( "lower_opaque_impl_trait(fn_def_id={:?}, opaque_ty_node_id={:?}, span={:?})", - fn_def_id, - opaque_ty_node_id, - span, + fn_def_id, opaque_ty_node_id, span, ); // Make sure we know that some funky desugaring has been going on here. @@ -1465,17 +1379,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // desugaring that explicitly states that we don't want to track that. // Not tracking it makes lints in rustc and clippy very fragile, as // frequently opened issues show. - let opaque_ty_span = self.mark_span_with_reason( - DesugaringKind::OpaqueTy, - span, - None, - ); + let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::OpaqueTy, span, None); - let opaque_ty_def_index = self - .resolver - .definitions() - .opt_def_index(opaque_ty_node_id) - .unwrap(); + let opaque_ty_def_index = + self.resolver.definitions().opt_def_index(opaque_ty_node_id).unwrap(); self.allocate_hir_id_counter(opaque_ty_node_id); @@ -1487,22 +1394,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &hir_bounds, ); - debug!( - "lower_opaque_impl_trait: lifetimes={:#?}", lifetimes, - ); + debug!("lower_opaque_impl_trait: lifetimes={:#?}", lifetimes,); - debug!( - "lower_opaque_impl_trait: lifetime_defs={:#?}", lifetime_defs, - ); + debug!("lower_opaque_impl_trait: lifetime_defs={:#?}", lifetime_defs,); self.with_hir_id_owner(opaque_ty_node_id, |lctx| { let opaque_ty_item = hir::OpaqueTy { generics: hir::Generics { params: lifetime_defs, - where_clause: hir::WhereClause { - predicates: hir_vec![], - span, - }, + where_clause: hir::WhereClause { predicates: hir_vec![], span }, span, }, bounds: hir_bounds, @@ -1511,12 +1411,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; trace!("lower_opaque_impl_trait: {:#?}", opaque_ty_def_index); - let opaque_ty_id = lctx.generate_opaque_type( - opaque_ty_node_id, - opaque_ty_item, - span, - opaque_ty_span, - ); + let opaque_ty_id = + lctx.generate_opaque_type(opaque_ty_node_id, opaque_ty_item, span, opaque_ty_span); // `impl Trait` now just becomes `Foo<'a, 'b, ..>`. hir::TyKind::Def(hir::ItemId { id: opaque_ty_id }, lifetimes) @@ -1579,9 +1475,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { output_lifetime_params: Vec<hir::GenericParam>, } - impl<'r, 'a, 'v, 'hir> hir::intravisit::Visitor<'v> - for ImplTraitLifetimeCollector<'r, 'a, 'hir> - { + impl<'r, 'a, 'v, 'hir> hir::intravisit::Visitor<'v> for ImplTraitLifetimeCollector<'r, 'a, 'hir> { fn nested_visit_map<'this>( &'this mut self, ) -> hir::intravisit::NestedVisitorMap<'this, 'v> { @@ -1663,7 +1557,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; if !self.currently_bound_lifetimes.contains(&name) - && !self.already_defined_lifetimes.contains(&name) { + && !self.already_defined_lifetimes.contains(&name) + { self.already_defined_lifetimes.insert(name); self.output_lifetimes.push(hir::GenericArg::Lifetime(hir::Lifetime { @@ -1680,17 +1575,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { def_node_id, DefPathData::LifetimeNs(name.ident().name), ExpnId::root(), - lifetime.span); + lifetime.span, + ); let (name, kind) = match name { hir::LifetimeName::Underscore => ( hir::ParamName::Plain(Ident::with_dummy_span(kw::UnderscoreLifetime)), hir::LifetimeParamKind::Elided, ), - hir::LifetimeName::Param(param_name) => ( - param_name, - hir::LifetimeParamKind::Explicit, - ), + hir::LifetimeName::Param(param_name) => { + (param_name, hir::LifetimeParamKind::Explicit) + } _ => bug!("expected `LifetimeName::Param` or `ParamName::Plain`"), }; @@ -1701,7 +1596,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { pure_wrt_drop: false, attrs: hir_vec![], bounds: hir_vec![], - kind: hir::GenericParamKind::Lifetime { kind } + kind: hir::GenericParamKind::Lifetime { kind }, }); } } @@ -1739,9 +1634,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let qself_position = qself.as_ref().map(|q| q.position); let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty, itctx.reborrow())); - let partial_res = self.resolver - .get_partial_res(id) - .unwrap_or_else(|| PartialRes::new(Res::Err)); + let partial_res = + self.resolver.get_partial_res(id).unwrap_or_else(|| PartialRes::new(Res::Err)); let proj_start = p.segments.len() - partial_res.unresolved_segments(); let path = P(hir::Path { @@ -1777,7 +1671,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { | Res::Def(DefKind::Union, def_id) | Res::Def(DefKind::Enum, def_id) | Res::Def(DefKind::TyAlias, def_id) - | Res::Def(DefKind::Trait, def_id) if i + 1 == proj_start => + | Res::Def(DefKind::Trait, def_id) + if i + 1 == proj_start => { Some(def_id) } @@ -1789,9 +1684,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ParenthesizedGenericArgs::Ok } // `a::b::Trait(Args)::TraitItem` - Res::Def(DefKind::Method, _) | - Res::Def(DefKind::AssocConst, _) | - Res::Def(DefKind::AssocTy, _) if i + 2 == proj_start => { + Res::Def(DefKind::Method, _) + | Res::Def(DefKind::AssocConst, _) + | Res::Def(DefKind::AssocTy, _) + if i + 2 == proj_start => + { ParenthesizedGenericArgs::Ok } // Avoid duplicated errors. @@ -1805,7 +1702,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { return n; } assert!(!def_id.is_local()); - let item_generics = self.resolver.cstore() + let item_generics = self + .resolver + .cstore() .item_generics_cloned_untracked(def_id, self.sess); let n = item_generics.own_counts().lifetimes; self.type_def_lifetime_params.insert(def_id, n); @@ -1894,7 +1793,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ) -> hir::Path { hir::Path { res, - segments: p.segments + segments: p + .segments .iter() .map(|segment| { self.lower_path_segment( @@ -1944,7 +1844,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { if data.inputs.len() > 0 { if let Some(split) = snippet.find('(') { let trait_name = &snippet[0..split]; - let args = &snippet[split + 1 .. snippet.len() - 1]; + let args = &snippet[split + 1..snippet.len() - 1]; err.span_suggestion( data.span, "use angle brackets instead", @@ -1959,8 +1859,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.lower_angle_bracketed_parameter_data( &data.as_angle_bracketed_args(), param_mode, - itctx - ).0, + itctx, + ) + .0, false, ) } @@ -1974,14 +1875,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { GenericArg::Lifetime(_) => true, _ => false, }); - let first_generic_span = generic_args.args.iter().map(|a| a.span()) - .chain(generic_args.bindings.iter().map(|b| b.span)).next(); + let first_generic_span = generic_args + .args + .iter() + .map(|a| a.span()) + .chain(generic_args.bindings.iter().map(|b| b.span)) + .next(); if !generic_args.parenthesized && !has_lifetimes { - generic_args.args = - self.elided_path_lifetimes(path_span, expected_lifetimes) - .into_iter() - .map(|lt| GenericArg::Lifetime(lt)) - .chain(generic_args.args.into_iter()) + generic_args.args = self + .elided_path_lifetimes(path_span, expected_lifetimes) + .into_iter() + .map(|lt| GenericArg::Lifetime(lt)) + .chain(generic_args.args.into_iter()) .collect(); if expected_lifetimes > 0 && param_mode == ParamMode::Explicit { let anon_lt_suggestion = vec!["'_"; expected_lifetimes].join(", "); @@ -2023,8 +1928,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ); err.emit(); } - AnonymousLifetimeMode::PassThrough | - AnonymousLifetimeMode::ReportError => { + AnonymousLifetimeMode::PassThrough | AnonymousLifetimeMode::ReportError => { self.resolver.lint_buffer().buffer_lint_with_diagnostic( ELIDED_LIFETIMES_IN_PATHS, CRATE_NODE_ID, @@ -2036,7 +1940,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { incl_angl_brckt, insertion_sp, suggestion, - ) + ), ); } } @@ -2078,12 +1982,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ( hir::GenericArgs { args: args.iter().map(|a| self.lower_generic_arg(a, itctx.reborrow())).collect(), - bindings: constraints.iter() + bindings: constraints + .iter() .map(|b| self.lower_assoc_ty_constraint(b, itctx.reborrow())) .collect(), parenthesized: false, }, - !has_non_lt_args && param_mode == ParamMode::Optional + !has_non_lt_args && param_mode == ParamMode::Optional, ) } @@ -2096,31 +2001,25 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // a hidden lifetime parameter. This is needed for backwards // compatibility, even in contexts like an impl header where // we generally don't permit such things (see #51008). - self.with_anonymous_lifetime_mode( - AnonymousLifetimeMode::PassThrough, - |this| { - let &ParenthesizedArgs { ref inputs, ref output, span } = data; - let inputs = inputs - .iter() - .map(|ty| this.lower_ty_direct(ty, ImplTraitContext::disallowed())) - .collect(); - let output_ty = match output { - FunctionRetTy::Ty(ty) => this.lower_ty(&ty, ImplTraitContext::disallowed()), - FunctionRetTy::Default(_) => P(this.ty_tup(span, hir::HirVec::new())), - }; - let args = hir_vec![GenericArg::Type(this.ty_tup(span, inputs))]; - let binding = hir::TypeBinding { - hir_id: this.next_id(), - ident: Ident::with_dummy_span(FN_OUTPUT_NAME), - span: output_ty.span, - kind: hir::TypeBindingKind::Equality { ty: output_ty }, - }; - ( - hir::GenericArgs { args, bindings: hir_vec![binding], parenthesized: true }, - false, - ) - } - ) + self.with_anonymous_lifetime_mode(AnonymousLifetimeMode::PassThrough, |this| { + let &ParenthesizedArgs { ref inputs, ref output, span } = data; + let inputs = inputs + .iter() + .map(|ty| this.lower_ty_direct(ty, ImplTraitContext::disallowed())) + .collect(); + let output_ty = match output { + FunctionRetTy::Ty(ty) => this.lower_ty(&ty, ImplTraitContext::disallowed()), + FunctionRetTy::Default(_) => P(this.ty_tup(span, hir::HirVec::new())), + }; + let args = hir_vec![GenericArg::Type(this.ty_tup(span, inputs))]; + let binding = hir::TypeBinding { + hir_id: this.next_id(), + ident: Ident::with_dummy_span(FN_OUTPUT_NAME), + span: output_ty.span, + kind: hir::TypeBindingKind::Equality { ty: output_ty }, + }; + (hir::GenericArgs { args, bindings: hir_vec![binding], parenthesized: true }, false) + }) } fn lower_local(&mut self, l: &Local) -> (hir::Local, SmallVec<[NodeId; 1]>) { @@ -2132,23 +2031,27 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } } let parent_def_id = DefId::local(self.current_hir_id_owner.last().unwrap().0); - (hir::Local { - hir_id: self.lower_node_id(l.id), - ty: l.ty - .as_ref() - .map(|t| self.lower_ty(t, - if self.sess.features_untracked().impl_trait_in_bindings { - ImplTraitContext::OpaqueTy(Some(parent_def_id)) - } else { - ImplTraitContext::Disallowed(ImplTraitPosition::Binding) - } - )), - pat: self.lower_pat(&l.pat), - init: l.init.as_ref().map(|e| P(self.lower_expr(e))), - span: l.span, - attrs: l.attrs.clone(), - source: hir::LocalSource::Normal, - }, ids) + ( + hir::Local { + hir_id: self.lower_node_id(l.id), + ty: l.ty.as_ref().map(|t| { + self.lower_ty( + t, + if self.sess.features_untracked().impl_trait_in_bindings { + ImplTraitContext::OpaqueTy(Some(parent_def_id)) + } else { + ImplTraitContext::Disallowed(ImplTraitPosition::Binding) + }, + ) + }), + pat: self.lower_pat(&l.pat), + init: l.init.as_ref().map(|e| P(self.lower_expr(e))), + span: l.span, + attrs: l.attrs.clone(), + source: hir::LocalSource::Normal, + }, + ids, + ) } fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> { @@ -2187,15 +2090,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { impl_trait_return_allow: bool, make_ret_async: Option<NodeId>, ) -> P<hir::FnDecl> { - debug!("lower_fn_decl(\ + debug!( + "lower_fn_decl(\ fn_decl: {:?}, \ in_band_ty_params: {:?}, \ impl_trait_return_allow: {}, \ make_ret_async: {:?})", - decl, - in_band_ty_params, - impl_trait_return_allow, - make_ret_async, + decl, in_band_ty_params, impl_trait_return_allow, make_ret_async, ); let lt_mode = if make_ret_async.is_some() { // In `async fn`, argument-position elided lifetimes @@ -2242,9 +2143,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { Some((def_id, _)) if impl_trait_return_allow => { hir::Return(self.lower_ty(ty, ImplTraitContext::OpaqueTy(Some(def_id)))) } - _ => { - hir::Return(self.lower_ty(ty, ImplTraitContext::disallowed())) - } + _ => hir::Return(self.lower_ty(ty, ImplTraitContext::disallowed())), }, FunctionRetTy::Default(span) => hir::DefaultReturn(span), } @@ -2254,31 +2153,30 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { inputs, output, c_variadic, - implicit_self: decl.inputs.get(0).map_or( - hir::ImplicitSelfKind::None, - |arg| { - let is_mutable_pat = match arg.pat.kind { - PatKind::Ident(BindingMode::ByValue(mt), _, _) | - PatKind::Ident(BindingMode::ByRef(mt), _, _) => - mt == Mutability::Mut, - _ => false, - }; + implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| { + let is_mutable_pat = match arg.pat.kind { + PatKind::Ident(BindingMode::ByValue(mt), _, _) + | PatKind::Ident(BindingMode::ByRef(mt), _, _) => mt == Mutability::Mut, + _ => false, + }; - match arg.ty.kind { - TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut, - TyKind::ImplicitSelf => hir::ImplicitSelfKind::Imm, - // Given we are only considering `ImplicitSelf` types, we needn't consider - // the case where we have a mutable pattern to a reference as that would - // no longer be an `ImplicitSelf`. - TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() && - mt.mutbl == ast::Mutability::Mut => - hir::ImplicitSelfKind::MutRef, - TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() => - hir::ImplicitSelfKind::ImmRef, - _ => hir::ImplicitSelfKind::None, + match arg.ty.kind { + TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut, + TyKind::ImplicitSelf => hir::ImplicitSelfKind::Imm, + // Given we are only considering `ImplicitSelf` types, we needn't consider + // the case where we have a mutable pattern to a reference as that would + // no longer be an `ImplicitSelf`. + TyKind::Rptr(_, ref mt) + if mt.ty.kind.is_implicit_self() && mt.mutbl == ast::Mutability::Mut => + { + hir::ImplicitSelfKind::MutRef } - }, - ), + TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() => { + hir::ImplicitSelfKind::ImmRef + } + _ => hir::ImplicitSelfKind::None, + } + }), }) } @@ -2308,17 +2206,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let span = output.span(); - let opaque_ty_span = self.mark_span_with_reason( - DesugaringKind::Async, - span, - None, - ); + let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::Async, span, None); - let opaque_ty_def_index = self - .resolver - .definitions() - .opt_def_index(opaque_ty_node_id) - .unwrap(); + let opaque_ty_def_index = + self.resolver.definitions().opt_def_index(opaque_ty_node_id).unwrap(); self.allocate_hir_id_counter(opaque_ty_node_id); @@ -2379,14 +2270,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // // Then, we will create `fn foo(..) -> Foo<'_, '_>`, and // hence the elision takes place at the fn site. - let future_bound = this.with_anonymous_lifetime_mode( - AnonymousLifetimeMode::CreateParameter, - |this| this.lower_async_fn_output_type_to_future_bound( - output, - fn_def_id, - span, - ), - ); + let future_bound = this + .with_anonymous_lifetime_mode(AnonymousLifetimeMode::CreateParameter, |this| { + this.lower_async_fn_output_type_to_future_bound(output, fn_def_id, span) + }); debug!("lower_async_fn_ret_ty: future_bound={:#?}", future_bound); @@ -2396,32 +2283,30 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // // Note: this must be done after lowering the output type, // as the output type may introduce new in-band lifetimes. - let lifetime_params: Vec<(Span, ParamName)> = - this.in_scope_lifetimes - .iter().cloned() - .map(|name| (name.ident().span, name)) - .chain(this.lifetimes_to_define.iter().cloned()) - .collect(); + let lifetime_params: Vec<(Span, ParamName)> = this + .in_scope_lifetimes + .iter() + .cloned() + .map(|name| (name.ident().span, name)) + .chain(this.lifetimes_to_define.iter().cloned()) + .collect(); debug!("lower_async_fn_ret_ty: in_scope_lifetimes={:#?}", this.in_scope_lifetimes); debug!("lower_async_fn_ret_ty: lifetimes_to_define={:#?}", this.lifetimes_to_define); debug!("lower_async_fn_ret_ty: lifetime_params={:#?}", lifetime_params); - let generic_params = - lifetime_params - .iter().cloned() - .map(|(span, hir_name)| { - this.lifetime_to_generic_param(span, hir_name, opaque_ty_def_index) - }) - .collect(); + let generic_params = lifetime_params + .iter() + .cloned() + .map(|(span, hir_name)| { + this.lifetime_to_generic_param(span, hir_name, opaque_ty_def_index) + }) + .collect(); let opaque_ty_item = hir::OpaqueTy { generics: hir::Generics { params: generic_params, - where_clause: hir::WhereClause { - predicates: hir_vec![], - span, - }, + where_clause: hir::WhereClause { predicates: hir_vec![], span }, span, }, bounds: hir_vec![future_bound], @@ -2430,12 +2315,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; trace!("exist ty from async fn def index: {:#?}", opaque_ty_def_index); - let opaque_ty_id = this.generate_opaque_type( - opaque_ty_node_id, - opaque_ty_item, - span, - opaque_ty_span, - ); + let opaque_ty_id = + this.generate_opaque_type(opaque_ty_node_id, opaque_ty_item, span, opaque_ty_span); (opaque_ty_id, lifetime_params) }); @@ -2456,8 +2337,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // // For the "output" lifetime parameters, we just want to // generate `'_`. - let mut generic_args: Vec<_> = - lifetime_params[..input_lifetimes_count] + let mut generic_args: Vec<_> = lifetime_params[..input_lifetimes_count] .iter() .map(|&(span, hir_name)| { // Input lifetime like `'a` or `'1`: @@ -2468,18 +2348,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }) }) .collect(); - generic_args.extend( - lifetime_params[input_lifetimes_count..] - .iter() - .map(|&(span, _)| { - // Output lifetime like `'_`. - GenericArg::Lifetime(hir::Lifetime { - hir_id: self.next_id(), - span, - name: hir::LifetimeName::Implicit, - }) + generic_args.extend(lifetime_params[input_lifetimes_count..].iter().map(|&(span, _)| { + // Output lifetime like `'_`. + GenericArg::Lifetime(hir::Lifetime { + hir_id: self.next_id(), + span, + name: hir::LifetimeName::Implicit, }) - ); + })); // Create the `Foo<...>` reference itself. Note that the `type // Foo = impl Trait` is, internally, created as a child of the @@ -2508,9 +2384,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { args: hir_vec![], bindings: hir_vec![hir::TypeBinding { ident: Ident::with_dummy_span(FN_OUTPUT_NAME), - kind: hir::TypeBindingKind::Equality { - ty: output_ty, - }, + kind: hir::TypeBindingKind::Equality { ty: output_ty }, hir_id: self.next_id(), span, }], @@ -2523,10 +2397,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::GenericBound::Trait( hir::PolyTraitRef { - trait_ref: hir::TraitRef { - path: future_path, - hir_ref_id: self.next_id(), - }, + trait_ref: hir::TraitRef { path: future_path, hir_ref_id: self.next_id() }, bound_generic_params: hir_vec![], span, }, @@ -2540,12 +2411,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { itctx: ImplTraitContext<'_>, ) -> hir::GenericBound { match *tpb { - GenericBound::Trait(ref ty, modifier) => { - hir::GenericBound::Trait( - self.lower_poly_trait_ref(ty, itctx), - self.lower_trait_bound_modifier(modifier), - ) - } + GenericBound::Trait(ref ty, modifier) => hir::GenericBound::Trait( + self.lower_poly_trait_ref(ty, itctx), + self.lower_trait_bound_modifier(modifier), + ), GenericBound::Outlives(ref lifetime) => { hir::GenericBound::Outlives(self.lower_lifetime(lifetime)) } @@ -2555,21 +2424,21 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime { let span = l.ident.span; match l.ident { - ident if ident.name == kw::StaticLifetime => - self.new_named_lifetime(l.id, span, hir::LifetimeName::Static), - ident if ident.name == kw::UnderscoreLifetime => - match self.anonymous_lifetime_mode { - AnonymousLifetimeMode::CreateParameter => { - let fresh_name = self.collect_fresh_in_band_lifetime(span); - self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name)) - } + ident if ident.name == kw::StaticLifetime => { + self.new_named_lifetime(l.id, span, hir::LifetimeName::Static) + } + ident if ident.name == kw::UnderscoreLifetime => match self.anonymous_lifetime_mode { + AnonymousLifetimeMode::CreateParameter => { + let fresh_name = self.collect_fresh_in_band_lifetime(span); + self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name)) + } - AnonymousLifetimeMode::PassThrough => { - self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore) - } + AnonymousLifetimeMode::PassThrough => { + self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore) + } - AnonymousLifetimeMode::ReportError => self.new_error_lifetime(Some(l.id), span), - }, + AnonymousLifetimeMode::ReportError => self.new_error_lifetime(Some(l.id), span), + }, ident => { self.maybe_collect_in_band_lifetime(ident); let param_name = ParamName::Plain(ident); @@ -2584,11 +2453,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { span: Span, name: hir::LifetimeName, ) -> hir::Lifetime { - hir::Lifetime { - hir_id: self.lower_node_id(id), - span, - name, - } + hir::Lifetime { hir_id: self.lower_node_id(id), span, name } } fn lower_generic_params( @@ -2597,35 +2462,37 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { add_bounds: &NodeMap<Vec<GenericBound>>, mut itctx: ImplTraitContext<'_>, ) -> hir::HirVec<hir::GenericParam> { - params.iter().map(|param| { - self.lower_generic_param(param, add_bounds, itctx.reborrow()) - }).collect() - } - - fn lower_generic_param(&mut self, - param: &GenericParam, - add_bounds: &NodeMap<Vec<GenericBound>>, - mut itctx: ImplTraitContext<'_>) - -> hir::GenericParam { - let mut bounds = self.with_anonymous_lifetime_mode( - AnonymousLifetimeMode::ReportError, - |this| this.lower_param_bounds(¶m.bounds, itctx.reborrow()), - ); + params + .iter() + .map(|param| self.lower_generic_param(param, add_bounds, itctx.reborrow())) + .collect() + } + + fn lower_generic_param( + &mut self, + param: &GenericParam, + add_bounds: &NodeMap<Vec<GenericBound>>, + mut itctx: ImplTraitContext<'_>, + ) -> hir::GenericParam { + let mut bounds = self + .with_anonymous_lifetime_mode(AnonymousLifetimeMode::ReportError, |this| { + this.lower_param_bounds(¶m.bounds, itctx.reborrow()) + }); let (name, kind) = match param.kind { GenericParamKind::Lifetime => { let was_collecting_in_band = self.is_collecting_in_band_lifetimes; self.is_collecting_in_band_lifetimes = false; - let lt = self.with_anonymous_lifetime_mode( - AnonymousLifetimeMode::ReportError, - |this| this.lower_lifetime(&Lifetime { id: param.id, ident: param.ident }), - ); + let lt = self + .with_anonymous_lifetime_mode(AnonymousLifetimeMode::ReportError, |this| { + this.lower_lifetime(&Lifetime { id: param.id, ident: param.ident }) + }); let param_name = match lt.name { hir::LifetimeName::Param(param_name) => param_name, hir::LifetimeName::Implicit - | hir::LifetimeName::Underscore - | hir::LifetimeName::Static => hir::ParamName::Plain(lt.name.ident()), + | hir::LifetimeName::Underscore + | hir::LifetimeName::Static => hir::ParamName::Plain(lt.name.ident()), hir::LifetimeName::ImplicitObjectLifetimeDefault => { span_bug!( param.ident.span, @@ -2635,9 +2502,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::LifetimeName::Error => ParamName::Error, }; - let kind = hir::GenericParamKind::Lifetime { - kind: hir::LifetimeParamKind::Explicit - }; + let kind = + hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Explicit }; self.is_collecting_in_band_lifetimes = was_collecting_in_band; @@ -2647,28 +2513,29 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let add_bounds = add_bounds.get(¶m.id).map_or(&[][..], |x| &x); if !add_bounds.is_empty() { let params = self.lower_param_bounds(add_bounds, itctx.reborrow()).into_iter(); - bounds = bounds.into_iter() - .chain(params) - .collect(); + bounds = bounds.into_iter().chain(params).collect(); } let kind = hir::GenericParamKind::Type { - default: default.as_ref().map(|x| { - self.lower_ty(x, ImplTraitContext::OpaqueTy(None)) - }), - synthetic: param.attrs.iter() - .filter(|attr| attr.check_name(sym::rustc_synthetic)) - .map(|_| hir::SyntheticTyParamKind::ImplTrait) - .next(), + default: default + .as_ref() + .map(|x| self.lower_ty(x, ImplTraitContext::OpaqueTy(None))), + synthetic: param + .attrs + .iter() + .filter(|attr| attr.check_name(sym::rustc_synthetic)) + .map(|_| hir::SyntheticTyParamKind::ImplTrait) + .next(), }; (hir::ParamName::Plain(param.ident), kind) } - GenericParamKind::Const { ref ty } => { - (hir::ParamName::Plain(param.ident), hir::GenericParamKind::Const { + GenericParamKind::Const { ref ty } => ( + hir::ParamName::Plain(param.ident), + hir::GenericParamKind::Const { ty: self.lower_ty(&ty, ImplTraitContext::disallowed()), - }) - } + }, + ), }; hir::GenericParam { @@ -2687,10 +2554,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::QPath::Resolved(None, path) => path, qpath => bug!("lower_trait_ref: unexpected QPath `{:?}`", qpath), }; - hir::TraitRef { - path, - hir_ref_id: self.lower_node_id(p.ref_id), - } + hir::TraitRef { path, hir_ref_id: self.lower_node_id(p.ref_id) } } fn lower_poly_trait_ref( @@ -2703,27 +2567,22 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &NodeMap::default(), itctx.reborrow(), ); - let trait_ref = self.with_in_scope_lifetime_defs( - &p.bound_generic_params, - |this| this.lower_trait_ref(&p.trait_ref, itctx), - ); + let trait_ref = self.with_in_scope_lifetime_defs(&p.bound_generic_params, |this| { + this.lower_trait_ref(&p.trait_ref, itctx) + }); - hir::PolyTraitRef { - bound_generic_params, - trait_ref, - span: p.span, - } + hir::PolyTraitRef { bound_generic_params, trait_ref, span: p.span } } fn lower_mt(&mut self, mt: &MutTy, itctx: ImplTraitContext<'_>) -> hir::MutTy { - hir::MutTy { - ty: self.lower_ty(&mt.ty, itctx), - mutbl: mt.mutbl, - } + hir::MutTy { ty: self.lower_ty(&mt.ty, itctx), mutbl: mt.mutbl } } - fn lower_param_bounds(&mut self, bounds: &[GenericBound], mut itctx: ImplTraitContext<'_>) - -> hir::GenericBounds { + fn lower_param_bounds( + &mut self, + bounds: &[GenericBound], + mut itctx: ImplTraitContext<'_>, + ) -> hir::GenericBounds { bounds.iter().map(|bound| self.lower_param_bound(bound, itctx.reborrow())).collect() } @@ -2819,9 +2678,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::PatKind::Tuple(pats, ddpos) } PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)), - PatKind::Ref(ref inner, mutbl) => { - hir::PatKind::Ref(self.lower_pat(inner), mutbl) - } + PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl), PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range( P(self.lower_expr(e1)), P(self.lower_expr(e2)), @@ -2895,7 +2752,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { prev_rest_span = Some(pat.span); slice = Some(self.pat_wild_with_node_id_of(pat)); break; - }, + } // Found a sub-slice pattern `$binding_mode $ident @ ..`. // Record, lower it to `$binding_mode $ident @ _`, and stop here. PatKind::Ident(ref bm, ident, Some(ref sub)) if sub.is_rest() => { @@ -2904,7 +2761,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let node = self.lower_pat_ident(pat, bm, ident, lower_sub); slice = Some(self.pat_with_node_id_of(pat, node)); break; - }, + } // It was not a subslice pattern so lower it normally. _ => before.push(self.lower_pat(pat)), } @@ -2919,7 +2776,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // The `HirValidator` is merciless; add a `_` pattern to avoid ICEs. after.push(self.pat_wild_with_node_id_of(pat)); Some(sub.span) - }, + } _ => None, }; if let Some(rest_span) = rest_span { @@ -2973,11 +2830,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { /// Construct a `Pat` with the `HirId` of `p.id` lowered. fn pat_with_node_id_of(&mut self, p: &Pat, kind: hir::PatKind) -> P<hir::Pat> { - P(hir::Pat { - hir_id: self.lower_node_id(p.id), - kind, - span: p.span, - }) + P(hir::Pat { hir_id: self.lower_node_id(p.id), kind, span: p.span }) } /// Emit a friendly error for extra `..` patterns in a tuple/tuple struct/slice pattern. @@ -3010,11 +2863,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn lower_anon_const(&mut self, c: &AnonConst) -> hir::AnonConst { - self.with_new_scopes(|this| { - hir::AnonConst { - hir_id: this.lower_node_id(c.id), - body: this.lower_const_body(c.value.span, Some(&c.value)), - } + self.with_new_scopes(|this| hir::AnonConst { + hir_id: this.lower_node_id(c.id), + body: this.lower_const_body(c.value.span, Some(&c.value)), }) } @@ -3037,22 +2888,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } }); return ids; - }, + } StmtKind::Item(ref it) => { // Can only use the ID once. let mut id = Some(s.id); - return self.lower_item_id(it) + return self + .lower_item_id(it) .into_iter() .map(|item_id| { - let hir_id = id.take() - .map(|id| self.lower_node_id(id)) - .unwrap_or_else(|| self.next_id()); - - hir::Stmt { - hir_id, - kind: hir::StmtKind::Item(item_id), - span: s.span, - } + let hir_id = id + .take() + .map(|id| self.lower_node_id(id)) + .unwrap_or_else(|| self.next_id()); + + hir::Stmt { hir_id, kind: hir::StmtKind::Item(item_id), span: s.span } }) .collect(); } @@ -3060,11 +2909,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { StmtKind::Semi(ref e) => hir::StmtKind::Semi(P(self.lower_expr(e))), StmtKind::Mac(..) => panic!("shouldn't exist here"), }; - smallvec![hir::Stmt { - hir_id: self.lower_node_id(s.id), - kind, - span: s.span, - }] + smallvec![hir::Stmt { hir_id: self.lower_node_id(s.id), kind, span: s.span }] } fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode { @@ -3115,15 +2960,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { pat: P<hir::Pat>, source: hir::LocalSource, ) -> hir::Stmt { - let local = hir::Local { - attrs, - hir_id: self.next_id(), - init, - pat, - source, - span, - ty: None, - }; + let local = hir::Local { attrs, hir_id: self.next_id(), init, pat, source, span, ty: None }; self.stmt(span, hir::StmtKind::Local(P(local))) } @@ -3203,7 +3040,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { kind: hir::PatKind::Binding(bm, hir_id, ident.with_span_pos(span), None), span, }), - hir_id + hir_id, ) } @@ -3212,11 +3049,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn pat(&mut self, span: Span, kind: hir::PatKind) -> P<hir::Pat> { - P(hir::Pat { - hir_id: self.next_id(), - kind, - span, - }) + P(hir::Pat { hir_id: self.next_id(), kind, span }) } /// Given a suffix `["b", "c", "d"]`, returns path `::std::b::c::d` when @@ -3232,16 +3065,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let ns = if is_value { Namespace::ValueNS } else { Namespace::TypeNS }; let (path, res) = self.resolver.resolve_str_path(span, self.crate_root, components, ns); - let mut segments: Vec<_> = path.segments.iter().map(|segment| { - let res = self.expect_full_res(segment.id); - hir::PathSegment { - ident: segment.ident, - hir_id: Some(self.lower_node_id(segment.id)), - res: Some(self.lower_res(res)), - infer_args: true, - args: None, - } - }).collect(); + let mut segments: Vec<_> = path + .segments + .iter() + .map(|segment| { + let res = self.expect_full_res(segment.id); + hir::PathSegment { + ident: segment.ident, + hir_id: Some(self.lower_node_id(segment.id)), + res: Some(self.lower_res(res)), + infer_args: true, + args: None, + } + }) + .collect(); segments.last_mut().unwrap().args = params; hir::Path { @@ -3259,10 +3096,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { Res::Def(DefKind::Trait, _) | Res::Def(DefKind::TraitAlias, _) => { let principal = hir::PolyTraitRef { bound_generic_params: hir::HirVec::new(), - trait_ref: hir::TraitRef { - path, - hir_ref_id: hir_id, - }, + trait_ref: hir::TraitRef { path, hir_ref_id: hir_id }, span, }; @@ -3277,11 +3111,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { _ => hir::TyKind::Path(qpath), }; - hir::Ty { - hir_id, - kind, - span, - } + hir::Ty { hir_id, kind, span } } /// Invoked to create the lifetime argument for a type `&T` @@ -3320,13 +3150,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ), }; - let mut err = struct_span_err!( - self.sess, - span, - E0637, - "{}", - msg, - ); + let mut err = struct_span_err!(self.sess, span, E0637, "{}", msg,); err.span_label(span, label); err.emit(); @@ -3338,19 +3162,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { /// sorts of cases are deprecated. This may therefore report a warning or an /// error, depending on the mode. fn elided_path_lifetimes(&mut self, span: Span, count: usize) -> P<[hir::Lifetime]> { - (0..count) - .map(|_| self.elided_path_lifetime(span)) - .collect() + (0..count).map(|_| self.elided_path_lifetime(span)).collect() } fn elided_path_lifetime(&mut self, span: Span) -> hir::Lifetime { match self.anonymous_lifetime_mode { AnonymousLifetimeMode::CreateParameter => { // We should have emitted E0726 when processing this path above - self.sess.delay_span_bug( - span, - "expected 'implicit elided lifetime not allowed' error", - ); + self.sess + .delay_span_bug(span, "expected 'implicit elided lifetime not allowed' error"); let id = self.resolver.next_node_id(); self.new_named_lifetime(id, span, hir::LifetimeName::Error) } @@ -3360,8 +3180,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // `PathSegment`, for which there is no associated `'_` or `&T` with no explicit // lifetime. Instead, we simply create an implicit lifetime, which will be checked // later, at which point a suitable error will be emitted. - | AnonymousLifetimeMode::PassThrough - | AnonymousLifetimeMode::ReportError => self.new_implicit_lifetime(span), + AnonymousLifetimeMode::PassThrough | AnonymousLifetimeMode::ReportError => { + self.new_implicit_lifetime(span) + } } } @@ -3404,17 +3225,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn new_implicit_lifetime(&mut self, span: Span) -> hir::Lifetime { - hir::Lifetime { - hir_id: self.next_id(), - span, - name: hir::LifetimeName::Implicit, - } + hir::Lifetime { hir_id: self.next_id(), span, name: hir::LifetimeName::Implicit } } fn maybe_lint_bare_trait(&mut self, span: Span, id: NodeId, is_global: bool) { // FIXME(davidtwco): This is a hack to detect macros which produce spans of the // call site which do not have a macro backtrace. See #61963. - let is_macro_callsite = self.sess.source_map() + let is_macro_callsite = self + .sess + .source_map() .span_to_snippet(span) .map(|snippet| snippet.starts_with("#[")) .unwrap_or(true); diff --git a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs index 9231e4f779e..8914ff8add8 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs @@ -1,13 +1,13 @@ //! Error Reporting for Anonymous Region Lifetime Errors //! where both the regions are anonymous. +use crate::hir::Node; +use crate::hir::{Expr, ExprKind::Closure}; use crate::infer::error_reporting::nice_region_error::NiceRegionError; +use crate::infer::lexical_region_resolve::RegionResolutionError::SubSupConflict; use crate::infer::SubregionOrigin; use crate::ty::RegionKind; -use crate::hir::{Expr, ExprKind::Closure}; -use crate::hir::Node; use crate::util::common::ErrorReported; -use crate::infer::lexical_region_resolve::RegionResolutionError::SubSupConflict; impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { /// Print the error message for lifetime errors when binding escapes a closure. @@ -36,69 +36,75 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { /// ...because it cannot outlive this closure /// ``` pub(super) fn try_report_outlives_closure(&self) -> Option<ErrorReported> { - if let Some(SubSupConflict(_, - origin, - ref sub_origin, - _, - ref sup_origin, - sup_region)) = self.error { - + if let Some(SubSupConflict(_, origin, ref sub_origin, _, ref sup_origin, sup_region)) = + self.error + { // #45983: when trying to assign the contents of an argument to a binding outside of a // closure, provide a specific message pointing this out. - if let (&SubregionOrigin::BindingTypeIsNotValidAtDecl(ref external_span), - &RegionKind::ReFree(ref free_region)) = (&sub_origin, sup_region) { + if let ( + &SubregionOrigin::BindingTypeIsNotValidAtDecl(ref external_span), + &RegionKind::ReFree(ref free_region), + ) = (&sub_origin, sup_region) + { let hir = &self.tcx().hir(); if let Some(hir_id) = hir.as_local_hir_id(free_region.scope) { - if let Node::Expr(Expr { - kind: Closure(_, _, _, closure_span, None), - .. - }) = hir.get(hir_id) { + if let Node::Expr(Expr { kind: Closure(_, _, _, closure_span, None), .. }) = + hir.get(hir_id) + { let sup_sp = sup_origin.span(); let origin_sp = origin.span(); let mut err = self.tcx().sess.struct_span_err( sup_sp, - "borrowed data cannot be stored outside of its closure"); + "borrowed data cannot be stored outside of its closure", + ); err.span_label(sup_sp, "cannot be stored outside of its closure"); if origin_sp == sup_sp || origin_sp.contains(sup_sp) { -// // sup_sp == origin.span(): -// -// let mut x = None; -// ----- borrowed data cannot be stored into here... -// with_int(|y| x = Some(y)); -// --- ^ cannot be stored outside of its closure -// | -// ...because it cannot outlive this closure -// -// // origin.contains(&sup_sp): -// -// let mut f: Option<&u32> = None; -// ----- borrowed data cannot be stored into here... -// closure_expecting_bound(|x: &'x u32| { -// ------------ ... because it cannot outlive this closure -// f = Some(x); -// ^ cannot be stored outside of its closure - err.span_label(*external_span, - "borrowed data cannot be stored into here..."); - err.span_label(*closure_span, - "...because it cannot outlive this closure"); + // // sup_sp == origin.span(): + // + // let mut x = None; + // ----- borrowed data cannot be stored into here... + // with_int(|y| x = Some(y)); + // --- ^ cannot be stored outside of its closure + // | + // ...because it cannot outlive this closure + // + // // origin.contains(&sup_sp): + // + // let mut f: Option<&u32> = None; + // ----- borrowed data cannot be stored into here... + // closure_expecting_bound(|x: &'x u32| { + // ------------ ... because it cannot outlive this closure + // f = Some(x); + // ^ cannot be stored outside of its closure + err.span_label( + *external_span, + "borrowed data cannot be stored into here...", + ); + err.span_label( + *closure_span, + "...because it cannot outlive this closure", + ); } else { -// FIXME: the wording for this case could be much improved -// -// let mut lines_to_use: Vec<&CrateId> = Vec::new(); -// - cannot infer an appropriate lifetime... -// let push_id = |installed_id: &CrateId| { -// ------- ------------------------ borrowed data cannot outlive this closure -// | -// ...so that variable is valid at time of its declaration -// lines_to_use.push(installed_id); -// ^^^^^^^^^^^^ cannot be stored outside of its closure - err.span_label(origin_sp, - "cannot infer an appropriate lifetime..."); - err.span_label(*external_span, - "...so that variable is valid at time of its \ - declaration"); - err.span_label(*closure_span, - "borrowed data cannot outlive this closure"); + // FIXME: the wording for this case could be much improved + // + // let mut lines_to_use: Vec<&CrateId> = Vec::new(); + // - cannot infer an appropriate lifetime... + // let push_id = |installed_id: &CrateId| { + // ------- ------------------------ borrowed data cannot outlive this closure + // | + // ...so that variable is valid at time of its declaration + // lines_to_use.push(installed_id); + // ^^^^^^^^^^^^ cannot be stored outside of its closure + err.span_label(origin_sp, "cannot infer an appropriate lifetime..."); + err.span_label( + *external_span, + "...so that variable is valid at time of its \ + declaration", + ); + err.span_label( + *closure_span, + "borrowed data cannot outlive this closure", + ); } err.emit(); return Some(ErrorReported); diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 3d6015ecfbf..6900ed4f475 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -23,26 +23,27 @@ pub use self::LintSource::*; use rustc_data_structures::sync; +use crate::hir; use crate::hir::def_id::{CrateNum, LOCAL_CRATE}; use crate::hir::intravisit; -use crate::hir; use crate::lint::builtin::BuiltinLintDiagnostics; -use crate::session::{Session, DiagnosticMessageId}; -use crate::ty::TyCtxt; +use crate::session::{DiagnosticMessageId, Session}; use crate::ty::query::Providers; +use crate::ty::TyCtxt; use crate::util::nodemap::NodeMap; use errors::{DiagnosticBuilder, DiagnosticId}; use syntax::ast; -use syntax::source_map::{MultiSpan, ExpnKind, DesugaringKind}; +use syntax::source_map::{DesugaringKind, ExpnKind, MultiSpan}; use syntax::symbol::Symbol; use syntax_pos::hygiene::MacroKind; use syntax_pos::Span; -pub use crate::lint::context::{LateContext, EarlyContext, LintContext, LintStore, - check_crate, check_ast_crate, late_lint_mod, CheckLintNameResult, - BufferedEarlyLint,}; +pub use crate::lint::context::{ + check_ast_crate, check_crate, late_lint_mod, BufferedEarlyLint, CheckLintNameResult, + EarlyContext, LateContext, LintContext, LintStore, +}; -pub use rustc_session::lint::{Lint, LintId, Level, FutureIncompatibleInfo}; +pub use rustc_session::lint::{FutureIncompatibleInfo, Level, Lint, LintId}; /// Declares a static `LintArray` and return it as an expression. #[macro_export] @@ -351,8 +352,8 @@ macro_rules! declare_combined_early_lint_pass { /// A lint pass boxed up as a trait object. pub type EarlyLintPassObject = Box<dyn EarlyLintPass + sync::Send + sync::Sync + 'static>; -pub type LateLintPassObject = Box<dyn for<'a, 'tcx> LateLintPass<'a, 'tcx> + sync::Send - + sync::Sync + 'static>; +pub type LateLintPassObject = + Box<dyn for<'a, 'tcx> LateLintPass<'a, 'tcx> + sync::Send + sync::Sync + 'static>; /// How a lint level was set. #[derive(Clone, Copy, PartialEq, Eq, HashStable)] @@ -371,11 +372,11 @@ pub enum LintSource { pub type LevelSource = (Level, LintSource); pub mod builtin; -pub mod internal; mod context; +pub mod internal; mod levels; -pub use self::levels::{LintLevelSets, LintLevelMap}; +pub use self::levels::{LintLevelMap, LintLevelSets}; #[derive(Default)] pub struct LintBuffer { @@ -383,18 +384,20 @@ pub struct LintBuffer { } impl LintBuffer { - pub fn add_lint(&mut self, - lint: &'static Lint, - id: ast::NodeId, - sp: MultiSpan, - msg: &str, - diagnostic: BuiltinLintDiagnostics) { + pub fn add_lint( + &mut self, + lint: &'static Lint, + id: ast::NodeId, + sp: MultiSpan, + msg: &str, + diagnostic: BuiltinLintDiagnostics, + ) { let early_lint = BufferedEarlyLint { lint_id: LintId::of(lint), ast_id: id, span: sp, msg: msg.to_string(), - diagnostic + diagnostic, }; let arr = self.map.entry(id).or_default(); if !arr.contains(&early_lint) { @@ -428,22 +431,20 @@ impl LintBuffer { } } -pub fn struct_lint_level<'a>(sess: &'a Session, - lint: &'static Lint, - level: Level, - src: LintSource, - span: Option<MultiSpan>, - msg: &str) - -> DiagnosticBuilder<'a> -{ +pub fn struct_lint_level<'a>( + sess: &'a Session, + lint: &'static Lint, + level: Level, + src: LintSource, + span: Option<MultiSpan>, + msg: &str, +) -> DiagnosticBuilder<'a> { let mut err = match (level, span) { (Level::Allow, _) => return sess.diagnostic().struct_dummy(), (Level::Warn, Some(span)) => sess.struct_span_warn(span, msg), (Level::Warn, None) => sess.struct_warn(msg), - (Level::Deny, Some(span)) | - (Level::Forbid, Some(span)) => sess.struct_span_err(span, msg), - (Level::Deny, None) | - (Level::Forbid, None) => sess.struct_err(msg), + (Level::Deny, Some(span)) | (Level::Forbid, Some(span)) => sess.struct_span_err(span, msg), + (Level::Deny, None) | (Level::Forbid, None) => sess.struct_err(msg), }; // Check for future incompatibility lints and issue a stronger warning. @@ -475,7 +476,8 @@ pub fn struct_lint_level<'a>(sess: &'a Session, sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("`#[{}({})]` on by default", level.as_str(), name)); + &format!("`#[{}({})]` on by default", level.as_str(), name), + ); } LintSource::CommandLine(lint_flag_val) => { let flag = match level { @@ -489,29 +491,43 @@ pub fn struct_lint_level<'a>(sess: &'a Session, sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("requested on the command line with `{} {}`", - flag, hyphen_case_lint_name)); + &format!( + "requested on the command line with `{} {}`", + flag, hyphen_case_lint_name + ), + ); } else { let hyphen_case_flag_val = lint_flag_val.as_str().replace("_", "-"); sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("`{} {}` implied by `{} {}`", - flag, hyphen_case_lint_name, flag, - hyphen_case_flag_val)); + &format!( + "`{} {}` implied by `{} {}`", + flag, hyphen_case_lint_name, flag, hyphen_case_flag_val + ), + ); } } LintSource::Node(lint_attr_name, src, reason) => { if let Some(rationale) = reason { err.note(&rationale.as_str()); } - sess.diag_span_note_once(&mut err, DiagnosticMessageId::from(lint), - src, "lint level defined here"); + sess.diag_span_note_once( + &mut err, + DiagnosticMessageId::from(lint), + src, + "lint level defined here", + ); if lint_attr_name.as_str() != name { let level_str = level.as_str(); - sess.diag_note_once(&mut err, DiagnosticMessageId::from(lint), - &format!("`#[{}({})]` implied by `#[{}({})]`", - level_str, name, level_str, lint_attr_name)); + sess.diag_note_once( + &mut err, + DiagnosticMessageId::from(lint), + &format!( + "`#[{}({})]` implied by `#[{}({})]`", + level_str, name, level_str, lint_attr_name + ), + ); } } } @@ -519,8 +535,7 @@ pub fn struct_lint_level<'a>(sess: &'a Session, err.code(DiagnosticId::Lint(name)); if let Some(future_incompatible) = future_incompatible { - const STANDARD_MESSAGE: &str = - "this was previously accepted by the compiler but is being phased out; \ + const STANDARD_MESSAGE: &str = "this was previously accepted by the compiler but is being phased out; \ it will become a hard error"; let explanation = if lint_id == LintId::of(builtin::UNSTABLE_NAME_COLLISIONS) { @@ -536,13 +551,12 @@ pub fn struct_lint_level<'a>(sess: &'a Session, } else { format!("{} in a future release!", STANDARD_MESSAGE) }; - let citation = format!("for more information, see {}", - future_incompatible.reference); + let citation = format!("for more information, see {}", future_incompatible.reference); err.warn(&explanation); err.note(&citation); } - return err + return err; } pub fn maybe_lint_level_root(tcx: TyCtxt<'_>, id: hir::HirId) -> bool { @@ -563,7 +577,7 @@ fn lint_levels(tcx: TyCtxt<'_>, cnum: CrateNum) -> &LintLevelMap { let push = builder.levels.push(&krate.attrs, &store); builder.levels.register_id(hir::CRATE_HIR_ID); for macro_def in krate.exported_macros { - builder.levels.register_id(macro_def.hir_id); + builder.levels.register_id(macro_def.hir_id); } intravisit::walk_crate(&mut builder, krate); builder.levels.pop(push); @@ -578,11 +592,9 @@ struct LintLevelMapBuilder<'a, 'tcx> { } impl LintLevelMapBuilder<'_, '_> { - fn with_lint_attrs<F>(&mut self, - id: hir::HirId, - attrs: &[ast::Attribute], - f: F) - where F: FnOnce(&mut Self) + fn with_lint_attrs<F>(&mut self, id: hir::HirId, attrs: &[ast::Attribute], f: F) + where + F: FnOnce(&mut Self), { let push = self.levels.push(attrs, self.store); if push.changed { @@ -628,10 +640,12 @@ impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'_, 'tcx> { }) } - fn visit_variant(&mut self, - v: &'tcx hir::Variant<'tcx>, - g: &'tcx hir::Generics, - item_id: hir::HirId) { + fn visit_variant( + &mut self, + v: &'tcx hir::Variant<'tcx>, + g: &'tcx hir::Generics, + item_id: hir::HirId, + ) { self.with_lint_attrs(v.id, &v.attrs, |builder| { intravisit::walk_variant(builder, v, g, item_id); }) diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 4b838d04059..67630a75768 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -5,9 +5,7 @@ //! used between functions, and they operate in a purely top-down //! way. Therefore, we break lifetime name resolution into a separate pass. -// ignore-tidy-filelength - -use crate::hir::def::{Res, DefKind}; +use crate::hir::def::{DefKind, Res}; use crate::hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE}; use crate::hir::map::Map; use crate::hir::ptr::P; @@ -17,7 +15,7 @@ use crate::ty::{self, DefIdTree, GenericParamDefKind, TyCtxt}; use crate::rustc::lint; use crate::session::Session; use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, HirIdMap, HirIdSet}; -use errors::{Applicability, DiagnosticBuilder, pluralize}; +use errors::{pluralize, Applicability, DiagnosticBuilder}; use rustc_macros::HashStable; use std::borrow::Cow; use std::cell::Cell; @@ -69,16 +67,8 @@ pub enum LifetimeUseSet<'tcx> { #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, HashStable)] pub enum Region { Static, - EarlyBound( - /* index */ u32, - /* lifetime decl */ DefId, - LifetimeDefOrigin, - ), - LateBound( - ty::DebruijnIndex, - /* lifetime decl */ DefId, - LifetimeDefOrigin, - ), + EarlyBound(/* index */ u32, /* lifetime decl */ DefId, LifetimeDefOrigin), + LateBound(ty::DebruijnIndex, /* lifetime decl */ DefId, LifetimeDefOrigin), LateBoundAnon(ty::DebruijnIndex, /* anon index */ u32), Free(DefId, /* lifetime decl */ DefId), } @@ -101,10 +91,7 @@ impl Region { "Region::late: param={:?} depth={:?} def_id={:?} origin={:?}", param, depth, def_id, origin, ); - ( - param.name.modern(), - Region::LateBound(depth, def_id, origin), - ) + (param.name.modern(), Region::LateBound(depth, def_id, origin)) } fn late_anon(index: &Cell<u32>) -> Region { @@ -153,9 +140,7 @@ impl Region { L: Iterator<Item = &'a hir::Lifetime>, { if let Region::EarlyBound(index, _, _) = self { - params - .nth(index as usize) - .and_then(|lifetime| map.defs.get(&lifetime.hir_id).cloned()) + params.nth(index as usize).and_then(|lifetime| map.defs.get(&lifetime.hir_id).cloned()) } else { Some(self) } @@ -344,16 +329,12 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) { is_late_bound_map: |tcx, id| { let id = LocalDefId::from_def_id(DefId::local(id)); // (*) - tcx.resolve_lifetimes(LOCAL_CRATE) - .late_bound - .get(&id) + tcx.resolve_lifetimes(LOCAL_CRATE).late_bound.get(&id) }, object_lifetime_defaults_map: |tcx, id| { let id = LocalDefId::from_def_id(DefId::local(id)); // (*) - tcx.resolve_lifetimes(LOCAL_CRATE) - .object_lifetime_defaults - .get(&id) + tcx.resolve_lifetimes(LOCAL_CRATE).object_lifetime_defaults.get(&id) }, ..*providers @@ -378,15 +359,11 @@ fn resolve_lifetimes(tcx: TyCtxt<'_>, for_krate: CrateNum) -> &ResolveLifetimes map.insert(hir_id.local_id, v); } for hir_id in named_region_map.late_bound { - let map = rl.late_bound - .entry(hir_id.owner_local_def_id()) - .or_default(); + let map = rl.late_bound.entry(hir_id.owner_local_def_id()).or_default(); map.insert(hir_id.local_id); } for (hir_id, v) in named_region_map.object_lifetime_defaults { - let map = rl.object_lifetime_defaults - .entry(hir_id.owner_local_def_id()) - .or_default(); + let map = rl.object_lifetime_defaults.entry(hir_id.owner_local_def_id()).or_default(); map.insert(hir_id.local_id, v); } @@ -423,8 +400,7 @@ fn krate(tcx: TyCtxt<'_>) -> NamedRegionMap { /// This function returns whether there is such an implicit parameter defined on the given item. fn sub_items_have_self_param(node: &hir::ItemKind<'_>) -> bool { match *node { - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) => true, + hir::ItemKind::Trait(..) | hir::ItemKind::TraitAlias(..) => true, _ => false, } } @@ -442,15 +418,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let saved = take(&mut self.labels_in_fn); let body = self.tcx.hir().body(body); extract_labels(self, body); - self.with( - Scope::Body { - id: body.id(), - s: self.scope, - }, - |_, this| { - this.visit_body(body); - }, - ); + self.with(Scope::Body { id: body.id(), s: self.scope }, |_, this| { + this.visit_body(body); + }); replace(&mut self.labels_in_fn, saved); } @@ -472,25 +442,17 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { } hir::ItemKind::Static(..) | hir::ItemKind::Const(..) => { // No lifetime parameters, but implied 'static. - let scope = Scope::Elision { - elide: Elide::Exact(Region::Static), - s: ROOT_SCOPE, - }; + let scope = Scope::Elision { elide: Elide::Exact(Region::Static), s: ROOT_SCOPE }; self.with(scope, |_, this| intravisit::walk_item(this, item)); } - hir::ItemKind::OpaqueTy(hir::OpaqueTy { - impl_trait_fn: Some(_), - .. - }) => { + hir::ItemKind::OpaqueTy(hir::OpaqueTy { impl_trait_fn: Some(_), .. }) => { // Currently opaque type declarations are just generated from `impl Trait` // items. Doing anything on this node is irrelevant, as we currently don't need // it. } hir::ItemKind::TyAlias(_, ref generics) | hir::ItemKind::OpaqueTy(hir::OpaqueTy { - impl_trait_fn: None, - ref generics, - .. + impl_trait_fn: None, ref generics, .. }) | hir::ItemKind::Enum(_, ref generics) | hir::ItemKind::Struct(_, ref generics) @@ -511,16 +473,19 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { 0 }; let mut non_lifetime_count = 0; - let lifetimes = generics.params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - Some(Region::early(&self.tcx.hir(), &mut index, param)) - } - GenericParamKind::Type { .. } | - GenericParamKind::Const { .. } => { - non_lifetime_count += 1; - None - } - }).collect(); + let lifetimes = generics + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + Some(Region::early(&self.tcx.hir(), &mut index, param)) + } + GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { + non_lifetime_count += 1; + None + } + }) + .collect(); let scope = Scope::Binder { lifetimes, next_early_index: index + non_lifetime_count, @@ -561,7 +526,8 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let was_in_fn_syntax = self.is_in_fn_syntax; self.is_in_fn_syntax = true; let scope = Scope::Binder { - lifetimes: c.generic_params + lifetimes: c + .generic_params .iter() .filter_map(|param| match param.kind { GenericParamKind::Lifetime { .. } => { @@ -592,10 +558,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { LifetimeName::Implicit => { // For types like `dyn Foo`, we should // generate a special form of elided. - span_bug!( - ty.span, - "object-lifetime-default expected, not implict", - ); + span_bug!(ty.span, "object-lifetime-default expected, not implict",); } LifetimeName::ImplicitObjectLifetimeDefault => { // If the user does not write *anything*, we @@ -636,19 +599,14 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let (generics, bounds) = match self.tcx.hir().expect_item(item_id.id).kind { // Named opaque `impl Trait` types are reached via `TyKind::Path`. // This arm is for `impl Trait` in the types of statics, constants and locals. - hir::ItemKind::OpaqueTy(hir::OpaqueTy { - impl_trait_fn: None, - .. - }) => { + hir::ItemKind::OpaqueTy(hir::OpaqueTy { impl_trait_fn: None, .. }) => { intravisit::walk_ty(self, ty); return; } // RPIT (return position impl trait) - hir::ItemKind::OpaqueTy(hir::OpaqueTy { - ref generics, - ref bounds, - .. - }) => (generics, bounds), + hir::ItemKind::OpaqueTy(hir::OpaqueTy { ref generics, ref bounds, .. }) => { + (generics, bounds) + } ref i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i), }; @@ -704,7 +662,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { match param.kind { GenericParamKind::Lifetime { .. } => { let (name, reg) = Region::early(&self.tcx.hir(), &mut index, ¶m); - let def_id = if let Region::EarlyBound(_ ,def_id , _) = reg { + let def_id = if let Region::EarlyBound(_, def_id, _) = reg { def_id } else { bug!(); @@ -723,8 +681,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { lifetimes.insert(name, reg); } } - GenericParamKind::Type { .. } | - GenericParamKind::Const { .. } => { + GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { non_lifetime_count += 1; } } @@ -732,10 +689,8 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let next_early_index = index + non_lifetime_count; if let Some(elision_region) = elision { - let scope = Scope::Elision { - elide: Elide::Exact(elision_region), - s: self.scope, - }; + let scope = + Scope::Elision { elide: Elide::Exact(elision_region), s: self.scope }; self.with(scope, |_old_scope, this| { let scope = Scope::Binder { lifetimes, @@ -788,16 +743,19 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let mut index = self.next_early_index(); debug!("visit_ty: index = {}", index); let mut non_lifetime_count = 0; - let lifetimes = generics.params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - Some(Region::early(&self.tcx.hir(), &mut index, param)) - } - GenericParamKind::Type { .. } | - GenericParamKind::Const { .. } => { - non_lifetime_count += 1; - None - } - }).collect(); + let lifetimes = generics + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + Some(Region::early(&self.tcx.hir(), &mut index, param)) + } + GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { + non_lifetime_count += 1; + None + } + }) + .collect(); let scope = Scope::Binder { lifetimes, next_early_index: index + non_lifetime_count, @@ -840,16 +798,19 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let mut index = self.next_early_index(); let mut non_lifetime_count = 0; debug!("visit_ty: index = {}", index); - let lifetimes = generics.params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - Some(Region::early(&self.tcx.hir(), &mut index, param)) - } - GenericParamKind::Const { .. } | - GenericParamKind::Type { .. } => { - non_lifetime_count += 1; - None - } - }).collect(); + let lifetimes = generics + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + Some(Region::early(&self.tcx.hir(), &mut index, param)) + } + GenericParamKind::Const { .. } | GenericParamKind::Type { .. } => { + non_lifetime_count += 1; + None + } + }) + .collect(); let scope = Scope::Binder { lifetimes, next_early_index: index + non_lifetime_count, @@ -867,19 +828,23 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let mut index = self.next_early_index(); let mut next_early_index = index; debug!("visit_ty: index = {}", index); - let lifetimes = generics.params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - Some(Region::early(&self.tcx.hir(), &mut index, param)) - } - GenericParamKind::Type { .. } => { - next_early_index += 1; - None - } - GenericParamKind::Const { .. } => { - next_early_index += 1; - None - } - }).collect(); + let lifetimes = generics + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + Some(Region::early(&self.tcx.hir(), &mut index, param)) + } + GenericParamKind::Type { .. } => { + next_early_index += 1; + None + } + GenericParamKind::Const { .. } => { + next_early_index += 1; + None + } + }) + .collect(); let scope = Scope::Binder { lifetimes, @@ -1016,12 +981,12 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { ) { debug!("visit_poly_trait_ref(trait_ref={:?})", trait_ref); - if !self.trait_ref_hack || trait_ref.bound_generic_params.iter().any(|param| { - match param.kind { + if !self.trait_ref_hack + || trait_ref.bound_generic_params.iter().any(|param| match param.kind { GenericParamKind::Lifetime { .. } => true, _ => false, - } - }) { + }) + { if self.trait_ref_hack { span_err!( self.tcx.sess, @@ -1073,28 +1038,16 @@ struct Shadower { } fn original_label(span: Span) -> Original { - Original { - kind: ShadowKind::Label, - span: span, - } + Original { kind: ShadowKind::Label, span: span } } fn shadower_label(span: Span) -> Shadower { - Shadower { - kind: ShadowKind::Label, - span: span, - } + Shadower { kind: ShadowKind::Label, span: span } } fn original_lifetime(span: Span) -> Original { - Original { - kind: ShadowKind::Lifetime, - span: span, - } + Original { kind: ShadowKind::Lifetime, span: span } } fn shadower_lifetime(param: &hir::GenericParam) -> Shadower { - Shadower { - kind: ShadowKind::Lifetime, - span: param.span, - } + Shadower { kind: ShadowKind::Lifetime, span: param.span } } impl ShadowKind { @@ -1114,12 +1067,8 @@ fn check_mixed_explicit_and_in_band_defs(tcx: TyCtxt<'_>, params: &P<[hir::Gener _ => None, }) .collect(); - let explicit = lifetime_params - .iter() - .find(|(kind, _)| *kind == LifetimeParamKind::Explicit); - let in_band = lifetime_params - .iter() - .find(|(kind, _)| *kind == LifetimeParamKind::InBand); + let explicit = lifetime_params.iter().find(|(kind, _)| *kind == LifetimeParamKind::Explicit); + let in_band = lifetime_params.iter().find(|(kind, _)| *kind == LifetimeParamKind::InBand); if let (Some((_, explicit_span)), Some((_, in_band_span))) = (explicit, in_band) { struct_span_err!( @@ -1127,9 +1076,10 @@ fn check_mixed_explicit_and_in_band_defs(tcx: TyCtxt<'_>, params: &P<[hir::Gener *in_band_span, E0688, "cannot mix in-band and explicit lifetime definitions" - ).span_label(*in_band_span, "in-band lifetime definition here") - .span_label(*explicit_span, "explicit lifetime definition here") - .emit(); + ) + .span_label(*in_band_span, "in-band lifetime definition here") + .span_label(*explicit_span, "explicit lifetime definition here") + .emit(); } } @@ -1174,11 +1124,8 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body<'_>) { labels_in_fn: &'a mut Vec<ast::Ident>, } - let mut gather = GatherLabels { - tcx: ctxt.tcx, - scope: ctxt.scope, - labels_in_fn: &mut ctxt.labels_in_fn, - }; + let mut gather = + GatherLabels { tcx: ctxt.tcx, scope: ctxt.scope, labels_in_fn: &mut ctxt.labels_in_fn }; gather.visit_body(body); impl<'v, 'a, 'tcx> Visitor<'v> for GatherLabels<'a, 'tcx> { @@ -1209,11 +1156,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body<'_>) { } fn expression_label(ex: &hir::Expr) -> Option<ast::Ident> { - if let hir::ExprKind::Loop(_, Some(label), _) = ex.kind { - Some(label.ident) - } else { - None - } + if let hir::ExprKind::Loop(_, Some(label), _) = ex.kind { Some(label.ident) } else { None } } fn check_if_label_shadows_lifetime( @@ -1233,9 +1176,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body<'_>) { return; } - Scope::Binder { - ref lifetimes, s, .. - } => { + Scope::Binder { ref lifetimes, s, .. } => { // FIXME (#24278): non-hygienic comparison if let Some(def) = lifetimes.get(&hir::ParamName::Plain(label.modern())) { let hir_id = tcx.hir().as_local_hir_id(def.id().unwrap()).unwrap(); @@ -1263,9 +1204,7 @@ fn compute_object_lifetime_defaults(tcx: TyCtxt<'_>) -> HirIdMap<Vec<ObjectLifet | hir::ItemKind::Union(_, ref generics) | hir::ItemKind::Enum(_, ref generics) | hir::ItemKind::OpaqueTy(hir::OpaqueTy { - ref generics, - impl_trait_fn: None, - .. + ref generics, impl_trait_fn: None, .. }) | hir::ItemKind::TyAlias(_, ref generics) | hir::ItemKind::Trait(_, _, ref generics, ..) => { @@ -1406,12 +1345,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { where F: for<'b> FnOnce(ScopeRef<'_>, &mut LifetimeContext<'b, 'tcx>), { - let LifetimeContext { - tcx, - map, - lifetime_uses, - .. - } = self; + let LifetimeContext { tcx, map, lifetime_uses, .. } = self; let labels_in_fn = take(&mut self.labels_in_fn); let xcrate_object_lifetime_defaults = take(&mut self.xcrate_object_lifetime_defaults); let mut this = LifetimeContext { @@ -1468,7 +1402,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // helper method to issue suggestions from `fn rah<'a>(&'a T)` to `fn rah(&T)` // or from `fn rah<'a>(T<'a>)` to `fn rah(T<'_>)` fn suggest_eliding_single_use_lifetime( - &self, err: &mut DiagnosticBuilder<'_>, def_id: DefId, lifetime: &hir::Lifetime + &self, + err: &mut DiagnosticBuilder<'_>, + def_id: DefId, + lifetime: &hir::Lifetime, ) { let name = lifetime.name.ident(); let mut remove_decl = None; @@ -1488,16 +1425,17 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // include the trailing whitespace between the lifetime and type names let lt_through_ty_span = lifetime.span.to(input.span.shrink_to_hi()); remove_use = Some( - self.tcx.sess.source_map() - .span_until_non_whitespace(lt_through_ty_span) + self.tcx + .sess + .source_map() + .span_until_non_whitespace(lt_through_ty_span), ); break; } } hir::TyKind::Path(ref qpath) => { if let QPath::Resolved(_, path) = qpath { - - let last_segment = &path.segments[path.segments.len()-1]; + let last_segment = &path.segments[path.segments.len() - 1]; let generics = last_segment.generic_args(); for arg in generics.args.iter() { if let GenericArg::Lifetime(lt) = arg { @@ -1509,21 +1447,21 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } break; } - }, + } _ => {} } } }; if let Node::Lifetime(hir_lifetime) = self.tcx.hir().get(lifetime.hir_id) { - if let Some(parent) = self.tcx.hir().find( - self.tcx.hir().get_parent_item(hir_lifetime.hir_id)) + if let Some(parent) = + self.tcx.hir().find(self.tcx.hir().get_parent_item(hir_lifetime.hir_id)) { match parent { Node::Item(item) => { if let hir::ItemKind::Fn(sig, _, _) = &item.kind { find_arg_use_span(&sig.decl.inputs); } - }, + } Node::ImplItem(impl_item) => { if let hir::ImplItemKind::Method(sig, _) = &impl_item.kind { find_arg_use_span(&sig.decl.inputs); @@ -1590,10 +1528,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { def_ids.sort_by_cached_key(|&def_id| self.tcx.def_path_hash(def_id)); for def_id in def_ids { - debug!( - "check_uses_for_lifetimes_defined_by_scope: def_id = {:?}", - def_id - ); + debug!("check_uses_for_lifetimes_defined_by_scope: def_id = {:?}", def_id); let lifetimeuseset = self.lifetime_uses.remove(&def_id); @@ -1623,14 +1558,20 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } if let Some(parent_def_id) = self.tcx.parent(def_id) { - if let Some(parent_hir_id) = self.tcx.hir() - .as_local_hir_id(parent_def_id) { - // lifetimes in `derive` expansions don't count (Issue #53738) - if self.tcx.hir().attrs(parent_hir_id).iter() - .any(|attr| attr.check_name(sym::automatically_derived)) { - continue; - } + if let Some(parent_hir_id) = + self.tcx.hir().as_local_hir_id(parent_def_id) + { + // lifetimes in `derive` expansions don't count (Issue #53738) + if self + .tcx + .hir() + .attrs(parent_hir_id) + .iter() + .any(|attr| attr.check_name(sym::automatically_derived)) + { + continue; } + } } let mut err = self.tcx.struct_span_lint_hir( @@ -1740,20 +1681,23 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } let mut non_lifetime_count = 0; - let lifetimes = generics.params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - if self.map.late_bound.contains(¶m.hir_id) { - Some(Region::late(&self.tcx.hir(), param)) - } else { - Some(Region::early(&self.tcx.hir(), &mut index, param)) + let lifetimes = generics + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + if self.map.late_bound.contains(¶m.hir_id) { + Some(Region::late(&self.tcx.hir(), param)) + } else { + Some(Region::early(&self.tcx.hir(), &mut index, param)) + } } - } - GenericParamKind::Type { .. } | - GenericParamKind::Const { .. } => { - non_lifetime_count += 1; - None - } - }).collect(); + GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { + non_lifetime_count += 1; + None + } + }) + .collect(); let next_early_index = index + non_lifetime_count; let scope = Scope::Binder { @@ -1775,13 +1719,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { match *scope { Scope::Root => return 0, - Scope::Binder { - next_early_index, - opaque_type_parent, - .. - } if (!only_opaque_type_parent || opaque_type_parent) => + Scope::Binder { next_early_index, opaque_type_parent, .. } + if (!only_opaque_type_parent || opaque_type_parent) => { - return next_early_index + return next_early_index; } Scope::Binder { s, .. } @@ -1832,9 +1773,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { break None; } - Scope::Binder { - ref lifetimes, s, .. - } => { + Scope::Binder { ref lifetimes, s, .. } => { match lifetime_ref.name { LifetimeName::Param(param_name) => { if let Some(&def) = lifetimes.get(¶m_name.modern()) { @@ -1860,17 +1799,13 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } else if let Some(body_id) = outermost_body { let fn_id = self.tcx.hir().body_owner(body_id); match self.tcx.hir().get(fn_id) { - Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(..), - .. - }) + Node::Item(&hir::Item { kind: hir::ItemKind::Fn(..), .. }) | Node::TraitItem(&hir::TraitItem { kind: hir::TraitItemKind::Method(..), .. }) | Node::ImplItem(&hir::ImplItem { - kind: hir::ImplItemKind::Method(..), - .. + kind: hir::ImplItemKind::Method(..), .. }) => { let scope = self.tcx.hir().local_def_id(fn_id); def = Region::Free(scope, def.id().unwrap()); @@ -1890,8 +1825,9 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0687, "lifetimes used in `fn` or `Fn` syntax must be \ explicitly declared using `<...>` binders" - ).span_label(lifetime_ref.span, "in-band lifetime definition") - .emit(); + ) + .span_label(lifetime_ref.span, "in-band lifetime definition") + .emit(); } Region::Static @@ -1912,17 +1848,16 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0261, "use of undeclared lifetime name `{}`", lifetime_ref - ).span_label(lifetime_ref.span, "undeclared lifetime") - .emit(); + ) + .span_label(lifetime_ref.span, "undeclared lifetime") + .emit(); } } fn visit_segment_args(&mut self, res: Res, depth: usize, generic_args: &'tcx hir::GenericArgs) { debug!( "visit_segment_args(res={:?}, depth={:?}, generic_args={:?})", - res, - depth, - generic_args, + res, depth, generic_args, ); if generic_args.parenthesized { @@ -1957,21 +1892,17 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // which requires object lifetime defaults. let parent_def_id = |this: &mut Self, def_id: DefId| { let def_key = this.tcx.def_key(def_id); - DefId { - krate: def_id.krate, - index: def_key.parent.expect("missing parent"), - } + DefId { krate: def_id.krate, index: def_key.parent.expect("missing parent") } }; let type_def_id = match res { - Res::Def(DefKind::AssocTy, def_id) - if depth == 1 => Some(parent_def_id(self, def_id)), - Res::Def(DefKind::Variant, def_id) - if depth == 0 => Some(parent_def_id(self, def_id)), + Res::Def(DefKind::AssocTy, def_id) if depth == 1 => Some(parent_def_id(self, def_id)), + Res::Def(DefKind::Variant, def_id) if depth == 0 => Some(parent_def_id(self, def_id)), Res::Def(DefKind::Struct, def_id) | Res::Def(DefKind::Union, def_id) | Res::Def(DefKind::Enum, def_id) | Res::Def(DefKind::TyAlias, def_id) - | Res::Def(DefKind::Trait, def_id) if depth == 0 => + | Res::Def(DefKind::Trait, def_id) + if depth == 0 => { Some(def_id) } @@ -2018,31 +1949,30 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { &map.object_lifetime_defaults[&id] } else { let tcx = self.tcx; - self.xcrate_object_lifetime_defaults - .entry(def_id) - .or_insert_with(|| { - tcx.generics_of(def_id) - .params - .iter() - .filter_map(|param| match param.kind { - GenericParamDefKind::Type { - object_lifetime_default, - .. - } => Some(object_lifetime_default), - GenericParamDefKind::Lifetime | GenericParamDefKind::Const => None, - }) - .collect() - }) + self.xcrate_object_lifetime_defaults.entry(def_id).or_insert_with(|| { + tcx.generics_of(def_id) + .params + .iter() + .filter_map(|param| match param.kind { + GenericParamDefKind::Type { object_lifetime_default, .. } => { + Some(object_lifetime_default) + } + GenericParamDefKind::Lifetime | GenericParamDefKind::Const => None, + }) + .collect() + }) }; debug!("visit_segment_args: unsubst={:?}", unsubst); unsubst .iter() .map(|set| match *set { - Set1::Empty => if in_body { - None - } else { - Some(Region::Static) - }, + Set1::Empty => { + if in_body { + None + } else { + Some(Region::Static) + } + } Set1::One(r) => { let lifetimes = generic_args.args.iter().filter_map(|arg| match arg { GenericArg::Lifetime(lt) => Some(lt), @@ -2063,10 +1993,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { GenericArg::Lifetime(_) => {} GenericArg::Type(ty) => { if let Some(<) = object_lifetime_defaults.get(i) { - let scope = Scope::ObjectLifetimeDefault { - lifetime: lt, - s: self.scope, - }; + let scope = Scope::ObjectLifetimeDefault { lifetime: lt, s: self.scope }; self.with(scope, |_, this| this.visit_ty(ty)); } else { self.visit_ty(ty); @@ -2103,22 +2030,15 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // // This is intended to leave room for us to implement the // correct behavior in the future. - let has_lifetime_parameter = generic_args - .args - .iter() - .any(|arg| match arg { - GenericArg::Lifetime(_) => true, - _ => false, - }); + let has_lifetime_parameter = generic_args.args.iter().any(|arg| match arg { + GenericArg::Lifetime(_) => true, + _ => false, + }); // Resolve lifetimes found in the type `XX` from `Item = XX` bindings. for b in &generic_args.bindings { let scope = Scope::ObjectLifetimeDefault { - lifetime: if has_lifetime_parameter { - None - } else { - Some(Region::Static) - }, + lifetime: if has_lifetime_parameter { None } else { Some(Region::Static) }, s: self.scope, }; self.with(scope, |_, this| this.visit_assoc_type_binding(b)); @@ -2128,10 +2048,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { fn visit_fn_like_elision(&mut self, inputs: &'tcx [hir::Ty], output: Option<&'tcx hir::Ty>) { debug!("visit_fn_like_elision: enter"); let mut arg_elide = Elide::FreshLateAnon(Cell::new(0)); - let arg_scope = Scope::Elision { - elide: arg_elide.clone(), - s: self.scope, - }; + let arg_scope = Scope::Elision { elide: arg_elide.clone(), s: self.scope }; self.with(arg_scope, |_, this| { for input in inputs { this.visit_ty(input); @@ -2158,24 +2075,16 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let parent = self.tcx.hir().get_parent_node(output.hir_id); let body = match self.tcx.hir().get(parent) { // `fn` definitions and methods. - Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(.., body), - .. - }) => Some(body), + Node::Item(&hir::Item { kind: hir::ItemKind::Fn(.., body), .. }) => Some(body), Node::TraitItem(&hir::TraitItem { - kind: hir::TraitItemKind::Method(_, ref m), - .. + kind: hir::TraitItemKind::Method(_, ref m), .. }) => { - if let hir::ItemKind::Trait(.., ref trait_items) = self.tcx - .hir() - .expect_item(self.tcx.hir().get_parent_item(parent)) - .kind + if let hir::ItemKind::Trait(.., ref trait_items) = + self.tcx.hir().expect_item(self.tcx.hir().get_parent_item(parent)).kind { - assoc_item_kind = trait_items - .iter() - .find(|ti| ti.id.hir_id == parent) - .map(|ti| ti.kind); + assoc_item_kind = + trait_items.iter().find(|ti| ti.id.hir_id == parent).map(|ti| ti.kind); } match *m { hir::TraitMethod::Required(_) => None, @@ -2183,20 +2092,13 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } } - Node::ImplItem(&hir::ImplItem { - kind: hir::ImplItemKind::Method(_, body), - .. - }) => { - if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = self.tcx - .hir() - .expect_item(self.tcx.hir().get_parent_item(parent)) - .kind + Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Method(_, body), .. }) => { + if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = + self.tcx.hir().expect_item(self.tcx.hir().get_parent_item(parent)).kind { impl_self = Some(self_ty); - assoc_item_kind = impl_items - .iter() - .find(|ii| ii.id.hir_id == parent) - .map(|ii| ii.kind); + assoc_item_kind = + impl_items.iter().find(|ii| ii.id.hir_id == parent).map(|ii| ii.kind); } Some(body) } @@ -2247,9 +2149,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { Res::Def(DefKind::Struct, _) | Res::Def(DefKind::Union, _) | Res::Def(DefKind::Enum, _) - | Res::PrimTy(_) => { - return res == path.res - } + | Res::PrimTy(_) => return res == path.res, _ => {} } } @@ -2285,10 +2185,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { }; visitor.visit_ty(&inputs[0]); if let Set1::One(lifetime) = visitor.lifetime { - let scope = Scope::Elision { - elide: Elide::Exact(lifetime), - s: self.scope, - }; + let scope = Scope::Elision { elide: Elide::Exact(lifetime), s: self.scope }; self.with(scope, |_, this| this.visit_ty(output)); return; } @@ -2338,10 +2235,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { debug!("visit_fn_like_elision: elide={:?}", elide); - let scope = Scope::Elision { - elide, - s: self.scope, - }; + let scope = Scope::Elision { elide, s: self.scope }; self.with(scope, |_, this| this.visit_ty(output)); debug!("visit_fn_like_elision: exit"); @@ -2411,8 +2305,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { self.have_bound_regions = true; } _ => { - self.lifetimes - .insert(lifetime.shifted_out_to_binder(self.outer_index)); + self.lifetimes.insert(lifetime.shifted_out_to_binder(self.outer_index)); } } } @@ -2533,25 +2426,17 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let mut m = String::new(); let len = params.len(); - let elided_params: Vec<_> = params - .iter() - .cloned() - .filter(|info| info.lifetime_count > 0) - .collect(); + let elided_params: Vec<_> = + params.iter().cloned().filter(|info| info.lifetime_count > 0).collect(); let elided_len = elided_params.len(); for (i, info) in elided_params.into_iter().enumerate() { - let ElisionFailureInfo { - parent, - index, - lifetime_count: n, - have_bound_regions, - } = info; - - let help_name = if let Some(ident) = parent.and_then(|body| { - self.tcx.hir().body(body).params[index].pat.simple_ident() - }) { + let ElisionFailureInfo { parent, index, lifetime_count: n, have_bound_regions } = info; + + let help_name = if let Some(ident) = + parent.and_then(|body| self.tcx.hir().body(body).params[index].pat.simple_ident()) + { format!("`{}`", ident) } else { format!("argument {}", index + 1) @@ -2629,9 +2514,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { Scope::Body { .. } | Scope::ObjectLifetimeDefault { lifetime: None, .. } => return, - Scope::ObjectLifetimeDefault { - lifetime: Some(l), .. - } => break l, + Scope::ObjectLifetimeDefault { lifetime: Some(l), .. } => break l, } }; self.insert_lifetime(lifetime_ref, lifetime.shifted(late_depth)); @@ -2652,9 +2535,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() { if let hir::ParamName::Plain(_) = lifetime_i_name { let name = lifetime_i_name.ident().name; - if name == kw::UnderscoreLifetime - || name == kw::StaticLifetime - { + if name == kw::UnderscoreLifetime || name == kw::StaticLifetime { let mut err = struct_span_err!( self.tcx.sess, lifetime_i.span, @@ -2679,9 +2560,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0263, "lifetime name `{}` declared twice in the same scope", lifetime_j.name.ident() - ).span_label(lifetime_j.span, "declared twice") - .span_label(lifetime_i.span, "previous declaration here") - .emit(); + ) + .span_label(lifetime_j.span, "declared twice") + .span_label(lifetime_i.span, "previous declaration here") + .emit(); } } @@ -2762,9 +2644,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { return; } - Scope::Binder { - ref lifetimes, s, .. - } => { + Scope::Binder { ref lifetimes, s, .. } => { if let Some(&def) = lifetimes.get(¶m.name.modern()) { let hir_id = self.tcx.hir().as_local_hir_id(def.id().unwrap()).unwrap(); @@ -2792,10 +2672,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { Scope::Root => break false, // Inside of items, it depends on the kind of item. - Scope::Binder { - track_lifetime_uses, - .. - } => break track_lifetime_uses, + Scope::Binder { track_lifetime_uses, .. } => break track_lifetime_uses, // Inside a body, `'_` will use an inference variable, // should be fine. @@ -2804,23 +2681,14 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // A lifetime only used in a fn argument could as well // be replaced with `'_`, as that would generate a // fresh name, too. - Scope::Elision { - elide: Elide::FreshLateAnon(_), - .. - } => break true, + Scope::Elision { elide: Elide::FreshLateAnon(_), .. } => break true, // In the return type or other such place, `'_` is not // going to make a fresh name, so we cannot // necessarily replace a single-use lifetime with // `'_`. - Scope::Elision { - elide: Elide::Exact(_), - .. - } => break false, - Scope::Elision { - elide: Elide::Error(_), - .. - } => break false, + Scope::Elision { elide: Elide::Exact(_), .. } => break false, + Scope::Elision { elide: Elide::Error(_), .. } => break false, Scope::ObjectLifetimeDefault { s, .. } => scope = s, } @@ -2854,14 +2722,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { | Region::EarlyBound(_, def_id, _) => { // A lifetime declared by the user. let track_lifetime_uses = self.track_lifetime_uses(); - debug!( - "insert_lifetime: track_lifetime_uses={}", - track_lifetime_uses - ); + debug!("insert_lifetime: track_lifetime_uses={}", track_lifetime_uses); if track_lifetime_uses && !self.lifetime_uses.contains_key(&def_id) { debug!("insert_lifetime: first use of {:?}", def_id); - self.lifetime_uses - .insert(def_id, LifetimeUseSet::One(lifetime_ref)); + self.lifetime_uses.insert(def_id, LifetimeUseSet::One(lifetime_ref)); } else { debug!("insert_lifetime: many uses of {:?}", def_id); self.lifetime_uses.insert(def_id, LifetimeUseSet::Many); @@ -2894,10 +2758,7 @@ fn insert_late_bound_lifetimes( decl: &hir::FnDecl, generics: &hir::Generics, ) { - debug!( - "insert_late_bound_lifetimes(decl={:?}, generics={:?})", - decl, generics - ); + debug!("insert_late_bound_lifetimes(decl={:?}, generics={:?})", decl, generics); let mut constrained_by_input = ConstrainedCollector::default(); for arg_ty in &decl.inputs { @@ -2907,10 +2768,7 @@ fn insert_late_bound_lifetimes( let mut appears_in_output = AllCollector::default(); intravisit::walk_fn_ret_ty(&mut appears_in_output, &decl.output); - debug!( - "insert_late_bound_lifetimes: constrained_by_input={:?}", - constrained_by_input.regions - ); + debug!("insert_late_bound_lifetimes: constrained_by_input={:?}", constrained_by_input.regions); // Walk the lifetimes that appear in where clauses. // @@ -2944,8 +2802,7 @@ fn insert_late_bound_lifetimes( hir::GenericParamKind::Lifetime { .. } => { /* fall through */ } // Neither types nor consts are late-bound. - hir::GenericParamKind::Type { .. } - | hir::GenericParamKind::Const { .. } => continue, + hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => continue, } let lt_name = hir::LifetimeName::Param(param.name.modern()); @@ -3035,13 +2892,7 @@ pub fn report_missing_lifetime_specifiers( span: Span, count: usize, ) -> DiagnosticBuilder<'_> { - struct_span_err!( - sess, - span, - E0106, - "missing lifetime specifier{}", - pluralize!(count) - ) + struct_span_err!(sess, span, E0106, "missing lifetime specifier{}", pluralize!(count)) } fn add_missing_lifetime_specifiers_label( @@ -3053,11 +2904,9 @@ fn add_missing_lifetime_specifiers_label( ) { if count > 1 { err.span_label(span, format!("expected {} lifetime parameters", count)); - } else if let (1, Some(name), Some("&")) = ( - lifetime_names.len(), - lifetime_names.iter().next(), - snippet, - ) { + } else if let (1, Some(name), Some("&")) = + (lifetime_names.len(), lifetime_names.iter().next(), snippet) + { err.span_suggestion( span, "consider using the named lifetime", diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 8baedfed9d6..e6ecf1b676e 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -5,73 +5,72 @@ #[allow(dead_code)] pub mod auto_trait; mod chalk_fulfill; +pub mod codegen; mod coherence; -pub mod error_reporting; mod engine; +pub mod error_reporting; mod fulfill; -mod project; mod object_safety; mod on_unimplemented; +mod project; +pub mod query; mod select; mod specialize; mod structural_impls; -pub mod codegen; mod util; -pub mod query; -use chalk_engine; use crate::hir; use crate::hir::def_id::DefId; -use crate::infer::{InferCtxt, SuppressRegionErrors}; use crate::infer::outlives::env::OutlivesEnvironment; +use crate::infer::{InferCtxt, SuppressRegionErrors}; use crate::middle::region; use crate::mir::interpret::ErrorHandled; +use crate::ty::error::{ExpectedFound, TypeError}; +use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; +use crate::ty::subst::{InternalSubsts, SubstsRef}; +use crate::ty::{self, AdtKind, GenericParamDefKind, List, ToPredicate, Ty, TyCtxt}; +use crate::util::common::ErrorReported; +use chalk_engine; use rustc_macros::HashStable; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; -use crate::ty::subst::{InternalSubsts, SubstsRef}; -use crate::ty::{self, AdtKind, List, Ty, TyCtxt, GenericParamDefKind, ToPredicate}; -use crate::ty::error::{ExpectedFound, TypeError}; -use crate::ty::fold::{TypeFolder, TypeFoldable, TypeVisitor}; -use crate::util::common::ErrorReported; use std::fmt::Debug; use std::rc::Rc; -pub use self::SelectionError::*; pub use self::FulfillmentErrorCode::*; -pub use self::Vtable::*; pub use self::ObligationCauseCode::*; +pub use self::SelectionError::*; +pub use self::Vtable::*; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; pub use self::coherence::{OrphanCheckErr, OverlapResult}; +pub use self::engine::{TraitEngine, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; -pub use self::project::MismatchedProjectionTypes; -pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; -pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; -pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; +pub use self::object_safety::ObjectSafetyViolation; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; -pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; +pub use self::project::MismatchedProjectionTypes; +pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; +pub use self::project::{Normalized, ProjectionCache, ProjectionCacheSnapshot, Reveal}; +pub use self::select::{EvaluationCache, SelectionCache, SelectionContext}; pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError}; -pub use self::specialize::{OverlapError, specialization_graph, translate_substs}; pub use self::specialize::find_associated_item; pub use self::specialize::specialization_graph::FutureCompatOverlapError; pub use self::specialize::specialization_graph::FutureCompatOverlapErrorKind; -pub use self::engine::{TraitEngine, TraitEngineExt}; +pub use self::specialize::{specialization_graph, translate_substs, OverlapError}; pub use self::util::{elaborate_predicates, elaborate_trait_ref, elaborate_trait_refs}; +pub use self::util::{expand_trait_aliases, TraitAliasExpander}; pub use self::util::{ - supertraits, supertrait_def_ids, transitive_bounds, Supertraits, SupertraitDefIds, + supertrait_def_ids, supertraits, transitive_bounds, SupertraitDefIds, Supertraits, }; -pub use self::util::{expand_trait_aliases, TraitAliasExpander}; pub use self::chalk_fulfill::{ - CanonicalGoal as ChalkCanonicalGoal, - FulfillmentContext as ChalkFulfillmentContext + CanonicalGoal as ChalkCanonicalGoal, FulfillmentContext as ChalkFulfillmentContext, }; -pub use self::ObligationCauseCode::*; pub use self::FulfillmentErrorCode::*; +pub use self::ObligationCauseCode::*; pub use self::SelectionError::*; pub use self::Vtable::*; @@ -79,7 +78,7 @@ pub use self::Vtable::*; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum IntercrateMode { Issue43355, - Fixed + Fixed, } /// The mode that trait queries run in. @@ -140,19 +139,19 @@ pub struct ObligationCause<'tcx> { /// information. pub body_id: hir::HirId, - pub code: ObligationCauseCode<'tcx> + pub code: ObligationCauseCode<'tcx>, } impl<'tcx> ObligationCause<'tcx> { pub fn span(&self, tcx: TyCtxt<'tcx>) -> Span { match self.code { - ObligationCauseCode::CompareImplMethodObligation { .. } | - ObligationCauseCode::MainFunctionType | - ObligationCauseCode::StartFunctionType => { - tcx.sess.source_map().def_span(self.span) - } - ObligationCauseCode::MatchExpressionArm( - box MatchExpressionArmCause { arm_span, .. }) => arm_span, + ObligationCauseCode::CompareImplMethodObligation { .. } + | ObligationCauseCode::MainFunctionType + | ObligationCauseCode::StartFunctionType => tcx.sess.source_map().def_span(self.span), + ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause { + arm_span, + .. + }) => arm_span, _ => self.span, } } @@ -189,7 +188,10 @@ pub enum ObligationCauseCode<'tcx> { ObjectCastObligation(/* Object type */ Ty<'tcx>), /// Obligation incurred due to a coercion. - Coercion { source: Ty<'tcx>, target: Ty<'tcx> }, + Coercion { + source: Ty<'tcx>, + target: Ty<'tcx>, + }, /// Various cases where expressions must be `Sized` / `Copy` / etc. /// `L = X` implies that `L` is `Sized`. @@ -211,7 +213,10 @@ pub enum ObligationCauseCode<'tcx> { RepeatVec(bool), /// Types of fields (other than the last, except for packed structs) in a struct must be sized. - FieldSized { adt_kind: AdtKind, last: bool }, + FieldSized { + adt_kind: AdtKind, + last: bool, + }, /// Constant expressions must be sized. ConstSized, @@ -245,7 +250,10 @@ pub enum ObligationCauseCode<'tcx> { MatchExpressionArm(Box<MatchExpressionArmCause<'tcx>>), /// Computing common supertype in the pattern guard for the arms of a match expression - MatchExpressionArmPattern { span: Span, ty: Ty<'tcx> }, + MatchExpressionArmPattern { + span: Span, + ty: Ty<'tcx>, + }, /// Constants in patterns must have `Structural` type. ConstPatternStructural, @@ -322,7 +330,7 @@ pub struct DerivedObligationCause<'tcx> { parent_trait_ref: ty::PolyTraitRef<'tcx>, /// The parent trait had this cause. - parent_code: Rc<ObligationCauseCode<'tcx>> + parent_code: Rc<ObligationCauseCode<'tcx>>, } pub type Obligations<'tcx, O> = Vec<Obligation<'tcx, O>>; @@ -415,7 +423,7 @@ impl<'tcx> GoalKind<'tcx> { Some(p) => p.into_goal(), None => GoalKind::Quantified( QuantifierKind::Universal, - domain_goal.map_bound(|p| tcx.mk_goal(p.into_goal())) + domain_goal.map_bound(|p| tcx.mk_goal(p.into_goal())), ), } } @@ -474,10 +482,7 @@ pub struct Environment<'tcx> { impl Environment<'tcx> { pub fn with<G>(self, goal: G) -> InEnvironment<'tcx, G> { - InEnvironment { - environment: self, - goal, - } + InEnvironment { environment: self, goal } } } @@ -490,12 +495,14 @@ pub struct InEnvironment<'tcx, G> { pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>; -#[derive(Clone,Debug,TypeFoldable)] +#[derive(Clone, Debug, TypeFoldable)] pub enum SelectionError<'tcx> { Unimplemented, - OutputTypeParameterMismatch(ty::PolyTraitRef<'tcx>, - ty::PolyTraitRef<'tcx>, - ty::error::TypeError<'tcx>), + OutputTypeParameterMismatch( + ty::PolyTraitRef<'tcx>, + ty::PolyTraitRef<'tcx>, + ty::error::TypeError<'tcx>, + ), TraitNotObjectSafe(DefId), ConstEvalFailure(ErrorHandled), Overflow, @@ -514,8 +521,7 @@ pub struct FulfillmentError<'tcx> { pub enum FulfillmentErrorCode<'tcx> { CodeSelectionError(SelectionError<'tcx>), CodeProjectionError(MismatchedProjectionTypes<'tcx>), - CodeSubtypeError(ExpectedFound<Ty<'tcx>>, - TypeError<'tcx>), // always comes from a SubtypePredicate + CodeSubtypeError(ExpectedFound<Ty<'tcx>>, TypeError<'tcx>), // always comes from a SubtypePredicate CodeAmbiguity, } @@ -617,7 +623,7 @@ pub enum Vtable<'tcx, N> { pub struct VtableImplData<'tcx, N> { pub impl_def_id: DefId, pub substs: SubstsRef<'tcx>, - pub nested: Vec<N> + pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] @@ -626,7 +632,7 @@ pub struct VtableGeneratorData<'tcx, N> { pub substs: SubstsRef<'tcx>, /// Nested obligations. This can be non-empty if the generator /// signature contains associated types. - pub nested: Vec<N> + pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] @@ -635,18 +641,18 @@ pub struct VtableClosureData<'tcx, N> { pub substs: SubstsRef<'tcx>, /// Nested obligations. This can be non-empty if the closure /// signature contains associated types. - pub nested: Vec<N> + pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct VtableAutoImplData<N> { pub trait_def_id: DefId, - pub nested: Vec<N> + pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct VtableBuiltinData<N> { - pub nested: Vec<N> + pub nested: Vec<N>, } /// A vtable for some object-safe trait `Foo` automatically derived @@ -667,7 +673,7 @@ pub struct VtableObjectData<'tcx, N> { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct VtableFnPointerData<'tcx, N> { pub fn_ty: Ty<'tcx>, - pub nested: Vec<N> + pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] @@ -698,14 +704,13 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'tcx>( def_id: DefId, span: Span, ) -> bool { - debug!("type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})", - ty, - infcx.tcx.def_path_str(def_id)); + debug!( + "type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})", + ty, + infcx.tcx.def_path_str(def_id) + ); - let trait_ref = ty::TraitRef { - def_id, - substs: infcx.tcx.mk_substs_trait(ty, &[]), - }; + let trait_ref = ty::TraitRef { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]) }; let obligation = Obligation { param_env, cause: ObligationCause::misc(span, hir::DUMMY_HIR_ID), @@ -714,8 +719,12 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'tcx>( }; let result = infcx.predicate_must_hold_modulo_regions(&obligation); - debug!("type_known_to_meet_ty={:?} bound={} => {:?}", - ty, infcx.tcx.def_path_str(def_id), result); + debug!( + "type_known_to_meet_ty={:?} bound={} => {:?}", + ty, + infcx.tcx.def_path_str(def_id), + result + ); if result && (ty.has_infer_types() || ty.has_closure_types()) { // Because of inference "guessing", selection can sometimes claim @@ -740,16 +749,20 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'tcx>( // assume it is move; linear is always ok. match fulfill_cx.select_all_or_error(infcx) { Ok(()) => { - debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success", - ty, - infcx.tcx.def_path_str(def_id)); + debug!( + "type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success", + ty, + infcx.tcx.def_path_str(def_id) + ); true } Err(e) => { - debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}", - ty, - infcx.tcx.def_path_str(def_id), - e); + debug!( + "type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}", + ty, + infcx.tcx.def_path_str(def_id), + e + ); false } } @@ -767,9 +780,7 @@ fn do_normalize_predicates<'tcx>( ) -> Result<Vec<ty::Predicate<'tcx>>, ErrorReported> { debug!( "do_normalize_predicates(predicates={:?}, region_context={:?}, cause={:?})", - predicates, - region_context, - cause, + predicates, region_context, cause, ); let span = cause.span; tcx.infer_ctxt().enter(|infcx| { @@ -787,19 +798,14 @@ fn do_normalize_predicates<'tcx>( // them here too, and we will remove this function when // we move over to lazy normalization *anyway*. let fulfill_cx = FulfillmentContext::new_ignoring_regions(); - let predicates = match fully_normalize( - &infcx, - fulfill_cx, - cause, - elaborated_env, - &predicates, - ) { - Ok(predicates) => predicates, - Err(errors) => { - infcx.report_fulfillment_errors(&errors, None, false); - return Err(ErrorReported) - } - }; + let predicates = + match fully_normalize(&infcx, fulfill_cx, cause, elaborated_env, &predicates) { + Ok(predicates) => predicates, + Err(errors) => { + infcx.report_fulfillment_errors(&errors, None, false); + return Err(ErrorReported); + } + }; debug!("do_normalize_predictes: normalized predicates = {:?}", predicates); @@ -827,7 +833,7 @@ fn do_normalize_predicates<'tcx>( // unconstrained variable, and it seems better not to ICE, // all things considered. tcx.sess.span_err(span, &fixup_err.to_string()); - return Err(ErrorReported) + return Err(ErrorReported); } }; if predicates.has_local_value() { @@ -862,20 +868,20 @@ pub fn normalize_param_env_or_error<'tcx>( // and errors will get reported then; so after typeck we // can be sure that no errors should occur. - debug!("normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})", - region_context, unnormalized_env, cause); + debug!( + "normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})", + region_context, unnormalized_env, cause + ); let mut predicates: Vec<_> = - util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.to_vec()) - .collect(); + util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.to_vec()).collect(); - debug!("normalize_param_env_or_error: elaborated-predicates={:?}", - predicates); + debug!("normalize_param_env_or_error: elaborated-predicates={:?}", predicates); let elaborated_env = ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, - unnormalized_env.def_id + unnormalized_env.def_id, ); // HACK: we are trying to normalize the param-env inside *itself*. The problem is that @@ -896,25 +902,31 @@ pub fn normalize_param_env_or_error<'tcx>( // // This works fairly well because trait matching does not actually care about param-env // TypeOutlives predicates - these are normally used by regionck. - let outlives_predicates: Vec<_> = predicates.drain_filter(|predicate| { - match predicate { + let outlives_predicates: Vec<_> = predicates + .drain_filter(|predicate| match predicate { ty::Predicate::TypeOutlives(..) => true, - _ => false - } - }).collect(); + _ => false, + }) + .collect(); - debug!("normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})", - predicates, outlives_predicates); - let non_outlives_predicates = - match do_normalize_predicates(tcx, region_context, cause.clone(), - elaborated_env, predicates) { - Ok(predicates) => predicates, - // An unnormalized env is better than nothing. - Err(ErrorReported) => { - debug!("normalize_param_env_or_error: errored resolving non-outlives predicates"); - return elaborated_env - } - }; + debug!( + "normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})", + predicates, outlives_predicates + ); + let non_outlives_predicates = match do_normalize_predicates( + tcx, + region_context, + cause.clone(), + elaborated_env, + predicates, + ) { + Ok(predicates) => predicates, + // An unnormalized env is better than nothing. + Err(ErrorReported) => { + debug!("normalize_param_env_or_error: errored resolving non-outlives predicates"); + return elaborated_env; + } + }; debug!("normalize_param_env_or_error: non-outlives predicates={:?}", non_outlives_predicates); @@ -923,21 +935,22 @@ pub fn normalize_param_env_or_error<'tcx>( // predicates here anyway. Keeping them here anyway because it seems safer. let outlives_env: Vec<_> = non_outlives_predicates.iter().chain(&outlives_predicates).cloned().collect(); - let outlives_env = ty::ParamEnv::new( - tcx.intern_predicates(&outlives_env), - unnormalized_env.reveal, - None - ); - let outlives_predicates = - match do_normalize_predicates(tcx, region_context, cause, - outlives_env, outlives_predicates) { - Ok(predicates) => predicates, - // An unnormalized env is better than nothing. - Err(ErrorReported) => { - debug!("normalize_param_env_or_error: errored resolving outlives predicates"); - return elaborated_env - } - }; + let outlives_env = + ty::ParamEnv::new(tcx.intern_predicates(&outlives_env), unnormalized_env.reveal, None); + let outlives_predicates = match do_normalize_predicates( + tcx, + region_context, + cause, + outlives_env, + outlives_predicates, + ) { + Ok(predicates) => predicates, + // An unnormalized env is better than nothing. + Err(ErrorReported) => { + debug!("normalize_param_env_or_error: errored resolving outlives predicates"); + return elaborated_env; + } + }; debug!("normalize_param_env_or_error: outlives predicates={:?}", outlives_predicates); let mut predicates = non_outlives_predicates; @@ -946,7 +959,7 @@ pub fn normalize_param_env_or_error<'tcx>( ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, - unnormalized_env.def_id + unnormalized_env.def_id, ) } @@ -964,9 +977,10 @@ where let selcx = &mut SelectionContext::new(infcx); let Normalized { value: normalized_value, obligations } = project::normalize(selcx, param_env, cause, value); - debug!("fully_normalize: normalized_value={:?} obligations={:?}", - normalized_value, - obligations); + debug!( + "fully_normalize: normalized_value={:?} obligations={:?}", + normalized_value, obligations + ); for obligation in obligations { fulfill_cx.register_predicate_obligation(selcx.infcx(), obligation); } @@ -987,8 +1001,7 @@ fn normalize_and_test_predicates<'tcx>( tcx: TyCtxt<'tcx>, predicates: Vec<ty::Predicate<'tcx>>, ) -> bool { - debug!("normalize_and_test_predicates(predicates={:?})", - predicates); + debug!("normalize_and_test_predicates(predicates={:?})", predicates); let result = tcx.infer_ctxt().enter(|infcx| { let param_env = ty::ParamEnv::reveal_all(); @@ -1007,8 +1020,7 @@ fn normalize_and_test_predicates<'tcx>( fulfill_cx.select_all_or_error(&infcx).is_ok() }); - debug!("normalize_and_test_predicates(predicates={:?}) = {:?}", - predicates, result); + debug!("normalize_and_test_predicates(predicates={:?}) = {:?}", predicates, result); result } @@ -1016,14 +1028,12 @@ fn substitute_normalize_and_test_predicates<'tcx>( tcx: TyCtxt<'tcx>, key: (DefId, SubstsRef<'tcx>), ) -> bool { - debug!("substitute_normalize_and_test_predicates(key={:?})", - key); + debug!("substitute_normalize_and_test_predicates(key={:?})", key); let predicates = tcx.predicates_of(key.0).instantiate(tcx, key.1).predicates; let result = normalize_and_test_predicates(tcx, predicates); - debug!("substitute_normalize_and_test_predicates(key={:?}) = {:?}", - key, result); + debug!("substitute_normalize_and_test_predicates(key={:?}) = {:?}", key, result); result } @@ -1036,100 +1046,98 @@ fn vtable_methods<'tcx>( ) -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] { debug!("vtable_methods({:?})", trait_ref); - tcx.arena.alloc_from_iter( - supertraits(tcx, trait_ref).flat_map(move |trait_ref| { - let trait_methods = tcx.associated_items(trait_ref.def_id()) - .filter(|item| item.kind == ty::AssocKind::Method); - - // Now list each method's DefId and InternalSubsts (for within its trait). - // If the method can never be called from this object, produce None. - trait_methods.map(move |trait_method| { - debug!("vtable_methods: trait_method={:?}", trait_method); - let def_id = trait_method.def_id; - - // Some methods cannot be called on an object; skip those. - if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { - debug!("vtable_methods: not vtable safe"); - return None; - } - - // The method may have some early-bound lifetimes; add regions for those. - let substs = trait_ref.map_bound(|trait_ref| - InternalSubsts::for_item(tcx, def_id, |param, _| - match param.kind { - GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), - GenericParamDefKind::Type { .. } | - GenericParamDefKind::Const => { - trait_ref.substs[param.index as usize] - } - } - ) - ); - - // The trait type may have higher-ranked lifetimes in it; - // erase them if they appear, so that we get the type - // at some particular call site. - let substs = tcx.normalize_erasing_late_bound_regions( - ty::ParamEnv::reveal_all(), - &substs - ); + tcx.arena.alloc_from_iter(supertraits(tcx, trait_ref).flat_map(move |trait_ref| { + let trait_methods = tcx + .associated_items(trait_ref.def_id()) + .filter(|item| item.kind == ty::AssocKind::Method); + + // Now list each method's DefId and InternalSubsts (for within its trait). + // If the method can never be called from this object, produce None. + trait_methods.map(move |trait_method| { + debug!("vtable_methods: trait_method={:?}", trait_method); + let def_id = trait_method.def_id; + + // Some methods cannot be called on an object; skip those. + if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { + debug!("vtable_methods: not vtable safe"); + return None; + } - // It's possible that the method relies on where-clauses that - // do not hold for this particular set of type parameters. - // Note that this method could then never be called, so we - // do not want to try and codegen it, in that case (see #23435). - let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs); - if !normalize_and_test_predicates(tcx, predicates.predicates) { - debug!("vtable_methods: predicates do not hold"); - return None; - } + // The method may have some early-bound lifetimes; add regions for those. + let substs = trait_ref.map_bound(|trait_ref| { + InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind { + GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), + GenericParamDefKind::Type { .. } | GenericParamDefKind::Const => { + trait_ref.substs[param.index as usize] + } + }) + }); + + // The trait type may have higher-ranked lifetimes in it; + // erase them if they appear, so that we get the type + // at some particular call site. + let substs = + tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &substs); + + // It's possible that the method relies on where-clauses that + // do not hold for this particular set of type parameters. + // Note that this method could then never be called, so we + // do not want to try and codegen it, in that case (see #23435). + let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs); + if !normalize_and_test_predicates(tcx, predicates.predicates) { + debug!("vtable_methods: predicates do not hold"); + return None; + } - Some((def_id, substs)) - }) + Some((def_id, substs)) }) - ) + })) } impl<'tcx, O> Obligation<'tcx, O> { - pub fn new(cause: ObligationCause<'tcx>, - param_env: ty::ParamEnv<'tcx>, - predicate: O) - -> Obligation<'tcx, O> - { + pub fn new( + cause: ObligationCause<'tcx>, + param_env: ty::ParamEnv<'tcx>, + predicate: O, + ) -> Obligation<'tcx, O> { Obligation { cause, param_env, recursion_depth: 0, predicate } } - fn with_depth(cause: ObligationCause<'tcx>, - recursion_depth: usize, - param_env: ty::ParamEnv<'tcx>, - predicate: O) - -> Obligation<'tcx, O> - { + fn with_depth( + cause: ObligationCause<'tcx>, + recursion_depth: usize, + param_env: ty::ParamEnv<'tcx>, + predicate: O, + ) -> Obligation<'tcx, O> { Obligation { cause, param_env, recursion_depth, predicate } } - pub fn misc(span: Span, - body_id: hir::HirId, - param_env: ty::ParamEnv<'tcx>, - trait_ref: O) - -> Obligation<'tcx, O> { + pub fn misc( + span: Span, + body_id: hir::HirId, + param_env: ty::ParamEnv<'tcx>, + trait_ref: O, + ) -> Obligation<'tcx, O> { Obligation::new(ObligationCause::misc(span, body_id), param_env, trait_ref) } - pub fn with<P>(&self, value: P) -> Obligation<'tcx,P> { - Obligation { cause: self.cause.clone(), - param_env: self.param_env, - recursion_depth: self.recursion_depth, - predicate: value } + pub fn with<P>(&self, value: P) -> Obligation<'tcx, P> { + Obligation { + cause: self.cause.clone(), + param_env: self.param_env, + recursion_depth: self.recursion_depth, + predicate: value, + } } } impl<'tcx> ObligationCause<'tcx> { #[inline] - pub fn new(span: Span, - body_id: hir::HirId, - code: ObligationCauseCode<'tcx>) - -> ObligationCause<'tcx> { + pub fn new( + span: Span, + body_id: hir::HirId, + code: ObligationCauseCode<'tcx>, + ) -> ObligationCause<'tcx> { ObligationCause { span, body_id, code } } @@ -1157,7 +1165,10 @@ impl<'tcx, N> Vtable<'tcx, N> { } } - pub fn map<M, F>(self, f: F) -> Vtable<'tcx, M> where F: FnMut(N) -> M { + pub fn map<M, F>(self, f: F) -> Vtable<'tcx, M> + where + F: FnMut(N) -> M, + { match self { VtableImpl(i) => VtableImpl(VtableImplData { impl_def_id: i.impl_def_id, @@ -1165,9 +1176,9 @@ impl<'tcx, N> Vtable<'tcx, N> { nested: i.nested.into_iter().map(f).collect(), }), VtableParam(n) => VtableParam(n.into_iter().map(f).collect()), - VtableBuiltin(i) => VtableBuiltin(VtableBuiltinData { - nested: i.nested.into_iter().map(f).collect(), - }), + VtableBuiltin(i) => { + VtableBuiltin(VtableBuiltinData { nested: i.nested.into_iter().map(f).collect() }) + } VtableObject(o) => VtableObject(VtableObjectData { upcast_trait_ref: o.upcast_trait_ref, vtable_base: o.vtable_base, @@ -1201,10 +1212,10 @@ impl<'tcx, N> Vtable<'tcx, N> { } impl<'tcx> FulfillmentError<'tcx> { - fn new(obligation: PredicateObligation<'tcx>, - code: FulfillmentErrorCode<'tcx>) - -> FulfillmentError<'tcx> - { + fn new( + obligation: PredicateObligation<'tcx>, + code: FulfillmentErrorCode<'tcx>, + ) -> FulfillmentError<'tcx> { FulfillmentError { obligation: obligation, code: code, points_at_arg_span: false } } } diff --git a/src/librustc/ty/constness.rs b/src/librustc/ty/constness.rs index 7fe950ef7b7..35d5bdaf182 100644 --- a/src/librustc/ty/constness.rs +++ b/src/librustc/ty/constness.rs @@ -1,41 +1,38 @@ -use crate::ty::query::Providers; -use crate::hir::def_id::DefId; use crate::hir; +use crate::hir::def_id::DefId; +use crate::hir::map::blocks::FnLikeNode; +use crate::ty::query::Providers; use crate::ty::TyCtxt; -use syntax_pos::symbol::Symbol; use rustc_target::spec::abi::Abi; -use crate::hir::map::blocks::FnLikeNode; use syntax::attr; +use syntax_pos::symbol::Symbol; impl<'tcx> TyCtxt<'tcx> { /// Whether the `def_id` counts as const fn in your current crate, considering all active /// feature gates pub fn is_const_fn(self, def_id: DefId) -> bool { - self.is_const_fn_raw(def_id) && match self.is_unstable_const_fn(def_id) { - Some(feature_name) => { - // has a `rustc_const_unstable` attribute, check whether the user enabled the - // corresponding feature gate. - self.features() - .declared_lib_features - .iter() - .any(|&(sym, _)| sym == feature_name) - }, - // functions without const stability are either stable user written - // const fn or the user is using feature gates and we thus don't - // care what they do - None => true, - } + self.is_const_fn_raw(def_id) + && match self.is_unstable_const_fn(def_id) { + Some(feature_name) => { + // has a `rustc_const_unstable` attribute, check whether the user enabled the + // corresponding feature gate. + self.features() + .declared_lib_features + .iter() + .any(|&(sym, _)| sym == feature_name) + } + // functions without const stability are either stable user written + // const fn or the user is using feature gates and we thus don't + // care what they do + None => true, + } } /// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it pub fn is_unstable_const_fn(self, def_id: DefId) -> Option<Symbol> { if self.is_const_fn_raw(def_id) { let const_stab = self.lookup_const_stability(def_id)?; - if const_stab.level.is_unstable() { - Some(const_stab.feature) - } else { - None - } + if const_stab.level.is_unstable() { Some(const_stab.feature) } else { None } } else { None } @@ -54,29 +51,31 @@ impl<'tcx> TyCtxt<'tcx> { match self.lookup_const_stability(def_id) { // `rustc_const_unstable` functions don't need to conform. Some(&attr::ConstStability { ref level, .. }) if level.is_unstable() => false, - None => if let Some(stab) = self.lookup_stability(def_id) { - if stab.level.is_stable() { - self.sess.span_err( - self.def_span(def_id), - "stable const functions must have either `rustc_const_stable` or \ + None => { + if let Some(stab) = self.lookup_stability(def_id) { + if stab.level.is_stable() { + self.sess.span_err( + self.def_span(def_id), + "stable const functions must have either `rustc_const_stable` or \ `rustc_const_unstable` attribute", - ); - // While we errored above, because we don't know if we need to conform, we - // err on the "safe" side and require min_const_fn. - true + ); + // While we errored above, because we don't know if we need to conform, we + // err on the "safe" side and require min_const_fn. + true + } else { + // Unstable functions need not conform to min_const_fn. + false + } } else { - // Unstable functions need not conform to min_const_fn. - false + // Internal functions are forced to conform to min_const_fn. + // Annotate the internal function with a const stability attribute if + // you need to use unstable features. + // Note: this is an arbitrary choice that does not affect stability or const + // safety or anything, it just changes whether we need to annotate some + // internal functions with `rustc_const_stable` or with `rustc_const_unstable` + true } - } else { - // Internal functions are forced to conform to min_const_fn. - // Annotate the internal function with a const stability attribute if - // you need to use unstable features. - // Note: this is an arbitrary choice that does not affect stability or const - // safety or anything, it just changes whether we need to annotate some - // internal functions with `rustc_const_stable` or with `rustc_const_unstable` - true - }, + } // Everything else needs to conform, because it would be callable from // other `min_const_fn` functions. _ => true, @@ -88,23 +87,25 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn provide(providers: &mut Providers<'_>) { /// Const evaluability whitelist is here to check evaluability at the /// top level beforehand. fn is_const_intrinsic(tcx: TyCtxt<'_>, def_id: DefId) -> Option<bool> { match tcx.fn_sig(def_id).abi() { - Abi::RustIntrinsic | - Abi::PlatformIntrinsic => Some(tcx.lookup_const_stability(def_id).is_some()), - _ => None + Abi::RustIntrinsic | Abi::PlatformIntrinsic => { + Some(tcx.lookup_const_stability(def_id).is_some()) + } + _ => None, } } /// Checks whether the function has a `const` modifier or, in case it is an intrinsic, whether /// said intrinsic is on the whitelist for being const callable. fn is_const_fn_raw(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - let hir_id = tcx.hir().as_local_hir_id(def_id) - .expect("Non-local call to local provider is_const_fn"); + let hir_id = tcx + .hir() + .as_local_hir_id(def_id) + .expect("Non-local call to local provider is_const_fn"); let node = tcx.hir().get(hir_id); @@ -120,27 +121,30 @@ pub fn provide(providers: &mut Providers<'_>) { } fn is_promotable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - tcx.is_const_fn(def_id) && match tcx.lookup_const_stability(def_id) { - Some(stab) => { - if cfg!(debug_assertions) && stab.promotable { - let sig = tcx.fn_sig(def_id); - assert_eq!( - sig.unsafety(), - hir::Unsafety::Normal, - "don't mark const unsafe fns as promotable", - // https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682 - ); + tcx.is_const_fn(def_id) + && match tcx.lookup_const_stability(def_id) { + Some(stab) => { + if cfg!(debug_assertions) && stab.promotable { + let sig = tcx.fn_sig(def_id); + assert_eq!( + sig.unsafety(), + hir::Unsafety::Normal, + "don't mark const unsafe fns as promotable", + // https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682 + ); + } + stab.promotable } - stab.promotable - }, - None => false, - } + None => false, + } } fn const_fn_is_allowed_fn_ptr(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - tcx.is_const_fn(def_id) && - tcx.lookup_const_stability(def_id) - .map(|stab| stab.allow_const_fn_ptr).unwrap_or(false) + tcx.is_const_fn(def_id) + && tcx + .lookup_const_stability(def_id) + .map(|stab| stab.allow_const_fn_ptr) + .unwrap_or(false) } *providers = Providers { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index e36b11ae005..39341de6367 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1,81 +1,82 @@ -// ignore-tidy-filelength //! Type context book-keeping. use crate::arena::Arena; use crate::dep_graph::DepGraph; -use crate::dep_graph::{self, DepNode, DepConstructor}; -use crate::session::Session; -use crate::session::config::{BorrowckMode, OutputFilenames}; -use crate::session::config::CrateType; -use crate::middle; -use crate::middle::lang_items::PanicLocationLangItem; -use crate::hir::{self, TraitCandidate, HirId, ItemKind, ItemLocalId, Node}; -use crate::hir::def::{Res, DefKind, Export}; +use crate::dep_graph::{self, DepConstructor, DepNode}; +use crate::hir::def::{DefKind, Export, Res}; use crate::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use crate::hir::map as hir_map; use crate::hir::map::DefPathHash; -use crate::lint::{self, Lint}; -use crate::ich::{StableHashingContext, NodeIdHashingMode}; +use crate::hir::{self, HirId, ItemKind, ItemLocalId, Node, TraitCandidate}; +use crate::ich::{NodeIdHashingMode, StableHashingContext}; use crate::infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos}; use crate::infer::outlives::free_region_map::FreeRegionMap; +use crate::lint::{self, Lint}; +use crate::middle; use crate::middle::cstore::CrateStoreDyn; use crate::middle::cstore::EncodedMetadata; use crate::middle::lang_items; +use crate::middle::lang_items::PanicLocationLangItem; use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault}; use crate::middle::stability; -use crate::mir::{BodyAndCache, Field, interpret, Local, Place, PlaceElem, ProjectionKind, Promoted}; -use crate::mir::interpret::{ConstValue, Allocation, Scalar}; -use crate::ty::subst::{GenericArg, InternalSubsts, SubstsRef, Subst}; -use crate::ty::ReprOptions; +use crate::mir::interpret::{Allocation, ConstValue, Scalar}; +use crate::mir::{ + interpret, BodyAndCache, Field, Local, Place, PlaceElem, ProjectionKind, Promoted, +}; +use crate::session::config::CrateType; +use crate::session::config::{BorrowckMode, OutputFilenames}; +use crate::session::Session; use crate::traits; -use crate::traits::{Clause, Clauses, GoalKind, Goal, Goals}; -use crate::ty::{self, DefIdTree, Ty, TypeAndMut}; -use crate::ty::{TyS, TyKind, List}; -use crate::ty::{AdtKind, AdtDef, Region, Const}; -use crate::ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate}; -use crate::ty::RegionKind; -use crate::ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid, ConstVid}; -use crate::ty::TyKind::*; -use crate::ty::{InferConst, ParamConst}; -use crate::ty::GenericParamDefKind; +use crate::traits::{Clause, Clauses, Goal, GoalKind, Goals}; use crate::ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx}; use crate::ty::query; use crate::ty::steal::Steal; -use crate::ty::subst::{UserSubsts, GenericArgKind}; -use crate::ty::{BoundVar, BindingMode}; +use crate::ty::subst::{GenericArg, InternalSubsts, Subst, SubstsRef}; +use crate::ty::subst::{GenericArgKind, UserSubsts}; use crate::ty::CanonicalPolyFnSig; +use crate::ty::GenericParamDefKind; +use crate::ty::RegionKind; +use crate::ty::ReprOptions; +use crate::ty::TyKind::*; +use crate::ty::{self, DefIdTree, Ty, TypeAndMut}; +use crate::ty::{AdtDef, AdtKind, Const, Region}; +use crate::ty::{BindingMode, BoundVar}; +use crate::ty::{ConstVid, FloatVar, FloatVid, IntVar, IntVid, TyVar, TyVid}; +use crate::ty::{ExistentialPredicate, InferTy, ParamTy, PolyFnSig, Predicate, ProjectionTy}; +use crate::ty::{InferConst, ParamConst}; +use crate::ty::{List, TyKind, TyS}; use crate::util::common::ErrorReported; use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet, NodeMap}; use crate::util::nodemap::{FxHashMap, FxHashSet}; -use errors::DiagnosticBuilder; use arena::SyncDroplessArena; -use smallvec::SmallVec; +use errors::DiagnosticBuilder; use rustc_data_structures::profiling::SelfProfilerRef; +use rustc_data_structures::sharded::ShardedHashMap; use rustc_data_structures::stable_hasher::{ - HashStable, StableHasher, StableVec, hash_stable_hashmap, + hash_stable_hashmap, HashStable, StableHasher, StableVec, }; +use rustc_data_structures::sync::{Lock, Lrc, WorkerLocal}; use rustc_index::vec::{Idx, IndexVec}; -use rustc_data_structures::sharded::ShardedHashMap; -use rustc_data_structures::sync::{Lrc, Lock, WorkerLocal}; +use rustc_macros::HashStable; +use rustc_target::spec::abi; +use smallvec::SmallVec; use std::any::Any; use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::hash_map::{self, Entry}; -use std::hash::{Hash, Hasher}; use std::fmt; -use std::mem; -use std::ops::{Deref, Bound}; +use std::hash::{Hash, Hasher}; use std::iter; +use std::mem; +use std::ops::{Bound, Deref}; use std::sync::Arc; -use rustc_target::spec::abi; -use rustc_macros::HashStable; use syntax::ast; use syntax::attr; +use syntax::expand::allocator::AllocatorKind; use syntax::source_map::MultiSpan; -use syntax::symbol::{Symbol, kw, sym}; +use syntax::symbol::{kw, sym, Symbol}; use syntax_pos::Span; -use syntax::expand::allocator::AllocatorKind; pub struct AllArenas { pub interner: SyncDroplessArena, @@ -83,9 +84,7 @@ pub struct AllArenas { impl AllArenas { pub fn new() -> Self { - AllArenas { - interner: SyncDroplessArena::default(), - } + AllArenas { interner: SyncDroplessArena::default() } } } @@ -135,20 +134,20 @@ impl<'tcx> CtxtInterners<'tcx> { /// Interns a type. #[allow(rustc::usage_of_ty_tykind)] #[inline(never)] - fn intern_ty(&self, - kind: TyKind<'tcx> - ) -> Ty<'tcx> { - self.type_.intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_kind(&kind); - - let ty_struct = TyS { - kind, - flags: flags.flags, - outer_exclusive_binder: flags.outer_exclusive_binder, - }; + fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> { + self.type_ + .intern(kind, |kind| { + let flags = super::flags::FlagComputation::for_kind(&kind); + + let ty_struct = TyS { + kind, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + }; - Interned(self.arena.alloc(ty_struct)) - }).0 + Interned(self.arena.alloc(ty_struct)) + }) + .0 } } @@ -192,7 +191,7 @@ pub struct CommonConsts<'tcx> { pub struct LocalTableInContext<'a, V> { local_id_root: Option<DefId>, - data: &'a ItemLocalMap<V> + data: &'a ItemLocalMap<V>, } /// Validate that the given HirId (respectively its `local_id` part) can be @@ -202,17 +201,21 @@ pub struct LocalTableInContext<'a, V> { /// would be in a different frame of reference and using its `local_id` /// would result in lookup errors, or worse, in silently wrong data being /// stored/returned. -fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>, - hir_id: hir::HirId, - mut_access: bool) { +fn validate_hir_id_for_typeck_tables( + local_id_root: Option<DefId>, + hir_id: hir::HirId, + mut_access: bool, +) { if let Some(local_id_root) = local_id_root { if hir_id.owner != local_id_root.index { ty::tls::with(|tcx| { - bug!("node {} with HirId::owner {:?} cannot be placed in \ + bug!( + "node {} with HirId::owner {:?} cannot be placed in \ TypeckTables with local_id_root {:?}", - tcx.hir().node_to_string(hir_id), - DefId::local(hir_id.owner), - local_id_root) + tcx.hir().node_to_string(hir_id), + DefId::local(hir_id.owner), + local_id_root + ) }); } } else { @@ -253,7 +256,7 @@ impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> { pub struct LocalTableInContextMut<'a, V> { local_id_root: Option<DefId>, - data: &'a mut ItemLocalMap<V> + data: &'a mut ItemLocalMap<V>, } impl<'a, V> LocalTableInContextMut<'a, V> { @@ -464,7 +467,8 @@ impl<'tcx> TypeckTables<'tcx> { pub fn qpath_res(&self, qpath: &hir::QPath, id: hir::HirId) -> Res { match *qpath { hir::QPath::Resolved(_, ref path) => path.res, - hir::QPath::TypeRelative(..) => self.type_dependent_def(id) + hir::QPath::TypeRelative(..) => self + .type_dependent_def(id) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), } } @@ -472,10 +476,7 @@ impl<'tcx> TypeckTables<'tcx> { pub fn type_dependent_defs( &self, ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.type_dependent_defs - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.type_dependent_defs } } pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { @@ -492,61 +493,43 @@ impl<'tcx> TypeckTables<'tcx> { ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.type_dependent_defs + data: &mut self.type_dependent_defs, } } pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.field_indices - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.field_indices } } pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { - LocalTableInContextMut { - local_id_root: self.local_id_root, - data: &mut self.field_indices - } + LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.field_indices } } - pub fn user_provided_types( - &self - ) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.user_provided_types - } + pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { + LocalTableInContext { local_id_root: self.local_id_root, data: &self.user_provided_types } } pub fn user_provided_types_mut( - &mut self + &mut self, ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.user_provided_types + data: &mut self.user_provided_types, } } pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.node_types - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.node_types } } pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { - LocalTableInContextMut { - local_id_root: self.local_id_root, - data: &mut self.node_types - } + LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.node_types } } pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { - self.node_type_opt(id).unwrap_or_else(|| - bug!("node_type: no type for node `{}`", - tls::with(|tcx| tcx.hir().node_to_string(id))) - ) + self.node_type_opt(id).unwrap_or_else(|| { + bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) + }) } pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> { @@ -555,10 +538,7 @@ impl<'tcx> TypeckTables<'tcx> { } pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { - LocalTableInContextMut { - local_id_root: self.local_id_root, - data: &mut self.node_substs - } + LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.node_substs } } pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { @@ -600,22 +580,16 @@ impl<'tcx> TypeckTables<'tcx> { } pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.adjustments - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.adjustments } } - pub fn adjustments_mut(&mut self) - -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { - LocalTableInContextMut { - local_id_root: self.local_id_root, - data: &mut self.adjustments - } + pub fn adjustments_mut( + &mut self, + ) -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { + LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.adjustments } } - pub fn expr_adjustments(&self, expr: &hir::Expr) - -> &[ty::adjustment::Adjustment<'tcx>] { + pub fn expr_adjustments(&self, expr: &hir::Expr) -> &[ty::adjustment::Adjustment<'tcx>] { validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false); self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) } @@ -623,16 +597,11 @@ impl<'tcx> TypeckTables<'tcx> { /// Returns the type of `expr`, considering any `Adjustment` /// entry recorded for that expression. pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> { - self.expr_adjustments(expr) - .last() - .map_or_else(|| self.expr_ty(expr), |adj| adj.target) + self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target) } pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> { - self.expr_adjustments(expr) - .last() - .map(|adj| adj.target) - .or_else(|| self.expr_ty_opt(expr)) + self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr)) } pub fn is_method_call(&self, expr: &hir::Expr) -> bool { @@ -644,7 +613,7 @@ impl<'tcx> TypeckTables<'tcx> { match self.type_dependent_defs().get(expr.hir_id) { Some(Ok((DefKind::Method, _))) => true, - _ => false + _ => false, } } @@ -656,29 +625,21 @@ impl<'tcx> TypeckTables<'tcx> { } pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.pat_binding_modes - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.pat_binding_modes } } - pub fn pat_binding_modes_mut(&mut self) - -> LocalTableInContextMut<'_, BindingMode> { + pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.pat_binding_modes + data: &mut self.pat_binding_modes, } } pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.pat_adjustments, - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.pat_adjustments } } - pub fn pat_adjustments_mut(&mut self) - -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { + pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.pat_adjustments, @@ -690,44 +651,35 @@ impl<'tcx> TypeckTables<'tcx> { } pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.closure_kind_origins - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.closure_kind_origins } } pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.closure_kind_origins + data: &mut self.closure_kind_origins, } } pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.liberated_fn_sigs - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.liberated_fn_sigs } } pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.liberated_fn_sigs + data: &mut self.liberated_fn_sigs, } } pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { - LocalTableInContext { - local_id_root: self.local_id_root, - data: &self.fru_field_types - } + LocalTableInContext { local_id_root: self.local_id_root, data: &self.fru_field_types } } pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.fru_field_types + data: &mut self.fru_field_types, } } @@ -743,7 +695,6 @@ impl<'tcx> TypeckTables<'tcx> { pub fn coercion_casts(&self) -> &ItemLocalSet { &self.coercion_casts } - } impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> { @@ -772,7 +723,6 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> { ref concrete_opaque_types, ref upvar_list, ref generator_interior_types, - } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { @@ -786,25 +736,19 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> { pat_binding_modes.hash_stable(hcx, hasher); pat_adjustments.hash_stable(hcx, hasher); hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| { - let ty::UpvarId { - var_path, - closure_expr_id - } = *up_var_id; - - let local_id_root = - local_id_root.expect("trying to hash invalid TypeckTables"); - - let var_owner_def_id = DefId { - krate: local_id_root.krate, - index: var_path.hir_id.owner, - }; - let closure_def_id = DefId { - krate: local_id_root.krate, - index: closure_expr_id.to_def_id().index, - }; - (hcx.def_path_hash(var_owner_def_id), - var_path.hir_id.local_id, - hcx.def_path_hash(closure_def_id)) + let ty::UpvarId { var_path, closure_expr_id } = *up_var_id; + + let local_id_root = local_id_root.expect("trying to hash invalid TypeckTables"); + + let var_owner_def_id = + DefId { krate: local_id_root.krate, index: var_path.hir_id.owner }; + let closure_def_id = + DefId { krate: local_id_root.krate, index: closure_expr_id.to_def_id().index }; + ( + hcx.def_path_hash(var_owner_def_id), + var_path.hir_id.local_id, + hcx.def_path_hash(closure_def_id), + ) }); closure_kind_origins.hash_stable(hcx, hasher); @@ -884,7 +828,7 @@ impl CanonicalUserType<'tcx> { }, } }) - }, + } } } } @@ -926,10 +870,7 @@ impl<'tcx> CommonTypes<'tcx> { u128: mk(Uint(ast::UintTy::U128)), f32: mk(Float(ast::FloatTy::F32)), f64: mk(Float(ast::FloatTy::F64)), - self_param: mk(ty::Param(ty::ParamTy { - index: 0, - name: kw::SelfUpper, - })), + self_param: mk(ty::Param(ty::ParamTy { index: 0, name: kw::SelfUpper })), trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), } @@ -938,11 +879,7 @@ impl<'tcx> CommonTypes<'tcx> { impl<'tcx> CommonLifetimes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { - let mk = |r| { - interners.region.intern(r, |r| { - Interned(interners.arena.alloc(r)) - }).0 - }; + let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0; CommonLifetimes { re_empty: mk(RegionKind::ReEmpty), @@ -954,11 +891,7 @@ impl<'tcx> CommonLifetimes<'tcx> { impl<'tcx> CommonConsts<'tcx> { fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { - let mk_const = |c| { - interners.const_.intern(c, |c| { - Interned(interners.arena.alloc(c)) - }).0 - }; + let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0; CommonConsts { err: mk_const(ty::Const { @@ -1030,9 +963,7 @@ pub struct GlobalCtxt<'tcx> { /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. - trait_map: FxHashMap<DefIndex, - FxHashMap<ItemLocalId, - StableVec<TraitCandidate>>>, + trait_map: FxHashMap<DefIndex, FxHashMap<ItemLocalId, StableVec<TraitCandidate>>>, /// Export map produced by name resolution. export_map: FxHashMap<DefId, Vec<Export<hir::HirId>>>, @@ -1099,13 +1030,17 @@ impl<'tcx> TyCtxt<'tcx> { self.arena.alloc(Steal::new(mir)) } - pub fn alloc_steal_promoted(self, promoted: IndexVec<Promoted, BodyAndCache<'tcx>>) -> - &'tcx Steal<IndexVec<Promoted, BodyAndCache<'tcx>>> { + pub fn alloc_steal_promoted( + self, + promoted: IndexVec<Promoted, BodyAndCache<'tcx>>, + ) -> &'tcx Steal<IndexVec<Promoted, BodyAndCache<'tcx>>> { self.arena.alloc(Steal::new(promoted)) } - pub fn intern_promoted(self, promoted: IndexVec<Promoted, BodyAndCache<'tcx>>) -> - &'tcx IndexVec<Promoted, BodyAndCache<'tcx>> { + pub fn intern_promoted( + self, + promoted: IndexVec<Promoted, BodyAndCache<'tcx>>, + ) -> &'tcx IndexVec<Promoted, BodyAndCache<'tcx>> { self.arena.alloc(promoted) } @@ -1121,9 +1056,7 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn intern_const_alloc(self, alloc: Allocation) -> &'tcx Allocation { - self.allocation_interner.intern(alloc, |alloc| { - self.arena.alloc(alloc) - }) + self.allocation_interner.intern(alloc, |alloc| self.arena.alloc(alloc)) } /// Allocates a read-only byte or string literal for `mir::interpret`. @@ -1135,21 +1068,15 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn intern_stability(self, stab: attr::Stability) -> &'tcx attr::Stability { - self.stability_interner.intern(stab, |stab| { - self.arena.alloc(stab) - }) + self.stability_interner.intern(stab, |stab| self.arena.alloc(stab)) } pub fn intern_const_stability(self, stab: attr::ConstStability) -> &'tcx attr::ConstStability { - self.const_stability_interner.intern(stab, |stab| { - self.arena.alloc(stab) - }) + self.const_stability_interner.intern(stab, |stab| self.arena.alloc(stab)) } pub fn intern_layout(self, layout: LayoutDetails) -> &'tcx LayoutDetails { - self.layout_interner.intern(layout, |layout| { - self.arena.alloc(layout) - }) + self.layout_interner.intern(layout, |layout| self.arena.alloc(layout)) } /// Returns a range of the start/end indices specified with the @@ -1169,8 +1096,10 @@ impl<'tcx> TyCtxt<'tcx> { } span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute"); }; - (get(sym::rustc_layout_scalar_valid_range_start), - get(sym::rustc_layout_scalar_valid_range_end)) + ( + get(sym::rustc_layout_scalar_valid_range_start), + get(sym::rustc_layout_scalar_valid_range_end), + ) } pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> { @@ -1218,10 +1147,8 @@ impl<'tcx> TyCtxt<'tcx> { // re-allocate when populating it. let capacity = def_path_tables.clone().map(|(_, t)| t.size()).sum::<usize>(); - let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher( - capacity, - ::std::default::Default::default() - ); + let mut map: FxHashMap<_, _> = + FxHashMap::with_capacity_and_hasher(capacity, ::std::default::Default::default()); for (cnum, def_path_table) in def_path_tables { def_path_table.add_def_path_hashes_to(cnum, &mut map); @@ -1252,33 +1179,34 @@ impl<'tcx> TyCtxt<'tcx> { consts: common_consts, extern_crate_map: resolutions.extern_crate_map, trait_map, - export_map: resolutions.export_map.into_iter().map(|(k, v)| { - let exports: Vec<_> = v.into_iter().map(|e| { - e.map_id(|id| hir.node_to_hir_id(id)) - }).collect(); - (k, exports) - }).collect(), - maybe_unused_trait_imports: - resolutions.maybe_unused_trait_imports - .into_iter() - .map(|id| hir.local_def_id_from_node_id(id)) - .collect(), - maybe_unused_extern_crates: - resolutions.maybe_unused_extern_crates - .into_iter() - .map(|(id, sp)| (hir.local_def_id_from_node_id(id), sp)) - .collect(), - glob_map: resolutions.glob_map.into_iter().map(|(id, names)| { - (hir.local_def_id_from_node_id(id), names) - }).collect(), + export_map: resolutions + .export_map + .into_iter() + .map(|(k, v)| { + let exports: Vec<_> = + v.into_iter().map(|e| e.map_id(|id| hir.node_to_hir_id(id))).collect(); + (k, exports) + }) + .collect(), + maybe_unused_trait_imports: resolutions + .maybe_unused_trait_imports + .into_iter() + .map(|id| hir.local_def_id_from_node_id(id)) + .collect(), + maybe_unused_extern_crates: resolutions + .maybe_unused_extern_crates + .into_iter() + .map(|(id, sp)| (hir.local_def_id_from_node_id(id), sp)) + .collect(), + glob_map: resolutions + .glob_map + .into_iter() + .map(|(id, names)| (hir.local_def_id_from_node_id(id), names)) + .collect(), extern_prelude: resolutions.extern_prelude, hir_map: hir, def_path_hash_to_def_id, - queries: query::Queries::new( - providers, - extern_providers, - on_disk_query_result_cache, - ), + queries: query::Queries::new(providers, extern_providers, on_disk_query_result_cache), rcache: Default::default(), selection_cache: Default::default(), evaluation_cache: Default::default(), @@ -1335,11 +1263,7 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn def_key(self, id: DefId) -> hir_map::DefKey { - if id.is_local() { - self.hir().def_key(id) - } else { - self.cstore.def_key(id) - } + if id.is_local() { self.hir().def_key(id) } else { self.cstore.def_key(id) } } /// Converts a `DefId` into its fully expanded `DefPath` (every @@ -1348,21 +1272,13 @@ impl<'tcx> TyCtxt<'tcx> { /// Note that if `id` is not local to this crate, the result will /// be a non-local `DefPath`. pub fn def_path(self, id: DefId) -> hir_map::DefPath { - if id.is_local() { - self.hir().def_path(id) - } else { - self.cstore.def_path(id) - } + if id.is_local() { self.hir().def_path(id) } else { self.cstore.def_path(id) } } /// Returns whether or not the crate with CrateNum 'cnum' /// is marked as a private dependency pub fn is_private_dep(self, cnum: CrateNum) -> bool { - if cnum == LOCAL_CRATE { - false - } else { - self.cstore.crate_is_private_dep_untracked(cnum) - } + if cnum == LOCAL_CRATE { false } else { self.cstore.crate_is_private_dep_untracked(cnum) } } #[inline] @@ -1380,26 +1296,29 @@ impl<'tcx> TyCtxt<'tcx> { // statements within the query system and we'd run into endless // recursion otherwise. let (crate_name, crate_disambiguator) = if def_id.is_local() { - (self.crate_name.clone(), - self.sess.local_crate_disambiguator()) + (self.crate_name.clone(), self.sess.local_crate_disambiguator()) } else { - (self.cstore.crate_name_untracked(def_id.krate), - self.cstore.crate_disambiguator_untracked(def_id.krate)) + ( + self.cstore.crate_name_untracked(def_id.krate), + self.cstore.crate_disambiguator_untracked(def_id.krate), + ) }; - format!("{}[{}]{}", - crate_name, - // Don't print the whole crate disambiguator. That's just - // annoying in debug output. - &(crate_disambiguator.to_fingerprint().to_hex())[..4], - self.def_path(def_id).to_string_no_crate()) + format!( + "{}[{}]{}", + crate_name, + // Don't print the whole crate disambiguator. That's just + // annoying in debug output. + &(crate_disambiguator.to_fingerprint().to_hex())[..4], + self.def_path(def_id).to_string_no_crate() + ) } pub fn metadata_encoding_version(self) -> Vec<u8> { self.cstore.metadata_encoding_version().to_vec() } - pub fn encode_metadata(self)-> EncodedMetadata { + pub fn encode_metadata(self) -> EncodedMetadata { let _prof_timer = self.prof.generic_activity("generate_crate_metadata"); self.cstore.encode_metadata(self) } @@ -1414,10 +1333,7 @@ impl<'tcx> TyCtxt<'tcx> { pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { let krate = self.gcx.hir_map.forest.untracked_krate(); - StableHashingContext::new(self.sess, - krate, - self.hir().definitions(), - &*self.cstore) + StableHashingContext::new(self.sess, krate, self.hir().definitions(), &*self.cstore) } // This method makes sure that we have a DepNode and a Fingerprint for @@ -1431,19 +1347,19 @@ impl<'tcx> TyCtxt<'tcx> { for cnum in self.cstore.crates_untracked() { let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum)); let crate_hash = self.cstore.crate_hash_untracked(cnum); - self.dep_graph.with_task(dep_node, - self, - crate_hash, - |_, x| x, // No transformation needed - dep_graph::hash_result, + self.dep_graph.with_task( + dep_node, + self, + crate_hash, + |_, x| x, // No transformation needed + dep_graph::hash_result, ); } } - pub fn serialize_query_result_cache<E>(self, - encoder: &mut E) - -> Result<(), E::Error> - where E: ty::codec::TyEncoder + pub fn serialize_query_result_cache<E>(self, encoder: &mut E) -> Result<(), E::Error> + where + E: ty::codec::TyEncoder, { self.queries.on_disk_cache.serialize(self, encoder) } @@ -1492,7 +1408,9 @@ impl<'tcx> TyCtxt<'tcx> { // // * Otherwise, use the behavior requested via `-Z borrowck=...` - if self.features().nll { return BorrowckMode::Mir; } + if self.features().nll { + return BorrowckMode::Mir; + } self.sess.opts.borrowck_mode } @@ -1503,18 +1421,18 @@ impl<'tcx> TyCtxt<'tcx> { self.sess.crate_types.borrow().iter().any(|crate_type| { match crate_type { - CrateType::Executable | - CrateType::Staticlib | - CrateType::ProcMacro | - CrateType::Cdylib => false, + CrateType::Executable + | CrateType::Staticlib + | CrateType::ProcMacro + | CrateType::Cdylib => false, // FIXME rust-lang/rust#64319, rust-lang/rust#64872: // We want to block export of generics from dylibs, // but we must fix rust-lang/rust#65890 before we can // do that robustly. - CrateType::Dylib => true, + CrateType::Dylib => true, - CrateType::Rlib => true, + CrateType::Rlib => true, } }) } @@ -1523,16 +1441,13 @@ impl<'tcx> TyCtxt<'tcx> { pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> { let (suitable_region_binding_scope, bound_region) = match *region { ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region), - ty::ReEarlyBound(ref ebr) => ( - self.parent(ebr.def_id).unwrap(), - ty::BoundRegion::BrNamed(ebr.def_id, ebr.name), - ), + ty::ReEarlyBound(ref ebr) => { + (self.parent(ebr.def_id).unwrap(), ty::BoundRegion::BrNamed(ebr.def_id, ebr.name)) + } _ => return None, // not a free region }; - let hir_id = self.hir() - .as_local_hir_id(suitable_region_binding_scope) - .unwrap(); + let hir_id = self.hir().as_local_hir_id(suitable_region_binding_scope).unwrap(); let is_impl_item = match self.hir().find(hir_id) { Some(Node::Item(..)) | Some(Node::TraitItem(..)) => false, Some(Node::ImplItem(..)) => { @@ -1548,10 +1463,7 @@ impl<'tcx> TyCtxt<'tcx> { }); } - pub fn return_type_impl_trait( - &self, - scope_def_id: DefId, - ) -> Option<(Ty<'tcx>, Span)> { + pub fn return_type_impl_trait(&self, scope_def_id: DefId) -> Option<(Ty<'tcx>, Span)> { // HACK: `type_of_def_id()` will fail on these (#55796), so return `None`. let hir_id = self.hir().as_local_hir_id(scope_def_id).unwrap(); match self.hir().get(hir_id) { @@ -1578,18 +1490,13 @@ impl<'tcx> TyCtxt<'tcx> { None } } - _ => None + _ => None, } } // Checks if the bound region is in Impl Item. - pub fn is_bound_region_in_impl_item( - &self, - suitable_region_binding_scope: DefId, - ) -> bool { - let container_id = self.associated_item(suitable_region_binding_scope) - .container - .id(); + pub fn is_bound_region_in_impl_item(&self, suitable_region_binding_scope: DefId) -> bool { + let container_id = self.associated_item(suitable_region_binding_scope).container.id(); if self.impl_trait_ref(container_id).is_some() { // For now, we do not try to target impls of traits. This is // because this message is going to suggest that the user @@ -1626,9 +1533,7 @@ impl<'tcx> GlobalCtxt<'tcx> { where F: FnOnce(TyCtxt<'tcx>) -> R, { - let tcx = TyCtxt { - gcx: self, - }; + let tcx = TyCtxt { gcx: self }; ty::tls::with_related_context(tcx, |icx| { let new_icx = ty::tls::ImplicitCtxt { tcx, @@ -1637,9 +1542,7 @@ impl<'tcx> GlobalCtxt<'tcx> { layout_depth: icx.layout_depth, task_deps: icx.task_deps, }; - ty::tls::enter_context(&new_icx, |_| { - f(tcx) - }) + ty::tls::enter_context(&new_icx, |_| f(tcx)) }) } } @@ -1669,64 +1572,64 @@ pub trait Lift<'tcx>: fmt::Debug { macro_rules! nop_lift { ($ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for $ty { - type Lifted = $lifted; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { - if tcx.interners.arena.in_arena(*self as *const _) { - Some(unsafe { mem::transmute(*self) }) - } else { - None - } - } + type Lifted = $lifted; + fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { + if tcx.interners.arena.in_arena(*self as *const _) { + Some(unsafe { mem::transmute(*self) }) + } else { + None } + } + } }; } macro_rules! nop_list_lift { ($ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> { - type Lifted = &'tcx List<$lifted>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { - if self.is_empty() { - return Some(List::empty()); - } - if tcx.interners.arena.in_arena(*self as *const _) { - Some(unsafe { mem::transmute(*self) }) - } else { - None - } - } + type Lifted = &'tcx List<$lifted>; + fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { + if self.is_empty() { + return Some(List::empty()); } + if tcx.interners.arena.in_arena(*self as *const _) { + Some(unsafe { mem::transmute(*self) }) + } else { + None + } + } + } }; } -nop_lift!{Ty<'a> => Ty<'tcx>} -nop_lift!{Region<'a> => Region<'tcx>} -nop_lift!{Goal<'a> => Goal<'tcx>} -nop_lift!{&'a Const<'a> => &'tcx Const<'tcx>} +nop_lift! {Ty<'a> => Ty<'tcx>} +nop_lift! {Region<'a> => Region<'tcx>} +nop_lift! {Goal<'a> => Goal<'tcx>} +nop_lift! {&'a Const<'a> => &'tcx Const<'tcx>} -nop_list_lift!{Goal<'a> => Goal<'tcx>} -nop_list_lift!{Clause<'a> => Clause<'tcx>} -nop_list_lift!{Ty<'a> => Ty<'tcx>} -nop_list_lift!{ExistentialPredicate<'a> => ExistentialPredicate<'tcx>} -nop_list_lift!{Predicate<'a> => Predicate<'tcx>} -nop_list_lift!{CanonicalVarInfo => CanonicalVarInfo} -nop_list_lift!{ProjectionKind => ProjectionKind} +nop_list_lift! {Goal<'a> => Goal<'tcx>} +nop_list_lift! {Clause<'a> => Clause<'tcx>} +nop_list_lift! {Ty<'a> => Ty<'tcx>} +nop_list_lift! {ExistentialPredicate<'a> => ExistentialPredicate<'tcx>} +nop_list_lift! {Predicate<'a> => Predicate<'tcx>} +nop_list_lift! {CanonicalVarInfo => CanonicalVarInfo} +nop_list_lift! {ProjectionKind => ProjectionKind} // This is the impl for `&'a InternalSubsts<'a>`. -nop_list_lift!{GenericArg<'a> => GenericArg<'tcx>} +nop_list_lift! {GenericArg<'a> => GenericArg<'tcx>} pub mod tls { - use super::{GlobalCtxt, TyCtxt, ptr_eq}; + use super::{ptr_eq, GlobalCtxt, TyCtxt}; - use std::fmt; - use std::mem; - use syntax_pos; + use crate::dep_graph::TaskDeps; use crate::ty::query; use errors::{Diagnostic, TRACK_DIAGNOSTICS}; - use rustc_data_structures::OnDrop; - use rustc_data_structures::sync::{self, Lrc, Lock}; + use rustc_data_structures::sync::{self, Lock, Lrc}; use rustc_data_structures::thin_vec::ThinVec; - use crate::dep_graph::TaskDeps; + use rustc_data_structures::OnDrop; + use std::fmt; + use std::mem; + use syntax_pos; #[cfg(not(parallel_compiler))] use std::cell::Cell; @@ -1830,7 +1733,8 @@ pub mod tls { /// Sets up the callbacks from libsyntax on the current thread. pub fn with_thread_locals<F, R>(f: F) -> R - where F: FnOnce() -> R + where + F: FnOnce() -> R, { syntax_pos::SPAN_DEBUG.with(|span_dbg| { let original_span_debug = span_dbg.get(); @@ -1859,9 +1763,7 @@ pub mod tls { where F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, { - set_tlv(context as *const _ as usize, || { - f(&context) - }) + set_tlv(context as *const _ as usize, || f(&context)) } /// Enters `GlobalCtxt` by setting up libsyntax callbacks and @@ -1881,19 +1783,10 @@ pub mod tls { GCX_PTR.with(|lock| *lock.lock() = 0); }); - let tcx = TyCtxt { - gcx, - }; - let icx = ImplicitCtxt { - tcx, - query: None, - diagnostics: None, - layout_depth: 0, - task_deps: None, - }; - enter_context(&icx, |_| { - f(tcx) - }) + let tcx = TyCtxt { gcx }; + let icx = + ImplicitCtxt { tcx, query: None, diagnostics: None, layout_depth: 0, task_deps: None }; + enter_context(&icx, |_| f(tcx)) } scoped_thread_local! { @@ -1911,16 +1804,9 @@ pub mod tls { let gcx = GCX_PTR.with(|lock| *lock.lock()); assert!(gcx != 0); let gcx = &*(gcx as *const GlobalCtxt<'_>); - let tcx = TyCtxt { - gcx, - }; - let icx = ImplicitCtxt { - query: None, - diagnostics: None, - tcx, - layout_depth: 0, - task_deps: None, - }; + let tcx = TyCtxt { gcx }; + let icx = + ImplicitCtxt { query: None, diagnostics: None, tcx, layout_depth: 0, task_deps: None }; enter_context(&icx, |_| f(tcx)) } @@ -1962,12 +1848,10 @@ pub mod tls { where F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, { - with_context(|context| { - unsafe { - assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); - let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); - f(context) - } + with_context(|context| unsafe { + assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); + let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); + f(context) }) } @@ -2069,9 +1953,27 @@ impl<'tcx> TyCtxt<'tcx> { pub fn print_debug_stats(self) { sty_debug_print!( self, - Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder, - Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound, - Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign); + Adt, + Array, + Slice, + RawPtr, + Ref, + FnDef, + FnPtr, + Placeholder, + Generator, + GeneratorWitness, + Dynamic, + Closure, + Tuple, + Bound, + Param, + Infer, + UnnormalizedProjection, + Projection, + Opaque, + Foreign + ); println!("InternalSubsts interner: #{}", self.interners.substs.len()); println!("Region interner: #{}", self.interners.region.len()); @@ -2082,16 +1984,15 @@ impl<'tcx> TyCtxt<'tcx> { } } - /// An entry in an interner. struct Interned<'tcx, T: ?Sized>(&'tcx T); -impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> { +impl<'tcx, T: 'tcx + ?Sized> Clone for Interned<'tcx, T> { fn clone(&self) -> Self { Interned(self.0) } } -impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {} +impl<'tcx, T: 'tcx + ?Sized> Copy for Interned<'tcx, T> {} // N.B., an `Interned<Ty>` compares and hashes as a `TyKind`. impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { @@ -2148,15 +2049,13 @@ impl<'tcx> Borrow<[GenericArg<'tcx>]> for Interned<'tcx, InternalSubsts<'tcx>> { } } -impl<'tcx> Borrow<[ProjectionKind]> - for Interned<'tcx, List<ProjectionKind>> { +impl<'tcx> Borrow<[ProjectionKind]> for Interned<'tcx, List<ProjectionKind>> { fn borrow(&self) -> &[ProjectionKind] { &self.0[..] } } -impl<'tcx> Borrow<[PlaceElem<'tcx>]> - for Interned<'tcx, List<PlaceElem<'tcx>>> { +impl<'tcx> Borrow<[PlaceElem<'tcx>]> for Interned<'tcx, List<PlaceElem<'tcx>>> { fn borrow(&self) -> &[PlaceElem<'tcx>] { &self.0[..] } @@ -2272,10 +2171,7 @@ impl<'tcx> TyCtxt<'tcx> { /// unsafe. pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { assert_eq!(sig.unsafety(), hir::Unsafety::Normal); - self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { - unsafety: hir::Unsafety::Unsafe, - ..sig - })) + self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) } /// Given a closure signature `sig`, returns an equivalent `fn` @@ -2289,18 +2185,10 @@ impl<'tcx> TyCtxt<'tcx> { pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>, unsafety: hir::Unsafety) -> Ty<'tcx> { let converted_sig = sig.map_bound(|s| { let params_iter = match s.inputs()[0].kind { - ty::Tuple(params) => { - params.into_iter().map(|k| k.expect_ty()) - } + ty::Tuple(params) => params.into_iter().map(|k| k.expect_ty()), _ => bug!(), }; - self.mk_fn_sig( - params_iter, - s.output(), - s.c_variadic, - unsafety, - abi::Abi::Rust, - ) + self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) }); self.mk_fn_ptr(converted_sig) @@ -2314,30 +2202,30 @@ impl<'tcx> TyCtxt<'tcx> { pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> { match tm { - ast::IntTy::Isize => self.types.isize, - ast::IntTy::I8 => self.types.i8, - ast::IntTy::I16 => self.types.i16, - ast::IntTy::I32 => self.types.i32, - ast::IntTy::I64 => self.types.i64, - ast::IntTy::I128 => self.types.i128, + ast::IntTy::Isize => self.types.isize, + ast::IntTy::I8 => self.types.i8, + ast::IntTy::I16 => self.types.i16, + ast::IntTy::I32 => self.types.i32, + ast::IntTy::I64 => self.types.i64, + ast::IntTy::I128 => self.types.i128, } } pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> { match tm { - ast::UintTy::Usize => self.types.usize, - ast::UintTy::U8 => self.types.u8, - ast::UintTy::U16 => self.types.u16, - ast::UintTy::U32 => self.types.u32, - ast::UintTy::U64 => self.types.u64, - ast::UintTy::U128 => self.types.u128, + ast::UintTy::Usize => self.types.usize, + ast::UintTy::U8 => self.types.u8, + ast::UintTy::U16 => self.types.u16, + ast::UintTy::U32 => self.types.u32, + ast::UintTy::U64 => self.types.u64, + ast::UintTy::U128 => self.types.u128, } } pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> { match tm { - ast::FloatTy::F32 => self.types.f32, - ast::FloatTy::F64 => self.types.f64, + ast::FloatTy::F32 => self.types.f32, + ast::FloatTy::F64 => self.types.f64, } } @@ -2364,12 +2252,9 @@ impl<'tcx> TyCtxt<'tcx> { fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> { let adt_def = self.adt_def(wrapper_def_id); - let substs = InternalSubsts::for_item(self, wrapper_def_id, |param, substs| { - match param.kind { - GenericParamDefKind::Lifetime | - GenericParamDefKind::Const => { - bug!() - } + let substs = + InternalSubsts::for_item(self, wrapper_def_id, |param, substs| match param.kind { + GenericParamDefKind::Lifetime | GenericParamDefKind::Const => bug!(), GenericParamDefKind::Type { has_default, .. } => { if param.index == 0 { ty_param.into() @@ -2378,8 +2263,7 @@ impl<'tcx> TyCtxt<'tcx> { self.type_of(param.def_id).subst(self, substs).into() } } - } - }); + }); self.mk_ty(Adt(adt_def, substs)) } @@ -2390,7 +2274,7 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_lang_item(self, ty: Ty<'tcx>, item: lang_items::LangItem) -> Option<Ty<'tcx>> { + pub fn mk_lang_item(self, ty: Ty<'tcx>, item: lang_items::LangItem) -> Option<Ty<'tcx>> { let def_id = self.lang_items().require(item).ok()?; Some(self.mk_generic_adt(def_id, ty)) } @@ -2413,22 +2297,22 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::Mutability::Mut }) + self.mk_ref(r, TypeAndMut { ty: ty, mutbl: hir::Mutability::Mut }) } #[inline] pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::Mutability::Not }) + self.mk_ref(r, TypeAndMut { ty: ty, mutbl: hir::Mutability::Not }) } #[inline] pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::Mutability::Mut }) + self.mk_ptr(TypeAndMut { ty: ty, mutbl: hir::Mutability::Mut }) } #[inline] pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::Mutability::Not }) + self.mk_ptr(TypeAndMut { ty: ty, mutbl: hir::Mutability::Not }) } #[inline] @@ -2466,11 +2350,7 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn mk_diverging_default(self) -> Ty<'tcx> { - if self.features().never_type_fallback { - self.types.never - } else { - self.types.unit - } + if self.features().never_type_fallback { self.types.never } else { self.types.unit } } #[inline] @@ -2479,8 +2359,7 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_fn_def(self, def_id: DefId, - substs: SubstsRef<'tcx>) -> Ty<'tcx> { + pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(FnDef(def_id, substs)) } @@ -2493,34 +2372,28 @@ impl<'tcx> TyCtxt<'tcx> { pub fn mk_dynamic( self, obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>, - reg: ty::Region<'tcx> + reg: ty::Region<'tcx>, ) -> Ty<'tcx> { self.mk_ty(Dynamic(obj, reg)) } #[inline] - pub fn mk_projection(self, - item_def_id: DefId, - substs: SubstsRef<'tcx>) - -> Ty<'tcx> { - self.mk_ty(Projection(ProjectionTy { - item_def_id, - substs, - })) - } + pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { + self.mk_ty(Projection(ProjectionTy { item_def_id, substs })) + } #[inline] - pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) - -> Ty<'tcx> { + pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Closure(closure_id, closure_substs)) } #[inline] - pub fn mk_generator(self, - id: DefId, - generator_substs: SubstsRef<'tcx>, - movability: hir::Movability) - -> Ty<'tcx> { + pub fn mk_generator( + self, + id: DefId, + generator_substs: SubstsRef<'tcx>, + movability: hir::Movability, + ) -> Ty<'tcx> { self.mk_ty(Generator(id, generator_substs, movability)) } @@ -2536,10 +2409,7 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { - self.mk_const(ty::Const { - val: ty::ConstKind::Infer(InferConst::Var(v)), - ty, - }) + self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty }) } #[inline] @@ -2558,15 +2428,8 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_const_infer( - self, - ic: InferConst<'tcx>, - ty: Ty<'tcx>, - ) -> &'tcx ty::Const<'tcx> { - self.mk_const(ty::Const { - val: ty::ConstKind::Infer(ic), - ty, - }) + pub fn mk_const_infer(self, ic: InferConst<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> { + self.mk_const(ty::Const { val: ty::ConstKind::Infer(ic), ty }) } #[inline] @@ -2575,19 +2438,10 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_const_param( - self, - index: u32, - name: Symbol, - ty: Ty<'tcx> - ) -> &'tcx Const<'tcx> { - self.mk_const(ty::Const { - val: ty::ConstKind::Param(ParamConst { index, name }), - ty, - }) + pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { + self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty }) } - pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { match param.kind { GenericParamDefKind::Lifetime => { @@ -2647,15 +2501,16 @@ impl<'tcx> TyCtxt<'tcx> { Place { base: place.base, projection: self.intern_place_elems(&projection) } } - pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>]) - -> &'tcx List<ExistentialPredicate<'tcx>> { + pub fn intern_existential_predicates( + self, + eps: &[ExistentialPredicate<'tcx>], + ) -> &'tcx List<ExistentialPredicate<'tcx>> { assert!(!eps.is_empty()); assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater)); self._intern_existential_predicates(eps) } - pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) - -> &'tcx List<Predicate<'tcx>> { + pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> { // FIXME consider asking the input slice to be sorted to avoid // re-interning permutations, in which case that would be asserted // here. @@ -2668,109 +2523,87 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_type_list(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_type_list(ts) } } pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_substs(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_substs(ts) } } pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> { - if ps.len() == 0 { - List::empty() - } else { - self._intern_projs(ps) - } + if ps.len() == 0 { List::empty() } else { self._intern_projs(ps) } } pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_place_elems(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_place_elems(ts) } } pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_canonical_var_infos(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_canonical_var_infos(ts) } } pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_clauses(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_clauses(ts) } } pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> { - if ts.len() == 0 { - List::empty() - } else { - self._intern_goals(ts) - } + if ts.len() == 0 { List::empty() } else { self._intern_goals(ts) } } - pub fn mk_fn_sig<I>(self, - inputs: I, - output: I::Item, - c_variadic: bool, - unsafety: hir::Unsafety, - abi: abi::Abi) - -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output + pub fn mk_fn_sig<I>( + self, + inputs: I, + output: I::Item, + c_variadic: bool, + unsafety: hir::Unsafety, + abi: abi::Abi, + ) -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output where I: Iterator<Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>, { inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { inputs_and_output: self.intern_type_list(xs), - c_variadic, unsafety, abi + c_variadic, + unsafety, + abi, }) } - pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>], - &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I) - -> I::Output { + pub fn mk_existential_predicates< + I: InternAs<[ExistentialPredicate<'tcx>], &'tcx List<ExistentialPredicate<'tcx>>>, + >( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_existential_predicates(xs)) } - pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], - &'tcx List<Predicate<'tcx>>>>(self, iter: I) - -> I::Output { + pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_predicates(xs)) } - pub fn mk_type_list<I: InternAs<[Ty<'tcx>], - &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output { + pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_type_list(xs)) } - pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], - &'tcx List<GenericArg<'tcx>>>>(self, iter: I) -> I::Output { + pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_substs(xs)) } - pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], - &'tcx List<PlaceElem<'tcx>>>>(self, iter: I) -> I::Output { + pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_place_elems(xs)) } - pub fn mk_substs_trait(self, - self_ty: Ty<'tcx>, - rest: &[GenericArg<'tcx>]) - -> SubstsRef<'tcx> - { + pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) } @@ -2782,31 +2615,37 @@ impl<'tcx> TyCtxt<'tcx> { iter.intern_with(|xs| self.intern_goals(xs)) } - pub fn lint_hir<S: Into<MultiSpan>>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str) { + pub fn lint_hir<S: Into<MultiSpan>>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + ) { self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit() } - pub fn lint_hir_note<S: Into<MultiSpan>>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str, - note: &str) { + pub fn lint_hir_note<S: Into<MultiSpan>>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + note: &str, + ) { let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg); err.note(note); err.emit() } - pub fn lint_node_note<S: Into<MultiSpan>>(self, - lint: &'static Lint, - id: hir::HirId, - span: S, - msg: &str, - note: &str) { + pub fn lint_node_note<S: Into<MultiSpan>>( + self, + lint: &'static Lint, + id: hir::HirId, + span: S, + msg: &str, + note: &str, + ) { let mut err = self.struct_span_lint_hir(lint, id, span.into(), msg); err.note(note); err.emit() @@ -2837,12 +2676,12 @@ impl<'tcx> TyCtxt<'tcx> { pub fn lint_level_at_node( self, lint: &'static Lint, - mut id: hir::HirId + mut id: hir::HirId, ) -> (lint::Level, lint::LintSource) { let sets = self.lint_levels(LOCAL_CRATE); loop { if let Some(pair) = sets.level_and_source(lint, id, self.sess) { - return pair + return pair; } let next = self.hir().get_parent_node(id); if next == id { @@ -2852,38 +2691,37 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str) - -> DiagnosticBuilder<'tcx> - { + pub fn struct_span_lint_hir<S: Into<MultiSpan>>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + ) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, hir_id); lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg) } - pub fn struct_lint_node(self, lint: &'static Lint, id: HirId, msg: &str) - -> DiagnosticBuilder<'tcx> - { + pub fn struct_lint_node( + self, + lint: &'static Lint, + id: HirId, + msg: &str, + ) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, id); lint::struct_lint_level(self.sess, lint, level, src, None, msg) } pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx StableVec<TraitCandidate>> { - self.in_scope_traits_map(id.owner) - .and_then(|map| map.get(&id.local_id)) + self.in_scope_traits_map(id.owner).and_then(|map| map.get(&id.local_id)) } pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> { - self.named_region_map(id.owner) - .and_then(|map| map.get(&id.local_id).cloned()) + self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned()) } pub fn is_late_bound(self, id: HirId) -> bool { - self.is_late_bound_map(id.owner) - .map(|set| set.contains(&id.local_id)) - .unwrap_or(false) + self.is_late_bound_map(id.owner).map(|set| set.contains(&id.local_id)).unwrap_or(false) } pub fn object_lifetime_defaults(self, id: HirId) -> Option<&'tcx [ObjectLifetimeDefault]> { @@ -2895,44 +2733,52 @@ impl<'tcx> TyCtxt<'tcx> { pub trait InternAs<T: ?Sized, R> { type Output; fn intern_with<F>(self, f: F) -> Self::Output - where F: FnOnce(&T) -> R; + where + F: FnOnce(&T) -> R; } impl<I, T, R, E> InternAs<[T], R> for I - where E: InternIteratorElement<T, R>, - I: Iterator<Item=E> { +where + E: InternIteratorElement<T, R>, + I: Iterator<Item = E>, +{ type Output = E::Output; fn intern_with<F>(self, f: F) -> Self::Output - where F: FnOnce(&[T]) -> R { + where + F: FnOnce(&[T]) -> R, + { E::intern_with(self, f) } } pub trait InternIteratorElement<T, R>: Sized { type Output; - fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; + fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; } impl<T, R> InternIteratorElement<T, R> for T { type Output = R; - fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.collect::<SmallVec<[_; 8]>>()) } } impl<'a, T, R> InternIteratorElement<T, R> for &'a T - where T: Clone + 'a +where + T: Clone + 'a, { type Output = R; - fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.cloned().collect::<SmallVec<[_; 8]>>()) } } impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> { type Output = Result<R, E>; - fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(mut iter: I, f: F) - -> Self::Output { + fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>( + mut iter: I, + f: F, + ) -> Self::Output { // This code is hot enough that it's worth specializing for the most // common length lists, to avoid the overhead of `SmallVec` creation. // The match arms are in order of frequency. The 1, 2, and 0 cases are @@ -2954,9 +2800,7 @@ impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> { assert!(iter.next().is_none()); f(&[]) } - _ => { - f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?) - } + _ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?), }) } } @@ -2990,9 +2834,7 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) { assert_eq!(id, LOCAL_CRATE); middle::diagnostic_items::collect_all(tcx) }; - providers.maybe_unused_trait_import = |tcx, id| { - tcx.maybe_unused_trait_imports.contains(&id) - }; + providers.maybe_unused_trait_import = |tcx, id| tcx.maybe_unused_trait_imports.contains(&id); providers.maybe_unused_extern_crates = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); &tcx.maybe_unused_extern_crates[..] diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 8af98c6117e..900c425fac2 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -1,13 +1,13 @@ use crate::hir; use crate::hir::def_id::DefId; use crate::infer::InferCtxt; -use crate::ty::subst::SubstsRef; +use crate::middle::lang_items; use crate::traits::{self, AssocTypeBoundData}; +use crate::ty::subst::SubstsRef; use crate::ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; use std::iter::once; use syntax::symbol::{kw, Ident}; use syntax_pos::Span; -use crate::middle::lang_items; /// Returns the set of obligations needed to make `ty` well-formed. /// If `ty` contains unresolved inference variables, this may include @@ -22,14 +22,7 @@ pub fn obligations<'a, 'tcx>( ty: Ty<'tcx>, span: Span, ) -> Option<Vec<traits::PredicateObligation<'tcx>>> { - let mut wf = WfPredicates { - infcx, - param_env, - body_id, - span, - out: vec![], - item: None, - }; + let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![], item: None }; if wf.compute(ty) { debug!("wf::obligations({:?}, body_id={:?}) = {:?}", ty, body_id, wf.out); let result = wf.normalize(); @@ -71,8 +64,7 @@ pub fn predicate_obligations<'a, 'tcx>( ty::Predicate::Trait(ref t) => { wf.compute_trait_ref(&t.skip_binder().trait_ref, Elaborate::None); // (*) } - ty::Predicate::RegionOutlives(..) => { - } + ty::Predicate::RegionOutlives(..) => {} ty::Predicate::TypeOutlives(ref t) => { wf.compute(t.skip_binder().0); } @@ -84,10 +76,8 @@ pub fn predicate_obligations<'a, 'tcx>( ty::Predicate::WellFormed(t) => { wf.compute(t); } - ty::Predicate::ObjectSafe(_) => { - } - ty::Predicate::ClosureKind(..) => { - } + ty::Predicate::ObjectSafe(_) => {} + ty::Predicate::ClosureKind(..) => {} ty::Predicate::Subtype(ref data) => { wf.compute(data.skip_binder().a); // (*) wf.compute(data.skip_binder().b); // (*) @@ -152,14 +142,15 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let cause = self.cause(traits::MiscObligation); let infcx = &mut self.infcx; let param_env = self.param_env; - self.out.iter() - .inspect(|pred| assert!(!pred.has_escaping_bound_vars())) - .flat_map(|pred| { - let mut selcx = traits::SelectionContext::new(infcx); - let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred); - once(pred.value).chain(pred.obligations) - }) - .collect() + self.out + .iter() + .inspect(|pred| assert!(!pred.has_escaping_bound_vars())) + .flat_map(|pred| { + let mut selcx = traits::SelectionContext::new(infcx); + let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred); + once(pred.value).chain(pred.obligations) + }) + .collect() } /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. @@ -171,154 +162,163 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let param_env = self.param_env; let item = &self.item; - let extend_cause_with_original_assoc_item_obligation = | - cause: &mut traits::ObligationCause<'_>, - pred: &ty::Predicate<'_>, - trait_assoc_items: ty::AssocItemsIterator<'_>, - | { - let trait_item = tcx.hir().as_local_hir_id(trait_ref.def_id).and_then(|trait_id| { - tcx.hir().find(trait_id) - }); - let (trait_name, trait_generics) = match trait_item { - Some(hir::Node::Item(hir::Item { - ident, - kind: hir::ItemKind::Trait(.., generics, _, _), - .. - })) | - Some(hir::Node::Item(hir::Item { - ident, - kind: hir::ItemKind::TraitAlias(generics, _), - .. - })) => (Some(ident), Some(generics)), - _ => (None, None), - }; - - let item_span = item.map(|i| tcx.sess.source_map().def_span(i.span)); - match pred { - ty::Predicate::Projection(proj) => { - // The obligation comes not from the current `impl` nor the `trait` being - // implemented, but rather from a "second order" obligation, like in - // `src/test/ui/associated-types/point-at-type-on-obligation-failure.rs`: - // - // error[E0271]: type mismatch resolving `<Foo2 as Bar2>::Ok == ()` - // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 - // | - // LL | type Ok; - // | -- associated type defined here - // ... - // LL | impl Bar for Foo { - // | ---------------- in this `impl` item - // LL | type Ok = (); - // | ^^^^^^^^^^^^^ expected `u32`, found `()` - // | - // = note: expected type `u32` - // found type `()` - // - // FIXME: we would want to point a span to all places that contributed to this - // obligation. In the case above, it should be closer to: - // - // error[E0271]: type mismatch resolving `<Foo2 as Bar2>::Ok == ()` - // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 - // | - // LL | type Ok; - // | -- associated type defined here - // LL | type Sibling: Bar2<Ok=Self::Ok>; - // | -------------------------------- obligation set here - // ... - // LL | impl Bar for Foo { - // | ---------------- in this `impl` item - // LL | type Ok = (); - // | ^^^^^^^^^^^^^ expected `u32`, found `()` - // ... - // LL | impl Bar2 for Foo2 { - // | ---------------- in this `impl` item - // LL | type Ok = u32; - // | -------------- obligation set here - // | - // = note: expected type `u32` - // found type `()` - if let Some(hir::ItemKind::Impl(.., impl_items)) = item.map(|i| &i.kind) { - let trait_assoc_item = tcx.associated_item(proj.projection_def_id()); - if let Some(impl_item) = impl_items.iter().filter(|item| { - item.ident == trait_assoc_item.ident - }).next() { - cause.span = impl_item.span; - cause.code = traits::AssocTypeBound(Box::new(AssocTypeBoundData { - impl_span: item_span, - original: trait_assoc_item.ident.span, - bounds: vec![], - })); + let extend_cause_with_original_assoc_item_obligation = + |cause: &mut traits::ObligationCause<'_>, + pred: &ty::Predicate<'_>, + trait_assoc_items: ty::AssocItemsIterator<'_>| { + let trait_item = tcx + .hir() + .as_local_hir_id(trait_ref.def_id) + .and_then(|trait_id| tcx.hir().find(trait_id)); + let (trait_name, trait_generics) = match trait_item { + Some(hir::Node::Item(hir::Item { + ident, + kind: hir::ItemKind::Trait(.., generics, _, _), + .. + })) + | Some(hir::Node::Item(hir::Item { + ident, + kind: hir::ItemKind::TraitAlias(generics, _), + .. + })) => (Some(ident), Some(generics)), + _ => (None, None), + }; + + let item_span = item.map(|i| tcx.sess.source_map().def_span(i.span)); + match pred { + ty::Predicate::Projection(proj) => { + // The obligation comes not from the current `impl` nor the `trait` being + // implemented, but rather from a "second order" obligation, like in + // `src/test/ui/associated-types/point-at-type-on-obligation-failure.rs`: + // + // error[E0271]: type mismatch resolving `<Foo2 as Bar2>::Ok == ()` + // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 + // | + // LL | type Ok; + // | -- associated type defined here + // ... + // LL | impl Bar for Foo { + // | ---------------- in this `impl` item + // LL | type Ok = (); + // | ^^^^^^^^^^^^^ expected `u32`, found `()` + // | + // = note: expected type `u32` + // found type `()` + // + // FIXME: we would want to point a span to all places that contributed to this + // obligation. In the case above, it should be closer to: + // + // error[E0271]: type mismatch resolving `<Foo2 as Bar2>::Ok == ()` + // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 + // | + // LL | type Ok; + // | -- associated type defined here + // LL | type Sibling: Bar2<Ok=Self::Ok>; + // | -------------------------------- obligation set here + // ... + // LL | impl Bar for Foo { + // | ---------------- in this `impl` item + // LL | type Ok = (); + // | ^^^^^^^^^^^^^ expected `u32`, found `()` + // ... + // LL | impl Bar2 for Foo2 { + // | ---------------- in this `impl` item + // LL | type Ok = u32; + // | -------------- obligation set here + // | + // = note: expected type `u32` + // found type `()` + if let Some(hir::ItemKind::Impl(.., impl_items)) = item.map(|i| &i.kind) { + let trait_assoc_item = tcx.associated_item(proj.projection_def_id()); + if let Some(impl_item) = impl_items + .iter() + .filter(|item| item.ident == trait_assoc_item.ident) + .next() + { + cause.span = impl_item.span; + cause.code = traits::AssocTypeBound(Box::new(AssocTypeBoundData { + impl_span: item_span, + original: trait_assoc_item.ident.span, + bounds: vec![], + })); + } } } - } - ty::Predicate::Trait(proj) => { - // An associated item obligation born out of the `trait` failed to be met. - // Point at the `impl` that failed the obligation, the associated item that - // needed to meet the obligation, and the definition of that associated item, - // which should hold the obligation in most cases. An example can be seen in - // `src/test/ui/associated-types/point-at-type-on-obligation-failure-2.rs`: - // - // error[E0277]: the trait bound `bool: Bar` is not satisfied - // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 - // | - // LL | type Assoc: Bar; - // | ----- associated type defined here - // ... - // LL | impl Foo for () { - // | --------------- in this `impl` item - // LL | type Assoc = bool; - // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` - // - // If the obligation comes from the where clause in the `trait`, we point at it: - // - // error[E0277]: the trait bound `bool: Bar` is not satisfied - // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 - // | - // | trait Foo where <Self as Foo>>::Assoc: Bar { - // | -------------------------- restricted in this bound - // LL | type Assoc; - // | ----- associated type defined here - // ... - // LL | impl Foo for () { - // | --------------- in this `impl` item - // LL | type Assoc = bool; - // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` - if let ( - ty::Projection(ty::ProjectionTy { item_def_id, .. }), - Some(hir::ItemKind::Impl(.., impl_items)), - ) = (&proj.skip_binder().self_ty().kind, item.map(|i| &i.kind)) { - if let Some((impl_item, trait_assoc_item)) = trait_assoc_items - .filter(|i| i.def_id == *item_def_id) - .next() - .and_then(|trait_assoc_item| impl_items.iter() - .filter(|i| i.ident == trait_assoc_item.ident) - .next() - .map(|impl_item| (impl_item, trait_assoc_item))) + ty::Predicate::Trait(proj) => { + // An associated item obligation born out of the `trait` failed to be met. + // Point at the `impl` that failed the obligation, the associated item that + // needed to meet the obligation, and the definition of that associated item, + // which should hold the obligation in most cases. An example can be seen in + // `src/test/ui/associated-types/point-at-type-on-obligation-failure-2.rs`: + // + // error[E0277]: the trait bound `bool: Bar` is not satisfied + // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 + // | + // LL | type Assoc: Bar; + // | ----- associated type defined here + // ... + // LL | impl Foo for () { + // | --------------- in this `impl` item + // LL | type Assoc = bool; + // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` + // + // If the obligation comes from the where clause in the `trait`, we point at it: + // + // error[E0277]: the trait bound `bool: Bar` is not satisfied + // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 + // | + // | trait Foo where <Self as Foo>>::Assoc: Bar { + // | -------------------------- restricted in this bound + // LL | type Assoc; + // | ----- associated type defined here + // ... + // LL | impl Foo for () { + // | --------------- in this `impl` item + // LL | type Assoc = bool; + // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` + if let ( + ty::Projection(ty::ProjectionTy { item_def_id, .. }), + Some(hir::ItemKind::Impl(.., impl_items)), + ) = (&proj.skip_binder().self_ty().kind, item.map(|i| &i.kind)) { - let bounds = trait_generics.map(|generics| get_generic_bound_spans( - &generics, - trait_name, - trait_assoc_item.ident, - )).unwrap_or_else(Vec::new); - cause.span = impl_item.span; - cause.code = traits::AssocTypeBound(Box::new(AssocTypeBoundData { - impl_span: item_span, - original: trait_assoc_item.ident.span, - bounds, - })); + if let Some((impl_item, trait_assoc_item)) = trait_assoc_items + .filter(|i| i.def_id == *item_def_id) + .next() + .and_then(|trait_assoc_item| { + impl_items + .iter() + .filter(|i| i.ident == trait_assoc_item.ident) + .next() + .map(|impl_item| (impl_item, trait_assoc_item)) + }) + { + let bounds = trait_generics + .map(|generics| { + get_generic_bound_spans( + &generics, + trait_name, + trait_assoc_item.ident, + ) + }) + .unwrap_or_else(Vec::new); + cause.span = impl_item.span; + cause.code = traits::AssocTypeBound(Box::new(AssocTypeBoundData { + impl_span: item_span, + original: trait_assoc_item.ident.span, + bounds, + })); + } } } + _ => {} } - _ => {} - } - }; + }; if let Elaborate::All = elaborate { let trait_assoc_items = tcx.associated_items(trait_ref.def_id); - let predicates = obligations.iter() - .map(|obligation| obligation.predicate.clone()) - .collect(); + let predicates = + obligations.iter().map(|obligation| obligation.predicate.clone()).collect(); let implied_obligations = traits::elaborate_predicates(tcx, predicates); let implied_obligations = implied_obligations.map(|pred| { let mut cause = cause.clone(); @@ -334,13 +334,9 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { self.out.extend(obligations); - self.out.extend(trait_ref.substs.types() - .filter(|ty| !ty.has_escaping_bound_vars()) - .map(|ty| traits::Obligation::new( - cause.clone(), - param_env, - ty::Predicate::WellFormed(ty), - ))); + self.out.extend(trait_ref.substs.types().filter(|ty| !ty.has_escaping_bound_vars()).map( + |ty| traits::Obligation::new(cause.clone(), param_env, ty::Predicate::WellFormed(ty)), + )); } /// Pushes the obligations required for `trait_ref::Item` to be WF @@ -368,9 +364,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let predicate = ty::Predicate::ConstEvaluatable(def_id, substs); let cause = self.cause(traits::MiscObligation); - self.out.push(traits::Obligation::new(cause, - self.param_env, - predicate)); + self.out.push(traits::Obligation::new(cause, self.param_env, predicate)); } } @@ -394,19 +388,19 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let param_env = self.param_env; while let Some(ty) = subtys.next() { match ty.kind { - ty::Bool | - ty::Char | - ty::Int(..) | - ty::Uint(..) | - ty::Float(..) | - ty::Error | - ty::Str | - ty::GeneratorWitness(..) | - ty::Never | - ty::Param(_) | - ty::Bound(..) | - ty::Placeholder(..) | - ty::Foreign(..) => { + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Error + | ty::Str + | ty::GeneratorWitness(..) + | ty::Never + | ty::Param(_) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Foreign(..) => { // WfScalar, WfParameter, etc } @@ -453,13 +447,13 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // WfReference if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() { let cause = self.cause(traits::ReferenceOutlivesReferent(ty)); - self.out.push( - traits::Obligation::new( - cause, - param_env, - ty::Predicate::TypeOutlives( - ty::Binder::dummy( - ty::OutlivesPredicate(rty, r))))); + self.out.push(traits::Obligation::new( + cause, + param_env, + ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate( + rty, r, + ))), + )); } } @@ -537,20 +531,18 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // obligations that don't refer to Self and // checking those - let defer_to_coercion = - self.infcx.tcx.features().object_safe_for_dispatch; + let defer_to_coercion = self.infcx.tcx.features().object_safe_for_dispatch; if !defer_to_coercion { let cause = self.cause(traits::MiscObligation); - let component_traits = - data.auto_traits().chain(data.principal_def_id()); - self.out.extend( - component_traits.map(|did| traits::Obligation::new( + let component_traits = data.auto_traits().chain(data.principal_def_id()); + self.out.extend(component_traits.map(|did| { + traits::Obligation::new( cause.clone(), param_env, - ty::Predicate::ObjectSafe(did) - )) - ); + ty::Predicate::ObjectSafe(did), + ) + })); } } @@ -569,16 +561,22 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // is satisfied to ensure termination.) ty::Infer(_) => { let ty = self.infcx.shallow_resolve(ty); - if let ty::Infer(_) = ty.kind { // not yet resolved... - if ty == ty0 { // ...this is the type we started from! no progress. + if let ty::Infer(_) = ty.kind { + // not yet resolved... + if ty == ty0 { + // ...this is the type we started from! no progress. return false; } let cause = self.cause(traits::MiscObligation); - self.out.push( // ...not the type we started from, so we made progress. - traits::Obligation::new(cause, - self.param_env, - ty::Predicate::WellFormed(ty))); + self.out.push( + // ...not the type we started from, so we made progress. + traits::Obligation::new( + cause, + self.param_env, + ty::Predicate::WellFormed(ty), + ), + ); } else { // Yes, resolved, proceed with the // result. Should never return false because @@ -593,27 +591,27 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { return true; } - fn nominal_obligations(&mut self, - def_id: DefId, - substs: SubstsRef<'tcx>) - -> Vec<traits::PredicateObligation<'tcx>> - { - let predicates = - self.infcx.tcx.predicates_of(def_id) - .instantiate(self.infcx.tcx, substs); + fn nominal_obligations( + &mut self, + def_id: DefId, + substs: SubstsRef<'tcx>, + ) -> Vec<traits::PredicateObligation<'tcx>> { + let predicates = self.infcx.tcx.predicates_of(def_id).instantiate(self.infcx.tcx, substs); let cause = self.cause(traits::ItemObligation(def_id)); - predicates.predicates - .into_iter() - .map(|pred| traits::Obligation::new(cause.clone(), - self.param_env, - pred)) - .filter(|pred| !pred.has_escaping_bound_vars()) - .collect() + predicates + .predicates + .into_iter() + .map(|pred| traits::Obligation::new(cause.clone(), self.param_env, pred)) + .filter(|pred| !pred.has_escaping_bound_vars()) + .collect() } - fn from_object_ty(&mut self, ty: Ty<'tcx>, - data: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>, - region: ty::Region<'tcx>) { + fn from_object_ty( + &mut self, + ty: Ty<'tcx>, + data: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>, + region: ty::Region<'tcx>, + ) { // Imagine a type like this: // // trait Foo { } @@ -646,19 +644,20 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // Note: in fact we only permit builtin traits, not `Bar<'d>`, I // am looking forward to the future here. if !data.has_escaping_bound_vars() && !region.has_escaping_bound_vars() { - let implicit_bounds = - object_region_bounds(self.infcx.tcx, data); + let implicit_bounds = object_region_bounds(self.infcx.tcx, data); let explicit_bound = region; self.out.reserve(implicit_bounds.len()); for implicit_bound in implicit_bounds { let cause = self.cause(traits::ObjectTypeBound(ty, explicit_bound)); - let outlives = ty::Binder::dummy( - ty::OutlivesPredicate(explicit_bound, implicit_bound)); - self.out.push(traits::Obligation::new(cause, - self.param_env, - outlives.to_predicate())); + let outlives = + ty::Binder::dummy(ty::OutlivesPredicate(explicit_bound, implicit_bound)); + self.out.push(traits::Obligation::new( + cause, + self.param_env, + outlives.to_predicate(), + )); } } } @@ -679,13 +678,16 @@ pub fn object_region_bounds<'tcx>( // a placeholder type. let open_ty = tcx.mk_ty_infer(ty::FreshTy(0)); - let predicates = existential_predicates.iter().filter_map(|predicate| { - if let ty::ExistentialPredicate::Projection(_) = *predicate.skip_binder() { - None - } else { - Some(predicate.with_self_ty(tcx, open_ty)) - } - }).collect(); + let predicates = existential_predicates + .iter() + .filter_map(|predicate| { + if let ty::ExistentialPredicate::Projection(_) = *predicate.skip_binder() { + None + } else { + Some(predicate.with_self_ty(tcx, open_ty)) + } + }) + .collect(); tcx.required_region_bounds(open_ty, predicates) } diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs index 796ea7aac36..818d09ce691 100644 --- a/src/librustc_codegen_llvm/back/write.rs +++ b/src/librustc_codegen_llvm/back/write.rs @@ -2,38 +2,38 @@ use crate::attributes; use crate::back::bytecode; use crate::back::lto::ThinBuffer; use crate::base; +use crate::common; use crate::consts; +use crate::context::{get_reloc_model, is_pie_binary}; use crate::llvm::{self, DiagnosticInfo, PassManager, SMDiagnostic}; use crate::llvm_util; -use crate::ModuleLlvm; use crate::type_::Type; -use crate::context::{is_pie_binary, get_reloc_model}; -use crate::common; use crate::LlvmCodegenBackend; +use crate::ModuleLlvm; +use log::debug; use rustc::bug; use rustc::hir::def_id::LOCAL_CRATE; -use rustc_codegen_ssa::back::write::{CodegenContext, ModuleConfig, run_assembler}; -use rustc_codegen_ssa::traits::*; -use rustc::session::config::{self, OutputType, Passes, Lto, Sanitizer, SwitchWithOptPath}; +use rustc::session::config::{self, Lto, OutputType, Passes, Sanitizer, SwitchWithOptPath}; use rustc::session::Session; use rustc::ty::TyCtxt; -use rustc_codegen_ssa::{RLIB_BYTECODE_EXTENSION, ModuleCodegen, CompiledModule}; use rustc::util::common::time_ext; -use rustc_fs_util::{path_to_c_string, link_or_copy}; +use rustc_codegen_ssa::back::write::{run_assembler, CodegenContext, ModuleConfig}; +use rustc_codegen_ssa::traits::*; +use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, RLIB_BYTECODE_EXTENSION}; use rustc_data_structures::small_c_str::SmallCStr; -use rustc_errors::{Handler, FatalError}; -use log::debug; +use rustc_errors::{FatalError, Handler}; +use rustc_fs_util::{link_or_copy, path_to_c_string}; +use libc::{c_char, c_int, c_uint, c_void, size_t}; use std::ffi::CString; use std::fs; use std::io::{self, Write}; use std::path::{Path, PathBuf}; +use std::slice; use std::str; use std::sync::Arc; -use std::slice; -use libc::{c_int, c_uint, c_void, c_char, size_t}; -pub const RELOC_MODEL_ARGS : [(&str, llvm::RelocMode); 7] = [ +pub const RELOC_MODEL_ARGS: [(&str, llvm::RelocMode); 7] = [ ("pic", llvm::RelocMode::PIC), ("static", llvm::RelocMode::Static), ("default", llvm::RelocMode::Default), @@ -50,7 +50,7 @@ pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[ ("large", llvm::CodeModel::Large), ]; -pub const TLS_MODEL_ARGS : [(&str, llvm::ThreadLocalMode); 4] = [ +pub const TLS_MODEL_ARGS: [(&str, llvm::ThreadLocalMode); 4] = [ ("global-dynamic", llvm::ThreadLocalMode::GeneralDynamic), ("local-dynamic", llvm::ThreadLocalMode::LocalDynamic), ("initial-exec", llvm::ThreadLocalMode::InitialExec), @@ -65,12 +65,13 @@ pub fn llvm_err(handler: &rustc_errors::Handler, msg: &str) -> FatalError { } pub fn write_output_file( - handler: &rustc_errors::Handler, - target: &'ll llvm::TargetMachine, - pm: &llvm::PassManager<'ll>, - m: &'ll llvm::Module, - output: &Path, - file_type: llvm::FileType) -> Result<(), FatalError> { + handler: &rustc_errors::Handler, + target: &'ll llvm::TargetMachine, + pm: &llvm::PassManager<'ll>, + m: &'ll llvm::Module, + output: &Path, + file_type: llvm::FileType, +) -> Result<(), FatalError> { unsafe { let output_c = path_to_c_string(output); let result = llvm::LLVMRustWriteOutputFile(target, pm, m, output_c.as_ptr(), file_type); @@ -85,9 +86,8 @@ pub fn create_informational_target_machine( sess: &Session, find_features: bool, ) -> &'static mut llvm::TargetMachine { - target_machine_factory(sess, config::OptLevel::No, find_features)().unwrap_or_else(|err| { - llvm_err(sess.diagnostic(), &err).raise() - }) + target_machine_factory(sess, config::OptLevel::No, find_features)() + .unwrap_or_else(|err| llvm_err(sess.diagnostic(), &err).raise()) } pub fn create_target_machine( @@ -95,13 +95,12 @@ pub fn create_target_machine( find_features: bool, ) -> &'static mut llvm::TargetMachine { target_machine_factory(&tcx.sess, tcx.backend_optimization_level(LOCAL_CRATE), find_features)() - .unwrap_or_else(|err| { - llvm_err(tcx.sess.diagnostic(), &err).raise() - }) + .unwrap_or_else(|err| llvm_err(tcx.sess.diagnostic(), &err).raise()) } -pub fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) -{ +pub fn to_llvm_opt_settings( + cfg: config::OptLevel, +) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) { use self::config::OptLevel::*; match cfg { No => (llvm::CodeGenOptLevel::None, llvm::CodeGenOptSizeNone), @@ -116,9 +115,11 @@ pub fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, ll // If find_features is true this won't access `sess.crate_types` by assuming // that `is_pie_binary` is false. When we discover LLVM target features // `sess.crate_types` is uninitialized so we cannot access it. -pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_features: bool) - -> Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync> -{ +pub fn target_machine_factory( + sess: &Session, + optlvl: config::OptLevel, + find_features: bool, +) -> Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync> { let reloc_model = get_reloc_model(sess); let (opt_level, _) = to_llvm_opt_settings(optlvl); @@ -127,22 +128,18 @@ pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_fea let ffunction_sections = sess.target.target.options.function_sections; let fdata_sections = ffunction_sections; - let code_model_arg = sess.opts.cg.code_model.as_ref().or( - sess.target.target.options.code_model.as_ref(), - ); + let code_model_arg = + sess.opts.cg.code_model.as_ref().or(sess.target.target.options.code_model.as_ref()); let code_model = match code_model_arg { - Some(s) => { - match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) { - Some(x) => x.1, - _ => { - sess.err(&format!("{:?} is not a valid code model", - code_model_arg)); - sess.abort_if_errors(); - bug!(); - } + Some(s) => match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) { + Some(x) => x.1, + _ => { + sess.err(&format!("{:?} is not a valid code model", code_model_arg)); + sess.abort_if_errors(); + bug!(); } - } + }, None => llvm::CodeModel::None, }; @@ -152,9 +149,9 @@ pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_fea // On the wasm target once the `atomics` feature is enabled that means that // we're no longer single-threaded, or otherwise we don't want LLVM to // lower atomic operations to single-threaded operations. - if singlethread && - sess.target.target.llvm_target.contains("wasm32") && - features.iter().any(|s| *s == "+atomics") + if singlethread + && sess.target.target.llvm_target.contains("wasm32") + && features.iter().any(|s| *s == "+atomics") { singlethread = false; } @@ -173,7 +170,10 @@ pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_fea Arc::new(move || { let tm = unsafe { llvm::LLVMRustCreateTargetMachine( - triple.as_ptr(), cpu.as_ptr(), features.as_ptr(), abi.as_ptr(), + triple.as_ptr(), + cpu.as_ptr(), + features.as_ptr(), + abi.as_ptr(), code_model, reloc_model, opt_level, @@ -190,8 +190,7 @@ pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_fea }; tm.ok_or_else(|| { - format!("Could not create LLVM TargetMachine for triple: {}", - triple.to_str().unwrap()) + format!("Could not create LLVM TargetMachine for triple: {}", triple.to_str().unwrap()) }) }) } @@ -199,10 +198,10 @@ pub fn target_machine_factory(sess: &Session, optlvl: config::OptLevel, find_fea pub(crate) fn save_temp_bitcode( cgcx: &CodegenContext<LlvmCodegenBackend>, module: &ModuleCodegen<ModuleLlvm>, - name: &str + name: &str, ) { if !cgcx.save_temps { - return + return; } unsafe { let ext = format!("{}.bc", name); @@ -220,9 +219,11 @@ pub struct DiagnosticHandlers<'a> { } impl<'a> DiagnosticHandlers<'a> { - pub fn new(cgcx: &'a CodegenContext<LlvmCodegenBackend>, - handler: &'a Handler, - llcx: &'a llvm::Context) -> Self { + pub fn new( + cgcx: &'a CodegenContext<LlvmCodegenBackend>, + handler: &'a Handler, + llcx: &'a llvm::Context, + ) -> Self { let data = Box::into_raw(Box::new((cgcx, handler))); unsafe { llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data.cast()); @@ -243,17 +244,17 @@ impl<'a> Drop for DiagnosticHandlers<'a> { } } -unsafe extern "C" fn report_inline_asm(cgcx: &CodegenContext<LlvmCodegenBackend>, - msg: &str, - cookie: c_uint) { +unsafe extern "C" fn report_inline_asm( + cgcx: &CodegenContext<LlvmCodegenBackend>, + msg: &str, + cookie: c_uint, +) { cgcx.diag_emitter.inline_asm_error(cookie as u32, msg.to_owned()); } -unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, - user: *const c_void, - cookie: c_uint) { +unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void, cookie: c_uint) { if user.is_null() { - return + return; } let (cgcx, _) = *(user as *const (&CodegenContext<LlvmCodegenBackend>, &Handler)); @@ -265,15 +266,13 @@ unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) { if user.is_null() { - return + return; } let (cgcx, diag_handler) = *(user as *const (&CodegenContext<LlvmCodegenBackend>, &Handler)); match llvm::diagnostic::Diagnostic::unpack(info) { llvm::diagnostic::InlineAsm(inline) => { - report_inline_asm(cgcx, - &llvm::twine_to_string(inline.message), - inline.cookie); + report_inline_asm(cgcx, &llvm::twine_to_string(inline.message), inline.cookie); } llvm::diagnostic::Optimization(opt) => { @@ -283,33 +282,35 @@ unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void }; if enabled { - diag_handler.note_without_error(&format!("optimization {} for {} at {}:{}:{}: {}", - opt.kind.describe(), - opt.pass_name, - opt.filename, - opt.line, - opt.column, - opt.message)); + diag_handler.note_without_error(&format!( + "optimization {} for {} at {}:{}:{}: {}", + opt.kind.describe(), + opt.pass_name, + opt.filename, + opt.line, + opt.column, + opt.message + )); } } - llvm::diagnostic::PGO(diagnostic_ref) | - llvm::diagnostic::Linker(diagnostic_ref) => { + llvm::diagnostic::PGO(diagnostic_ref) | llvm::diagnostic::Linker(diagnostic_ref) => { let msg = llvm::build_string(|s| { llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s) - }).expect("non-UTF8 diagnostic"); + }) + .expect("non-UTF8 diagnostic"); diag_handler.warn(&msg); } - llvm::diagnostic::UnknownDiagnostic(..) => {}, + llvm::diagnostic::UnknownDiagnostic(..) => {} } } // Unsafe due to LLVM calls. -pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>, - diag_handler: &Handler, - module: &ModuleCodegen<ModuleLlvm>, - config: &ModuleConfig) - -> Result<(), FatalError> -{ +pub(crate) unsafe fn optimize( + cgcx: &CodegenContext<LlvmCodegenBackend>, + diag_handler: &Handler, + module: &ModuleCodegen<ModuleLlvm>, + config: &ModuleConfig, +) -> Result<(), FatalError> { let _timer = cgcx.prof.generic_activity("LLVM_module_optimize"); let llmod = module.module_llvm.llmod(); @@ -376,11 +377,15 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>, llvm::LLVMAddAnalysisPasses(tm, fpm); llvm::LLVMAddAnalysisPasses(tm, mpm); let opt_level = to_llvm_opt_settings(opt_level).0; - let prepare_for_thin_lto = cgcx.lto == Lto::Thin || cgcx.lto == Lto::ThinLocal || - (cgcx.lto != Lto::Fat && cgcx.opts.cg.linker_plugin_lto.enabled()); + let prepare_for_thin_lto = cgcx.lto == Lto::Thin + || cgcx.lto == Lto::ThinLocal + || (cgcx.lto != Lto::Fat && cgcx.opts.cg.linker_plugin_lto.enabled()); with_llvm_pmb(llmod, &config, opt_level, prepare_for_thin_lto, &mut |b| { llvm::LLVMRustAddLastExtensionPasses( - b, extra_passes.as_ptr(), extra_passes.len() as size_t); + b, + extra_passes.as_ptr(), + extra_passes.len() as size_t, + ); llvm::LLVMPassManagerBuilderPopulateFunctionPassManager(b, fpm); llvm::LLVMPassManagerBuilderPopulateModulePassManager(b, mpm); }); @@ -401,13 +406,17 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>, if using_thin_buffers && !have_name_anon_globals_pass { // As described above, this will probably cause an error in LLVM if config.no_prepopulate_passes { - diag_handler.err("The current compilation is going to use thin LTO buffers \ + diag_handler.err( + "The current compilation is going to use thin LTO buffers \ without running LLVM's NameAnonGlobals pass. \ This will likely cause errors in LLVM. Consider adding \ - -C passes=name-anon-globals to the compiler command line."); + -C passes=name-anon-globals to the compiler command line.", + ); } else { - bug!("We are using thin LTO buffers without running the NameAnonGlobals pass. \ - This will likely cause errors in LLVM and should never happen."); + bug!( + "We are using thin LTO buffers without running the NameAnonGlobals pass. \ + This will likely cause errors in LLVM and should never happen." + ); } } } @@ -417,19 +426,19 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>, // Finally, run the actual optimization passes { let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_function_passes"); - time_ext(config.time_passes, - &format!("llvm function passes [{}]", module_name.unwrap()), - || { - llvm::LLVMRustRunFunctionPassManager(fpm, llmod) - }); + time_ext( + config.time_passes, + &format!("llvm function passes [{}]", module_name.unwrap()), + || llvm::LLVMRustRunFunctionPassManager(fpm, llmod), + ); } { let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_module_passes"); - time_ext(config.time_passes, - &format!("llvm module passes [{}]", module_name.unwrap()), - || { - llvm::LLVMRunPassManager(mpm, llmod) - }); + time_ext( + config.time_passes, + &format!("llvm module passes [{}]", module_name.unwrap()), + || llvm::LLVMRunPassManager(mpm, llmod), + ); } // Deallocate managers that we're now done with @@ -439,9 +448,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>, Ok(()) } -unsafe fn add_sanitizer_passes(config: &ModuleConfig, - passes: &mut Vec<&'static mut llvm::Pass>) { - +unsafe fn add_sanitizer_passes(config: &ModuleConfig, passes: &mut Vec<&'static mut llvm::Pass>) { let sanitizer = match &config.sanitizer { None => return, Some(s) => s, @@ -464,12 +471,12 @@ unsafe fn add_sanitizer_passes(config: &ModuleConfig, } } -pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, - diag_handler: &Handler, - module: ModuleCodegen<ModuleLlvm>, - config: &ModuleConfig) - -> Result<CompiledModule, FatalError> -{ +pub(crate) unsafe fn codegen( + cgcx: &CodegenContext<LlvmCodegenBackend>, + diag_handler: &Handler, + module: ModuleCodegen<ModuleLlvm>, + config: &ModuleConfig, +) -> Result<CompiledModule, FatalError> { let _timer = cgcx.prof.generic_activity("LLVM_module_codegen"); { let llmod = module.module_llvm.llmod(); @@ -491,11 +498,14 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, // pass manager passed to the closure should be ensured to not // escape the closure itself, and the manager should only be // used once. - unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine, - llmod: &'ll llvm::Module, - no_builtins: bool, - f: F) -> R - where F: FnOnce(&'ll mut PassManager<'ll>) -> R, + unsafe fn with_codegen<'ll, F, R>( + tm: &'ll llvm::TargetMachine, + llmod: &'ll llvm::Module, + no_builtins: bool, + f: F, + ) -> R + where + F: FnOnce(&'ll mut PassManager<'ll>) -> R, { let cpm = llvm::LLVMCreatePassManager(); llvm::LLVMAddAnalysisPasses(tm, cpm); @@ -519,7 +529,6 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); - if write_bc || config.emit_bc_compressed || config.embed_bitcode { let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_make_bitcode"); let thin = ThinBuffer::new(llmod); @@ -552,88 +561,103 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, embed_bitcode(cgcx, llcx, llmod, None); } - time_ext(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()), + time_ext( + config.time_passes, + &format!("codegen passes [{}]", module_name.unwrap()), || -> Result<(), FatalError> { - if config.emit_ir { - let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_ir"); - let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); - let out_c = path_to_c_string(&out); - - extern "C" fn demangle_callback(input_ptr: *const c_char, - input_len: size_t, - output_ptr: *mut c_char, - output_len: size_t) -> size_t { - let input = unsafe { - slice::from_raw_parts(input_ptr as *const u8, input_len as usize) - }; - - let input = match str::from_utf8(input) { - Ok(s) => s, - Err(_) => return 0, - }; - - let output = unsafe { - slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize) - }; - let mut cursor = io::Cursor::new(output); - - let demangled = match rustc_demangle::try_demangle(input) { - Ok(d) => d, - Err(_) => return 0, - }; - - if let Err(_) = write!(cursor, "{:#}", demangled) { - // Possible only if provided buffer is not big enough - return 0; + if config.emit_ir { + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_ir"); + let out = + cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); + let out_c = path_to_c_string(&out); + + extern "C" fn demangle_callback( + input_ptr: *const c_char, + input_len: size_t, + output_ptr: *mut c_char, + output_len: size_t, + ) -> size_t { + let input = unsafe { + slice::from_raw_parts(input_ptr as *const u8, input_len as usize) + }; + + let input = match str::from_utf8(input) { + Ok(s) => s, + Err(_) => return 0, + }; + + let output = unsafe { + slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize) + }; + let mut cursor = io::Cursor::new(output); + + let demangled = match rustc_demangle::try_demangle(input) { + Ok(d) => d, + Err(_) => return 0, + }; + + if let Err(_) = write!(cursor, "{:#}", demangled) { + // Possible only if provided buffer is not big enough + return 0; + } + + cursor.position() as size_t } - cursor.position() as size_t + let result = + llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback); + result.into_result().map_err(|()| { + let msg = format!("failed to write LLVM IR to {}", out.display()); + llvm_err(diag_handler, &msg) + })?; } - let result = - llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback); - result.into_result().map_err(|()| { - let msg = format!("failed to write LLVM IR to {}", out.display()); - llvm_err(diag_handler, &msg) - })?; - } - - if config.emit_asm || asm_to_obj { - let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_asm"); - let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); - - // We can't use the same module for asm and binary output, because that triggers - // various errors like invalid IR or broken binaries, so we might have to clone the - // module to produce the asm output - let llmod = if config.emit_obj { - llvm::LLVMCloneModule(llmod) - } else { - llmod - }; - with_codegen(tm, llmod, config.no_builtins, |cpm| { - write_output_file(diag_handler, tm, cpm, llmod, &path, - llvm::FileType::AssemblyFile) - })?; - } + if config.emit_asm || asm_to_obj { + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_asm"); + let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); + + // We can't use the same module for asm and binary output, because that triggers + // various errors like invalid IR or broken binaries, so we might have to clone the + // module to produce the asm output + let llmod = if config.emit_obj { llvm::LLVMCloneModule(llmod) } else { llmod }; + with_codegen(tm, llmod, config.no_builtins, |cpm| { + write_output_file( + diag_handler, + tm, + cpm, + llmod, + &path, + llvm::FileType::AssemblyFile, + ) + })?; + } - if write_obj { - let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_obj"); - with_codegen(tm, llmod, config.no_builtins, |cpm| { - write_output_file(diag_handler, tm, cpm, llmod, &obj_out, - llvm::FileType::ObjectFile) - })?; - } else if asm_to_obj { - let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_asm_to_obj"); - let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); - run_assembler(cgcx, diag_handler, &assembly, &obj_out); - - if !config.emit_asm && !cgcx.save_temps { - drop(fs::remove_file(&assembly)); + if write_obj { + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_obj"); + with_codegen(tm, llmod, config.no_builtins, |cpm| { + write_output_file( + diag_handler, + tm, + cpm, + llmod, + &obj_out, + llvm::FileType::ObjectFile, + ) + })?; + } else if asm_to_obj { + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_asm_to_obj"); + let assembly = + cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); + run_assembler(cgcx, diag_handler, &assembly, &obj_out); + + if !config.emit_asm && !cgcx.save_temps { + drop(fs::remove_file(&assembly)); + } } - } - Ok(()) - })?; + Ok(()) + }, + )?; if copy_bc_to_obj { debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out); @@ -651,10 +675,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, drop(handlers); } - Ok(module.into_compiled_module(config.emit_obj, - config.emit_bc, - config.emit_bc_compressed, - &cgcx.output_filenames)) + Ok(module.into_compiled_module( + config.emit_obj, + config.emit_bc, + config.emit_bc_compressed, + &cgcx.output_filenames, + )) } /// Embed the bitcode of an LLVM module in the LLVM module itself. @@ -675,10 +701,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>, /// /// Basically all of this is us attempting to follow in the footsteps of clang /// on iOS. See #35968 for lots more info. -unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>, - llcx: &llvm::Context, - llmod: &llvm::Module, - bitcode: Option<&[u8]>) { +unsafe fn embed_bitcode( + cgcx: &CodegenContext<LlvmCodegenBackend>, + llcx: &llvm::Context, + llmod: &llvm::Module, + bitcode: Option<&[u8]>, +) { let llconst = common::bytes_in_context(llcx, bitcode.unwrap_or(&[])); let llglobal = llvm::LLVMAddGlobal( llmod, @@ -687,14 +715,10 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>, ); llvm::LLVMSetInitializer(llglobal, llconst); - let is_apple = cgcx.opts.target_triple.triple().contains("-ios") || - cgcx.opts.target_triple.triple().contains("-darwin"); + let is_apple = cgcx.opts.target_triple.triple().contains("-ios") + || cgcx.opts.target_triple.triple().contains("-darwin"); - let section = if is_apple { - "__LLVM,__bitcode\0" - } else { - ".llvmbc\0" - }; + let section = if is_apple { "__LLVM,__bitcode\0" } else { ".llvmbc\0" }; llvm::LLVMSetSection(llglobal, section.as_ptr().cast()); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); llvm::LLVMSetGlobalConstant(llglobal, llvm::True); @@ -706,28 +730,26 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>, "rustc.embedded.cmdline\0".as_ptr().cast(), ); llvm::LLVMSetInitializer(llglobal, llconst); - let section = if is_apple { - "__LLVM,__cmdline\0" - } else { - ".llvmcmd\0" - }; + let section = if is_apple { "__LLVM,__cmdline\0" } else { ".llvmcmd\0" }; llvm::LLVMSetSection(llglobal, section.as_ptr().cast()); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); } -pub unsafe fn with_llvm_pmb(llmod: &llvm::Module, - config: &ModuleConfig, - opt_level: llvm::CodeGenOptLevel, - prepare_for_thin_lto: bool, - f: &mut dyn FnMut(&llvm::PassManagerBuilder)) { +pub unsafe fn with_llvm_pmb( + llmod: &llvm::Module, + config: &ModuleConfig, + opt_level: llvm::CodeGenOptLevel, + prepare_for_thin_lto: bool, + f: &mut dyn FnMut(&llvm::PassManagerBuilder), +) { use std::ptr; // Create the PassManagerBuilder for LLVM. We configure it with // reasonable defaults and prepare it to actually populate the pass // manager. let builder = llvm::LLVMPassManagerBuilderCreate(); - let opt_size = config.opt_size.map(|x| to_llvm_opt_settings(x).1) - .unwrap_or(llvm::CodeGenOptSizeNone); + let opt_size = + config.opt_size.map(|x| to_llvm_opt_settings(x).1).unwrap_or(llvm::CodeGenOptSizeNone); let inline_threshold = config.inline_threshold; let pgo_gen_path = match config.pgo_gen { @@ -740,14 +762,13 @@ pub unsafe fn with_llvm_pmb(llmod: &llvm::Module, Some(CString::new(format!("{}", path.display())).unwrap()) } - SwitchWithOptPath::Disabled => { - None - } + SwitchWithOptPath::Disabled => None, }; - let pgo_use_path = config.pgo_use.as_ref().map(|path_buf| { - CString::new(path_buf.to_string_lossy().as_bytes()).unwrap() - }); + let pgo_use_path = config + .pgo_use + .as_ref() + .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap()); llvm::LLVMRustConfigurePassManagerBuilder( builder, @@ -794,9 +815,7 @@ pub unsafe fn with_llvm_pmb(llmod: &llvm::Module, (llvm::CodeGenOptLevel::Default, ..) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 225); } - (llvm::CodeGenOptLevel::Other, ..) => { - bug!("CodeGenOptLevel::Other selected") - } + (llvm::CodeGenOptLevel::Other, ..) => bug!("CodeGenOptLevel::Other selected"), } f(builder); @@ -811,36 +830,28 @@ pub unsafe fn with_llvm_pmb(llmod: &llvm::Module, fn create_msvc_imps( cgcx: &CodegenContext<LlvmCodegenBackend>, llcx: &llvm::Context, - llmod: &llvm::Module + llmod: &llvm::Module, ) { if !cgcx.msvc_imps_needed { - return + return; } // The x86 ABI seems to require that leading underscores are added to symbol // names, so we need an extra underscore on x86. There's also a leading // '\x01' here which disables LLVM's symbol mangling (e.g., no extra // underscores added in front). - let prefix = if cgcx.target_arch == "x86" { - "\x01__imp__" - } else { - "\x01__imp_" - }; + let prefix = if cgcx.target_arch == "x86" { "\x01__imp__" } else { "\x01__imp_" }; unsafe { let i8p_ty = Type::i8p_llcx(llcx); let globals = base::iter_globals(llmod) .filter(|&val| { - llvm::LLVMRustGetLinkage(val) == llvm::Linkage::ExternalLinkage && - llvm::LLVMIsDeclaration(val) == 0 + llvm::LLVMRustGetLinkage(val) == llvm::Linkage::ExternalLinkage + && llvm::LLVMIsDeclaration(val) == 0 }) .filter_map(|val| { // Exclude some symbols that we know are not Rust symbols. let name = llvm::get_value_name(val); - if ignored(name) { - None - } else { - Some((val, name)) - } + if ignored(name) { None } else { Some((val, name)) } }) .map(move |(val, name)| { let mut imp_name = prefix.as_bytes().to_vec(); @@ -851,9 +862,7 @@ fn create_msvc_imps( .collect::<Vec<_>>(); for (imp_name, val) in globals { - let imp = llvm::LLVMAddGlobal(llmod, - i8p_ty, - imp_name.as_ptr().cast()); + let imp = llvm::LLVMAddGlobal(llmod, i8p_ty, imp_name.as_ptr().cast()); llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty)); llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage); } diff --git a/src/librustc_codegen_llvm/consts.rs b/src/librustc_codegen_llvm/consts.rs index 78a86d33a14..12c44513159 100644 --- a/src/librustc_codegen_llvm/consts.rs +++ b/src/librustc_codegen_llvm/consts.rs @@ -1,27 +1,26 @@ -use crate::llvm::{self, SetUnnamedAddr, True}; -use crate::debuginfo; -use crate::common::CodegenCx; use crate::base; +use crate::common::CodegenCx; +use crate::debuginfo; +use crate::llvm::{self, SetUnnamedAddr, True}; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; use libc::c_uint; +use log::debug; use rustc::hir::def_id::DefId; -use rustc::mir::interpret::{ConstValue, Allocation, read_target_uint, - Pointer, ErrorHandled}; -use rustc::mir::mono::MonoItem; use rustc::hir::Node; -use rustc_target::abi::HasDataLayout; -use rustc::ty::{self, Ty, Instance}; +use rustc::mir::interpret::{read_target_uint, Allocation, ConstValue, ErrorHandled, Pointer}; +use rustc::mir::mono::MonoItem; +use rustc::ty::{self, Instance, Ty}; +use rustc::{bug, span_bug}; use rustc_codegen_ssa::traits::*; -use syntax::symbol::{Symbol, sym}; +use rustc_target::abi::HasDataLayout; +use syntax::symbol::{sym, Symbol}; use syntax_pos::Span; -use rustc::{bug, span_bug}; -use log::debug; -use rustc::ty::layout::{self, Size, Align, LayoutOf}; +use rustc::ty::layout::{self, Align, LayoutOf, Size}; -use rustc::hir::{self, CodegenFnAttrs, CodegenFnAttrFlags}; +use rustc::hir::{self, CodegenFnAttrFlags, CodegenFnAttrs}; use std::ffi::CStr; @@ -51,14 +50,13 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll // affect interpreter execution (we inspect the result after interpreter execution), // and we properly interpret the relocation as a relocation pointer offset. alloc.inspect_with_undef_and_ptr_outside_interpreter(offset..(offset + pointer_size)), - ).expect("const_alloc_to_llvm: could not read relocation pointer") as u64; + ) + .expect("const_alloc_to_llvm: could not read relocation pointer") + as u64; llvals.push(cx.scalar_to_backend( Pointer::new(alloc_id, Size::from_bytes(ptr_offset)).into(), - &layout::Scalar { - value: layout::Primitive::Pointer, - valid_range: 0..=!0 - }, - cx.type_i8p() + &layout::Scalar { value: layout::Primitive::Pointer, valid_range: 0..=!0 }, + cx.type_i8p(), )); next_offset = offset + pointer_size; } @@ -84,19 +82,13 @@ pub fn codegen_static_initializer( let static_ = cx.tcx.const_eval_poly(def_id)?; let alloc = match static_.val { - ty::ConstKind::Value(ConstValue::ByRef { - alloc, offset, - }) if offset.bytes() == 0 => { - alloc - }, + ty::ConstKind::Value(ConstValue::ByRef { alloc, offset }) if offset.bytes() == 0 => alloc, _ => bug!("static const eval returned {:#?}", static_), }; Ok((const_alloc_to_llvm(cx, alloc), alloc)) } -fn set_global_alignment(cx: &CodegenCx<'ll, '_>, - gv: &'ll Value, - mut align: Align) { +fn set_global_alignment(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) { // The target may require greater alignment for globals than the type does. // Note: GCC and Clang also allow `__attribute__((aligned))` on variables, // which can force it to be smaller. Rust doesn't support this yet. @@ -118,7 +110,7 @@ fn check_and_apply_linkage( attrs: &CodegenFnAttrs, ty: Ty<'tcx>, sym: Symbol, - span: Span + span: Span, ) -> &'ll Value { let llty = cx.layout_of(ty).llvm_type(cx); let sym = sym.as_str(); @@ -134,7 +126,9 @@ fn check_and_apply_linkage( cx.layout_of(mt.ty).llvm_type(cx) } else { cx.sess().span_fatal( - span, "must have type `*const T` or `*mut T` due to `#[linkage]` attribute") + span, + "must have type `*const T` or `*mut T` due to `#[linkage]` attribute", + ) }; unsafe { // Declare a symbol `foo` with the desired linkage. @@ -149,7 +143,7 @@ fn check_and_apply_linkage( // zero. let mut real_name = "_rust_extern_with_linkage_".to_string(); real_name.push_str(&sym); - let g2 = cx.define_global(&real_name, llty).unwrap_or_else(||{ + let g2 = cx.define_global(&real_name, llty).unwrap_or_else(|| { cx.sess().span_fatal(span, &format!("symbol `{}` is already defined", &sym)) }); llvm::LLVMRustSetLinkage(g2, llvm::Linkage::InternalLinkage); @@ -164,16 +158,12 @@ fn check_and_apply_linkage( } pub fn ptrcast(val: &'ll Value, ty: &'ll Type) -> &'ll Value { - unsafe { - llvm::LLVMConstPointerCast(val, ty) - } + unsafe { llvm::LLVMConstPointerCast(val, ty) } } impl CodegenCx<'ll, 'tcx> { crate fn const_bitcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value { - unsafe { - llvm::LLVMConstBitCast(val, ty) - } + unsafe { llvm::LLVMConstBitCast(val, ty) } } crate fn static_addr_of_mut( @@ -186,13 +176,12 @@ impl CodegenCx<'ll, 'tcx> { let gv = match kind { Some(kind) if !self.tcx.sess.fewer_names() => { let name = self.generate_local_symbol_name(kind); - let gv = self.define_global(&name[..], - self.val_ty(cv)).unwrap_or_else(||{ - bug!("symbol `{}` is already defined", name); + let gv = self.define_global(&name[..], self.val_ty(cv)).unwrap_or_else(|| { + bug!("symbol `{}` is already defined", name); }); llvm::LLVMRustSetLinkage(gv, llvm::Linkage::PrivateLinkage); gv - }, + } _ => self.define_private_global(self.val_ty(cv)), }; llvm::LLVMSetInitializer(gv, cv); @@ -208,13 +197,14 @@ impl CodegenCx<'ll, 'tcx> { return g; } - let defined_in_current_codegen_unit = self.codegen_unit - .items() - .contains_key(&MonoItem::Static(def_id)); - assert!(!defined_in_current_codegen_unit, - "consts::get_static() should always hit the cache for \ + let defined_in_current_codegen_unit = + self.codegen_unit.items().contains_key(&MonoItem::Static(def_id)); + assert!( + !defined_in_current_codegen_unit, + "consts::get_static() should always hit the cache for \ statics defined in the same CGU, but did not for `{:?}`", - def_id); + def_id + ); let ty = instance.ty(self.tcx); let sym = self.tcx.symbol_name(instance).name; @@ -222,12 +212,9 @@ impl CodegenCx<'ll, 'tcx> { debug!("get_static: sym={} instance={:?}", sym, instance); let g = if let Some(id) = self.tcx.hir().as_local_hir_id(def_id) { - let llty = self.layout_of(ty).llvm_type(self); let (g, attrs) = match self.tcx.hir().get(id) { - Node::Item(&hir::Item { - attrs, span, kind: hir::ItemKind::Static(..), .. - }) => { + Node::Item(&hir::Item { attrs, span, kind: hir::ItemKind::Static(..), .. }) => { let sym_str = sym.as_str(); if let Some(g) = self.get_declared_value(&sym_str) { if self.val_ty(g) != self.type_ptr_to(llty) { @@ -247,13 +234,16 @@ impl CodegenCx<'ll, 'tcx> { } Node::ForeignItem(&hir::ForeignItem { - ref attrs, span, kind: hir::ForeignItemKind::Static(..), .. + ref attrs, + span, + kind: hir::ForeignItemKind::Static(..), + .. }) => { let fn_attrs = self.tcx.codegen_fn_attrs(def_id); (check_and_apply_linkage(&self, &fn_attrs, ty, sym, span), &**attrs) } - item => bug!("get_static: expected static, found {:?}", item) + item => bug!("get_static: expected static, found {:?}", item), }; debug!("get_static: sym={} attrs={:?}", sym, attrs); @@ -283,8 +273,7 @@ impl CodegenCx<'ll, 'tcx> { llvm::set_thread_local_mode(g, self.tls_model); } - let needs_dll_storage_attr = - self.use_dll_storage_attrs && !self.tcx.is_foreign_item(def_id) && + let needs_dll_storage_attr = self.use_dll_storage_attrs && !self.tcx.is_foreign_item(def_id) && // ThinLTO can't handle this workaround in all cases, so we don't // emit the attrs. Instead we make them unnecessary by disallowing // dynamic linking when linker plugin based LTO is enabled. @@ -292,9 +281,11 @@ impl CodegenCx<'ll, 'tcx> { // If this assertion triggers, there's something wrong with commandline // argument validation. - debug_assert!(!(self.tcx.sess.opts.cg.linker_plugin_lto.enabled() && - self.tcx.sess.target.target.options.is_like_msvc && - self.tcx.sess.opts.cg.prefer_dynamic)); + debug_assert!( + !(self.tcx.sess.opts.cg.linker_plugin_lto.enabled() + && self.tcx.sess.target.target.options.is_like_msvc + && self.tcx.sess.opts.cg.prefer_dynamic) + ); if needs_dll_storage_attr { // This item is external but not foreign, i.e., it originates from an external Rust @@ -329,12 +320,7 @@ impl CodegenCx<'ll, 'tcx> { } impl StaticMethods for CodegenCx<'ll, 'tcx> { - fn static_addr_of( - &self, - cv: &'ll Value, - align: Align, - kind: Option<&str>, - ) -> &'ll Value { + fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value { if let Some(&gv) = self.const_globals.borrow().get(&cv) { unsafe { // Upgrade the alignment in cases where the same constant is used with different @@ -354,11 +340,7 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { gv } - fn codegen_static( - &self, - def_id: DefId, - is_mutable: bool, - ) { + fn codegen_static(&self, def_id: DefId, is_mutable: bool) { unsafe { let attrs = self.tcx.codegen_fn_attrs(def_id); @@ -395,7 +377,11 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { let visibility = llvm::LLVMRustGetVisibility(g); let new_g = llvm::LLVMRustGetOrInsertGlobal( - self.llmod, name.as_ptr().cast(), name.len(), val_llty); + self.llmod, + name.as_ptr().cast(), + name.len(), + val_llty, + ); llvm::LLVMRustSetLinkage(new_g, linkage); llvm::LLVMRustSetVisibility(new_g, visibility); @@ -464,7 +450,8 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { // The `inspect` method is okay here because we checked relocations, and // because we are doing this access to inspect the final interpreter state // (not as part of the interpreter execution). - alloc.inspect_with_undef_and_ptr_outside_interpreter(0..alloc.len()) + alloc + .inspect_with_undef_and_ptr_outside_interpreter(0..alloc.len()) .iter() .all(|b| *b == 0) }; @@ -477,7 +464,6 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { } } - // Wasm statics with custom link sections get special treatment as they // go into custom sections of the wasm executable. if self.tcx.sess.opts.target_triple.triple().starts_with("wasm32") { @@ -492,8 +478,8 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { // The `inspect` method is okay here because we checked relocations, and // because we are doing this access to inspect the final interpreter state (not // as part of the interpreter execution). - let bytes = alloc.inspect_with_undef_and_ptr_outside_interpreter( - 0..alloc.len()); + let bytes = + alloc.inspect_with_undef_and_ptr_outside_interpreter(0..alloc.len()); let alloc = llvm::LLVMMDStringInContext( self.llcx, bytes.as_ptr().cast(), diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs index 8327ff257c2..e3c2dfac58b 100644 --- a/src/librustc_codegen_llvm/debuginfo/metadata.rs +++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs @@ -1,56 +1,60 @@ -use self::RecursiveTypeDescription::*; -use self::MemberDescriptionFactory::*; use self::EnumDiscriminantInfo::*; +use self::MemberDescriptionFactory::*; +use self::RecursiveTypeDescription::*; -use super::utils::{debug_context, DIB, span_start, - get_namespace_for_item, create_DIArray, is_node_local_to_unit}; use super::namespace::mangled_name_of_instance; use super::type_names::compute_debuginfo_type_name; +use super::utils::{ + create_DIArray, debug_context, get_namespace_for_item, is_node_local_to_unit, span_start, DIB, +}; use super::CrateDebugContext; use crate::abi; use crate::common::CodegenCx; use crate::llvm; -use crate::llvm::debuginfo::{DIArray, DIType, DIFile, DIScope, DIDescriptor, - DICompositeType, DILexicalBlock, DIFlags, DebugEmissionKind}; +use crate::llvm::debuginfo::{ + DIArray, DICompositeType, DIDescriptor, DIFile, DIFlags, DILexicalBlock, DIScope, DIType, + DebugEmissionKind, +}; use crate::llvm_util; use crate::value::Value; -use rustc_codegen_ssa::traits::*; -use rustc_index::vec::{Idx, IndexVec}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc::hir::CodegenFnAttrFlags; +use log::debug; use rustc::hir::def::CtorKind; -use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE}; +use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; +use rustc::hir::CodegenFnAttrFlags; use rustc::ich::NodeIdHashingMode; -use rustc::mir::{self, Field, GeneratorLayout}; use rustc::mir::interpret::truncate; -use rustc_data_structures::fingerprint::Fingerprint; +use rustc::mir::{self, Field, GeneratorLayout}; +use rustc::session::config::{self, DebugInfo}; +use rustc::ty::layout::{ + self, Align, Integer, IntegerExt, LayoutOf, PrimitiveExt, Size, TyLayout, VariantIdx, +}; +use rustc::ty::subst::{GenericArgKind, SubstsRef}; use rustc::ty::Instance; use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt}; -use rustc::ty::layout::{self, Align, Integer, IntegerExt, LayoutOf, - PrimitiveExt, Size, TyLayout, VariantIdx}; -use rustc::ty::subst::{GenericArgKind, SubstsRef}; -use rustc::session::config::{self, DebugInfo}; use rustc::util::nodemap::FxHashMap; -use rustc_fs_util::path_to_c_string; -use rustc_data_structures::small_c_str::SmallCStr; +use rustc::{bug, span_bug}; +use rustc_codegen_ssa::traits::*; use rustc_data_structures::const_cstr; +use rustc_data_structures::fingerprint::Fingerprint; +use rustc_data_structures::small_c_str::SmallCStr; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_fs_util::path_to_c_string; +use rustc_index::vec::{Idx, IndexVec}; use rustc_target::abi::HasDataLayout; use syntax::ast; use syntax::symbol::{Interner, Symbol}; -use syntax_pos::{self, Span, FileName}; -use rustc::{bug, span_bug}; -use log::debug; +use syntax_pos::{self, FileName, Span}; -use libc::{c_uint, c_longlong}; +use libc::{c_longlong, c_uint}; use std::collections::hash_map::Entry; use std::ffi::CString; use std::fmt::{self, Write}; use std::hash::{Hash, Hasher}; use std::iter; -use std::ptr; use std::path::{Path, PathBuf}; +use std::ptr; impl PartialEq for llvm::Metadata { fn eq(&self, other: &Self) -> bool { @@ -107,17 +111,13 @@ pub struct TypeMap<'ll, 'tcx> { /// A map from types to debuginfo metadata. This is an N:1 mapping. type_to_metadata: FxHashMap<Ty<'tcx>, &'ll DIType>, /// A map from types to `UniqueTypeId`. This is an N:1 mapping. - type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId> + type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>, } impl TypeMap<'ll, 'tcx> { /// Adds a Ty to metadata mapping to the TypeMap. The method will fail if /// the mapping already exists. - fn register_type_with_metadata( - &mut self, - type_: Ty<'tcx>, - metadata: &'ll DIType, - ) { + fn register_type_with_metadata(&mut self, type_: Ty<'tcx>, metadata: &'ll DIType) { if self.type_to_metadata.insert(type_, metadata).is_some() { bug!("type metadata for `Ty` '{}' is already in the `TypeMap`!", type_); } @@ -140,10 +140,7 @@ impl TypeMap<'ll, 'tcx> { /// /// This function is used to remove the temporary metadata /// mapping after we've computed the actual metadata. - fn remove_type( - &mut self, - type_: Ty<'tcx>, - ) { + fn remove_type(&mut self, type_: Ty<'tcx>) { if self.type_to_metadata.remove(type_).is_none() { bug!("type metadata `Ty` '{}' is not in the `TypeMap`!", type_); } @@ -157,8 +154,10 @@ impl TypeMap<'ll, 'tcx> { metadata: &'ll DIType, ) { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { - bug!("type metadata for unique ID '{}' is already in the `TypeMap`!", - self.get_unique_type_id_as_string(unique_type_id)); + bug!( + "type metadata for unique ID '{}' is already in the `TypeMap`!", + self.get_unique_type_id_as_string(unique_type_id) + ); } } @@ -180,8 +179,11 @@ impl TypeMap<'ll, 'tcx> { /// Gets the `UniqueTypeId` for the given type. If the `UniqueTypeId` for the given /// type has been requested before, this is just a table lookup. Otherwise, an /// ID will be generated and stored for later lookup. - fn get_unique_type_id_of_type<'a>(&mut self, cx: &CodegenCx<'a, 'tcx>, - type_: Ty<'tcx>) -> UniqueTypeId { + fn get_unique_type_id_of_type<'a>( + &mut self, + cx: &CodegenCx<'a, 'tcx>, + type_: Ty<'tcx>, + ) -> UniqueTypeId { // Let's see if we already have something in the cache. if let Some(unique_type_id) = self.type_to_unique_id.get(&type_).cloned() { return unique_type_id; @@ -209,15 +211,15 @@ impl TypeMap<'ll, 'tcx> { /// Gets the `UniqueTypeId` for an enum variant. Enum variants are not really /// types of their own, so they need special handling. We still need a /// `UniqueTypeId` for them, since to debuginfo they *are* real types. - fn get_unique_type_id_of_enum_variant<'a>(&mut self, - cx: &CodegenCx<'a, 'tcx>, - enum_type: Ty<'tcx>, - variant_name: &str) - -> UniqueTypeId { + fn get_unique_type_id_of_enum_variant<'a>( + &mut self, + cx: &CodegenCx<'a, 'tcx>, + enum_type: Ty<'tcx>, + variant_name: &str, + ) -> UniqueTypeId { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); - let enum_variant_type_id = format!("{}::{}", - self.get_unique_type_id_as_string(enum_type_id), - variant_name); + let enum_variant_type_id = + format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id), variant_name); let interner_key = self.unique_id_interner.intern(&enum_variant_type_id); UniqueTypeId(interner_key) } @@ -226,8 +228,8 @@ impl TypeMap<'ll, 'tcx> { /// Variant parts are not types and shouldn't really have their own ID, /// but it makes `set_members_of_composite_type()` simpler. fn get_unique_type_id_str_of_enum_variant_part(&mut self, enum_type_id: UniqueTypeId) -> &str { - let variant_part_type_id = format!("{}_variant_part", - self.get_unique_type_id_as_string(enum_type_id)); + let variant_part_type_id = + format!("{}_variant_part", self.get_unique_type_id_as_string(enum_type_id)); let interner_key = self.unique_id_interner.intern(&variant_part_type_id); self.unique_id_interner.get(interner_key) } @@ -246,7 +248,7 @@ enum RecursiveTypeDescription<'ll, 'tcx> { member_holding_stub: &'ll DICompositeType, member_description_factory: MemberDescriptionFactory<'ll, 'tcx>, }, - FinalMetadata(&'ll DICompositeType) + FinalMetadata(&'ll DICompositeType), } fn create_and_register_recursive_type_forward_declaration( @@ -257,7 +259,6 @@ fn create_and_register_recursive_type_forward_declaration( member_holding_stub: &'ll DICompositeType, member_description_factory: MemberDescriptionFactory<'ll, 'tcx>, ) -> RecursiveTypeDescription<'ll, 'tcx> { - // Insert the stub into the `TypeMap` in order to allow for recursive references. let mut type_map = debug_context(cx).type_map.borrow_mut(); type_map.register_unique_id_with_metadata(unique_type_id, metadata_stub); @@ -294,23 +295,27 @@ impl RecursiveTypeDescription<'ll, 'tcx> { // function. { let type_map = debug_context(cx).type_map.borrow(); - if type_map.find_metadata_for_unique_id(unique_type_id).is_none() || - type_map.find_metadata_for_type(unfinished_type).is_none() { - bug!("Forward declaration of potentially recursive type \ + if type_map.find_metadata_for_unique_id(unique_type_id).is_none() + || type_map.find_metadata_for_type(unfinished_type).is_none() + { + bug!( + "Forward declaration of potentially recursive type \ '{:?}' was not found in TypeMap!", - unfinished_type); + unfinished_type + ); } } // ... then create the member descriptions ... - let member_descriptions = - member_description_factory.create_member_descriptions(cx); + let member_descriptions = member_description_factory.create_member_descriptions(cx); // ... and attach them to the stub to complete it. - set_members_of_composite_type(cx, - unfinished_type, - member_holding_stub, - member_descriptions); + set_members_of_composite_type( + cx, + unfinished_type, + member_holding_stub, + member_descriptions, + ); return MetadataCreationResult::new(metadata_stub, true); } } @@ -320,14 +325,13 @@ impl RecursiveTypeDescription<'ll, 'tcx> { /// Returns from the enclosing function if the type metadata with the given /// unique ID can be found in the type map. macro_rules! return_if_metadata_created_in_meantime { - ($cx: expr, $unique_type_id: expr) => ( - if let Some(metadata) = debug_context($cx).type_map - .borrow() - .find_metadata_for_unique_id($unique_type_id) + ($cx: expr, $unique_type_id: expr) => { + if let Some(metadata) = + debug_context($cx).type_map.borrow().find_metadata_for_unique_id($unique_type_id) { return MetadataCreationResult::new(metadata, true); } - ) + }; } fn fixed_vec_metadata( @@ -345,12 +349,11 @@ fn fixed_vec_metadata( let upper_bound = match array_or_slice_type.kind { ty::Array(_, len) => len.eval_usize(cx.tcx, ty::ParamEnv::reveal_all()) as c_longlong, - _ => -1 + _ => -1, }; - let subrange = unsafe { - Some(llvm::LLVMRustDIBuilderGetOrCreateSubrange(DIB(cx), 0, upper_bound)) - }; + let subrange = + unsafe { Some(llvm::LLVMRustDIBuilderGetOrCreateSubrange(DIB(cx), 0, upper_bound)) }; let subscripts = create_DIArray(DIB(cx), &[subrange]); let metadata = unsafe { @@ -359,7 +362,8 @@ fn fixed_vec_metadata( size.bits(), align.bits() as u32, element_type_metadata, - subscripts) + subscripts, + ) }; return MetadataCreationResult::new(metadata, false); @@ -406,14 +410,16 @@ fn vec_slice_metadata( let file_metadata = unknown_file_metadata(cx); - let metadata = composite_type_metadata(cx, - slice_ptr_type, - &slice_type_name[..], - unique_type_id, - member_descriptions, - NO_SCOPE_METADATA, - file_metadata, - span); + let metadata = composite_type_metadata( + cx, + slice_ptr_type, + &slice_type_name[..], + unique_type_id, + member_descriptions, + NO_SCOPE_METADATA, + file_metadata, + span, + ); MetadataCreationResult::new(metadata, false) } @@ -423,23 +429,21 @@ fn subroutine_type_metadata( signature: ty::PolyFnSig<'tcx>, span: Span, ) -> MetadataCreationResult<'ll> { - let signature = cx.tcx.normalize_erasing_late_bound_regions( - ty::ParamEnv::reveal_all(), - &signature, - ); + let signature = + cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &signature); let signature_metadata: Vec<_> = iter::once( // return type match signature.output().kind { ty::Tuple(ref tys) if tys.is_empty() => None, - _ => Some(type_metadata(cx, signature.output(), span)) - } - ).chain( + _ => Some(type_metadata(cx, signature.output(), span)), + }, + ) + .chain( // regular arguments - signature.inputs().iter().map(|argument_type| { - Some(type_metadata(cx, argument_type, span)) - }) - ).collect(); + signature.inputs().iter().map(|argument_type| Some(type_metadata(cx, argument_type, span))), + ) + .collect(); return_if_metadata_created_in_meantime!(cx, unique_type_id); @@ -448,9 +452,11 @@ fn subroutine_type_metadata( llvm::LLVMRustDIBuilderCreateSubroutineType( DIB(cx), unknown_file_metadata(cx), - create_DIArray(DIB(cx), &signature_metadata[..])) + create_DIArray(DIB(cx), &signature_metadata[..]), + ) }, - false); + false, + ); } // FIXME(1563): This is all a bit of a hack because 'trait pointer' is an ill- @@ -470,18 +476,20 @@ fn trait_pointer_metadata( // However, it does not describe the trait's methods. let containing_scope = match trait_type.kind { - ty::Dynamic(ref data, ..) => - data.principal_def_id().map(|did| get_namespace_for_item(cx, did)), + ty::Dynamic(ref data, ..) => { + data.principal_def_id().map(|did| get_namespace_for_item(cx, did)) + } _ => { - bug!("debuginfo: unexpected trait-object type in \ + bug!( + "debuginfo: unexpected trait-object type in \ trait_pointer_metadata(): {:?}", - trait_type); + trait_type + ); } }; let trait_object_type = trait_object_type.unwrap_or(trait_type); - let trait_type_name = - compute_debuginfo_type_name(cx.tcx, trait_object_type, false); + let trait_type_name = compute_debuginfo_type_name(cx.tcx, trait_object_type, false); let file_metadata = unknown_file_metadata(cx); @@ -495,9 +503,11 @@ fn trait_pointer_metadata( let member_descriptions = vec![ MemberDescription { name: "pointer".to_owned(), - type_metadata: type_metadata(cx, + type_metadata: type_metadata( + cx, cx.tcx.mk_mut_ptr(cx.tcx.types.u8), - syntax_pos::DUMMY_SP), + syntax_pos::DUMMY_SP, + ), offset: layout.fields.offset(0), size: data_ptr_field.size, align: data_ptr_field.align.abi, @@ -515,21 +525,19 @@ fn trait_pointer_metadata( }, ]; - composite_type_metadata(cx, - trait_object_type, - &trait_type_name[..], - unique_type_id, - member_descriptions, - containing_scope, - file_metadata, - syntax_pos::DUMMY_SP) + composite_type_metadata( + cx, + trait_object_type, + &trait_type_name[..], + unique_type_id, + member_descriptions, + containing_scope, + file_metadata, + syntax_pos::DUMMY_SP, + ) } -pub fn type_metadata( - cx: &CodegenCx<'ll, 'tcx>, - t: Ty<'tcx>, - usage_site_span: Span, -) -> &'ll DIType { +pub fn type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>, usage_site_span: Span) -> &'ll DIType { // Get the unique type ID of this type. let unique_type_id = { let mut type_map = debug_context(cx).type_map.borrow_mut(); @@ -538,7 +546,7 @@ pub fn type_metadata( match type_map.find_metadata_for_type(t) { Some(metadata) => { return metadata; - }, + } None => { // The Ty is not in the `TypeMap` but maybe we have already seen // an equivalent type (e.g., only differing in region arguments). @@ -552,7 +560,7 @@ pub fn type_metadata( // return the cached metadata. type_map.register_type_with_metadata(t, metadata); return metadata; - }, + } None => { // There really is no type metadata for this type, so // proceed by creating it. @@ -565,82 +573,54 @@ pub fn type_metadata( debug!("type_metadata: {:?}", t); - let ptr_metadata = |ty: Ty<'tcx>| { - match ty.kind { - ty::Slice(typ) => { - Ok(vec_slice_metadata(cx, t, typ, unique_type_id, usage_site_span)) - } - ty::Str => { - Ok(vec_slice_metadata(cx, t, cx.tcx.types.u8, unique_type_id, usage_site_span)) - } - ty::Dynamic(..) => { - Ok(MetadataCreationResult::new( - trait_pointer_metadata(cx, ty, Some(t), unique_type_id), - false)) - } - _ => { - let pointee_metadata = type_metadata(cx, ty, usage_site_span); - - if let Some(metadata) = debug_context(cx).type_map - .borrow() - .find_metadata_for_unique_id(unique_type_id) - { - return Err(metadata); - } + let ptr_metadata = |ty: Ty<'tcx>| match ty.kind { + ty::Slice(typ) => Ok(vec_slice_metadata(cx, t, typ, unique_type_id, usage_site_span)), + ty::Str => Ok(vec_slice_metadata(cx, t, cx.tcx.types.u8, unique_type_id, usage_site_span)), + ty::Dynamic(..) => Ok(MetadataCreationResult::new( + trait_pointer_metadata(cx, ty, Some(t), unique_type_id), + false, + )), + _ => { + let pointee_metadata = type_metadata(cx, ty, usage_site_span); - Ok(MetadataCreationResult::new(pointer_type_metadata(cx, t, pointee_metadata), - false)) + if let Some(metadata) = + debug_context(cx).type_map.borrow().find_metadata_for_unique_id(unique_type_id) + { + return Err(metadata); } + + Ok(MetadataCreationResult::new(pointer_type_metadata(cx, t, pointee_metadata), false)) } }; let MetadataCreationResult { metadata, already_stored_in_typemap } = match t.kind { - ty::Never | - ty::Bool | - ty::Char | - ty::Int(_) | - ty::Uint(_) | - ty::Float(_) => { + ty::Never | ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) => { MetadataCreationResult::new(basic_type_metadata(cx, t), false) } ty::Tuple(ref elements) if elements.is_empty() => { MetadataCreationResult::new(basic_type_metadata(cx, t), false) } - ty::Array(typ, _) | - ty::Slice(typ) => { + ty::Array(typ, _) | ty::Slice(typ) => { fixed_vec_metadata(cx, unique_type_id, t, typ, usage_site_span) } - ty::Str => { - fixed_vec_metadata(cx, unique_type_id, t, cx.tcx.types.i8, usage_site_span) - } + ty::Str => fixed_vec_metadata(cx, unique_type_id, t, cx.tcx.types.i8, usage_site_span), ty::Dynamic(..) => { - MetadataCreationResult::new( - trait_pointer_metadata(cx, t, None, unique_type_id), - false) + MetadataCreationResult::new(trait_pointer_metadata(cx, t, None, unique_type_id), false) } ty::Foreign(..) => { - MetadataCreationResult::new( - foreign_type_metadata(cx, t, unique_type_id), - false) - } - ty::RawPtr(ty::TypeAndMut{ty, ..}) | - ty::Ref(_, ty, _) => { - match ptr_metadata(ty) { - Ok(res) => res, - Err(metadata) => return metadata, - } - } - ty::Adt(def, _) if def.is_box() => { - match ptr_metadata(t.boxed_ty()) { - Ok(res) => res, - Err(metadata) => return metadata, - } + MetadataCreationResult::new(foreign_type_metadata(cx, t, unique_type_id), false) } + ty::RawPtr(ty::TypeAndMut { ty, .. }) | ty::Ref(_, ty, _) => match ptr_metadata(ty) { + Ok(res) => res, + Err(metadata) => return metadata, + }, + ty::Adt(def, _) if def.is_box() => match ptr_metadata(t.boxed_ty()) { + Ok(res) => res, + Err(metadata) => return metadata, + }, ty::FnDef(..) | ty::FnPtr(_) => { - - if let Some(metadata) = debug_context(cx).type_map - .borrow() - .find_metadata_for_unique_id(unique_type_id) + if let Some(metadata) = + debug_context(cx).type_map.borrow().find_metadata_for_unique_id(unique_type_id) { return metadata; } @@ -665,81 +645,63 @@ pub fn type_metadata( SmallCStr::new("<recur_type>").as_ptr(), size.bits(), align.bits() as u32, - DW_ATE_unsigned) + DW_ATE_unsigned, + ) } }; let type_map = &debug_context(cx).type_map; type_map.borrow_mut().register_type_with_metadata(t, temp_type); - let fn_metadata = subroutine_type_metadata(cx, - unique_type_id, - t.fn_sig(cx.tcx), - usage_site_span).metadata; + let fn_metadata = + subroutine_type_metadata(cx, unique_type_id, t.fn_sig(cx.tcx), usage_site_span) + .metadata; type_map.borrow_mut().remove_type(t); - // This is actually a function pointer, so wrap it in pointer DI. MetadataCreationResult::new(pointer_type_metadata(cx, t, fn_metadata), false) - } ty::Closure(def_id, substs) => { - let upvar_tys : Vec<_> = substs.as_closure().upvar_tys(def_id, cx.tcx).collect(); + let upvar_tys: Vec<_> = substs.as_closure().upvar_tys(def_id, cx.tcx).collect(); let containing_scope = get_namespace_for_item(cx, def_id); - prepare_tuple_metadata(cx, - t, - &upvar_tys, - unique_type_id, - usage_site_span, - Some(containing_scope)).finalize(cx) + prepare_tuple_metadata( + cx, + t, + &upvar_tys, + unique_type_id, + usage_site_span, + Some(containing_scope), + ) + .finalize(cx) } - ty::Generator(def_id, substs, _) => { - let upvar_tys : Vec<_> = substs - .as_generator().prefix_tys(def_id, cx.tcx).map(|t| { - cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t) - }).collect(); - prepare_enum_metadata(cx, - t, - def_id, - unique_type_id, - usage_site_span, - upvar_tys).finalize(cx) + ty::Generator(def_id, substs, _) => { + let upvar_tys: Vec<_> = substs + .as_generator() + .prefix_tys(def_id, cx.tcx) + .map(|t| cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t)) + .collect(); + prepare_enum_metadata(cx, t, def_id, unique_type_id, usage_site_span, upvar_tys) + .finalize(cx) } ty::Adt(def, ..) => match def.adt_kind() { AdtKind::Struct => { - prepare_struct_metadata(cx, - t, - unique_type_id, - usage_site_span).finalize(cx) + prepare_struct_metadata(cx, t, unique_type_id, usage_site_span).finalize(cx) } AdtKind::Union => { - prepare_union_metadata(cx, - t, - unique_type_id, - usage_site_span).finalize(cx) + prepare_union_metadata(cx, t, unique_type_id, usage_site_span).finalize(cx) } AdtKind::Enum => { - prepare_enum_metadata(cx, - t, - def.did, - unique_type_id, - usage_site_span, - vec![]).finalize(cx) + prepare_enum_metadata(cx, t, def.did, unique_type_id, usage_site_span, vec![]) + .finalize(cx) } }, ty::Tuple(ref elements) => { let tys: Vec<_> = elements.iter().map(|k| k.expect_ty()).collect(); - prepare_tuple_metadata(cx, - t, - &tys, - unique_type_id, - usage_site_span, - NO_SCOPE_METADATA).finalize(cx) - } - _ => { - bug!("debuginfo: unexpected type in type_metadata: {:?}", t) + prepare_tuple_metadata(cx, t, &tys, unique_type_id, usage_site_span, NO_SCOPE_METADATA) + .finalize(cx) } + _ => bug!("debuginfo: unexpected type in type_metadata: {:?}", t), }; { @@ -750,26 +712,30 @@ pub fn type_metadata( let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) { Some(metadata) => metadata, None => { - span_bug!(usage_site_span, - "expected type metadata for unique \ + span_bug!( + usage_site_span, + "expected type metadata for unique \ type ID '{}' to already be in \ the `debuginfo::TypeMap` but it \ was not. (Ty = {})", - type_map.get_unique_type_id_as_string(unique_type_id), - t); + type_map.get_unique_type_id_as_string(unique_type_id), + t + ); } }; match type_map.find_metadata_for_type(t) { Some(metadata) => { if metadata != metadata_for_uid { - span_bug!(usage_site_span, - "mismatch between `Ty` and \ + span_bug!( + usage_site_span, + "mismatch between `Ty` and \ `UniqueTypeId` maps in \ `debuginfo::TypeMap`. \ UniqueTypeId={}, Ty={}", - type_map.get_unique_type_id_as_string(unique_type_id), - t); + type_map.get_unique_type_id_as_string(unique_type_id), + t + ); } } None => { @@ -785,12 +751,12 @@ pub fn type_metadata( metadata } -pub fn file_metadata(cx: &CodegenCx<'ll, '_>, - file_name: &FileName, - defining_crate: CrateNum) -> &'ll DIFile { - debug!("file_metadata: file_name: {}, defining_crate: {}", - file_name, - defining_crate); +pub fn file_metadata( + cx: &CodegenCx<'ll, '_>, + file_name: &FileName, + defining_crate: CrateNum, +) -> &'ll DIFile { + debug!("file_metadata: file_name: {}, defining_crate: {}", file_name, defining_crate); let file_name = Some(file_name.to_string()); let directory = if defining_crate == LOCAL_CRATE { @@ -807,10 +773,11 @@ pub fn unknown_file_metadata(cx: &CodegenCx<'ll, '_>) -> &'ll DIFile { file_metadata_raw(cx, None, None) } -fn file_metadata_raw(cx: &CodegenCx<'ll, '_>, - file_name: Option<String>, - directory: Option<String>) - -> &'ll DIFile { +fn file_metadata_raw( + cx: &CodegenCx<'ll, '_>, + file_name: Option<String>, + directory: Option<String>, +) -> &'ll DIFile { let key = (file_name, directory); match debug_context(cx).created_files.borrow_mut().entry(key) { @@ -819,15 +786,16 @@ fn file_metadata_raw(cx: &CodegenCx<'ll, '_>, let (file_name, directory) = v.key(); debug!("file_metadata: file_name: {:?}, directory: {:?}", file_name, directory); - let file_name = SmallCStr::new( - if let Some(file_name) = file_name { &file_name } else { "<unknown>" }); - let directory = SmallCStr::new( - if let Some(directory) = directory { &directory } else { "" }); + let file_name = SmallCStr::new(if let Some(file_name) = file_name { + &file_name + } else { + "<unknown>" + }); + let directory = + SmallCStr::new(if let Some(directory) = directory { &directory } else { "" }); let file_metadata = unsafe { - llvm::LLVMRustDIBuilderCreateFile(DIB(cx), - file_name.as_ptr(), - directory.as_ptr()) + llvm::LLVMRustDIBuilderCreateFile(DIB(cx), file_name.as_ptr(), directory.as_ptr()) }; v.insert(file_metadata); @@ -841,20 +809,13 @@ fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType { let (name, encoding) = match t.kind { ty::Never => ("!", DW_ATE_unsigned), - ty::Tuple(ref elements) if elements.is_empty() => - ("()", DW_ATE_unsigned), + ty::Tuple(ref elements) if elements.is_empty() => ("()", DW_ATE_unsigned), ty::Bool => ("bool", DW_ATE_boolean), ty::Char => ("char", DW_ATE_unsigned_char), - ty::Int(int_ty) => { - (int_ty.name_str(), DW_ATE_signed) - }, - ty::Uint(uint_ty) => { - (uint_ty.name_str(), DW_ATE_unsigned) - }, - ty::Float(float_ty) => { - (float_ty.name_str(), DW_ATE_float) - }, - _ => bug!("debuginfo::basic_type_metadata - `t` is invalid type") + ty::Int(int_ty) => (int_ty.name_str(), DW_ATE_signed), + ty::Uint(uint_ty) => (uint_ty.name_str(), DW_ATE_unsigned), + ty::Float(float_ty) => (float_ty.name_str(), DW_ATE_float), + _ => bug!("debuginfo::basic_type_metadata - `t` is invalid type"), }; let (size, align) = cx.size_and_align_of(t); @@ -865,7 +826,8 @@ fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType { name.as_ptr(), size.bits(), align.bits() as u32, - encoding) + encoding, + ) }; return ty_metadata; @@ -896,7 +858,8 @@ fn pointer_type_metadata( pointee_type_metadata, pointer_size.bits(), pointer_align.bits() as u32, - name.as_ptr()) + name.as_ptr(), + ) } } @@ -920,10 +883,8 @@ pub fn compile_unit_metadata( } debug!("compile_unit_metadata: {:?}", name_in_debuginfo); - let rustc_producer = format!( - "rustc version {}", - option_env!("CFG_VERSION").expect("CFG_VERSION"), - ); + let rustc_producer = + format!("rustc version {}", option_env!("CFG_VERSION").expect("CFG_VERSION"),); // FIXME(#41252) Remove "clang LLVM" if we can get GDB and LLVM to play nice. let producer = format!("clang LLVM ({})", rustc_producer); @@ -954,7 +915,10 @@ pub fn compile_unit_metadata( unsafe { let file_metadata = llvm::LLVMRustDIBuilderCreateFile( - debug_context.builder, name_in_debuginfo.as_ptr(), work_dir.as_ptr()); + debug_context.builder, + name_in_debuginfo.as_ptr(), + work_dir.as_ptr(), + ); let unit_metadata = llvm::LLVMRustDIBuilderCreateCompileUnit( debug_context.builder, @@ -965,27 +929,36 @@ pub fn compile_unit_metadata( flags.as_ptr().cast(), 0, split_name.as_ptr().cast(), - kind); + kind, + ); if tcx.sess.opts.debugging_opts.profile { - let cu_desc_metadata = llvm::LLVMRustMetadataAsValue(debug_context.llcontext, - unit_metadata); + let cu_desc_metadata = + llvm::LLVMRustMetadataAsValue(debug_context.llcontext, unit_metadata); let gcov_cu_info = [ - path_to_mdstring(debug_context.llcontext, - &tcx.output_filenames(LOCAL_CRATE).with_extension("gcno")), - path_to_mdstring(debug_context.llcontext, - &tcx.output_filenames(LOCAL_CRATE).with_extension("gcda")), + path_to_mdstring( + debug_context.llcontext, + &tcx.output_filenames(LOCAL_CRATE).with_extension("gcno"), + ), + path_to_mdstring( + debug_context.llcontext, + &tcx.output_filenames(LOCAL_CRATE).with_extension("gcda"), + ), cu_desc_metadata, ]; - let gcov_metadata = llvm::LLVMMDNodeInContext(debug_context.llcontext, - gcov_cu_info.as_ptr(), - gcov_cu_info.len() as c_uint); + let gcov_metadata = llvm::LLVMMDNodeInContext( + debug_context.llcontext, + gcov_cu_info.as_ptr(), + gcov_cu_info.len() as c_uint, + ); let llvm_gcov_ident = const_cstr!("llvm.gcov"); - llvm::LLVMAddNamedMetadataOperand(debug_context.llmod, - llvm_gcov_ident.as_ptr(), - gcov_metadata); + llvm::LLVMAddNamedMetadataOperand( + debug_context.llmod, + llvm_gcov_ident.as_ptr(), + gcov_metadata, + ); } // Insert `llvm.ident` metadata on the wasm32 targets since that will @@ -1009,24 +982,23 @@ pub fn compile_unit_metadata( fn path_to_mdstring(llcx: &'ll llvm::Context, path: &Path) -> &'ll Value { let path_str = path_to_c_string(path); unsafe { - llvm::LLVMMDStringInContext(llcx, - path_str.as_ptr(), - path_str.as_bytes().len() as c_uint) + llvm::LLVMMDStringInContext( + llcx, + path_str.as_ptr(), + path_str.as_bytes().len() as c_uint, + ) } } } struct MetadataCreationResult<'ll> { metadata: &'ll DIType, - already_stored_in_typemap: bool + already_stored_in_typemap: bool, } impl MetadataCreationResult<'ll> { fn new(metadata: &'ll DIType, already_stored_in_typemap: bool) -> Self { - MetadataCreationResult { - metadata, - already_stored_in_typemap, - } + MetadataCreationResult { metadata, already_stored_in_typemap } } } @@ -1044,9 +1016,11 @@ struct MemberDescription<'ll> { } impl<'ll> MemberDescription<'ll> { - fn into_metadata(self, - cx: &CodegenCx<'ll, '_>, - composite_type_metadata: &'ll DIScope) -> &'ll DIType { + fn into_metadata( + self, + cx: &CodegenCx<'ll, '_>, + composite_type_metadata: &'ll DIScope, + ) -> &'ll DIType { let member_name = CString::new(self.name).unwrap(); unsafe { llvm::LLVMRustDIBuilderCreateVariantMemberType( @@ -1063,7 +1037,8 @@ impl<'ll> MemberDescription<'ll> { Some(value) => Some(cx.const_u64(value)), }, self.flags, - self.type_metadata) + self.type_metadata, + ) } } } @@ -1077,28 +1052,17 @@ enum MemberDescriptionFactory<'ll, 'tcx> { TupleMDF(TupleMemberDescriptionFactory<'tcx>), EnumMDF(EnumMemberDescriptionFactory<'ll, 'tcx>), UnionMDF(UnionMemberDescriptionFactory<'tcx>), - VariantMDF(VariantMemberDescriptionFactory<'ll, 'tcx>) + VariantMDF(VariantMemberDescriptionFactory<'ll, 'tcx>), } impl MemberDescriptionFactory<'ll, 'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { match *self { - StructMDF(ref this) => { - this.create_member_descriptions(cx) - } - TupleMDF(ref this) => { - this.create_member_descriptions(cx) - } - EnumMDF(ref this) => { - this.create_member_descriptions(cx) - } - UnionMDF(ref this) => { - this.create_member_descriptions(cx) - } - VariantMDF(ref this) => { - this.create_member_descriptions(cx) - } + StructMDF(ref this) => this.create_member_descriptions(cx), + TupleMDF(ref this) => this.create_member_descriptions(cx), + EnumMDF(ref this) => this.create_member_descriptions(cx), + UnionMDF(ref this) => this.create_member_descriptions(cx), + VariantMDF(ref this) => this.create_member_descriptions(cx), } } } @@ -1115,30 +1079,33 @@ struct StructMemberDescriptionFactory<'tcx> { } impl<'tcx> StructMemberDescriptionFactory<'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { let layout = cx.layout_of(self.ty); - self.variant.fields.iter().enumerate().map(|(i, f)| { - let name = if self.variant.ctor_kind == CtorKind::Fn { - format!("__{}", i) - } else { - f.ident.to_string() - }; - let field = layout.field(cx, i); - MemberDescription { - name, - type_metadata: type_metadata(cx, field.ty, self.span), - offset: layout.fields.offset(i), - size: field.size, - align: field.align.abi, - flags: DIFlags::FlagZero, - discriminant: None, - } - }).collect() + self.variant + .fields + .iter() + .enumerate() + .map(|(i, f)| { + let name = if self.variant.ctor_kind == CtorKind::Fn { + format!("__{}", i) + } else { + f.ident.to_string() + }; + let field = layout.field(cx, i); + MemberDescription { + name, + type_metadata: type_metadata(cx, field.ty, self.span), + offset: layout.fields.offset(i), + size: field.size, + align: field.align.abi, + flags: DIFlags::FlagZero, + discriminant: None, + } + }) + .collect() } } - fn prepare_struct_metadata( cx: &CodegenCx<'ll, 'tcx>, struct_type: Ty<'tcx>, @@ -1149,16 +1116,13 @@ fn prepare_struct_metadata( let (struct_def_id, variant) = match struct_type.kind { ty::Adt(def, _) => (def.did, def.non_enum_variant()), - _ => bug!("prepare_struct_metadata on a non-ADT") + _ => bug!("prepare_struct_metadata on a non-ADT"), }; let containing_scope = get_namespace_for_item(cx, struct_def_id); - let struct_metadata_stub = create_struct_stub(cx, - struct_type, - &struct_name, - unique_type_id, - Some(containing_scope)); + let struct_metadata_stub = + create_struct_stub(cx, struct_type, &struct_name, unique_type_id, Some(containing_scope)); create_and_register_recursive_type_forward_declaration( cx, @@ -1166,11 +1130,7 @@ fn prepare_struct_metadata( unique_type_id, struct_metadata_stub, struct_metadata_stub, - StructMDF(StructMemberDescriptionFactory { - ty: struct_type, - variant, - span, - }) + StructMDF(StructMemberDescriptionFactory { ty: struct_type, variant, span }), ) } @@ -1186,21 +1146,24 @@ struct TupleMemberDescriptionFactory<'tcx> { } impl<'tcx> TupleMemberDescriptionFactory<'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { let layout = cx.layout_of(self.ty); - self.component_types.iter().enumerate().map(|(i, &component_type)| { - let (size, align) = cx.size_and_align_of(component_type); - MemberDescription { - name: format!("__{}", i), - type_metadata: type_metadata(cx, component_type, self.span), - offset: layout.fields.offset(i), - size, - align, - flags: DIFlags::FlagZero, - discriminant: None, - } - }).collect() + self.component_types + .iter() + .enumerate() + .map(|(i, &component_type)| { + let (size, align) = cx.size_and_align_of(component_type); + MemberDescription { + name: format!("__{}", i), + type_metadata: type_metadata(cx, component_type, self.span), + offset: layout.fields.offset(i), + size, + align, + flags: DIFlags::FlagZero, + discriminant: None, + } + }) + .collect() } } @@ -1214,11 +1177,8 @@ fn prepare_tuple_metadata( ) -> RecursiveTypeDescription<'ll, 'tcx> { let tuple_name = compute_debuginfo_type_name(cx.tcx, tuple_type, false); - let struct_stub = create_struct_stub(cx, - tuple_type, - &tuple_name[..], - unique_type_id, - containing_scope); + let struct_stub = + create_struct_stub(cx, tuple_type, &tuple_name[..], unique_type_id, containing_scope); create_and_register_recursive_type_forward_declaration( cx, @@ -1230,7 +1190,7 @@ fn prepare_tuple_metadata( ty: tuple_type, component_types: component_types.to_vec(), span, - }) + }), ) } @@ -1245,20 +1205,24 @@ struct UnionMemberDescriptionFactory<'tcx> { } impl<'tcx> UnionMemberDescriptionFactory<'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { - self.variant.fields.iter().enumerate().map(|(i, f)| { - let field = self.layout.field(cx, i); - MemberDescription { - name: f.ident.to_string(), - type_metadata: type_metadata(cx, field.ty, self.span), - offset: Size::ZERO, - size: field.size, - align: field.align.abi, - flags: DIFlags::FlagZero, - discriminant: None, - } - }).collect() + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { + self.variant + .fields + .iter() + .enumerate() + .map(|(i, f)| { + let field = self.layout.field(cx, i); + MemberDescription { + name: f.ident.to_string(), + type_metadata: type_metadata(cx, field.ty, self.span), + offset: Size::ZERO, + size: field.size, + align: field.align.abi, + flags: DIFlags::FlagZero, + discriminant: None, + } + }) + .collect() } } @@ -1272,16 +1236,13 @@ fn prepare_union_metadata( let (union_def_id, variant) = match union_type.kind { ty::Adt(def, _) => (def.did, def.non_enum_variant()), - _ => bug!("prepare_union_metadata on a non-ADT") + _ => bug!("prepare_union_metadata on a non-ADT"), }; let containing_scope = get_namespace_for_item(cx, union_def_id); - let union_metadata_stub = create_union_stub(cx, - union_type, - &union_name, - unique_type_id, - containing_scope); + let union_metadata_stub = + create_union_stub(cx, union_type, &union_name, unique_type_id, containing_scope); create_and_register_recursive_type_forward_declaration( cx, @@ -1289,11 +1250,7 @@ fn prepare_union_metadata( unique_type_id, union_metadata_stub, union_metadata_stub, - UnionMDF(UnionMemberDescriptionFactory { - layout: cx.layout_of(union_type), - variant, - span, - }) + UnionMDF(UnionMemberDescriptionFactory { layout: cx.layout_of(union_type), variant, span }), ) } @@ -1327,8 +1284,7 @@ fn generator_layout_and_saved_local_names( ) -> (&'tcx GeneratorLayout<'tcx>, IndexVec<mir::GeneratorSavedLocal, Option<ast::Name>>) { let body = tcx.optimized_mir(def_id); let generator_layout = body.generator_layout.as_ref().unwrap(); - let mut generator_saved_local_names = - IndexVec::from_elem(None, &generator_layout.field_tys); + let mut generator_saved_local_names = IndexVec::from_elem(None, &generator_layout.field_tys); let state_arg = mir::PlaceBase::Local(mir::Local::new(1)); for var in &body.var_debug_info { @@ -1372,8 +1328,7 @@ struct EnumMemberDescriptionFactory<'ll, 'tcx> { } impl EnumMemberDescriptionFactory<'ll, 'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { let generator_variant_info_data = match self.enum_type.kind { ty::Generator(def_id, ..) => { Some(generator_layout_and_saved_local_names(cx.tcx, def_id)) @@ -1381,21 +1336,19 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { _ => None, }; - let variant_info_for = |index: VariantIdx| { - match self.enum_type.kind { - ty::Adt(adt, _) => VariantInfo::Adt(&adt.variants[index]), - ty::Generator(_, substs, _) => { - let (generator_layout, generator_saved_local_names) = - generator_variant_info_data.as_ref().unwrap(); - VariantInfo::Generator { - substs, - generator_layout: *generator_layout, - generator_saved_local_names, - variant_index: index, - } + let variant_info_for = |index: VariantIdx| match self.enum_type.kind { + ty::Adt(adt, _) => VariantInfo::Adt(&adt.variants[index]), + ty::Generator(_, substs, _) => { + let (generator_layout, generator_saved_local_names) = + generator_variant_info_data.as_ref().unwrap(); + VariantInfo::Generator { + substs, + generator_layout: *generator_layout, + generator_saved_local_names, + variant_index: index, } - _ => bug!(), } + _ => bug!(), }; // This will always find the metadata in the type map. @@ -1415,36 +1368,32 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { } let variant_info = variant_info_for(index); - let (variant_type_metadata, member_description_factory) = - describe_enum_variant(cx, - self.layout, - variant_info, - NoDiscriminant, - self_metadata, - self.span); - - let member_descriptions = - member_description_factory.create_member_descriptions(cx); - - set_members_of_composite_type(cx, - self.enum_type, - variant_type_metadata, - member_descriptions); - vec![ - MemberDescription { - name: if fallback { - String::new() - } else { - variant_info.variant_name() - }, - type_metadata: variant_type_metadata, - offset: Size::ZERO, - size: self.layout.size, - align: self.layout.align.abi, - flags: DIFlags::FlagZero, - discriminant: None, - } - ] + let (variant_type_metadata, member_description_factory) = describe_enum_variant( + cx, + self.layout, + variant_info, + NoDiscriminant, + self_metadata, + self.span, + ); + + let member_descriptions = member_description_factory.create_member_descriptions(cx); + + set_members_of_composite_type( + cx, + self.enum_type, + variant_type_metadata, + member_descriptions, + ); + vec![MemberDescription { + name: if fallback { String::new() } else { variant_info.variant_name() }, + type_metadata: variant_type_metadata, + offset: Size::ZERO, + size: self.layout.size, + align: self.layout.align.abi, + flags: DIFlags::FlagZero, + discriminant: None, + }] } layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, @@ -1455,54 +1404,58 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { let discriminant_info = if fallback { RegularDiscriminant { discr_field: Field::from(discr_index), - discr_type_metadata: self.discriminant_type_metadata.unwrap() + discr_type_metadata: self.discriminant_type_metadata.unwrap(), } } else { // This doesn't matter in this case. NoDiscriminant }; - variants.iter_enumerated().map(|(i, _)| { - let variant = self.layout.for_variant(cx, i); - let variant_info = variant_info_for(i); - let (variant_type_metadata, member_desc_factory) = - describe_enum_variant(cx, - variant, - variant_info, - discriminant_info, - self_metadata, - self.span); - - let member_descriptions = member_desc_factory - .create_member_descriptions(cx); - - set_members_of_composite_type(cx, - self.enum_type, - variant_type_metadata, - member_descriptions); - - MemberDescription { - name: if fallback { - String::new() - } else { - variant_info.variant_name() - }, - type_metadata: variant_type_metadata, - offset: Size::ZERO, - size: self.layout.size, - align: self.layout.align.abi, - flags: DIFlags::FlagZero, - discriminant: Some( - self.layout.ty.discriminant_for_variant(cx.tcx, i).unwrap().val as u64 - ), - } - }).collect() + variants + .iter_enumerated() + .map(|(i, _)| { + let variant = self.layout.for_variant(cx, i); + let variant_info = variant_info_for(i); + let (variant_type_metadata, member_desc_factory) = describe_enum_variant( + cx, + variant, + variant_info, + discriminant_info, + self_metadata, + self.span, + ); + + let member_descriptions = + member_desc_factory.create_member_descriptions(cx); + + set_members_of_composite_type( + cx, + self.enum_type, + variant_type_metadata, + member_descriptions, + ); + + MemberDescription { + name: if fallback { + String::new() + } else { + variant_info.variant_name() + }, + type_metadata: variant_type_metadata, + offset: Size::ZERO, + size: self.layout.size, + align: self.layout.align.abi, + flags: DIFlags::FlagZero, + discriminant: Some( + self.layout.ty.discriminant_for_variant(cx.tcx, i).unwrap().val + as u64, + ), + } + }) + .collect() } layout::Variants::Multiple { - discr_kind: layout::DiscriminantKind::Niche { - ref niche_variants, - niche_start, - dataful_variant, - }, + discr_kind: + layout::DiscriminantKind::Niche { ref niche_variants, niche_start, dataful_variant }, ref discr, ref variants, discr_index, @@ -1510,32 +1463,37 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { if fallback { let variant = self.layout.for_variant(cx, dataful_variant); // Create a description of the non-null variant. - let (variant_type_metadata, member_description_factory) = - describe_enum_variant(cx, - variant, - variant_info_for(dataful_variant), - OptimizedDiscriminant, - self.containing_scope, - self.span); + let (variant_type_metadata, member_description_factory) = describe_enum_variant( + cx, + variant, + variant_info_for(dataful_variant), + OptimizedDiscriminant, + self.containing_scope, + self.span, + ); let variant_member_descriptions = member_description_factory.create_member_descriptions(cx); - set_members_of_composite_type(cx, - self.enum_type, - variant_type_metadata, - variant_member_descriptions); + set_members_of_composite_type( + cx, + self.enum_type, + variant_type_metadata, + variant_member_descriptions, + ); // Encode the information about the null variant in the union // member's name. let mut name = String::from("RUST$ENCODED$ENUM$"); // Right now it's not even going to work for `niche_start > 0`, // and for multiple niche variants it only supports the first. - fn compute_field_path<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, - name: &mut String, - layout: TyLayout<'tcx>, - offset: Size, - size: Size) { + fn compute_field_path<'a, 'tcx>( + cx: &CodegenCx<'a, 'tcx>, + name: &mut String, + layout: TyLayout<'tcx>, + offset: Size, + size: Size, + ) { for i in 0..layout.fields.count() { let field_offset = layout.fields.offset(i); if field_offset > offset { @@ -1549,70 +1507,78 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { } } } - compute_field_path(cx, &mut name, - self.layout, - self.layout.fields.offset(discr_index), - self.layout.field(cx, discr_index).size); + compute_field_path( + cx, + &mut name, + self.layout, + self.layout.fields.offset(discr_index), + self.layout.field(cx, discr_index).size, + ); variant_info_for(*niche_variants.start()).map_struct_name(|variant_name| { name.push_str(variant_name); }); // Create the (singleton) list of descriptions of union members. - vec![ - MemberDescription { - name, - type_metadata: variant_type_metadata, - offset: Size::ZERO, - size: variant.size, - align: variant.align.abi, - flags: DIFlags::FlagZero, - discriminant: None, - } - ] + vec![MemberDescription { + name, + type_metadata: variant_type_metadata, + offset: Size::ZERO, + size: variant.size, + align: variant.align.abi, + flags: DIFlags::FlagZero, + discriminant: None, + }] } else { - variants.iter_enumerated().map(|(i, _)| { - let variant = self.layout.for_variant(cx, i); - let variant_info = variant_info_for(i); - let (variant_type_metadata, member_desc_factory) = - describe_enum_variant(cx, - variant, - variant_info, - OptimizedDiscriminant, - self_metadata, - self.span); - - let member_descriptions = member_desc_factory - .create_member_descriptions(cx); - - set_members_of_composite_type(cx, - self.enum_type, - variant_type_metadata, - member_descriptions); - - let niche_value = if i == dataful_variant { - None - } else { - let value = (i.as_u32() as u128) - .wrapping_sub(niche_variants.start().as_u32() as u128) - .wrapping_add(niche_start); - let value = truncate(value, discr.value.size(cx)); - // NOTE(eddyb) do *NOT* remove this assert, until - // we pass the full 128-bit value to LLVM, otherwise - // truncation will be silent and remain undetected. - assert_eq!(value as u64 as u128, value); - Some(value as u64) - }; - - MemberDescription { - name: variant_info.variant_name(), - type_metadata: variant_type_metadata, - offset: Size::ZERO, - size: self.layout.size, - align: self.layout.align.abi, - flags: DIFlags::FlagZero, - discriminant: niche_value, - } - }).collect() + variants + .iter_enumerated() + .map(|(i, _)| { + let variant = self.layout.for_variant(cx, i); + let variant_info = variant_info_for(i); + let (variant_type_metadata, member_desc_factory) = + describe_enum_variant( + cx, + variant, + variant_info, + OptimizedDiscriminant, + self_metadata, + self.span, + ); + + let member_descriptions = + member_desc_factory.create_member_descriptions(cx); + + set_members_of_composite_type( + cx, + self.enum_type, + variant_type_metadata, + member_descriptions, + ); + + let niche_value = if i == dataful_variant { + None + } else { + let value = (i.as_u32() as u128) + .wrapping_sub(niche_variants.start().as_u32() as u128) + .wrapping_add(niche_start); + let value = truncate(value, discr.value.size(cx)); + // NOTE(eddyb) do *NOT* remove this assert, until + // we pass the full 128-bit value to LLVM, otherwise + // truncation will be silent and remain undetected. + assert_eq!(value as u64 as u128, value); + Some(value as u64) + }; + + MemberDescription { + name: variant_info.variant_name(), + type_metadata: variant_type_metadata, + offset: Size::ZERO, + size: self.layout.size, + align: self.layout.align.abi, + flags: DIFlags::FlagZero, + discriminant: niche_value, + } + }) + .collect() } } } @@ -1629,37 +1595,40 @@ struct VariantMemberDescriptionFactory<'ll, 'tcx> { } impl VariantMemberDescriptionFactory<'ll, 'tcx> { - fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) - -> Vec<MemberDescription<'ll>> { - self.args.iter().enumerate().map(|(i, &(ref name, ty))| { - let (size, align) = cx.size_and_align_of(ty); - MemberDescription { - name: name.to_string(), - type_metadata: if use_enum_fallback(cx) { - match self.discriminant_type_metadata { - // Discriminant is always the first field of our variant - // when using the enum fallback. - Some(metadata) if i == 0 => metadata, - _ => type_metadata(cx, ty, self.span) - } - } else { - type_metadata(cx, ty, self.span) - }, - offset: self.offsets[i], - size, - align, - flags: DIFlags::FlagZero, - discriminant: None, - } - }).collect() + fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> { + self.args + .iter() + .enumerate() + .map(|(i, &(ref name, ty))| { + let (size, align) = cx.size_and_align_of(ty); + MemberDescription { + name: name.to_string(), + type_metadata: if use_enum_fallback(cx) { + match self.discriminant_type_metadata { + // Discriminant is always the first field of our variant + // when using the enum fallback. + Some(metadata) if i == 0 => metadata, + _ => type_metadata(cx, ty, self.span), + } + } else { + type_metadata(cx, ty, self.span) + }, + offset: self.offsets[i], + size, + align, + flags: DIFlags::FlagZero, + discriminant: None, + } + }) + .collect() } } #[derive(Copy, Clone)] enum EnumDiscriminantInfo<'ll> { - RegularDiscriminant{ discr_field: Field, discr_type_metadata: &'ll DIType }, + RegularDiscriminant { discr_field: Field, discr_type_metadata: &'ll DIType }, OptimizedDiscriminant, - NoDiscriminant + NoDiscriminant, } #[derive(Copy, Clone)] @@ -1677,8 +1646,9 @@ impl<'tcx> VariantInfo<'_, 'tcx> { fn map_struct_name<R>(&self, f: impl FnOnce(&str) -> R) -> R { match self { VariantInfo::Adt(variant) => f(&variant.ident.as_str()), - VariantInfo::Generator { substs, variant_index, .. } => - f(&substs.as_generator().variant_name(*variant_index)), + VariantInfo::Generator { substs, variant_index, .. } => { + f(&substs.as_generator().variant_name(*variant_index)) + } } } @@ -1697,16 +1667,18 @@ impl<'tcx> VariantInfo<'_, 'tcx> { fn field_name(&self, i: usize) -> String { let field_name = match *self { - VariantInfo::Adt(variant) if variant.ctor_kind != CtorKind::Fn => - Some(variant.fields[i].ident.name), + VariantInfo::Adt(variant) if variant.ctor_kind != CtorKind::Fn => { + Some(variant.fields[i].ident.name) + } VariantInfo::Generator { generator_layout, generator_saved_local_names, variant_index, .. - } => generator_saved_local_names[ - generator_layout.variant_fields[variant_index][i.into()] - ], + } => { + generator_saved_local_names + [generator_layout.variant_fields[variant_index][i.into()]] + } _ => None, }; field_name.map(|name| name.to_string()).unwrap_or_else(|| format!("__{}", i)) @@ -1726,17 +1698,11 @@ fn describe_enum_variant( span: Span, ) -> (&'ll DICompositeType, MemberDescriptionFactory<'ll, 'tcx>) { let metadata_stub = variant.map_struct_name(|variant_name| { - let unique_type_id = debug_context(cx).type_map - .borrow_mut() - .get_unique_type_id_of_enum_variant( - cx, - layout.ty, - &variant_name); - create_struct_stub(cx, - layout.ty, - &variant_name, - unique_type_id, - Some(containing_scope)) + let unique_type_id = debug_context(cx) + .type_map + .borrow_mut() + .get_unique_type_id_of_enum_variant(cx, layout.ty, &variant_name); + create_struct_stub(cx, layout.ty, &variant_name, unique_type_id, Some(containing_scope)) }); // Build an array of (field name, field type) pairs to be captured in the factory closure. @@ -1747,44 +1713,43 @@ fn describe_enum_variant( // We have the layout of an enum variant, we need the layout of the outer enum let enum_layout = cx.layout_of(layout.ty); let offset = enum_layout.fields.offset(discr_field.as_usize()); - let args = ( - "RUST$ENUM$DISR".to_owned(), - enum_layout.field(cx, discr_field.as_usize()).ty); + let args = + ("RUST$ENUM$DISR".to_owned(), enum_layout.field(cx, discr_field.as_usize()).ty); (Some(offset), Some(args)) } _ => (None, None), }; ( - discr_offset.into_iter().chain((0..layout.fields.count()).map(|i| { - layout.fields.offset(i) - })).collect(), - discr_arg.into_iter().chain((0..layout.fields.count()).map(|i| { - (variant.field_name(i), layout.field(cx, i).ty) - })).collect() + discr_offset + .into_iter() + .chain((0..layout.fields.count()).map(|i| layout.fields.offset(i))) + .collect(), + discr_arg + .into_iter() + .chain( + (0..layout.fields.count()) + .map(|i| (variant.field_name(i), layout.field(cx, i).ty)), + ) + .collect(), ) } else { ( - (0..layout.fields.count()).map(|i| { - layout.fields.offset(i) - }).collect(), - (0..layout.fields.count()).map(|i| { - (variant.field_name(i), layout.field(cx, i).ty) - }).collect() + (0..layout.fields.count()).map(|i| layout.fields.offset(i)).collect(), + (0..layout.fields.count()) + .map(|i| (variant.field_name(i), layout.field(cx, i).ty)) + .collect(), ) }; - let member_description_factory = - VariantMDF(VariantMemberDescriptionFactory { - offsets, - args, - discriminant_type_metadata: match discriminant_info { - RegularDiscriminant { discr_type_metadata, .. } => { - Some(discr_type_metadata) - } - _ => None - }, - span, - }); + let member_description_factory = VariantMDF(VariantMemberDescriptionFactory { + offsets, + args, + discriminant_type_metadata: match discriminant_info { + RegularDiscriminant { discr_type_metadata, .. } => Some(discr_type_metadata), + _ => None, + }, + span, + }); (metadata_stub, member_description_factory) } @@ -1820,7 +1785,8 @@ fn prepare_enum_metadata( DIB(cx), name.as_ptr(), // FIXME: what if enumeration has i128 discriminant? - discr.val as u64)) + discr.val as u64, + )) } }) .collect(), @@ -1834,7 +1800,8 @@ fn prepare_enum_metadata( DIB(cx), name.as_ptr(), // FIXME: what if enumeration has i128 discriminant? - variant_index.as_usize() as u64)) + variant_index.as_usize() as u64, + )) } }) .collect(), @@ -1842,14 +1809,12 @@ fn prepare_enum_metadata( }; let disr_type_key = (enum_def_id, discr); - let cached_discriminant_type_metadata = debug_context(cx).created_enum_disr_types - .borrow() - .get(&disr_type_key).cloned(); + let cached_discriminant_type_metadata = + debug_context(cx).created_enum_disr_types.borrow().get(&disr_type_key).cloned(); match cached_discriminant_type_metadata { Some(discriminant_type_metadata) => discriminant_type_metadata, None => { - let (discriminant_size, discriminant_align) = - (discr.size(cx), discr.align(cx)); + let (discriminant_size, discriminant_align) = (discr.size(cx), discr.align(cx)); let discriminant_base_type_metadata = type_metadata(cx, discr.to_ty(cx.tcx), syntax_pos::DUMMY_SP); @@ -1869,12 +1834,15 @@ fn prepare_enum_metadata( discriminant_size.bits(), discriminant_align.abi.bits() as u32, create_DIArray(DIB(cx), &enumerators_metadata), - discriminant_base_type_metadata, true) + discriminant_base_type_metadata, + true, + ) }; - debug_context(cx).created_enum_disr_types - .borrow_mut() - .insert(disr_type_key, discriminant_type_metadata); + debug_context(cx) + .created_enum_disr_types + .borrow_mut() + .insert(disr_type_key, discriminant_type_metadata); discriminant_type_metadata } @@ -1884,23 +1852,26 @@ fn prepare_enum_metadata( let layout = cx.layout_of(enum_type); match (&layout.abi, &layout.variants) { - (&layout::Abi::Scalar(_), &layout::Variants::Multiple { - discr_kind: layout::DiscriminantKind::Tag, - ref discr, - .. - }) => return FinalMetadata(discriminant_type_metadata(discr.value)), + ( + &layout::Abi::Scalar(_), + &layout::Variants::Multiple { + discr_kind: layout::DiscriminantKind::Tag, + ref discr, + .. + }, + ) => return FinalMetadata(discriminant_type_metadata(discr.value)), _ => {} } let enum_name = SmallCStr::new(&enum_name); let unique_type_id_str = SmallCStr::new( - debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id) + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id), ); if use_enum_fallback(cx) { let discriminant_type_metadata = match layout.variants { - layout::Variants::Single { .. } | - layout::Variants::Multiple { + layout::Variants::Single { .. } + | layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Niche { .. }, .. } => None, @@ -1908,9 +1879,7 @@ fn prepare_enum_metadata( discr_kind: layout::DiscriminantKind::Tag, ref discr, .. - } => { - Some(discriminant_type_metadata(discr.value)) - } + } => Some(discriminant_type_metadata(discr.value)), }; let enum_metadata = unsafe { @@ -1925,7 +1894,8 @@ fn prepare_enum_metadata( DIFlags::FlagZero, None, 0, // RuntimeLang - unique_type_id_str.as_ptr()) + unique_type_id_str.as_ptr(), + ) }; return create_and_register_recursive_type_forward_declaration( @@ -1968,7 +1938,8 @@ fn prepare_enum_metadata( layout::F32 => Integer::I32, layout::F64 => Integer::I64, layout::Pointer => cx.data_layout().ptr_sized_integer(), - }.to_ty(cx.tcx, false); + } + .to_ty(cx.tcx, false); let discr_metadata = basic_type_metadata(cx, discr_type); unsafe { @@ -1982,9 +1953,10 @@ fn prepare_enum_metadata( align.abi.bits() as u32, layout.fields.offset(discr_index).bits(), DIFlags::FlagArtificial, - discr_metadata)) + discr_metadata, + )) } - }, + } layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, @@ -2007,9 +1979,10 @@ fn prepare_enum_metadata( align.bits() as u32, layout.fields.offset(discr_index).bits(), DIFlags::FlagArtificial, - discr_metadata)) + discr_metadata, + )) } - }, + } }; let mut outer_fields = match layout.variants { @@ -2018,7 +1991,7 @@ fn prepare_enum_metadata( let tuple_mdf = TupleMemberDescriptionFactory { ty: enum_type, component_types: outer_field_tys, - span + span, }; tuple_mdf .create_member_descriptions(cx) @@ -2029,9 +2002,10 @@ fn prepare_enum_metadata( }; let variant_part_unique_type_id_str = SmallCStr::new( - debug_context(cx).type_map + debug_context(cx) + .type_map .borrow_mut() - .get_unique_type_id_str_of_enum_variant_part(unique_type_id) + .get_unique_type_id_str_of_enum_variant_part(unique_type_id), ); let empty_array = create_DIArray(DIB(cx), &[]); let variant_part = unsafe { @@ -2046,7 +2020,8 @@ fn prepare_enum_metadata( DIFlags::FlagZero, discriminator_metadata, empty_array, - variant_part_unique_type_id_str.as_ptr()) + variant_part_unique_type_id_str.as_ptr(), + ) }; outer_fields.push(Some(variant_part)); @@ -2066,7 +2041,8 @@ fn prepare_enum_metadata( type_array, 0, None, - unique_type_id_str.as_ptr()) + unique_type_id_str.as_ptr(), + ) }; return create_and_register_recursive_type_forward_declaration( @@ -2103,24 +2079,25 @@ fn composite_type_metadata( _definition_span: Span, ) -> &'ll DICompositeType { // Create the (empty) struct metadata node ... - let composite_type_metadata = create_struct_stub(cx, - composite_type, - composite_type_name, - composite_type_unique_id, - containing_scope); + let composite_type_metadata = create_struct_stub( + cx, + composite_type, + composite_type_name, + composite_type_unique_id, + containing_scope, + ); // ... and immediately create and add the member descriptions. - set_members_of_composite_type(cx, - composite_type, - composite_type_metadata, - member_descriptions); + set_members_of_composite_type(cx, composite_type, composite_type_metadata, member_descriptions); composite_type_metadata } -fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, - composite_type: Ty<'tcx>, - composite_type_metadata: &'ll DICompositeType, - member_descriptions: Vec<MemberDescription<'ll>>) { +fn set_members_of_composite_type( + cx: &CodegenCx<'ll, 'tcx>, + composite_type: Ty<'tcx>, + composite_type_metadata: &'ll DICompositeType, + member_descriptions: Vec<MemberDescription<'ll>>, +) { // In some rare cases LLVM metadata uniquing would lead to an existing type // description being used instead of a new one created in // create_struct_stub. This would cause a hard to trace assertion in @@ -2131,8 +2108,10 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, let mut composite_types_completed = debug_context(cx).composite_types_completed.borrow_mut(); if !composite_types_completed.insert(&composite_type_metadata) { - bug!("debuginfo::set_members_of_composite_type() - \ - Already completed forward declaration re-encountered."); + bug!( + "debuginfo::set_members_of_composite_type() - \ + Already completed forward declaration re-encountered." + ); } } @@ -2145,7 +2124,11 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, unsafe { let type_array = create_DIArray(DIB(cx), &member_metadata[..]); llvm::LLVMRustDICompositeTypeReplaceArrays( - DIB(cx), composite_type_metadata, Some(type_array), type_params); + DIB(cx), + composite_type_metadata, + Some(type_array), + type_params, + ); } } @@ -2155,40 +2138,42 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&' if !substs.types().next().is_none() { let generics = cx.tcx.generics_of(def.did); let names = get_parameter_names(cx, generics); - let template_params: Vec<_> = substs.iter().zip(names).filter_map(|(kind, name)| { - if let GenericArgKind::Type(ty) = kind.unpack() { - let actual_type = cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty); - let actual_type_metadata = - type_metadata(cx, actual_type, syntax_pos::DUMMY_SP); - let name = SmallCStr::new(&name.as_str()); - Some(unsafe { - - Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter( - DIB(cx), - None, - name.as_ptr(), - actual_type_metadata, - unknown_file_metadata(cx), - 0, - 0, - )) - }) - } else { - None - } - }).collect(); + let template_params: Vec<_> = substs + .iter() + .zip(names) + .filter_map(|(kind, name)| { + if let GenericArgKind::Type(ty) = kind.unpack() { + let actual_type = + cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty); + let actual_type_metadata = + type_metadata(cx, actual_type, syntax_pos::DUMMY_SP); + let name = SmallCStr::new(&name.as_str()); + Some(unsafe { + Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter( + DIB(cx), + None, + name.as_ptr(), + actual_type_metadata, + unknown_file_metadata(cx), + 0, + 0, + )) + }) + } else { + None + } + }) + .collect(); return Some(create_DIArray(DIB(cx), &template_params[..])); } } return Some(create_DIArray(DIB(cx), &[])); - fn get_parameter_names(cx: &CodegenCx<'_, '_>, - generics: &ty::Generics) - -> Vec<Symbol> { - let mut names = generics.parent.map_or(vec![], |def_id| { - get_parameter_names(cx, cx.tcx.generics_of(def_id)) - }); + fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> { + let mut names = generics + .parent + .map_or(vec![], |def_id| get_parameter_names(cx, cx.tcx.generics_of(def_id))); names.extend(generics.params.iter().map(|param| param.name)); names } @@ -2208,7 +2193,7 @@ fn create_struct_stub( let name = SmallCStr::new(struct_type_name); let unique_type_id = SmallCStr::new( - debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id) + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id), ); let metadata_stub = unsafe { // `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null @@ -2229,7 +2214,8 @@ fn create_struct_stub( empty_array, 0, None, - unique_type_id.as_ptr()) + unique_type_id.as_ptr(), + ) }; metadata_stub @@ -2246,7 +2232,7 @@ fn create_union_stub( let name = SmallCStr::new(union_type_name); let unique_type_id = SmallCStr::new( - debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id) + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id), ); let metadata_stub = unsafe { // `LLVMRustDIBuilderCreateUnionType()` wants an empty array. A null @@ -2265,7 +2251,8 @@ fn create_union_stub( DIFlags::FlagZero, Some(empty_array), 0, // RuntimeLang - unique_type_id.as_ptr()) + unique_type_id.as_ptr(), + ) }; metadata_stub @@ -2274,11 +2261,7 @@ fn create_union_stub( /// Creates debug information for the given global variable. /// /// Adds the created metadata nodes directly to the crate's IR. -pub fn create_global_var_metadata( - cx: &CodegenCx<'ll, '_>, - def_id: DefId, - global: &'ll Value, -) { +pub fn create_global_var_metadata(cx: &CodegenCx<'ll, '_>, def_id: DefId, global: &'ll Value) { if cx.dbg_cx.is_none() { return; } @@ -2317,20 +2300,20 @@ pub fn create_global_var_metadata( let global_align = cx.align_of(variable_type); unsafe { - llvm::LLVMRustDIBuilderCreateStaticVariable(DIB(cx), - Some(var_scope), - var_name.as_ptr(), - // If null, linkage_name field is omitted, - // which is what we want for no_mangle statics - linkage_name.as_ref() - .map_or(ptr::null(), |name| name.as_ptr()), - file_metadata, - line_number, - type_metadata, - is_local_to_unit, - global, - None, - global_align.bytes() as u32, + llvm::LLVMRustDIBuilderCreateStaticVariable( + DIB(cx), + Some(var_scope), + var_name.as_ptr(), + // If null, linkage_name field is omitted, + // which is what we want for no_mangle statics + linkage_name.as_ref().map_or(ptr::null(), |name| name.as_ptr()), + file_metadata, + line_number, + type_metadata, + is_local_to_unit, + global, + None, + global_align.bytes() as u32, ); } } @@ -2370,20 +2353,22 @@ pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: & empty_array, 0, Some(type_metadata), - name.as_ptr() + name.as_ptr(), ); - llvm::LLVMRustDIBuilderCreateStaticVariable(DIB(cx), - NO_SCOPE_METADATA, - name.as_ptr(), - ptr::null(), - unknown_file_metadata(cx), - UNKNOWN_LINE_NUMBER, - vtable_type, - true, - vtable, - None, - 0); + llvm::LLVMRustDIBuilderCreateStaticVariable( + DIB(cx), + NO_SCOPE_METADATA, + name.as_ptr(), + ptr::null(), + unknown_file_metadata(cx), + UNKNOWN_LINE_NUMBER, + vtable_type, + true, + vtable, + None, + 0, + ); } } @@ -2395,10 +2380,5 @@ pub fn extend_scope_to_file( defining_crate: CrateNum, ) -> &'ll DILexicalBlock { let file_metadata = file_metadata(cx, &file.name, defining_crate); - unsafe { - llvm::LLVMRustDIBuilderCreateLexicalBlockFile( - DIB(cx), - scope_metadata, - file_metadata) - } + unsafe { llvm::LLVMRustDIBuilderCreateLexicalBlockFile(DIB(cx), scope_metadata, file_metadata) } } diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs index f6725e66f03..2d783d6d713 100644 --- a/src/librustc_codegen_ssa/base.rs +++ b/src/librustc_codegen_ssa/base.rs @@ -13,58 +13,80 @@ //! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, //! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. -use crate::{CachedModuleCodegen, CrateInfo, MemFlags, ModuleCodegen, ModuleKind}; use crate::back::write::{ - OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm, - submit_post_lto_module_to_llvm, + start_async_codegen, submit_post_lto_module_to_llvm, submit_pre_lto_module_to_llvm, + OngoingCodegen, }; -use crate::common::{RealPredicate, TypeKind, IntPredicate}; +use crate::common::{IntPredicate, RealPredicate, TypeKind}; use crate::meth; use crate::mir; use crate::mir::operand::OperandValue; use crate::mir::place::PlaceRef; use crate::traits::*; +use crate::{CachedModuleCodegen, CrateInfo, MemFlags, ModuleCodegen, ModuleKind}; use rustc::hir; -use rustc_session::cgu_reuse_tracker::CguReuse; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::middle::cstore::EncodedMetadata; +use rustc::middle::cstore::{self, LinkagePreference}; use rustc::middle::lang_items::StartFnLangItem; use rustc::middle::weak_lang_items; -use rustc::mir::mono::{CodegenUnitNameBuilder, CodegenUnit, MonoItem}; -use rustc::ty::{self, Ty, TyCtxt, Instance}; -use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt}; -use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA}; -use rustc::ty::query::Providers; -use rustc::middle::cstore::{self, LinkagePreference}; -use rustc::util::common::{time, print_time_passes_entry, set_time_depth, time_depth}; +use rustc::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem}; use rustc::session::config::{self, EntryFnType, Lto}; use rustc::session::Session; +use rustc::ty::layout::{self, Align, HasTyCtxt, LayoutOf, TyLayout, VariantIdx}; +use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA}; +use rustc::ty::query::Providers; +use rustc::ty::{self, Instance, Ty, TyCtxt}; +use rustc::util::common::{print_time_passes_entry, set_time_depth, time, time_depth}; use rustc::util::nodemap::FxHashMap; +use rustc_codegen_utils::{check_for_rustc_errors_attr, symbol_names_test}; use rustc_index::vec::Idx; -use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr}; +use rustc_session::cgu_reuse_tracker::CguReuse; use syntax::attr; use syntax_pos::Span; use std::cmp; use std::ops::{Deref, DerefMut}; -use std::time::{Instant, Duration}; +use std::time::{Duration, Instant}; -pub fn bin_op_to_icmp_predicate(op: hir::BinOpKind, - signed: bool) - -> IntPredicate { +pub fn bin_op_to_icmp_predicate(op: hir::BinOpKind, signed: bool) -> IntPredicate { match op { hir::BinOpKind::Eq => IntPredicate::IntEQ, hir::BinOpKind::Ne => IntPredicate::IntNE, - hir::BinOpKind::Lt => if signed { IntPredicate::IntSLT } else { IntPredicate::IntULT }, - hir::BinOpKind::Le => if signed { IntPredicate::IntSLE } else { IntPredicate::IntULE }, - hir::BinOpKind::Gt => if signed { IntPredicate::IntSGT } else { IntPredicate::IntUGT }, - hir::BinOpKind::Ge => if signed { IntPredicate::IntSGE } else { IntPredicate::IntUGE }, - op => { - bug!("comparison_op_to_icmp_predicate: expected comparison operator, \ - found {:?}", - op) + hir::BinOpKind::Lt => { + if signed { + IntPredicate::IntSLT + } else { + IntPredicate::IntULT + } + } + hir::BinOpKind::Le => { + if signed { + IntPredicate::IntSLE + } else { + IntPredicate::IntULE + } + } + hir::BinOpKind::Gt => { + if signed { + IntPredicate::IntSGT + } else { + IntPredicate::IntUGT + } + } + hir::BinOpKind::Ge => { + if signed { + IntPredicate::IntSGE + } else { + IntPredicate::IntUGE + } } + op => bug!( + "comparison_op_to_icmp_predicate: expected comparison operator, \ + found {:?}", + op + ), } } @@ -77,9 +99,11 @@ pub fn bin_op_to_fcmp_predicate(op: hir::BinOpKind) -> RealPredicate { hir::BinOpKind::Gt => RealPredicate::RealOGT, hir::BinOpKind::Ge => RealPredicate::RealOGE, op => { - bug!("comparison_op_to_fcmp_predicate: expected comparison operator, \ + bug!( + "comparison_op_to_fcmp_predicate: expected comparison operator, \ found {:?}", - op); + op + ); } } } @@ -97,7 +121,7 @@ pub fn compare_simd_types<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let cmp = bin_op_to_fcmp_predicate(op); let cmp = bx.fcmp(cmp, lhs, rhs); return bx.sext(cmp, ret_ty); - }, + } ty::Uint(_) => false, ty::Int(_) => true, _ => bug!("compare_simd_types: invalid SIMD type"), @@ -136,17 +160,13 @@ pub fn unsized_info<'tcx, Cx: CodegenMethods<'tcx>>( old_info.expect("unsized_info: missing old info for trait upcast") } (_, &ty::Dynamic(ref data, ..)) => { - let vtable_ptr = cx.layout_of(cx.tcx().mk_mut_ptr(target)) - .field(cx, FAT_PTR_EXTRA); + let vtable_ptr = cx.layout_of(cx.tcx().mk_mut_ptr(target)).field(cx, FAT_PTR_EXTRA); cx.const_ptrcast( meth::get_vtable(cx, source, data.principal()), cx.backend_type(vtable_ptr), ) } - _ => bug!( - "unsized_info: invalid unsizing {:?} -> {:?}", - source, target - ), + _ => bug!("unsized_info: invalid unsizing {:?} -> {:?}", source, target), } } @@ -159,12 +179,9 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ) -> (Bx::Value, Bx::Value) { debug!("unsize_thin_ptr: {:?} => {:?}", src_ty, dst_ty); match (&src_ty.kind, &dst_ty.kind) { - (&ty::Ref(_, a, _), - &ty::Ref(_, b, _)) | - (&ty::Ref(_, a, _), - &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) | - (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), - &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => { + (&ty::Ref(_, a, _), &ty::Ref(_, b, _)) + | (&ty::Ref(_, a, _), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) + | (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => { assert!(bx.cx().type_is_sized(a)); let ptr_ty = bx.cx().type_ptr_to(bx.cx().backend_type(bx.cx().layout_of(b))); (bx.pointercast(src, ptr_ty), unsized_info(bx.cx(), a, b, None)) @@ -193,8 +210,10 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types. // FIXME(eddyb) move these out of this `match` arm, so they're always // applied, uniformly, no matter the source/destination types. - (bx.bitcast(lldata, bx.cx().scalar_pair_element_backend_type(dst_layout, 0, true)), - bx.bitcast(llextra, bx.cx().scalar_pair_element_backend_type(dst_layout, 1, true))) + ( + bx.bitcast(lldata, bx.cx().scalar_pair_element_backend_type(dst_layout, 0, true)), + bx.bitcast(llextra, bx.cx().scalar_pair_element_backend_type(dst_layout, 1, true)), + ) } _ => bug!("unsize_thin_ptr: called on bad types"), } @@ -210,9 +229,9 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let src_ty = src.layout.ty; let dst_ty = dst.layout.ty; match (&src_ty.kind, &dst_ty.kind) { - (&ty::Ref(..), &ty::Ref(..)) | - (&ty::Ref(..), &ty::RawPtr(..)) | - (&ty::RawPtr(..), &ty::RawPtr(..)) => { + (&ty::Ref(..), &ty::Ref(..)) + | (&ty::Ref(..), &ty::RawPtr(..)) + | (&ty::RawPtr(..), &ty::RawPtr(..)) => { let (base, info) = match bx.load_operand(src).val { OperandValue::Pair(base, info) => { // fat-ptr to fat-ptr unsize preserves the vtable @@ -224,10 +243,8 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let thin_ptr = dst.layout.field(bx.cx(), FAT_PTR_ADDR); (bx.pointercast(base, bx.cx().backend_type(thin_ptr)), info) } - OperandValue::Immediate(base) => { - unsize_thin_ptr(bx, base, src_ty, dst_ty) - } - OperandValue::Ref(..) => bug!() + OperandValue::Immediate(base) => unsize_thin_ptr(bx, base, src_ty, dst_ty), + OperandValue::Ref(..) => bug!(), }; OperandValue::Pair(base, info).store(bx, dst); } @@ -244,18 +261,21 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } if src_f.layout.ty == dst_f.layout.ty { - memcpy_ty(bx, dst_f.llval, dst_f.align, src_f.llval, src_f.align, - src_f.layout, MemFlags::empty()); + memcpy_ty( + bx, + dst_f.llval, + dst_f.align, + src_f.llval, + src_f.align, + src_f.layout, + MemFlags::empty(), + ); } else { coerce_unsized_into(bx, src_f, dst_f); } } } - _ => bug!( - "coerce_unsized_into: invalid coercion {:?} -> {:?}", - src_ty, - dst_ty, - ), + _ => bug!("coerce_unsized_into: invalid coercion {:?} -> {:?}", src_ty, dst_ty,), } } @@ -313,11 +333,7 @@ pub fn from_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, val: Bx::Value, ) -> Bx::Value { - if bx.cx().val_ty(val) == bx.cx().type_i1() { - bx.zext(val, bx.cx().type_i8()) - } else { - val - } + if bx.cx().val_ty(val) == bx.cx().type_i1() { bx.zext(val, bx.cx().type_i8()) } else { val } } pub fn to_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( @@ -375,7 +391,7 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( /// users main function. pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &'a Bx::CodegenCx) { let (main_def_id, span) = match cx.tcx().entry_fn(LOCAL_CRATE) { - Some((def_id, _)) => { (def_id, cx.tcx().def_span(def_id)) }, + Some((def_id, _)) => (def_id, cx.tcx().def_span(def_id)), None => return, }; @@ -393,7 +409,7 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' match et { Some(EntryFnType::Main) => create_entry_fn::<Bx>(cx, span, main_llfn, main_def_id, true), Some(EntryFnType::Start) => create_entry_fn::<Bx>(cx, span, main_llfn, main_def_id, false), - None => {} // Do nothing. + None => {} // Do nothing. } fn create_entry_fn<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( @@ -417,15 +433,14 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' // late-bound regions, since late-bound // regions must appear in the argument // listing. - let main_ret_ty = cx.tcx().erase_regions( - &main_ret_ty.no_bound_vars().unwrap(), - ); + let main_ret_ty = cx.tcx().erase_regions(&main_ret_ty.no_bound_vars().unwrap()); if cx.get_defined_value("main").is_some() { // FIXME: We should be smart and show a better diagnostic here. - cx.sess().struct_span_err(sp, "entry symbol `main` defined multiple times") - .help("did you use `#[no_mangle]` on `fn main`? Use `#[start]` instead") - .emit(); + cx.sess() + .struct_span_err(sp, "entry symbol `main` defined multiple times") + .help("did you use `#[no_mangle]` on `fn main`? Use `#[start]` instead") + .emit(); cx.sess().abort_if_errors(); bug!(); } @@ -449,10 +464,13 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' ty::ParamEnv::reveal_all(), start_def_id, cx.tcx().intern_substs(&[main_ret_ty.into()]), - ).unwrap() + ) + .unwrap(), ); - (start_fn, vec![bx.pointercast(rust_main, cx.type_ptr_to(cx.type_i8p())), - arg_argc, arg_argv]) + ( + start_fn, + vec![bx.pointercast(rust_main, cx.type_ptr_to(cx.type_i8p())), arg_argc, arg_argv], + ) } else { debug!("using user-defined start fn"); (rust_main, vec![arg_argc, arg_argv]) @@ -467,9 +485,8 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' /// Obtain the `argc` and `argv` values to pass to the rust start function. fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cx: &'a Bx::CodegenCx, - bx: &mut Bx -) -> (Bx::Value, Bx::Value) -{ + bx: &mut Bx, +) -> (Bx::Value, Bx::Value) { if cx.sess().target.target.options.main_needs_argc_argv { // Params from native `main()` used as args for rust start function let param_argc = bx.get_param(0); @@ -496,8 +513,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>( check_for_rustc_errors_attr(tcx); // Skip crate items and just output metadata in -Z no-codegen mode. - if tcx.sess.opts.debugging_opts.no_codegen || - !tcx.sess.opts.output_types.should_codegen() { + if tcx.sess.opts.debugging_opts.no_codegen || !tcx.sess.opts.output_types.should_codegen() { let ongoing_codegen = start_async_codegen(backend, tcx, metadata, 1); ongoing_codegen.codegen_finished(tcx); @@ -538,28 +554,21 @@ pub fn codegen_crate<B: ExtraBackendMethods>( // linkage, then it's already got an allocator shim and we'll be using that // one instead. If nothing exists then it's our job to generate the // allocator! - let any_dynamic_crate = tcx.dependency_formats(LOCAL_CRATE) - .iter() - .any(|(_, list)| { - use rustc::middle::dependency_format::Linkage; - list.iter().any(|&linkage| linkage == Linkage::Dynamic) - }); + let any_dynamic_crate = tcx.dependency_formats(LOCAL_CRATE).iter().any(|(_, list)| { + use rustc::middle::dependency_format::Linkage; + list.iter().any(|&linkage| linkage == Linkage::Dynamic) + }); let allocator_module = if any_dynamic_crate { None } else if let Some(kind) = tcx.allocator_kind() { - let llmod_id = cgu_name_builder.build_cgu_name(LOCAL_CRATE, - &["crate"], - Some("allocator")).to_string(); + let llmod_id = + cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string(); let mut modules = backend.new_metadata(tcx, &llmod_id); time(tcx.sess, "write allocator module", || { backend.codegen_allocator(tcx, &mut modules, kind) }); - Some(ModuleCodegen { - name: llmod_id, - module_llvm: modules, - kind: ModuleKind::Allocator, - }) + Some(ModuleCodegen { name: llmod_id, module_llvm: modules, kind: ModuleKind::Allocator }) } else { None }; @@ -570,13 +579,15 @@ pub fn codegen_crate<B: ExtraBackendMethods>( if need_metadata_module { // Codegen the encoded metadata. - let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE, - &["crate"], - Some("metadata")).to_string(); + let metadata_cgu_name = + cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("metadata")).to_string(); let mut metadata_llvm_module = backend.new_metadata(tcx, &metadata_cgu_name); time(tcx.sess, "write compressed metadata", || { - backend.write_compressed_metadata(tcx, &ongoing_codegen.metadata, - &mut metadata_llvm_module); + backend.write_compressed_metadata( + tcx, + &ongoing_codegen.metadata, + &mut metadata_llvm_module, + ); }); let metadata_module = ModuleCodegen { @@ -612,19 +623,26 @@ pub fn codegen_crate<B: ExtraBackendMethods>( false } CguReuse::PreLto => { - submit_pre_lto_module_to_llvm(&backend, tcx, &ongoing_codegen.coordinator_send, - CachedModuleCodegen { - name: cgu.name().to_string(), - source: cgu.work_product(tcx), - }); + submit_pre_lto_module_to_llvm( + &backend, + tcx, + &ongoing_codegen.coordinator_send, + CachedModuleCodegen { + name: cgu.name().to_string(), + source: cgu.work_product(tcx), + }, + ); true } CguReuse::PostLto => { - submit_post_lto_module_to_llvm(&backend, &ongoing_codegen.coordinator_send, - CachedModuleCodegen { - name: cgu.name().to_string(), - source: cgu.work_product(tcx), - }); + submit_post_lto_module_to_llvm( + &backend, + &ongoing_codegen.coordinator_send, + CachedModuleCodegen { + name: cgu.name().to_string(), + source: cgu.work_product(tcx), + }, + ); true } }; @@ -636,9 +654,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>( // -Ztime-passes output manually. let time_depth = time_depth(); set_time_depth(time_depth + 1); - print_time_passes_entry(tcx.sess.time_passes(), - "codegen to LLVM IR", - total_codegen_time); + print_time_passes_entry(tcx.sess.time_passes(), "codegen to LLVM IR", total_codegen_time); set_time_depth(time_depth); ::rustc_incremental::assert_module_sources::assert_module_sources(tcx); @@ -699,13 +715,9 @@ impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> { } fn assert_and_save_dep_graph(tcx: TyCtxt<'_>) { - time(tcx.sess, - "assert dep graph", - || ::rustc_incremental::assert_dep_graph(tcx)); + time(tcx.sess, "assert dep graph", || ::rustc_incremental::assert_dep_graph(tcx)); - time(tcx.sess, - "serialize dep graph", - || ::rustc_incremental::save_dep_graph(tcx)); + time(tcx.sess, "serialize dep graph", || ::rustc_incremental::save_dep_graph(tcx)); } impl CrateInfo { @@ -765,7 +777,8 @@ impl CrateInfo { // No need to look for lang items that are whitelisted and don't // actually need to exist. - let missing = missing.iter() + let missing = missing + .iter() .cloned() .filter(|&l| !weak_lang_items::whitelisted(tcx, l)) .collect(); @@ -812,15 +825,15 @@ pub fn provide_both(providers: &mut Providers<'_>) { providers.dllimport_foreign_items = |tcx, krate| { let module_map = tcx.foreign_modules(krate); - let module_map = module_map.iter() - .map(|lib| (lib.def_id, lib)) - .collect::<FxHashMap<_, _>>(); + let module_map = + module_map.iter().map(|lib| (lib.def_id, lib)).collect::<FxHashMap<_, _>>(); - let dllimports = tcx.native_libraries(krate) + let dllimports = tcx + .native_libraries(krate) .iter() .filter(|lib| { if lib.kind != cstore::NativeLibraryKind::NativeUnknown { - return false + return false; } let cfg = match lib.cfg { Some(ref cfg) => cfg, @@ -835,21 +848,20 @@ pub fn provide_both(providers: &mut Providers<'_>) { tcx.arena.alloc(dllimports) }; - providers.is_dllimport_foreign_item = |tcx, def_id| { - tcx.dllimport_foreign_items(def_id.krate).contains(&def_id) - }; + providers.is_dllimport_foreign_item = + |tcx, def_id| tcx.dllimport_foreign_items(def_id.krate).contains(&def_id); } fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguReuse { if !tcx.dep_graph.is_fully_enabled() { - return CguReuse::No + return CguReuse::No; } let work_product_id = &cgu.work_product_id(); if tcx.dep_graph.previous_work_product(work_product_id).is_none() { // We don't have anything cached for this CGU. This can happen // if the CGU did not exist in the previous session. - return CguReuse::No + return CguReuse::No; } // Try to mark the CGU as green. If it we can do so, it means that nothing @@ -859,17 +871,15 @@ fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguR // know that later). If we are not doing LTO, there is only one optimized // version of each module, so we re-use that. let dep_node = cgu.codegen_dep_node(tcx); - assert!(!tcx.dep_graph.dep_node_exists(&dep_node), + assert!( + !tcx.dep_graph.dep_node_exists(&dep_node), "CompileCodegenUnit dep-node for CGU `{}` already exists before marking.", - cgu.name()); + cgu.name() + ); if tcx.dep_graph.try_mark_green(tcx, &dep_node).is_some() { // We can re-use either the pre- or the post-thinlto state - if tcx.sess.lto() != Lto::No { - CguReuse::PreLto - } else { - CguReuse::PostLto - } + if tcx.sess.lto() != Lto::No { CguReuse::PreLto } else { CguReuse::PostLto } } else { CguReuse::No } diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 98d239d353d..7399db1f2b9 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -1,15 +1,15 @@ -use super::{FunctionCx, LocalRef}; use super::operand::OperandValue; +use super::{FunctionCx, LocalRef}; -use crate::MemFlags; use crate::common::IntPredicate; use crate::glue; use crate::traits::*; +use crate::MemFlags; -use rustc::ty::{self, Instance, Ty}; -use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt}; use rustc::mir; use rustc::mir::tcx::PlaceTy; +use rustc::ty::layout::{self, Align, HasTyCtxt, LayoutOf, TyLayout, VariantIdx}; +use rustc::ty::{self, Instance, Ty}; #[derive(Copy, Clone, Debug)] pub struct PlaceRef<'tcx, V> { @@ -27,31 +27,14 @@ pub struct PlaceRef<'tcx, V> { } impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { - pub fn new_sized( - llval: V, - layout: TyLayout<'tcx>, - ) -> PlaceRef<'tcx, V> { + pub fn new_sized(llval: V, layout: TyLayout<'tcx>) -> PlaceRef<'tcx, V> { assert!(!layout.is_unsized()); - PlaceRef { - llval, - llextra: None, - layout, - align: layout.align.abi - } + PlaceRef { llval, llextra: None, layout, align: layout.align.abi } } - pub fn new_sized_aligned( - llval: V, - layout: TyLayout<'tcx>, - align: Align, - ) -> PlaceRef<'tcx, V> { + pub fn new_sized_aligned(llval: V, layout: TyLayout<'tcx>, align: Align) -> PlaceRef<'tcx, V> { assert!(!layout.is_unsized()); - PlaceRef { - llval, - llextra: None, - layout, - align - } + PlaceRef { llval, llextra: None, layout, align } } fn new_thin_place<Bx: BuilderMethods<'a, 'tcx, Value = V>>( @@ -60,12 +43,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { layout: TyLayout<'tcx>, ) -> PlaceRef<'tcx, V> { assert!(!bx.cx().type_has_metadata(layout.ty)); - PlaceRef { - llval, - llextra: None, - layout, - align: layout.align.abi - } + PlaceRef { llval, llextra: None, layout, align: layout.align.abi } } // FIXME(eddyb) pass something else for the name so no work is done @@ -92,10 +70,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { Self::alloca(bx, ptr_layout) } - pub fn len<Cx: ConstMethods<'tcx, Value = V>>( - &self, - cx: &Cx - ) -> V { + pub fn len<Cx: ConstMethods<'tcx, Value = V>>(&self, cx: &Cx) -> V { if let layout::FieldPlacement::Array { count, .. } = self.layout.fields { if self.layout.is_unsized() { assert_eq!(count, 0); @@ -112,7 +87,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { /// Access a field, at a point when the value's case is known. pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>( - self, bx: &mut Bx, + self, + bx: &mut Bx, ix: usize, ) -> Self { let field = self.layout.field(bx.cx(), ix); @@ -133,11 +109,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { PlaceRef { // HACK(eddyb): have to bitcast pointers until LLVM removes pointee types. llval: bx.pointercast(llval, bx.cx().type_ptr_to(bx.cx().backend_type(field))), - llextra: if bx.cx().type_has_metadata(field.ty) { - self.llextra - } else { - None - }, + llextra: if bx.cx().type_has_metadata(field.ty) { self.llextra } else { None }, layout: field, align: effective_field_align, } @@ -149,8 +121,10 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { // * packed struct - there is no alignment padding match field.ty.kind { _ if self.llextra.is_none() => { - debug!("unsized field `{}`, of `{:?}` has no metadata for adjustment", - ix, self.llval); + debug!( + "unsized field `{}`, of `{:?}` has no metadata for adjustment", + ix, self.llval + ); return simple(); } _ if !field.is_unsized() => return simple(), @@ -222,7 +196,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, - cast_to: Ty<'tcx> + cast_to: Ty<'tcx>, ) -> V { let cast_to = bx.cx().immediate_backend_type(bx.cx().layout_of(cast_to)); if self.layout.abi.is_uninhabited() { @@ -230,7 +204,10 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { } let (discr_scalar, discr_kind, discr_index) = match self.layout.variants { layout::Variants::Single { index } => { - let discr_val = self.layout.ty.discriminant_for_variant(bx.cx().tcx(), index) + let discr_val = self + .layout + .ty + .discriminant_for_variant(bx.cx().tcx(), index) .map_or(index.as_u32() as u128, |discr| discr.val); return bx.cx().const_uint_big(cast_to, discr_val); } @@ -252,7 +229,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { // let LLVM interpret the `i1` as signed, because // then `i1 1` (i.e., `E::B`) is effectively `i8 -1`. layout::Int(_, signed) => !discr_scalar.is_bool() && signed, - _ => false + _ => false, }; bx.intcast(encoded_discr.immediate(), cast_to, signed) } @@ -330,7 +307,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>( &self, bx: &mut Bx, - variant_index: VariantIdx + variant_index: VariantIdx, ) { if self.layout.for_variant(bx.cx(), variant_index).abi.is_uninhabited() { // We play it safe by using a well-defined `abort`, but we could go for immediate UB @@ -353,20 +330,19 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { bx.store( bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to), ptr.llval, - ptr.align); + ptr.align, + ); } layout::Variants::Multiple { - discr_kind: layout::DiscriminantKind::Niche { - dataful_variant, - ref niche_variants, - niche_start, - }, + discr_kind: + layout::DiscriminantKind::Niche { dataful_variant, ref niche_variants, niche_start }, discr_index, .. } => { if variant_index != dataful_variant { - if bx.cx().sess().target.target.arch == "arm" || - bx.cx().sess().target.target.arch == "aarch64" { + if bx.cx().sess().target.target.arch == "arm" + || bx.cx().sess().target.target.arch == "aarch64" + { // FIXME(#34427): as workaround for LLVM bug on ARM, // use memset of 0 before assigning niche value. let fill_byte = bx.cx().const_u8(0); @@ -377,8 +353,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { let niche = self.project_field(bx, discr_index); let niche_llty = bx.cx().immediate_backend_type(niche.layout); let niche_value = variant_index.as_u32() - niche_variants.start().as_u32(); - let niche_value = (niche_value as u128) - .wrapping_add(niche_start); + let niche_value = (niche_value as u128).wrapping_add(niche_start); // FIXME(eddyb): check the actual primitive type here. let niche_llval = if niche_value == 0 { // HACK(eddyb): using `c_null` as it works on all types. @@ -395,7 +370,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>( &self, bx: &mut Bx, - llindex: V + llindex: V, ) -> Self { // Statically compute the offset if we can, otherwise just use the element size, // as this will yield the lowest alignment. @@ -417,7 +392,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>( &self, bx: &mut Bx, - variant_index: VariantIdx + variant_index: VariantIdx, ) -> Self { let mut downcast = *self; downcast.layout = self.layout.for_variant(bx.cx(), variant_index); @@ -442,17 +417,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { pub fn codegen_place( &mut self, bx: &mut Bx, - place_ref: &mir::PlaceRef<'_, 'tcx> + place_ref: &mir::PlaceRef<'_, 'tcx>, ) -> PlaceRef<'tcx, Bx::Value> { debug!("codegen_place(place_ref={:?})", place_ref); let cx = self.cx; let tcx = self.cx.tcx(); let result = match place_ref { - mir::PlaceRef { - base: mir::PlaceBase::Local(index), - projection: [], - } => { + mir::PlaceRef { base: mir::PlaceBase::Local(index), projection: [] } => { match self.locals[*index] { LocalRef::Place(place) => { return place; @@ -466,11 +438,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } mir::PlaceRef { - base: mir::PlaceBase::Static(box mir::Static { - ty, - kind: mir::StaticKind::Promoted(promoted, substs), - def_id, - }), + base: + mir::PlaceBase::Static(box mir::Static { + ty, + kind: mir::StaticKind::Promoted(promoted, substs), + def_id, + }), projection: [], } => { let instance = Instance::new(*def_id, self.monomorphize(substs)); @@ -478,10 +451,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { match bx.tcx().const_eval_promoted(instance, *promoted) { Ok(val) => match val.val { ty::ConstKind::Value(mir::interpret::ConstValue::ByRef { - alloc, offset - }) => { - bx.cx().from_const_alloc(layout, alloc, offset) - } + alloc, + offset, + }) => bx.cx().from_const_alloc(layout, alloc, offset), _ => bug!("promoteds should have an allocation: {:?}", val), }, Err(_) => { @@ -492,19 +464,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { bx.abort(); // We still have to return a place but it doesn't matter, // this code is unreachable. - let llval = bx.cx().const_undef( - bx.cx().type_ptr_to(bx.cx().backend_type(layout)) - ); + let llval = + bx.cx().const_undef(bx.cx().type_ptr_to(bx.cx().backend_type(layout))); PlaceRef::new_sized(llval, layout) } } } mir::PlaceRef { - base: mir::PlaceBase::Static(box mir::Static { - ty, - kind: mir::StaticKind::Static, - def_id, - }), + base: + mir::PlaceBase::Static(box mir::Static { + ty, + kind: mir::StaticKind::Static, + def_id, + }), projection: [], } => { // NB: The layout of a static may be unsized as is the case when working @@ -512,26 +484,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let layout = cx.layout_of(self.monomorphize(&ty)); let static_ = bx.get_static(*def_id); PlaceRef::new_thin_place(bx, static_, layout) - }, - mir::PlaceRef { - base, - projection: [proj_base @ .., mir::ProjectionElem::Deref], - } => { + } + mir::PlaceRef { base, projection: [proj_base @ .., mir::ProjectionElem::Deref] } => { // Load the pointer from its location. - self.codegen_consume(bx, &mir::PlaceRef { - base, - projection: proj_base, - }).deref(bx.cx()) + self.codegen_consume(bx, &mir::PlaceRef { base, projection: proj_base }) + .deref(bx.cx()) } - mir::PlaceRef { - base, - projection: [proj_base @ .., elem], - } => { + mir::PlaceRef { base, projection: [proj_base @ .., elem] } => { // FIXME turn this recursion into iteration - let cg_base = self.codegen_place(bx, &mir::PlaceRef { - base, - projection: proj_base, - }); + let cg_base = + self.codegen_place(bx, &mir::PlaceRef { base, projection: proj_base }); match elem { mir::ProjectionElem::Deref => bug!(), @@ -539,50 +501,54 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { cg_base.project_field(bx, field.index()) } mir::ProjectionElem::Index(index) => { - let index = &mir::Operand::Copy( - mir::Place::from(*index) - ); + let index = &mir::Operand::Copy(mir::Place::from(*index)); let index = self.codegen_operand(bx, index); let llindex = index.immediate(); cg_base.project_index(bx, llindex) } - mir::ProjectionElem::ConstantIndex { offset, - from_end: false, - min_length: _ } => { + mir::ProjectionElem::ConstantIndex { + offset, + from_end: false, + min_length: _, + } => { let lloffset = bx.cx().const_usize(*offset as u64); cg_base.project_index(bx, lloffset) } - mir::ProjectionElem::ConstantIndex { offset, - from_end: true, - min_length: _ } => { + mir::ProjectionElem::ConstantIndex { + offset, + from_end: true, + min_length: _, + } => { let lloffset = bx.cx().const_usize(*offset as u64); let lllen = cg_base.len(bx.cx()); let llindex = bx.sub(lllen, lloffset); cg_base.project_index(bx, llindex) } mir::ProjectionElem::Subslice { from, to, from_end } => { - let mut subslice = cg_base.project_index(bx, - bx.cx().const_usize(*from as u64)); - let projected_ty = PlaceTy::from_ty(cg_base.layout.ty) - .projection_ty(tcx, elem).ty; + let mut subslice = + cg_base.project_index(bx, bx.cx().const_usize(*from as u64)); + let projected_ty = + PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, elem).ty; subslice.layout = bx.cx().layout_of(self.monomorphize(&projected_ty)); if subslice.layout.is_unsized() { assert!(from_end, "slice subslices should be `from_end`"); - subslice.llextra = Some(bx.sub(cg_base.llextra.unwrap(), - bx.cx().const_usize((*from as u64) + (*to as u64)))); + subslice.llextra = Some(bx.sub( + cg_base.llextra.unwrap(), + bx.cx().const_usize((*from as u64) + (*to as u64)), + )); } // Cast the place pointer type to the new // array or slice type (`*[%_; new_len]`). - subslice.llval = bx.pointercast(subslice.llval, - bx.cx().type_ptr_to(bx.cx().backend_type(subslice.layout))); + subslice.llval = bx.pointercast( + subslice.llval, + bx.cx().type_ptr_to(bx.cx().backend_type(subslice.layout)), + ); subslice } - mir::ProjectionElem::Downcast(_, v) => { - cg_base.project_downcast(bx, *v) - } + mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, *v), } } }; @@ -592,12 +558,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { pub fn monomorphized_place_ty(&self, place_ref: &mir::PlaceRef<'_, 'tcx>) -> Ty<'tcx> { let tcx = self.cx.tcx(); - let place_ty = mir::Place::ty_from( - place_ref.base, - place_ref.projection, - *self.mir, - tcx, - ); + let place_ty = mir::Place::ty_from(place_ref.base, place_ref.projection, *self.mir, tcx); self.monomorphize(&place_ty.ty) } } diff --git a/src/librustc_codegen_utils/symbol_names/v0.rs b/src/librustc_codegen_utils/symbol_names/v0.rs index 858ad9f1cfd..045d06a2e1d 100644 --- a/src/librustc_codegen_utils/symbol_names/v0.rs +++ b/src/librustc_codegen_utils/symbol_names/v0.rs @@ -1,13 +1,13 @@ use rustc::hir; use rustc::hir::def_id::{CrateNum, DefId}; use rustc::hir::map::{DefPathData, DisambiguatedDefPathData}; -use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Instance}; -use rustc::ty::print::{Printer, Print}; -use rustc::ty::subst::{GenericArg, Subst, GenericArgKind}; +use rustc::ty::print::{Print, Printer}; +use rustc::ty::subst::{GenericArg, GenericArgKind, Subst}; +use rustc::ty::{self, Instance, Ty, TyCtxt, TypeFoldable}; use rustc_data_structures::base_n; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_target::spec::abi::Abi; -use syntax::ast::{IntTy, UintTy, FloatTy}; +use syntax::ast::{FloatTy, IntTy, UintTy}; use std::fmt::Write; use std::ops::Range; @@ -19,8 +19,7 @@ pub(super) fn mangle( ) -> String { let def_id = instance.def_id(); // FIXME(eddyb) this should ideally not be needed. - let substs = - tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), instance.substs); + let substs = tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), instance.substs); let prefix = "_R"; let mut cx = SymbolMangler { @@ -36,12 +35,7 @@ pub(super) fn mangle( out: String::from(prefix), }; cx = if instance.is_vtable_shim() { - cx.path_append_ns( - |cx| cx.print_def_path(def_id, substs), - 'S', - 0, - "", - ).unwrap() + cx.path_append_ns(|cx| cx.print_def_path(def_id, substs), 'S', 0, "").unwrap() } else { cx.print_def_path(def_id, substs).unwrap() }; @@ -183,9 +177,10 @@ impl SymbolMangler<'tcx> { fn in_binder<T>( mut self, value: &ty::Binder<T>, - print_value: impl FnOnce(Self, &T) -> Result<Self, !> + print_value: impl FnOnce(Self, &T) -> Result<Self, !>, ) -> Result<Self, !> - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { let regions = if value.has_late_bound_regions() { self.tcx.collect_referenced_late_bound_regions(value) @@ -196,16 +191,20 @@ impl SymbolMangler<'tcx> { let mut lifetime_depths = self.binders.last().map(|b| b.lifetime_depths.end).map_or(0..0, |i| i..i); - let lifetimes = regions.into_iter().map(|br| { - match br { - ty::BrAnon(i) => { - // FIXME(eddyb) for some reason, `anonymize_late_bound_regions` starts at `1`. - assert_ne!(i, 0); - i - 1 - }, - _ => bug!("symbol_names: non-anonymized region `{:?}` in `{:?}`", br, value), - } - }).max().map_or(0, |max| max + 1); + let lifetimes = regions + .into_iter() + .map(|br| { + match br { + ty::BrAnon(i) => { + // FIXME(eddyb) for some reason, `anonymize_late_bound_regions` starts at `1`. + assert_ne!(i, 0); + i - 1 + } + _ => bug!("symbol_names: non-anonymized region `{:?}` in `{:?}`", br, value), + } + }) + .max() + .map_or(0, |max| max + 1); self.push_opt_integer_62("G", lifetimes as u64); lifetime_depths.end += lifetimes; @@ -263,8 +262,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { let key = self.tcx.def_key(impl_def_id); let parent_def_id = DefId { index: key.parent.unwrap(), ..impl_def_id }; - let mut param_env = self.tcx.param_env(impl_def_id) - .with_reveal_all(); + let mut param_env = self.tcx.param_env(impl_def_id).with_reveal_all(); if !substs.is_empty() { param_env = param_env.subst(self.tcx, substs); } @@ -272,8 +270,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { match &mut impl_trait_ref { Some(impl_trait_ref) => { assert_eq!(impl_trait_ref.self_ty(), self_ty); - *impl_trait_ref = - self.tcx.normalize_erasing_regions(param_env, *impl_trait_ref); + *impl_trait_ref = self.tcx.normalize_erasing_regions(param_env, *impl_trait_ref); self_ty = impl_trait_ref.self_ty(); } None => { @@ -289,10 +286,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { ) } - fn print_region( - mut self, - region: ty::Region<'_>, - ) -> Result<Self::Region, Self::Error> { + fn print_region(mut self, region: ty::Region<'_>) -> Result<Self::Region, Self::Error> { let i = match *region { // Erased lifetimes use the index 0, for a // shorter mangling of `L_`. @@ -318,10 +312,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { Ok(self) } - fn print_type( - mut self, - ty: Ty<'tcx>, - ) -> Result<Self::Type, Self::Error> { + fn print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> { // Basic types, never cached (single-character). let basic_type = match ty.kind { ty::Bool => "b", @@ -345,8 +336,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { ty::Never => "z", // Placeholders (should be demangled as `_`). - ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | - ty::Infer(_) | ty::Error => "p", + ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error => "p", _ => "", }; @@ -362,14 +352,15 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { match ty.kind { // Basic types, handled above. - ty::Bool | ty::Char | ty::Str | - ty::Int(_) | ty::Uint(_) | ty::Float(_) | - ty::Never => unreachable!(), + ty::Bool | ty::Char | ty::Str | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Never => { + unreachable!() + } ty::Tuple(_) if ty.is_unit() => unreachable!(), // Placeholders, also handled as part of basic types. - ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | - ty::Infer(_) | ty::Error => unreachable!(), + ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error => { + unreachable!() + } ty::Ref(r, ty, mutbl) => { self.push(match mutbl { @@ -409,13 +400,13 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { } // Mangle all nominal types as paths. - ty::Adt(&ty::AdtDef { did: def_id, .. }, substs) | - ty::FnDef(def_id, substs) | - ty::Opaque(def_id, substs) | - ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) | - ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) | - ty::Closure(def_id, substs) | - ty::Generator(def_id, substs, _) => { + ty::Adt(&ty::AdtDef { did: def_id, .. }, substs) + | ty::FnDef(def_id, substs) + | ty::Opaque(def_id, substs) + | ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) + | ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) + | ty::Closure(def_id, substs) + | ty::Generator(def_id, substs, _) => { self = self.print_def_path(def_id, substs)?; } ty::Foreign(def_id) => { @@ -460,9 +451,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { self = r.print(self)?; } - ty::GeneratorWitness(_) => { - bug!("symbol_names: unexpected `GeneratorWitness`") - } + ty::GeneratorWitness(_) => bug!("symbol_names: unexpected `GeneratorWitness`"), } // Only cache types that do not refer to an enclosing @@ -502,10 +491,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { Ok(self) } - fn print_const( - mut self, - ct: &'tcx ty::Const<'tcx>, - ) -> Result<Self::Const, Self::Error> { + fn print_const(mut self, ct: &'tcx ty::Const<'tcx>) -> Result<Self::Const, Self::Error> { if let Some(&i) = self.compress.as_ref().and_then(|c| c.consts.get(&ct)) { return self.print_backref(i); } @@ -514,8 +500,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { match ct.ty.kind { ty::Uint(_) => {} _ => { - bug!("symbol_names: unsupported constant of type `{}` ({:?})", - ct.ty, ct); + bug!("symbol_names: unsupported constant of type `{}` ({:?})", ct.ty, ct); } } self = ct.ty.print(self)?; @@ -539,10 +524,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { Ok(self) } - fn path_crate( - mut self, - cnum: CrateNum, - ) -> Result<Self::Path, Self::Error> { + fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> { self.push("C"); let fingerprint = self.tcx.crate_disambiguator(cnum).to_fingerprint(); self.push_disambiguator(fingerprint.to_smaller_hash()); @@ -612,7 +594,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { print_prefix, ns, disambiguated_data.disambiguator as u64, - name.as_ref().map_or("", |s| &s[..]) + name.as_ref().map_or("", |s| &s[..]), ) } fn path_generic_args( @@ -621,17 +603,13 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { args: &[GenericArg<'tcx>], ) -> Result<Self::Path, Self::Error> { // Don't print any regions if they're all erased. - let print_regions = args.iter().any(|arg| { - match arg.unpack() { - GenericArgKind::Lifetime(r) => *r != ty::ReErased, - _ => false, - } + let print_regions = args.iter().any(|arg| match arg.unpack() { + GenericArgKind::Lifetime(r) => *r != ty::ReErased, + _ => false, }); - let args = args.iter().cloned().filter(|arg| { - match arg.unpack() { - GenericArgKind::Lifetime(_) => print_regions, - _ => true, - } + let args = args.iter().cloned().filter(|arg| match arg.unpack() { + GenericArgKind::Lifetime(_) => print_regions, + _ => true, }); if args.clone().next().is_none() { diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 45191375748..fdac4390b26 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -9,26 +9,25 @@ use Destination::*; -use syntax_pos::{SourceFile, Span, MultiSpan}; use syntax_pos::source_map::SourceMap; +use syntax_pos::{MultiSpan, SourceFile, Span}; +use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, Style, StyledString}; +use crate::styled_buffer::StyledBuffer; +use crate::Level::Error; use crate::{ - Level, CodeSuggestion, Diagnostic, SubDiagnostic, pluralize, - SuggestionStyle, DiagnosticId, + pluralize, CodeSuggestion, Diagnostic, DiagnosticId, Level, SubDiagnostic, SuggestionStyle, }; -use crate::Level::Error; -use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; -use crate::styled_buffer::StyledBuffer; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; use std::borrow::Cow; -use std::io::prelude::*; +use std::cmp::{max, min, Reverse}; use std::io; -use std::cmp::{min, max, Reverse}; +use std::io::prelude::*; use std::path::Path; -use termcolor::{StandardStream, ColorChoice, ColorSpec, BufferWriter, Ansi}; -use termcolor::{WriteColor, Color, Buffer}; +use termcolor::{Ansi, BufferWriter, ColorChoice, ColorSpec, StandardStream}; +use termcolor::{Buffer, Color, WriteColor}; /// Describes the way the content of the `rendered` field of the json output is generated #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -57,8 +56,15 @@ impl HumanReadableErrorType { ) -> EmitterWriter { let (short, color_config) = self.unzip(); let color = color_config.suggests_using_colors(); - EmitterWriter::new(dst, source_map, short, teach, color, terminal_width, - external_macro_backtrace) + EmitterWriter::new( + dst, + source_map, + short, + teach, + color, + terminal_width, + external_macro_backtrace, + ) } } @@ -117,15 +123,14 @@ impl Margin { } fn was_cut_right(&self, line_len: usize) -> bool { - let right = if self.computed_right == self.span_right || - self.computed_right == self.label_right - { - // Account for the "..." padding given above. Otherwise we end up with code lines that - // do fit but end in "..." as if they were trimmed. - self.computed_right - 6 - } else { - self.computed_right - }; + let right = + if self.computed_right == self.span_right || self.computed_right == self.label_right { + // Account for the "..." padding given above. Otherwise we end up with code lines that + // do fit but end in "..." as if they were trimmed. + self.computed_right - 6 + } else { + self.computed_right + }; right < line_len && self.computed_left + self.column_width < line_len } @@ -156,7 +161,8 @@ impl Margin { let padding_left = (self.column_width - (self.span_right - self.span_left)) / 5 * 2; self.computed_left = self.span_left.saturating_sub(padding_left); self.computed_right = self.computed_left + self.column_width; - } else { // Mostly give up but still don't show the full line. + } else { + // Mostly give up but still don't show the full line. self.computed_left = self.span_left; self.computed_right = self.span_right; } @@ -240,11 +246,15 @@ pub trait Emitter { format!( "help: {}{}: `{}`", sugg.msg, - if self.source_map().map(|sm| is_case_difference( - &**sm, - substitution, - sugg.substitutions[0].parts[0].span, - )).unwrap_or(false) { + if self + .source_map() + .map(|sm| is_case_difference( + &**sm, + substitution, + sugg.substitutions[0].parts[0].span, + )) + .unwrap_or(false) + { " (notice the capitalization)" } else { "" @@ -271,37 +281,35 @@ pub trait Emitter { // This does a small "fix" for multispans by looking to see if it can find any that // point directly at <*macros>. Since these are often difficult to read, this // will change the span to point at the use site. - fn fix_multispans_in_std_macros(&self, - source_map: &Option<Lrc<SourceMap>>, - span: &mut MultiSpan, - children: &mut Vec<SubDiagnostic>, - level: &Level, - backtrace: bool) { + fn fix_multispans_in_std_macros( + &self, + source_map: &Option<Lrc<SourceMap>>, + span: &mut MultiSpan, + children: &mut Vec<SubDiagnostic>, + level: &Level, + backtrace: bool, + ) { let mut spans_updated = self.fix_multispan_in_std_macros(source_map, span, backtrace); for child in children.iter_mut() { - spans_updated |= self.fix_multispan_in_std_macros( - source_map, - &mut child.span, - backtrace - ); + spans_updated |= + self.fix_multispan_in_std_macros(source_map, &mut child.span, backtrace); } let msg = if level == &Error { "this error originates in a macro outside of the current crate \ (in Nightly builds, run with -Z external-macro-backtrace \ - for more info)".to_string() + for more info)" + .to_string() } else { "this warning originates in a macro outside of the current crate \ (in Nightly builds, run with -Z external-macro-backtrace \ - for more info)".to_string() + for more info)" + .to_string() }; if spans_updated { children.push(SubDiagnostic { level: Level::Note, - message: vec![ - (msg, - Style::NoStyle), - ], + message: vec![(msg, Style::NoStyle)], span: MultiSpan::new(), render_span: None, }); @@ -311,10 +319,12 @@ pub trait Emitter { // This "fixes" MultiSpans that contain Spans that are pointing to locations inside of // <*macros>. Since these locations are often difficult to read, we move these Spans from // <*macros> to their corresponding use site. - fn fix_multispan_in_std_macros(&self, - source_map: &Option<Lrc<SourceMap>>, - span: &mut MultiSpan, - always_backtrace: bool) -> bool { + fn fix_multispan_in_std_macros( + &self, + source_map: &Option<Lrc<SourceMap>>, + span: &mut MultiSpan, + always_backtrace: bool, + ) -> bool { let sm = match source_map { Some(ref sm) => sm, None => return false, @@ -340,31 +350,40 @@ pub trait Emitter { continue; } if always_backtrace { - new_labels.push((trace.def_site_span, - format!("in this expansion of `{}`{}", - trace.macro_decl_name, - if backtrace_len > 2 { - // if backtrace_len == 1 it'll be pointed - // at by "in this macro invocation" - format!(" (#{})", i + 1) - } else { - String::new() - }))); + new_labels.push(( + trace.def_site_span, + format!( + "in this expansion of `{}`{}", + trace.macro_decl_name, + if backtrace_len > 2 { + // if backtrace_len == 1 it'll be pointed + // at by "in this macro invocation" + format!(" (#{})", i + 1) + } else { + String::new() + } + ), + )); } // Check to make sure we're not in any <*macros> - if !sm.span_to_filename(trace.def_site_span).is_macros() && - !trace.macro_decl_name.starts_with("desugaring of ") && - !trace.macro_decl_name.starts_with("#[") || - always_backtrace { - new_labels.push((trace.call_site, - format!("in this macro invocation{}", - if backtrace_len > 2 && always_backtrace { - // only specify order when the macro - // backtrace is multiple levels deep - format!(" (#{})", i + 1) - } else { - String::new() - }))); + if !sm.span_to_filename(trace.def_site_span).is_macros() + && !trace.macro_decl_name.starts_with("desugaring of ") + && !trace.macro_decl_name.starts_with("#[") + || always_backtrace + { + new_labels.push(( + trace.call_site, + format!( + "in this macro invocation{}", + if backtrace_len > 2 && always_backtrace { + // only specify order when the macro + // backtrace is multiple levels deep + format!(" (#{})", i + 1) + } else { + String::new() + } + ), + )); if !always_backtrace { break; } @@ -378,9 +397,7 @@ pub trait Emitter { if sp_label.span.is_dummy() { continue; } - if sm.span_to_filename(sp_label.span.clone()).is_macros() && - !always_backtrace - { + if sm.span_to_filename(sp_label.span.clone()).is_macros() && !always_backtrace { let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); @@ -406,18 +423,22 @@ impl Emitter for EmitterWriter { let mut children = diag.children.clone(); let (mut primary_span, suggestions) = self.primary_span_formatted(&diag); - self.fix_multispans_in_std_macros(&self.sm, - &mut primary_span, - &mut children, - &diag.level, - self.external_macro_backtrace); - - self.emit_messages_default(&diag.level, - &diag.styled_message(), - &diag.code, - &primary_span, - &children, - &suggestions); + self.fix_multispans_in_std_macros( + &self.sm, + &mut primary_span, + &mut children, + &diag.level, + self.external_macro_backtrace, + ); + + self.emit_messages_default( + &diag.level, + &diag.styled_message(), + &diag.code, + &primary_span, + &children, + &suggestions, + ); } fn should_show_explain(&self) -> bool { @@ -429,7 +450,9 @@ impl Emitter for EmitterWriter { pub struct SilentEmitter; impl Emitter for SilentEmitter { - fn source_map(&self) -> Option<&Lrc<SourceMap>> { None } + fn source_map(&self) -> Option<&Lrc<SourceMap>> { + None + } fn emit_diagnostic(&mut self, _: &Diagnostic) {} } @@ -458,17 +481,13 @@ impl ColorConfig { } } ColorConfig::Never => ColorChoice::Never, - ColorConfig::Auto if atty::is(atty::Stream::Stderr) => { - ColorChoice::Auto - } + ColorConfig::Auto if atty::is(atty::Stream::Stderr) => ColorChoice::Auto, ColorConfig::Auto => ColorChoice::Never, } } fn suggests_using_colors(self) -> bool { match self { - | ColorConfig::Always - | ColorConfig::Auto - => true, + ColorConfig::Always | ColorConfig::Auto => true, ColorConfig::Never => false, } } @@ -540,11 +559,7 @@ impl EmitterWriter { } fn maybe_anonymized(&self, line_num: usize) -> String { - if self.ui_testing { - ANONYMIZED_LINE_NUM.to_string() - } else { - line_num.to_string() - } + if self.ui_testing { ANONYMIZED_LINE_NUM.to_string() } else { line_num.to_string() } } fn draw_line( @@ -563,17 +578,21 @@ impl EmitterWriter { let right = margin.right(line_len); // On long lines, we strip the source line, accounting for unicode. let mut taken = 0; - let code: String = source_string.chars().skip(left).take_while(|ch| { - // Make sure that the trimming on the right will fall within the terminal width. - // FIXME: `unicode_width` sometimes disagrees with terminals on how wide a `char` is. - // For now, just accept that sometimes the code line will be longer than desired. - let next = unicode_width::UnicodeWidthChar::width(*ch).unwrap_or(1); - if taken + next > right - left { - return false; - } - taken += next; - true - }).collect(); + let code: String = source_string + .chars() + .skip(left) + .take_while(|ch| { + // Make sure that the trimming on the right will fall within the terminal width. + // FIXME: `unicode_width` sometimes disagrees with terminals on how wide a `char` is. + // For now, just accept that sometimes the code line will be longer than desired. + let next = unicode_width::UnicodeWidthChar::width(*ch).unwrap_or(1); + if taken + next > right - left { + return false; + } + taken += next; + true + }) + .collect(); buffer.puts(line_offset, code_offset, &code, Style::Quotation); if margin.was_cut_left() { // We have stripped some code/whitespace from the beginning, make it clear. @@ -624,7 +643,9 @@ impl EmitterWriter { let left = margin.left(source_string.len()); // Left trim // Account for unicode characters of width !=0 that were removed. - let left = source_string.chars().take(left) + let left = source_string + .chars() + .take(left) .map(|ch| unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1)) .sum(); @@ -773,13 +794,14 @@ impl EmitterWriter { if overlaps(next, annotation, 0) // This label overlaps with another one and both && annotation.has_label() // take space (they have text and are not && j > i // multiline lines). - && p == 0 // We're currently on the first line, move the label one line down + && p == 0 + // We're currently on the first line, move the label one line down { // If we're overlapping with an un-labelled annotation with the same span // we can just merge them in the output if next.start_col == annotation.start_col - && next.end_col == annotation.end_col - && !next.has_label() + && next.end_col == annotation.end_col + && !next.has_label() { continue; } @@ -791,7 +813,7 @@ impl EmitterWriter { } annotations_position.push((p, annotation)); for (j, next) in annotations.iter().enumerate() { - if j > i { + if j > i { let l = next.label.as_ref().map_or(0, |label| label.len() + 2); if (overlaps(next, annotation, l) // Do not allow two labels to be in the same // line if they overlap including padding, to @@ -814,7 +836,8 @@ impl EmitterWriter { || (overlaps(next, annotation, l) && next.end_col <= annotation.end_col && next.has_label() - && p == 0) // Avoid #42595. + && p == 0) + // Avoid #42595. { // This annotation needs a new line in the output. p += 1; @@ -848,10 +871,7 @@ impl EmitterWriter { // | for pos in 0..=line_len { draw_col_separator(buffer, line_offset + pos + 1, width_offset - 2); - buffer.putc(line_offset + pos + 1, - width_offset - 2, - '|', - Style::LineNumber); + buffer.putc(line_offset + pos + 1, width_offset - 2, '|', Style::LineNumber); } // Write the horizontal lines for multiline annotations @@ -874,8 +894,7 @@ impl EmitterWriter { }; let pos = pos + 1; match annotation.annotation_type { - AnnotationType::MultilineStart(depth) | - AnnotationType::MultilineEnd(depth) => { + AnnotationType::MultilineStart(depth) | AnnotationType::MultilineEnd(depth) => { draw_range( buffer, '_', @@ -919,27 +938,23 @@ impl EmitterWriter { if pos > 1 && (annotation.has_label() || annotation.takes_space()) { for p in line_offset + 1..=line_offset + pos { - buffer.putc(p, - (code_offset + annotation.start_col).saturating_sub(left), - '|', - style); + buffer.putc( + p, + (code_offset + annotation.start_col).saturating_sub(left), + '|', + style, + ); } } match annotation.annotation_type { AnnotationType::MultilineStart(depth) => { for p in line_offset + pos + 1..line_offset + line_len + 2 { - buffer.putc(p, - width_offset + depth - 1, - '|', - style); + buffer.putc(p, width_offset + depth - 1, '|', style); } } AnnotationType::MultilineEnd(depth) => { for p in line_offset..=line_offset + pos { - buffer.putc(p, - width_offset + depth - 1, - '|', - style); + buffer.putc(p, width_offset + depth - 1, '|', style); } } _ => (), @@ -958,11 +973,8 @@ impl EmitterWriter { // 4 | } // | _ test for &(pos, annotation) in &annotations_position { - let style = if annotation.is_primary { - Style::LabelPrimary - } else { - Style::LabelSecondary - }; + let style = + if annotation.is_primary { Style::LabelPrimary } else { Style::LabelSecondary }; let (pos, col) = if pos == 0 { (pos + 1, (annotation.end_col + 1).saturating_sub(left)) } else { @@ -1012,8 +1024,9 @@ impl EmitterWriter { ); } } - annotations_position.iter().filter_map(|&(_, annotation)| { - match annotation.annotation_type { + annotations_position + .iter() + .filter_map(|&(_, annotation)| match annotation.annotation_type { AnnotationType::MultilineStart(p) | AnnotationType::MultilineEnd(p) => { let style = if annotation.is_primary { Style::LabelPrimary @@ -1022,10 +1035,9 @@ impl EmitterWriter { }; Some((p, style)) } - _ => None - } - - }).collect::<Vec<_>>() + _ => None, + }) + .collect::<Vec<_>>() } fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize { @@ -1055,7 +1067,8 @@ impl EmitterWriter { fn get_max_line_num(&mut self, span: &MultiSpan, children: &[SubDiagnostic]) -> usize { let primary = self.get_multispan_max_line_num(span); - children.iter() + children + .iter() .map(|sub| self.get_multispan_max_line_num(&sub.span)) .max() .unwrap_or(0) @@ -1064,13 +1077,14 @@ impl EmitterWriter { /// Adds a left margin to every line but the first, given a padding length and the label being /// displayed, keeping the provided highlighting. - fn msg_to_buffer(&self, - buffer: &mut StyledBuffer, - msg: &[(String, Style)], - padding: usize, - label: &str, - override_style: Option<Style>) { - + fn msg_to_buffer( + &self, + buffer: &mut StyledBuffer, + msg: &[(String, Style)], + padding: usize, + label: &str, + override_style: Option<Style>, + ) { // The extra 5 ` ` is padding that's always needed to align to the `note: `: // // error: message @@ -1144,14 +1158,10 @@ impl EmitterWriter { is_secondary: bool, ) -> io::Result<()> { let mut buffer = StyledBuffer::new(); - let header_style = if is_secondary { - Style::HeaderMsg - } else { - Style::MainHeaderMsg - }; + let header_style = if is_secondary { Style::HeaderMsg } else { Style::MainHeaderMsg }; - if !msp.has_primary_spans() && !msp.has_span_labels() && is_secondary - && !self.short_message { + if !msp.has_primary_spans() && !msp.has_span_labels() && is_secondary && !self.short_message + { // This is a secondary message with no span info for _ in 0..max_line_num_len { buffer.prepend(0, " ", Style::NoStyle); @@ -1189,7 +1199,8 @@ impl EmitterWriter { // Make sure our primary file comes first let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) = - (self.sm.as_ref(), msp.primary_span().as_ref()) { + (self.sm.as_ref(), msp.primary_span().as_ref()) + { if !primary_span.is_dummy() { (sm.lookup_char_pos(primary_span.lo()), sm) } else { @@ -1202,7 +1213,8 @@ impl EmitterWriter { return Ok(()); }; if let Ok(pos) = - annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name)) { + annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name)) + { annotated_files.swap(0, pos); } @@ -1263,17 +1275,16 @@ impl EmitterWriter { } else { String::new() }; - format!("{}:{}{}", - annotated_file.file.name, - sm.doctest_offset_line( - &annotated_file.file.name, first_line.line_index), - col) + format!( + "{}:{}{}", + annotated_file.file.name, + sm.doctest_offset_line(&annotated_file.file.name, first_line.line_index), + col + ) } else { annotated_file.file.name.to_string() }; - buffer.append(buffer_msg_line_offset + 1, - &loc, - Style::LineAndColumn); + buffer.append(buffer_msg_line_offset + 1, &loc, Style::LineAndColumn); for _ in 0..max_line_num_len { buffer.prepend(buffer_msg_line_offset + 1, " ", Style::NoStyle); } @@ -1282,9 +1293,11 @@ impl EmitterWriter { if !self.short_message { // Put in the spacer between the location and annotated source let buffer_msg_line_offset = buffer.num_lines(); - draw_col_separator_no_space(&mut buffer, - buffer_msg_line_offset, - max_line_num_len + 1); + draw_col_separator_no_space( + &mut buffer, + buffer_msg_line_offset, + max_line_num_len + 1, + ); // Contains the vertical lines' positions for active multiline annotations let mut multilines = FxHashMap::default(); @@ -1295,15 +1308,10 @@ impl EmitterWriter { let file = annotated_file.file.clone(); let line = &annotated_file.lines[line_idx]; if let Some(source_string) = file.get_line(line.line_index - 1) { - let leading_whitespace = source_string - .chars() - .take_while(|c| c.is_whitespace()) - .count(); + let leading_whitespace = + source_string.chars().take_while(|c| c.is_whitespace()).count(); if source_string.chars().any(|c| !c.is_whitespace()) { - whitespace_margin = min( - whitespace_margin, - leading_whitespace, - ); + whitespace_margin = min(whitespace_margin, leading_whitespace); } } } @@ -1328,9 +1336,10 @@ impl EmitterWriter { let mut label_right_margin = 0; let mut max_line_len = 0; for line in &annotated_file.lines { - max_line_len = max(max_line_len, annotated_file.file - .get_line(line.line_index - 1) - .map_or(0, |s| s.len())); + max_line_len = max( + max_line_len, + annotated_file.file.get_line(line.line_index - 1).map_or(0, |s| s.len()), + ); for ann in &line.annotations { span_right_margin = max(span_right_margin, ann.start_col); span_right_margin = max(span_right_margin, ann.end_col); @@ -1393,32 +1402,31 @@ impl EmitterWriter { // the code in this line. for (depth, style) in &multilines { for line in previous_buffer_line..buffer.num_lines() { - draw_multiline_line(&mut buffer, - line, - width_offset, - *depth, - *style); + draw_multiline_line(&mut buffer, line, width_offset, *depth, *style); } } // check to see if we need to print out or elide lines that come between // this annotated line and the next one. if line_idx < (annotated_file.lines.len() - 1) { - let line_idx_delta = annotated_file.lines[line_idx + 1].line_index - - annotated_file.lines[line_idx].line_index; + let line_idx_delta = annotated_file.lines[line_idx + 1].line_index + - annotated_file.lines[line_idx].line_index; if line_idx_delta > 2 { let last_buffer_line_num = buffer.num_lines(); buffer.puts(last_buffer_line_num, 0, "...", Style::LineNumber); // Set the multiline annotation vertical lines on `...` bridging line. for (depth, style) in &multilines { - draw_multiline_line(&mut buffer, - last_buffer_line_num, - width_offset, - *depth, - *style); + draw_multiline_line( + &mut buffer, + last_buffer_line_num, + width_offset, + *depth, + *style, + ); } } else if line_idx_delta == 2 { - let unannotated_line = annotated_file.file + let unannotated_line = annotated_file + .file .get_line(annotated_file.lines[line_idx].line_index) .unwrap_or_else(|| Cow::from("")); @@ -1455,7 +1463,6 @@ impl EmitterWriter { emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; Ok(()) - } fn emit_suggestion_default( @@ -1466,7 +1473,7 @@ impl EmitterWriter { ) -> io::Result<()> { let sm = match self.sm { Some(ref sm) => sm, - None => return Ok(()) + None => return Ok(()), }; let mut buffer = StyledBuffer::new(); @@ -1495,8 +1502,7 @@ impl EmitterWriter { // Only show underline if the suggestion spans a single line and doesn't cover the // entirety of the code output. If you have multiple replacements in the same line // of code, show the underline. - let show_underline = !(parts.len() == 1 - && parts[0].snippet.trim() == complete.trim()) + let show_underline = !(parts.len() == 1 && parts[0].snippet.trim() == complete.trim()) && complete.lines().count() == 1; let lines = sm.span_to_lines(parts[0].span).unwrap(); @@ -1509,10 +1515,12 @@ impl EmitterWriter { let mut lines = complete.lines(); for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) { // Print the span column to avoid confusion - buffer.puts(row_num, - 0, - &self.maybe_anonymized(line_start + line_pos), - Style::LineNumber); + buffer.puts( + row_num, + 0, + &self.maybe_anonymized(line_start + line_pos), + Style::LineNumber, + ); // print the suggestion draw_col_separator(&mut buffer, row_num, max_line_num_len + 1); buffer.append(row_num, line, Style::NoStyle); @@ -1532,34 +1540,42 @@ impl EmitterWriter { let span_end_pos = sm.lookup_char_pos(part.span.hi()).col_display; // Do not underline the leading... - let start = part.snippet.len() - .saturating_sub(part.snippet.trim_start().len()); + let start = part.snippet.len().saturating_sub(part.snippet.trim_start().len()); // ...or trailing spaces. Account for substitutions containing unicode // characters. - let sub_len: usize = part.snippet.trim().chars() + let sub_len: usize = part + .snippet + .trim() + .chars() .map(|ch| unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1)) .sum(); let underline_start = (span_start_pos + start) as isize + offset; let underline_end = (span_start_pos + start + sub_len) as isize + offset; for p in underline_start..underline_end { - buffer.putc(row_num, - max_line_num_len + 3 + p as usize, - '^', - Style::UnderlinePrimary); + buffer.putc( + row_num, + max_line_num_len + 3 + p as usize, + '^', + Style::UnderlinePrimary, + ); } // underline removals too if underline_start == underline_end { - for p in underline_start-1..underline_start+1 { - buffer.putc(row_num, - max_line_num_len + 3 + p as usize, - '-', - Style::UnderlineSecondary); + for p in underline_start - 1..underline_start + 1 { + buffer.putc( + row_num, + max_line_num_len + 3 + p as usize, + '-', + Style::UnderlineSecondary, + ); } } // length of the code after substitution - let full_sub_len = part.snippet.chars() + let full_sub_len = part + .snippet + .chars() .map(|ch| unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1)) .sum::<usize>() as isize; @@ -1609,17 +1625,21 @@ impl EmitterWriter { match self.emit_message_default(span, message, code, level, max_line_num_len, false) { Ok(()) => { - if !children.is_empty() || suggestions.iter().any(|s| { - s.style != SuggestionStyle::CompletelyHidden - }) { + if !children.is_empty() + || suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden) + { let mut buffer = StyledBuffer::new(); if !self.short_message { draw_col_separator_no_space(&mut buffer, 0, max_line_num_len + 1); } - match emit_to_destination(&buffer.render(), level, &mut self.dst, - self.short_message) { + match emit_to_destination( + &buffer.render(), + level, + &mut self.dst, + self.short_message, + ) { Ok(()) => (), - Err(e) => panic!("failed to emit error: {}", e) + Err(e) => panic!("failed to emit error: {}", e), } } if !self.short_message { @@ -1634,7 +1654,7 @@ impl EmitterWriter { true, ) { Err(e) => panic!("failed to emit error: {}", e), - _ => () + _ => (), } } for sugg in suggestions { @@ -1650,16 +1670,13 @@ impl EmitterWriter { true, ) { Err(e) => panic!("failed to emit error: {}", e), - _ => () + _ => (), } } else { - match self.emit_suggestion_default( - sugg, - &Level::Help, - max_line_num_len, - ) { + match self.emit_suggestion_default(sugg, &Level::Help, max_line_num_len) + { Err(e) => panic!("failed to emit error: {}", e), - _ => () + _ => (), } } } @@ -1671,12 +1688,10 @@ impl EmitterWriter { let mut dst = self.dst.writable(); match writeln!(dst) { Err(e) => panic!("failed to emit error: {}", e), - _ => { - match dst.flush() { - Err(e) => panic!("failed to emit error: {}", e), - _ => (), - } - } + _ => match dst.flush() { + Err(e) => panic!("failed to emit error: {}", e), + _ => (), + }, } } } @@ -1686,13 +1701,14 @@ impl FileWithAnnotatedLines { /// This helps us quickly iterate over the whole message (including secondary file spans) pub fn collect_annotations( msp: &MultiSpan, - source_map: &Option<Lrc<SourceMap>> + source_map: &Option<Lrc<SourceMap>>, ) -> Vec<FileWithAnnotatedLines> { - fn add_annotation_to_file(file_vec: &mut Vec<FileWithAnnotatedLines>, - file: Lrc<SourceFile>, - line_index: usize, - ann: Annotation) { - + fn add_annotation_to_file( + file_vec: &mut Vec<FileWithAnnotatedLines>, + file: Lrc<SourceFile>, + line_index: usize, + ann: Annotation, + ) { for slot in file_vec.iter_mut() { // Look through each of our files for the one we're adding to if slot.file.name == file.name { @@ -1704,10 +1720,7 @@ impl FileWithAnnotatedLines { } } // We don't have a line yet, create one - slot.lines.push(Line { - line_index, - annotations: vec![ann], - }); + slot.lines.push(Line { line_index, annotations: vec![ann] }); slot.lines.sort(); return; } @@ -1715,10 +1728,7 @@ impl FileWithAnnotatedLines { // This is the first time we're seeing the file file_vec.push(FileWithAnnotatedLines { file, - lines: vec![Line { - line_index, - annotations: vec![ann], - }], + lines: vec![Line { line_index, annotations: vec![ann] }], multiline_depth: 0, }); } @@ -1776,8 +1786,8 @@ impl FileWithAnnotatedLines { for (_, a) in multiline_annotations.iter_mut() { // Move all other multiline annotations overlapping with this one // one level to the right. - if !(ann.same_span(a)) && - num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true) + if !(ann.same_span(a)) + && num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true) { a.increase_depth(); } else if ann.same_span(a) && &ann != a { @@ -1788,7 +1798,7 @@ impl FileWithAnnotatedLines { } } - let mut max_depth = 0; // max overlapping multiline spans + let mut max_depth = 0; // max overlapping multiline spans for (file, ann) in multiline_annotations { max_depth = max(max_depth, ann.depth); let mut end_ann = ann.as_end(); @@ -1849,15 +1859,23 @@ fn draw_col_separator_no_space(buffer: &mut StyledBuffer, line: usize, col: usiz draw_col_separator_no_space_with_style(buffer, line, col, Style::LineNumber); } -fn draw_col_separator_no_space_with_style(buffer: &mut StyledBuffer, - line: usize, - col: usize, - style: Style) { +fn draw_col_separator_no_space_with_style( + buffer: &mut StyledBuffer, + line: usize, + col: usize, + style: Style, +) { buffer.putc(line, col, '|', style); } -fn draw_range(buffer: &mut StyledBuffer, symbol: char, line: usize, - col_from: usize, col_to: usize, style: Style) { +fn draw_range( + buffer: &mut StyledBuffer, + symbol: char, + line: usize, + col_from: usize, + col_to: usize, + style: Style, +) { for col in col_from..col_to { buffer.putc(line, col, symbol, style); } @@ -1867,33 +1885,36 @@ fn draw_note_separator(buffer: &mut StyledBuffer, line: usize, col: usize) { buffer.puts(line, col, "= ", Style::LineNumber); } -fn draw_multiline_line(buffer: &mut StyledBuffer, - line: usize, - offset: usize, - depth: usize, - style: Style) -{ +fn draw_multiline_line( + buffer: &mut StyledBuffer, + line: usize, + offset: usize, + depth: usize, + style: Style, +) { buffer.putc(line, offset + depth - 1, '|', style); } -fn num_overlap(a_start: usize, a_end: usize, b_start: usize, b_end:usize, inclusive: bool) -> bool { - let extra = if inclusive { - 1 - } else { - 0 - }; - (b_start..b_end + extra).contains(&a_start) || - (a_start..a_end + extra).contains(&b_start) +fn num_overlap( + a_start: usize, + a_end: usize, + b_start: usize, + b_end: usize, + inclusive: bool, +) -> bool { + let extra = if inclusive { 1 } else { 0 }; + (b_start..b_end + extra).contains(&a_start) || (a_start..a_end + extra).contains(&b_start) } fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool { num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false) } -fn emit_to_destination(rendered_buffer: &[Vec<StyledString>], - lvl: &Level, - dst: &mut Destination, - short_message: bool) - -> io::Result<()> { +fn emit_to_destination( + rendered_buffer: &[Vec<StyledString>], + lvl: &Level, + dst: &mut Destination, + short_message: bool, +) -> io::Result<()> { use crate::lock; let mut dst = dst.writable(); @@ -1986,26 +2007,22 @@ impl<'a> WritableDst<'a> { Style::MainHeaderMsg => { spec.set_bold(true); if cfg!(windows) { - spec.set_intense(true) - .set_fg(Some(Color::White)); + spec.set_intense(true).set_fg(Some(Color::White)); } } Style::UnderlinePrimary | Style::LabelPrimary => { spec = lvl.color(); spec.set_bold(true); } - Style::UnderlineSecondary | - Style::LabelSecondary => { - spec.set_bold(true) - .set_intense(true); + Style::UnderlineSecondary | Style::LabelSecondary => { + spec.set_bold(true).set_intense(true); if cfg!(windows) { spec.set_fg(Some(Color::Cyan)); } else { spec.set_fg(Some(Color::Blue)); } } - Style::HeaderMsg | - Style::NoStyle => {} + Style::HeaderMsg | Style::NoStyle => {} Style::Level(lvl) => { spec = lvl.color(); spec.set_bold(true); @@ -2022,7 +2039,7 @@ impl<'a> WritableDst<'a> { WritableDst::Terminal(ref mut t) => t.set_color(color), WritableDst::Buffered(_, ref mut t) => t.set_color(color), WritableDst::ColoredRaw(ref mut t) => t.set_color(color), - WritableDst::Raw(_) => Ok(()) + WritableDst::Raw(_) => Ok(()), } } @@ -2073,9 +2090,11 @@ pub fn is_case_difference(sm: &SourceMap, suggested: &str, sp: Span) -> bool { let found = sm.span_to_snippet(sp).unwrap(); let ascii_confusables = &['c', 'f', 'i', 'k', 'o', 's', 'u', 'v', 'w', 'x', 'y', 'z']; // All the chars that differ in capitalization are confusable (above): - let confusable = found.chars().zip(suggested.chars()).filter(|(f, s)| f != s).all(|(f, s)| { - (ascii_confusables.contains(&f) || ascii_confusables.contains(&s)) - }); + let confusable = found + .chars() + .zip(suggested.chars()) + .filter(|(f, s)| f != s) + .all(|(f, s)| (ascii_confusables.contains(&f) || ascii_confusables.contains(&s))); confusable && found.to_lowercase() == suggested.to_lowercase() // FIXME: We sometimes suggest the same thing we already have, which is a // bug, but be defensive against that here. diff --git a/src/librustc_mir/borrow_check/diagnostics/mutability_errors.rs b/src/librustc_mir/borrow_check/diagnostics/mutability_errors.rs index 38101c35dcc..b46b26444f6 100644 --- a/src/librustc_mir/borrow_check/diagnostics/mutability_errors.rs +++ b/src/librustc_mir/borrow_check/diagnostics/mutability_errors.rs @@ -1,14 +1,14 @@ use rustc::hir; use rustc::hir::Node; use rustc::mir::{self, ClearCrossCrate, Local, LocalInfo, Location, ReadOnlyBodyAndCache}; -use rustc::mir::{Mutability, Place, PlaceRef, PlaceBase, ProjectionElem}; +use rustc::mir::{Mutability, Place, PlaceBase, PlaceRef, ProjectionElem}; use rustc::ty::{self, Ty, TyCtxt}; use rustc_index::vec::Idx; -use syntax_pos::Span; use syntax_pos::symbol::kw; +use syntax_pos::Span; -use crate::borrow_check::MirBorrowckCtxt; use crate::borrow_check::diagnostics::BorrowedContentSource; +use crate::borrow_check::MirBorrowckCtxt; use crate::util::collect_writes::FindAssignments; use rustc_errors::Applicability; @@ -42,16 +42,12 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { debug!("report_mutability_error: access_place_desc={:?}", access_place_desc); match the_place_err { - PlaceRef { - base: PlaceBase::Local(local), - projection: [], - } => { + PlaceRef { base: PlaceBase::Local(local), projection: [] } => { item_msg = format!("`{}`", access_place_desc.unwrap()); if access_place.as_local().is_some() { reason = ", as it is not declared as mutable".to_string(); } else { - let name = self.local_names[*local] - .expect("immutable unnamed local"); + let name = self.local_names[*local].expect("immutable unnamed local"); reason = format!(", as `{}` is not declared as mutable", name); } } @@ -61,12 +57,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)], } => { debug_assert!(is_closure_or_generator( - Place::ty_from( - &the_place_err.base, - proj_base, - *self.body, - self.infcx.tcx - ).ty)); + Place::ty_from(&the_place_err.base, proj_base, *self.body, self.infcx.tcx).ty + )); item_msg = format!("`{}`", access_place_desc.unwrap()); if self.is_upvar_field_projection(access_place.as_ref()).is_some() { @@ -77,17 +69,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } } - PlaceRef { - base: &PlaceBase::Local(local), - projection: [ProjectionElem::Deref], - } if self.body.local_decls[local].is_ref_for_guard() => { + PlaceRef { base: &PlaceBase::Local(local), projection: [ProjectionElem::Deref] } + if self.body.local_decls[local].is_ref_for_guard() => + { item_msg = format!("`{}`", access_place_desc.unwrap()); reason = ", as it is immutable for the pattern guard".to_string(); } - PlaceRef { - base: &PlaceBase::Local(local), - projection: [ProjectionElem::Deref], - } if self.body.local_decls[local].is_ref_to_static() => { + PlaceRef { base: &PlaceBase::Local(local), projection: [ProjectionElem::Deref] } + if self.body.local_decls[local].is_ref_to_static() => + { if access_place.projection.len() == 1 { item_msg = format!("immutable static item `{}`", access_place_desc.unwrap()); reason = String::new(); @@ -102,13 +92,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } } } - PlaceRef { - base: _, - projection: [proj_base @ .., ProjectionElem::Deref], - } => { - if the_place_err.base == &PlaceBase::Local(Local::new(1)) && - proj_base.is_empty() && - !self.upvars.is_empty() { + PlaceRef { base: _, projection: [proj_base @ .., ProjectionElem::Deref] } => { + if the_place_err.base == &PlaceBase::Local(Local::new(1)) + && proj_base.is_empty() + && !self.upvars.is_empty() + { item_msg = format!("`{}`", access_place_desc.unwrap()); debug_assert!(self.body.local_decls[Local::new(1)].ty.is_region_ptr()); debug_assert!(is_closure_or_generator( @@ -121,12 +109,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { .ty )); - reason = - if self.is_upvar_field_projection(access_place.as_ref()).is_some() { - ", as it is a captured variable in a `Fn` closure".to_string() - } else { - ", as `Fn` closures cannot mutate their captured variables".to_string() - } + reason = if self.is_upvar_field_projection(access_place.as_ref()).is_some() { + ", as it is a captured variable in a `Fn` closure".to_string() + } else { + ", as `Fn` closures cannot mutate their captured variables".to_string() + } } else { let source = self.borrowed_content_source(PlaceRef { base: the_place_err.base, @@ -149,26 +136,13 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } } - PlaceRef { - base: PlaceBase::Static(_), - .. - } - | PlaceRef { - base: _, - projection: [.., ProjectionElem::Index(_)], - } - | PlaceRef { - base: _, - projection: [.., ProjectionElem::ConstantIndex { .. }], - } - | PlaceRef { - base: _, - projection: [.., ProjectionElem::Subslice { .. }], + PlaceRef { base: PlaceBase::Static(_), .. } + | PlaceRef { base: _, projection: [.., ProjectionElem::Index(_)] } + | PlaceRef { base: _, projection: [.., ProjectionElem::ConstantIndex { .. }] } + | PlaceRef { base: _, projection: [.., ProjectionElem::Subslice { .. }] } + | PlaceRef { base: _, projection: [.., ProjectionElem::Downcast(..)] } => { + bug!("Unexpected immutable place.") } - | PlaceRef { - base: _, - projection: [.., ProjectionElem::Downcast(..)], - } => bug!("Unexpected immutable place."), } debug!("report_mutability_error: item_msg={:?}, reason={:?}", item_msg, reason); @@ -191,18 +165,14 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let borrow_spans = self.borrow_spans(span, location); let borrow_span = borrow_spans.args_or_use(); - err = self.cannot_borrow_path_as_mutable_because( - borrow_span, - &item_msg, - &reason, - ); + err = self.cannot_borrow_path_as_mutable_because(borrow_span, &item_msg, &reason); borrow_spans.var_span_label( &mut err, format!( "mutable borrow occurs due to use of `{}` in closure", // always Some() if the message is printed. self.describe_place(access_place.as_ref()).unwrap_or_default(), - ) + ), ); borrow_span } @@ -219,11 +189,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // after the field access). PlaceRef { base, - projection: [proj_base @ .., - ProjectionElem::Deref, - ProjectionElem::Field(field, _), - ProjectionElem::Deref, - ], + projection: + [proj_base @ .., ProjectionElem::Deref, ProjectionElem::Field(field, _), ProjectionElem::Deref], } => { err.span_label(span, format!("cannot {ACT}", ACT = act)); @@ -239,49 +206,50 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { Applicability::MaybeIncorrect, ); } - }, + } // Suggest removing a `&mut` from the use of a mutable reference. - PlaceRef { - base: PlaceBase::Local(local), - projection: [], - } if { - self.body.local_decls.get(*local).map(|local_decl| { - if let LocalInfo::User(ClearCrossCrate::Set( - mir::BindingForm::ImplicitSelf(kind) - )) = local_decl.local_info { - // Check if the user variable is a `&mut self` and we can therefore - // suggest removing the `&mut`. - // - // Deliberately fall into this case for all implicit self types, - // so that we don't fall in to the next case with them. - kind == mir::ImplicitSelfKind::MutRef - } else if Some(kw::SelfLower) == self.local_names[*local] { - // Otherwise, check if the name is the self kewyord - in which case - // we have an explicit self. Do the same thing in this case and check - // for a `self: &mut Self` to suggest removing the `&mut`. - if let ty::Ref( - _, _, hir::Mutability::Mut - ) = local_decl.ty.kind { - true - } else { - false - } - } else { - false - } - }).unwrap_or(false) - } => { + PlaceRef { base: PlaceBase::Local(local), projection: [] } + if { + self.body + .local_decls + .get(*local) + .map(|local_decl| { + if let LocalInfo::User(ClearCrossCrate::Set( + mir::BindingForm::ImplicitSelf(kind), + )) = local_decl.local_info + { + // Check if the user variable is a `&mut self` and we can therefore + // suggest removing the `&mut`. + // + // Deliberately fall into this case for all implicit self types, + // so that we don't fall in to the next case with them. + kind == mir::ImplicitSelfKind::MutRef + } else if Some(kw::SelfLower) == self.local_names[*local] { + // Otherwise, check if the name is the self kewyord - in which case + // we have an explicit self. Do the same thing in this case and check + // for a `self: &mut Self` to suggest removing the `&mut`. + if let ty::Ref(_, _, hir::Mutability::Mut) = local_decl.ty.kind { + true + } else { + false + } + } else { + false + } + }) + .unwrap_or(false) + } => + { err.span_label(span, format!("cannot {ACT}", ACT = act)); err.span_label(span, "try removing `&mut` here"); - }, + } // We want to suggest users use `let mut` for local (user // variable) mutations... - PlaceRef { - base: PlaceBase::Local(local), - projection: [], - } if self.body.local_decls[*local].can_be_made_mutable() => { + PlaceRef { base: PlaceBase::Local(local), projection: [] } + if self.body.local_decls[*local].can_be_made_mutable() => + { // ... but it doesn't make sense to suggest it on // variables that are `ref x`, `ref mut x`, `&self`, // or `&mut self` (such variables are simply not @@ -310,8 +278,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { err.span_label(span, format!("cannot {ACT}", ACT = act)); let upvar_hir_id = self.upvars[upvar_index.index()].var_hir_id; - if let Some(Node::Binding(pat)) = self.infcx.tcx.hir().find(upvar_hir_id) - { + if let Some(Node::Binding(pat)) = self.infcx.tcx.hir().find(upvar_hir_id) { if let hir::PatKind::Binding( hir::BindingAnnotation::Unannotated, _, @@ -332,10 +299,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // complete hack to approximate old AST-borrowck // diagnostic: if the span starts with a mutable borrow of // a local variable, then just suggest the user remove it. - PlaceRef { - base: PlaceBase::Local(_), - projection: [], - } if { + PlaceRef { base: PlaceBase::Local(_), projection: [] } + if { if let Ok(snippet) = self.infcx.tcx.sess.source_map().span_to_snippet(span) { snippet.starts_with("&mut ") } else { @@ -347,10 +312,9 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { err.span_label(span, "try removing `&mut` here"); } - PlaceRef { - base: PlaceBase::Local(local), - projection: [ProjectionElem::Deref], - } if self.body.local_decls[*local].is_ref_for_guard() => { + PlaceRef { base: PlaceBase::Local(local), projection: [ProjectionElem::Deref] } + if self.body.local_decls[*local].is_ref_for_guard() => + { err.span_label(span, format!("cannot {ACT}", ACT = act)); err.note( "variables bound in patterns are immutable until the end of the pattern guard", @@ -362,10 +326,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // // FIXME: can this case be generalized to work for an // arbitrary base for the projection? - PlaceRef { - base: PlaceBase::Local(local), - projection: [ProjectionElem::Deref], - } if self.body.local_decls[*local].is_user_variable() => + PlaceRef { base: PlaceBase::Local(local), projection: [ProjectionElem::Deref] } + if self.body.local_decls[*local].is_user_variable() => { let local_decl = &self.body.local_decls[*local]; let suggestion = match local_decl.local_info { @@ -449,41 +411,32 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { PlaceRef { base, projection: [ProjectionElem::Deref], - // FIXME document what is this 1 magic number about - } if *base == PlaceBase::Local(Local::new(1)) && - !self.upvars.is_empty() => - { + // FIXME document what is this 1 magic number about + } if *base == PlaceBase::Local(Local::new(1)) && !self.upvars.is_empty() => { err.span_label(span, format!("cannot {ACT}", ACT = act)); err.span_help( self.body.span, - "consider changing this to accept closures that implement `FnMut`" + "consider changing this to accept closures that implement `FnMut`", ); } - PlaceRef { - base: _, - projection: [.., ProjectionElem::Deref], - } => { + PlaceRef { base: _, projection: [.., ProjectionElem::Deref] } => { err.span_label(span, format!("cannot {ACT}", ACT = act)); match opt_source { Some(BorrowedContentSource::OverloadedDeref(ty)) => { - err.help( - &format!( - "trait `DerefMut` is required to modify through a dereference, \ + err.help(&format!( + "trait `DerefMut` is required to modify through a dereference, \ but it is not implemented for `{}`", - ty, - ), - ); - }, + ty, + )); + } Some(BorrowedContentSource::OverloadedIndex(ty)) => { - err.help( - &format!( - "trait `IndexMut` is required to modify indexed content, \ + err.help(&format!( + "trait `IndexMut` is required to modify indexed content, \ but it is not implemented for `{}`", - ty, - ), - ); + ty, + )); } _ => (), } @@ -503,17 +456,20 @@ fn suggest_ampmut_self<'tcx>( local_decl: &mir::LocalDecl<'tcx>, ) -> (Span, String) { let sp = local_decl.source_info.span; - (sp, match tcx.sess.source_map().span_to_snippet(sp) { - Ok(snippet) => { - let lt_pos = snippet.find('\''); - if let Some(lt_pos) = lt_pos { - format!("&{}mut self", &snippet[lt_pos..snippet.len() - 4]) - } else { - "&mut self".to_string() + ( + sp, + match tcx.sess.source_map().span_to_snippet(sp) { + Ok(snippet) => { + let lt_pos = snippet.find('\''); + if let Some(lt_pos) = lt_pos { + format!("&{}mut self", &snippet[lt_pos..snippet.len() - 4]) + } else { + "&mut self".to_string() + } } - } - _ => "&mut self".to_string() - }) + _ => "&mut self".to_string(), + }, + ) } // When we want to suggest a user change a local variable to be a `&mut`, there @@ -542,10 +498,9 @@ fn suggest_ampmut<'tcx>( if !locations.is_empty() { let assignment_rhs_span = body.source_info(locations[0]).span; if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) { - if let (true, Some(ws_pos)) = ( - src.starts_with("&'"), - src.find(|c: char| -> bool { c.is_whitespace() }), - ) { + if let (true, Some(ws_pos)) = + (src.starts_with("&'"), src.find(|c: char| -> bool { c.is_whitespace() })) + { let lt_name = &src[1..ws_pos]; let ty = &src[ws_pos..]; return (assignment_rhs_span, format!("&{} mut {}", lt_name, ty)); @@ -567,10 +522,9 @@ fn suggest_ampmut<'tcx>( }; if let Ok(src) = tcx.sess.source_map().span_to_snippet(highlight_span) { - if let (true, Some(ws_pos)) = ( - src.starts_with("&'"), - src.find(|c: char| -> bool { c.is_whitespace() }), - ) { + if let (true, Some(ws_pos)) = + (src.starts_with("&'"), src.find(|c: char| -> bool { c.is_whitespace() })) + { let lt_name = &src[1..ws_pos]; let ty = &src[ws_pos..]; return (highlight_span, format!("&{} mut{}", lt_name, ty)); @@ -579,12 +533,14 @@ fn suggest_ampmut<'tcx>( let ty_mut = local_decl.ty.builtin_deref(true).unwrap(); assert_eq!(ty_mut.mutbl, hir::Mutability::Not); - (highlight_span, - if local_decl.ty.is_region_ptr() { - format!("&mut {}", ty_mut.ty) - } else { - format!("*mut {}", ty_mut.ty) - }) + ( + highlight_span, + if local_decl.ty.is_region_ptr() { + format!("&mut {}", ty_mut.ty) + } else { + format!("*mut {}", ty_mut.ty) + }, + ) } fn is_closure_or_generator(ty: Ty<'_>) -> bool { @@ -613,10 +569,11 @@ fn annotate_struct_field( // Now we're dealing with the actual struct that we're going to suggest a change to, // we can expect a field that is an immutable reference to a type. if let hir::Node::Field(field) = node { - if let hir::TyKind::Rptr(lifetime, hir::MutTy { - mutbl: hir::Mutability::Not, - ref ty - }) = field.ty.kind { + if let hir::TyKind::Rptr( + lifetime, + hir::MutTy { mutbl: hir::Mutability::Not, ref ty }, + ) = field.ty.kind + { // Get the snippets in two parts - the named lifetime (if there is one) and // type being referenced, that way we can reconstruct the snippet without loss // of detail. @@ -629,10 +586,7 @@ fn annotate_struct_field( return Some(( field.ty.span, - format!( - "&{}mut {}", - lifetime_snippet, &*type_snippet, - ), + format!("&{}mut {}", lifetime_snippet, &*type_snippet,), )); } } @@ -645,9 +599,7 @@ fn annotate_struct_field( /// If possible, suggest replacing `ref` with `ref mut`. fn suggest_ref_mut(tcx: TyCtxt<'_>, binding_span: Span) -> Option<String> { let hi_src = tcx.sess.source_map().span_to_snippet(binding_span).ok()?; - if hi_src.starts_with("ref") - && hi_src["ref".len()..].starts_with(rustc_lexer::is_whitespace) - { + if hi_src.starts_with("ref") && hi_src["ref".len()..].starts_with(rustc_lexer::is_whitespace) { let replacement = format!("ref mut{}", &hi_src["ref".len()..]); Some(replacement) } else { diff --git a/src/librustc_mir/borrow_check/type_check/mod.rs b/src/librustc_mir/borrow_check/type_check/mod.rs index 5613fd045ea..db7db03bde0 100644 --- a/src/librustc_mir/borrow_check/type_check/mod.rs +++ b/src/librustc_mir/borrow_check/type_check/mod.rs @@ -1,60 +1,57 @@ //! This pass type-checks the MIR to ensure it is not broken. -use std::{fmt, iter, mem}; use std::rc::Rc; +use std::{fmt, iter, mem}; use either::Either; use rustc::hir; use rustc::hir::def_id::DefId; -use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime, NLLRegionVariableOrigin}; use rustc::infer::canonical::QueryRegionConstraints; use rustc::infer::outlives::env::RegionBoundPairs; use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; -use rustc::mir::*; +use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime, NLLRegionVariableOrigin}; use rustc::mir::interpret::PanicInfo; use rustc::mir::tcx::PlaceTy; use rustc::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; -use rustc::traits::{self, ObligationCause, PredicateObligations}; -use rustc::traits::query::{Fallible, NoSolution}; +use rustc::mir::*; use rustc::traits::query::type_op; use rustc::traits::query::type_op::custom::CustomTypeOp; -use rustc::ty::{ - self, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, RegionVid, ToPolyTraitRef, Ty, - TyCtxt, UserType, - UserTypeAnnotationIndex, -}; +use rustc::traits::query::{Fallible, NoSolution}; +use rustc::traits::{self, ObligationCause, PredicateObligations}; use rustc::ty::adjustment::PointerCast; use rustc::ty::cast::CastTy; use rustc::ty::fold::TypeFoldable; use rustc::ty::layout::VariantIdx; use rustc::ty::subst::{GenericArgKind, Subst, SubstsRef, UserSubsts}; +use rustc::ty::{ + self, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, RegionVid, ToPolyTraitRef, Ty, + TyCtxt, UserType, UserTypeAnnotationIndex, +}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_error_codes::*; use rustc_index::vec::{Idx, IndexVec}; -use syntax_pos::{DUMMY_SP, Span}; +use syntax_pos::{Span, DUMMY_SP}; +use crate::dataflow::move_paths::MoveData; use crate::dataflow::FlowAtLocation; use crate::dataflow::MaybeInitializedPlaces; -use crate::dataflow::move_paths::MoveData; use crate::transform::promote_consts::should_suggest_const_in_array_repeat_expressions_attribute; use crate::borrow_check::{ borrow_set::BorrowSet, + constraints::{OutlivesConstraint, OutlivesConstraintSet}, + facts::AllFacts, location::LocationTable, - constraints::{OutlivesConstraintSet, OutlivesConstraint}, member_constraints::MemberConstraintSet, - facts::AllFacts, + nll::ToRegionVid, region_infer::values::{ LivenessValues, PlaceholderIndex, PlaceholderIndices, RegionValueElements, }, region_infer::{ClosureRegionRequirementsExt, TypeTest}, - type_check::free_region_relations::{ - CreateResult, UniversalRegionRelations, - }, - universal_regions::{DefiningTy, UniversalRegions}, - nll::ToRegionVid, renumber, + type_check::free_region_relations::{CreateResult, UniversalRegionRelations}, + universal_regions::{DefiningTy, UniversalRegions}, }; macro_rules! span_mirbug { @@ -172,26 +169,14 @@ pub(crate) fn type_check<'tcx>( &mut borrowck_context, &universal_region_relations, |mut cx| { - cx.equate_inputs_and_outputs( - &body, - universal_regions, - &normalized_inputs_and_output); - liveness::generate( - &mut cx, - body, - elements, - flow_inits, - move_data, - location_table); + cx.equate_inputs_and_outputs(&body, universal_regions, &normalized_inputs_and_output); + liveness::generate(&mut cx, body, elements, flow_inits, move_data, location_table); translate_outlives_facts(&mut cx); }, ); - MirTypeckResults { - constraints, - universal_region_relations, - } + MirTypeckResults { constraints, universal_region_relations } } fn type_check_internal<'a, 'tcx, R>( @@ -235,25 +220,23 @@ fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) { if let Some(facts) = cx.all_facts { let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation"); let location_table = cx.location_table; - facts - .outlives - .extend(cx.constraints.outlives_constraints.outlives().iter().flat_map( - |constraint: &OutlivesConstraint| { - if let Some(from_location) = constraint.locations.from_location() { - Either::Left(iter::once(( - constraint.sup, - constraint.sub, - location_table.mid_index(from_location), - ))) - } else { - Either::Right( - location_table - .all_points() - .map(move |location| (constraint.sup, constraint.sub, location)), - ) - } - }, - )); + facts.outlives.extend(cx.constraints.outlives_constraints.outlives().iter().flat_map( + |constraint: &OutlivesConstraint| { + if let Some(from_location) = constraint.locations.from_location() { + Either::Left(iter::once(( + constraint.sup, + constraint.sub, + location_table.mid_index(from_location), + ))) + } else { + Either::Right( + location_table + .all_points() + .map(move |location| (constraint.sup, constraint.sub, location)), + ) + } + }, + )); } } @@ -311,7 +294,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { if let Err(terr) = self.cx.relate_type_and_user_type( constant.literal.ty, ty::Variance::Invariant, - &UserTypeProjection { base: annotation_index, projs: vec![], }, + &UserTypeProjection { base: annotation_index, projs: vec![] }, location.to_locations(), ConstraintCategory::Boring, ) { @@ -331,24 +314,18 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { location.to_locations(), ConstraintCategory::Boring, self.cx.param_env.and(type_op::ascribe_user_type::AscribeUserType::new( - constant.literal.ty, def_id, UserSubsts { substs, user_self_ty: None }, + constant.literal.ty, + def_id, + UserSubsts { substs, user_self_ty: None }, )), ) { - span_mirbug!( - self, - constant, - "bad constant type {:?} ({:?})", - constant, - terr - ); + span_mirbug!(self, constant, "bad constant type {:?} ({:?})", constant, terr); } } if let ty::FnDef(def_id, substs) = constant.literal.ty.kind { let tcx = self.tcx(); - let instantiated_predicates = tcx - .predicates_of(def_id) - .instantiate(tcx, substs); + let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs); self.cx.normalize_and_prove_instantiated_predicates( instantiated_predicates, location.to_locations(), @@ -451,15 +428,11 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { debug!("sanitize_place: {:?}", place); let mut place_ty = match &place.base { - PlaceBase::Local(index) => - PlaceTy::from_ty(self.body.local_decls[*index].ty), + PlaceBase::Local(index) => PlaceTy::from_ty(self.body.local_decls[*index].ty), PlaceBase::Static(box Static { kind, ty, def_id }) => { let san_ty = self.sanitize_type(place, ty); let check_err = - |verifier: &mut TypeVerifier<'a, 'b, 'tcx>, - place: &Place<'tcx>, - ty, - san_ty| { + |verifier: &mut TypeVerifier<'a, 'b, 'tcx>, place: &Place<'tcx>, ty, san_ty| { if let Err(terr) = verifier.cx.eq_types( san_ty, ty, @@ -467,13 +440,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { ConstraintCategory::Boring, ) { span_mirbug!( - verifier, - place, - "bad promoted type ({:?}: {:?}): {:?}", - ty, - san_ty, - terr - ); + verifier, + place, + "bad promoted type ({:?}: {:?}): {:?}", + ty, + san_ty, + terr + ); }; }; match kind { @@ -501,10 +474,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { let is_promoted = match place.as_ref() { PlaceRef { - base: &PlaceBase::Static(box Static { - kind: StaticKind::Promoted(..), - .. - }), + base: &PlaceBase::Static(box Static { kind: StaticKind::Promoted(..), .. }), projection: &[], } => true, _ => false, @@ -553,7 +523,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { fn sanitize_promoted( &mut self, promoted_body: ReadOnlyBodyAndCache<'b, 'tcx>, - location: Location + location: Location, ) { // Determine the constraints from the promoted MIR by running the type // checker on the promoted MIR, then transfer the constraints back to @@ -566,24 +536,23 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let all_facts = &mut None; let mut constraints = Default::default(); let mut closure_bounds = Default::default(); - let mut liveness_constraints = LivenessValues::new( - Rc::new(RegionValueElements::new(&promoted_body)), - ); + let mut liveness_constraints = + LivenessValues::new(Rc::new(RegionValueElements::new(&promoted_body))); // Don't try to add borrow_region facts for the promoted MIR let mut swap_constraints = |this: &mut Self| { mem::swap(this.cx.borrowck_context.all_facts, all_facts); mem::swap( &mut this.cx.borrowck_context.constraints.outlives_constraints, - &mut constraints + &mut constraints, ); mem::swap( &mut this.cx.borrowck_context.constraints.closure_bounds_mapping, - &mut closure_bounds + &mut closure_bounds, ); mem::swap( &mut this.cx.borrowck_context.constraints.liveness_constraints, - &mut liveness_constraints + &mut liveness_constraints, ); }; @@ -591,7 +560,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.visit_body(promoted_body); - if !self.errors_reported { // if verifier failed, don't do further checks to avoid ICEs self.cx.typeck_mir(promoted_body); @@ -616,23 +584,23 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.cx.borrowck_context.constraints.outlives_constraints.push(constraint) } for live_region in liveness_constraints.rows() { - self.cx.borrowck_context.constraints.liveness_constraints + self.cx + .borrowck_context + .constraints + .liveness_constraints .add_element(live_region, location); } if !closure_bounds.is_empty() { - let combined_bounds_mapping = closure_bounds - .into_iter() - .flat_map(|(_, value)| value) - .collect(); - let existing = self.cx.borrowck_context + let combined_bounds_mapping = + closure_bounds.into_iter().flat_map(|(_, value)| value).collect(); + let existing = self + .cx + .borrowck_context .constraints .closure_bounds_mapping .insert(location, combined_bounds_mapping); - assert!( - existing.is_none(), - "Multiple promoteds/closures at the same location." - ); + assert!(existing.is_none(), "Multiple promoteds/closures at the same location."); } } @@ -649,36 +617,28 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { match *pi { ProjectionElem::Deref => { let deref_ty = base_ty.builtin_deref(true); - PlaceTy::from_ty( - deref_ty.map(|t| t.ty).unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty) - }) - ) + PlaceTy::from_ty(deref_ty.map(|t| t.ty).unwrap_or_else(|| { + span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty) + })) } ProjectionElem::Index(i) => { let index_ty = Place::from(i).ty(self.body, tcx).ty; if index_ty != tcx.types.usize { - PlaceTy::from_ty( - span_mirbug_and_err!(self, i, "index by non-usize {:?}", i), - ) + PlaceTy::from_ty(span_mirbug_and_err!(self, i, "index by non-usize {:?}", i)) } else { - PlaceTy::from_ty( - base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - }), - ) + PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { + span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) + })) } } ProjectionElem::ConstantIndex { .. } => { // consider verifying in-bounds - PlaceTy::from_ty( - base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - }), - ) + PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { + span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) + })) } - ProjectionElem::Subslice { from, to, from_end } => PlaceTy::from_ty( - match base_ty.kind { + ProjectionElem::Subslice { from, to, from_end } => { + PlaceTy::from_ty(match base_ty.kind { ty::Array(inner, _) => { assert!(!from_end, "array subslices should not use from_end"); tcx.mk_array(inner, (to - from) as u64) @@ -686,27 +646,22 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { ty::Slice(..) => { assert!(from_end, "slice subslices should use from_end"); base_ty - }, + } _ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty), - }, - ), + }) + } ProjectionElem::Downcast(maybe_name, index) => match base_ty.kind { ty::Adt(adt_def, _substs) if adt_def.is_enum() => { if index.as_usize() >= adt_def.variants.len() { - PlaceTy::from_ty( - span_mirbug_and_err!( - self, - place, - "cast to variant #{:?} but enum only has {:?}", - index, - adt_def.variants.len() - ), - ) + PlaceTy::from_ty(span_mirbug_and_err!( + self, + place, + "cast to variant #{:?} but enum only has {:?}", + index, + adt_def.variants.len() + )) } else { - PlaceTy { - ty: base_ty, - variant_index: Some(index), - } + PlaceTy { ty: base_ty, variant_index: Some(index) } } } // We do not need to handle generators here, because this runs @@ -724,26 +679,28 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { span_mirbug_and_err!(self, place, "can't downcast {:?}", base_ty) }; PlaceTy::from_ty(ty) - }, + } }, ProjectionElem::Field(field, fty) => { let fty = self.sanitize_type(place, fty); match self.field_ty(place, base, field, location) { - Ok(ty) => if let Err(terr) = self.cx.eq_types( - ty, - fty, - location.to_locations(), - ConstraintCategory::Boring, - ) { - span_mirbug!( - self, - place, - "bad field access ({:?}: {:?}): {:?}", + Ok(ty) => { + if let Err(terr) = self.cx.eq_types( ty, fty, - terr - ); - }, + location.to_locations(), + ConstraintCategory::Boring, + ) { + span_mirbug!( + self, + place, + "bad field access ({:?}: {:?}): {:?}", + ty, + fty, + terr + ); + } + } Err(FieldAccessError::OutOfRange { field_count }) => span_mirbug!( self, place, @@ -778,31 +735,30 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let mut variants = substs.as_generator().state_tys(def_id, tcx); let mut variant = match variants.nth(variant_index.into()) { Some(v) => v, - None => { - bug!("variant_index of generator out of range: {:?}/{:?}", - variant_index, - substs.as_generator().state_tys(def_id, tcx).count()) - } + None => bug!( + "variant_index of generator out of range: {:?}/{:?}", + variant_index, + substs.as_generator().state_tys(def_id, tcx).count() + ), }; return match variant.nth(field.index()) { Some(ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { - field_count: variant.count(), - }), - } + None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }), + }; } _ => bug!("can't have downcast of non-adt non-generator type"), - } + }, PlaceTy { ty, variant_index: None } => match ty.kind { - ty::Adt(adt_def, substs) if !adt_def.is_enum() => - (&adt_def.variants[VariantIdx::new(0)], substs), + ty::Adt(adt_def, substs) if !adt_def.is_enum() => { + (&adt_def.variants[VariantIdx::new(0)], substs) + } ty::Closure(def_id, substs) => { return match substs.as_closure().upvar_tys(def_id, tcx).nth(field.index()) { Some(ty) => Ok(ty), None => Err(FieldAccessError::OutOfRange { field_count: substs.as_closure().upvar_tys(def_id, tcx).count(), }), - } + }; } ty::Generator(def_id, substs, _) => { // Only prefix fields (upvars and current state) are @@ -812,15 +768,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { None => Err(FieldAccessError::OutOfRange { field_count: substs.as_generator().prefix_tys(def_id, tcx).count(), }), - } + }; } ty::Tuple(tys) => { return match tys.get(field.index()) { Some(&ty) => Ok(ty.expect_ty()), - None => Err(FieldAccessError::OutOfRange { - field_count: tys.len(), - }), - } + None => Err(FieldAccessError::OutOfRange { field_count: tys.len() }), + }; } _ => { return Ok(span_mirbug_and_err!( @@ -828,7 +782,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { parent, "can't project out of {:?}", base_ty - )) + )); } }, }; @@ -836,9 +790,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { if let Some(field) = variant.fields.get(field.index()) { Ok(self.cx.normalize(&field.ty(tcx, substs), location)) } else { - Err(FieldAccessError::OutOfRange { - field_count: variant.fields.len(), - }) + Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() }) } } } @@ -1029,13 +981,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { fn check_user_type_annotations(&mut self) { debug!( "check_user_type_annotations: user_type_annotations={:?}", - self.user_type_annotations + self.user_type_annotations ); for user_annotation in self.user_type_annotations { let CanonicalUserTypeAnnotation { span, ref user_ty, inferred_ty } = *user_annotation; - let (annotation, _) = self.infcx.instantiate_canonical_with_fresh_inference_vars( - span, user_ty - ); + let (annotation, _) = + self.infcx.instantiate_canonical_with_fresh_inference_vars(span, user_ty); match annotation { UserType::Ty(mut ty) => { ty = self.normalize(ty, Locations::All(span)); @@ -1061,13 +1012,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Locations::All(span), ConstraintCategory::TypeAnnotation, ); - }, + } UserType::TypeOf(def_id, user_substs) => { if let Err(terr) = self.fully_perform_op( Locations::All(span), ConstraintCategory::BoringNoLocation, self.param_env.and(type_op::ascribe_user_type::AscribeUserType::new( - inferred_ty, def_id, user_substs, + inferred_ty, + def_id, + user_substs, )), ) { span_mirbug!( @@ -1080,7 +1033,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { terr ); } - }, + } } } } @@ -1116,10 +1069,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { category: ConstraintCategory, data: &QueryRegionConstraints<'tcx>, ) { - debug!( - "push_region_constraints: constraints generated at {:?} are {:#?}", - locations, data - ); + debug!("push_region_constraints: constraints generated at {:?} are {:#?}", locations, data); constraint_conversion::ConstraintConversion::new( self.infcx, @@ -1130,7 +1080,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { locations, category, &mut self.borrowck_context.constraints, - ).convert_all(data); + ) + .convert_all(data); } /// Convenient wrapper around `relate_tys::relate_types` -- see @@ -1229,8 +1180,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ); curr_projected_ty = projected_ty; } - debug!("user_ty base: {:?} freshened: {:?} projs: {:?} yields: {:?}", - user_ty.base, annotated_type, user_ty.projs, curr_projected_ty); + debug!( + "user_ty base: {:?} freshened: {:?} projs: {:?} yields: {:?}", + user_ty.base, annotated_type, user_ty.projs, curr_projected_ty + ); let ty = curr_projected_ty.ty; self.relate_types(a, v, ty, locations, category)?; @@ -1280,16 +1233,19 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { revealed_ty={:?}", output_ty, opaque_type_map, revealed_ty ); - obligations.add(infcx - .at(&ObligationCause::dummy(), param_env) - .eq(output_ty, revealed_ty)?); + obligations.add( + infcx + .at(&ObligationCause::dummy(), param_env) + .eq(output_ty, revealed_ty)?, + ); for (&opaque_def_id, opaque_decl) in &opaque_type_map { let opaque_defn_ty = tcx.type_of(opaque_def_id); let opaque_defn_ty = opaque_defn_ty.subst(tcx, opaque_decl.substs); let opaque_defn_ty = renumber::renumber_regions(infcx, &opaque_defn_ty); let concrete_is_opaque = infcx - .resolve_vars_if_possible(&opaque_decl.concrete_ty).is_impl_trait(); + .resolve_vars_if_possible(&opaque_decl.concrete_ty) + .is_impl_trait(); debug!( "eq_opaque_type_and_type: concrete_ty={:?}={:?} opaque_defn_ty={:?} \ @@ -1321,9 +1277,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // gets 'revealed' into if !concrete_is_opaque { - obligations.add(infcx - .at(&ObligationCause::dummy(), param_env) - .eq(opaque_decl.concrete_ty, opaque_defn_ty)?); + obligations.add( + infcx + .at(&ObligationCause::dummy(), param_env) + .eq(opaque_decl.concrete_ty, opaque_defn_ty)?, + ); } } @@ -1356,10 +1314,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { &opaque_decl, universal_region_relations, ); - Ok(InferOk { - value: (), - obligations: vec![], - }) + Ok(InferOk { value: (), obligations: vec![] }) }, || "opaque_type_map".to_string(), ), @@ -1377,33 +1332,33 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { &mut self, body: ReadOnlyBodyAndCache<'_, 'tcx>, stmt: &Statement<'tcx>, - location: Location) - { + location: Location, + ) { debug!("check_stmt: {:?}", stmt); let tcx = self.tcx(); match stmt.kind { - StatementKind::Assign(box(ref place, ref rv)) => { + StatementKind::Assign(box (ref place, ref rv)) => { // Assignments to temporaries are not "interesting"; // they are not caused by the user, but rather artifacts // of lowering. Assignments to other sorts of places *are* interesting // though. let category = match place.as_local() { - Some(RETURN_PLACE) => if let BorrowCheckContext { - universal_regions: - UniversalRegions { - defining_ty: DefiningTy::Const(def_id, _), - .. - }, - .. - } = self.borrowck_context { - if tcx.is_static(*def_id) { - ConstraintCategory::UseAsStatic + Some(RETURN_PLACE) => { + if let BorrowCheckContext { + universal_regions: + UniversalRegions { defining_ty: DefiningTy::Const(def_id, _), .. }, + .. + } = self.borrowck_context + { + if tcx.is_static(*def_id) { + ConstraintCategory::UseAsStatic + } else { + ConstraintCategory::UseAsConst + } } else { - ConstraintCategory::UseAsConst + ConstraintCategory::Return } - } else { - ConstraintCategory::Return - }, + } Some(l) if !body.local_decls[l].is_user_variable() => { ConstraintCategory::Boring } @@ -1431,7 +1386,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { if let Err(terr) = self.relate_type_and_user_type( rv_ty, ty::Variance::Invariant, - &UserTypeProjection { base: annotation_index, projs: vec![], }, + &UserTypeProjection { base: annotation_index, projs: vec![] }, location.to_locations(), ConstraintCategory::Boring, ) { @@ -1460,10 +1415,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ); } } - StatementKind::SetDiscriminant { - ref place, - variant_index, - } => { + StatementKind::SetDiscriminant { ref place, variant_index } => { let place_type = place.ty(*body, tcx).ty; let adt = match place_type.kind { ty::Adt(adt, _) if adt.is_enum() => adt, @@ -1485,7 +1437,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ); }; } - StatementKind::AscribeUserType(box(ref place, ref projection), variance) => { + StatementKind::AscribeUserType(box (ref place, ref projection), variance) => { let place_ty = place.ty(*body, tcx).ty; if let Err(terr) = self.relate_type_and_user_type( place_ty, @@ -1536,12 +1488,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // no checks needed for these } - TerminatorKind::DropAndReplace { - ref location, - ref value, - target: _, - unwind: _, - } => { + TerminatorKind::DropAndReplace { ref location, ref value, target: _, unwind: _ } => { let place_ty = location.ty(body, tcx).ty; let rv_ty = value.ty(body, tcx); @@ -1559,11 +1506,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ); } } - TerminatorKind::SwitchInt { - ref discr, - switch_ty, - .. - } => { + TerminatorKind::SwitchInt { ref discr, switch_ty, .. } => { let discr_ty = discr.ty(body, tcx); if let Err(terr) = self.sub_types( discr_ty, @@ -1585,13 +1528,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } // FIXME: check the values } - TerminatorKind::Call { - ref func, - ref args, - ref destination, - from_hir_call, - .. - } => { + TerminatorKind::Call { ref func, ref args, ref destination, from_hir_call, .. } => { let func_ty = func.ty(body, tcx); debug!("check_terminator: call, func_ty={:?}", func_ty); let sig = match func_ty.kind { @@ -1623,9 +1560,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // output) types in the signature must be live, since // all the inputs that fed into it were live. for &late_bound_region in map.values() { - let region_vid = self.borrowck_context - .universal_regions - .to_region_vid(late_bound_region); + let region_vid = + self.borrowck_context.universal_regions.to_region_vid(late_bound_region); self.borrowck_context .constraints .liveness_constraints @@ -1634,9 +1570,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.check_call_inputs(body, term, &sig, args, term_location, from_hir_call); } - TerminatorKind::Assert { - ref cond, ref msg, .. - } => { + TerminatorKind::Assert { ref cond, ref msg, .. } => { let cond_ty = cond.ty(body, tcx); if cond_ty != tcx.types.bool { span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty); @@ -1694,10 +1628,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Some(RETURN_PLACE) => { if let BorrowCheckContext { universal_regions: - UniversalRegions { - defining_ty: DefiningTy::Const(def_id, _), - .. - }, + UniversalRegions { defining_ty: DefiningTy::Const(def_id, _), .. }, .. } = self.borrowck_context { @@ -1789,21 +1720,31 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { TerminatorKind::Goto { target } => { self.assert_iscleanup(body, block_data, target, is_cleanup) } - TerminatorKind::SwitchInt { ref targets, .. } => for target in targets { - self.assert_iscleanup(body, block_data, *target, is_cleanup); - }, - TerminatorKind::Resume => if !is_cleanup { - span_mirbug!(self, block_data, "resume on non-cleanup block!") - }, - TerminatorKind::Abort => if !is_cleanup { - span_mirbug!(self, block_data, "abort on non-cleanup block!") - }, - TerminatorKind::Return => if is_cleanup { - span_mirbug!(self, block_data, "return on cleanup block") - }, - TerminatorKind::GeneratorDrop { .. } => if is_cleanup { - span_mirbug!(self, block_data, "generator_drop in cleanup block") - }, + TerminatorKind::SwitchInt { ref targets, .. } => { + for target in targets { + self.assert_iscleanup(body, block_data, *target, is_cleanup); + } + } + TerminatorKind::Resume => { + if !is_cleanup { + span_mirbug!(self, block_data, "resume on non-cleanup block!") + } + } + TerminatorKind::Abort => { + if !is_cleanup { + span_mirbug!(self, block_data, "abort on non-cleanup block!") + } + } + TerminatorKind::Return => { + if is_cleanup { + span_mirbug!(self, block_data, "return on cleanup block") + } + } + TerminatorKind::GeneratorDrop { .. } => { + if is_cleanup { + span_mirbug!(self, block_data, "generator_drop in cleanup block") + } + } TerminatorKind::Yield { resume, drop, .. } => { if is_cleanup { span_mirbug!(self, block_data, "yield in cleanup block") @@ -1816,11 +1757,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { TerminatorKind::Unreachable => {} TerminatorKind::Drop { target, unwind, .. } | TerminatorKind::DropAndReplace { target, unwind, .. } - | TerminatorKind::Assert { - target, - cleanup: unwind, - .. - } => { + | TerminatorKind::Assert { target, cleanup: unwind, .. } => { self.assert_iscleanup(body, block_data, target, is_cleanup); if let Some(unwind) = unwind { if is_cleanup { @@ -1829,11 +1766,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.assert_iscleanup(body, block_data, unwind, true); } } - TerminatorKind::Call { - ref destination, - cleanup, - .. - } => { + TerminatorKind::Call { ref destination, cleanup, .. } => { if let &Some((_, target)) = destination { self.assert_iscleanup(body, block_data, target, is_cleanup); } @@ -1844,25 +1777,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.assert_iscleanup(body, block_data, cleanup, true); } } - TerminatorKind::FalseEdges { - real_target, - imaginary_target, - } => { + TerminatorKind::FalseEdges { real_target, imaginary_target } => { self.assert_iscleanup(body, block_data, real_target, is_cleanup); self.assert_iscleanup(body, block_data, imaginary_target, is_cleanup); } - TerminatorKind::FalseUnwind { - real_target, - unwind, - } => { + TerminatorKind::FalseUnwind { real_target, unwind } => { self.assert_iscleanup(body, block_data, real_target, is_cleanup); if let Some(unwind) = unwind { if is_cleanup { - span_mirbug!( - self, - block_data, - "cleanup in cleanup block via false unwind" - ); + span_mirbug!(self, block_data, "cleanup in cleanup block via false unwind"); } self.assert_iscleanup(body, block_data, unwind, true); } @@ -1878,13 +1801,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { iscleanuppad: bool, ) { if body[bb].is_cleanup != iscleanuppad { - span_mirbug!( - self, - ctxt, - "cleanuppad mismatch: {:?} should be {:?}", - bb, - iscleanuppad - ); + span_mirbug!(self, ctxt, "cleanuppad mismatch: {:?} should be {:?}", bb, iscleanuppad); } } @@ -1958,9 +1875,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { if let Some(field) = variant.fields.get(adj_field_index) { Ok(self.normalize(field.ty(tcx, substs), location)) } else { - Err(FieldAccessError::OutOfRange { - field_count: variant.fields.len(), - }) + Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() }) } } AggregateKind::Closure(def_id, substs) => { @@ -1993,8 +1908,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { &mut self, body: ReadOnlyBodyAndCache<'_, 'tcx>, rvalue: &Rvalue<'tcx>, - location: Location) - { + location: Location, + ) { let tcx = self.tcx(); match rvalue { @@ -2002,42 +1917,48 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.check_aggregate_rvalue(&body, rvalue, ak, ops, location) } - Rvalue::Repeat(operand, len) => if *len > 1 { - if let Operand::Move(_) = operand { - // While this is located in `nll::typeck` this error is not an NLL error, it's - // a required check to make sure that repeated elements implement `Copy`. - let span = body.source_info(location).span; - let ty = operand.ty(*body, tcx); - if !self.infcx.type_is_copy_modulo_regions(self.param_env, ty, span) { - // To determine if `const_in_array_repeat_expressions` feature gate should - // be mentioned, need to check if the rvalue is promotable. - let should_suggest = - should_suggest_const_in_array_repeat_expressions_attribute( - tcx, self.mir_def_id, body, operand); - debug!("check_rvalue: should_suggest={:?}", should_suggest); - - self.infcx.report_selection_error( - &traits::Obligation::new( - ObligationCause::new( - span, - self.tcx().hir().def_index_to_hir_id(self.mir_def_id.index), - traits::ObligationCauseCode::RepeatVec(should_suggest), - ), - self.param_env, - ty::Predicate::Trait(ty::Binder::bind(ty::TraitPredicate { - trait_ref: ty::TraitRef::new( - self.tcx().lang_items().copy_trait().unwrap(), - tcx.mk_substs_trait(ty, &[]), + Rvalue::Repeat(operand, len) => { + if *len > 1 { + if let Operand::Move(_) = operand { + // While this is located in `nll::typeck` this error is not an NLL error, it's + // a required check to make sure that repeated elements implement `Copy`. + let span = body.source_info(location).span; + let ty = operand.ty(*body, tcx); + if !self.infcx.type_is_copy_modulo_regions(self.param_env, ty, span) { + // To determine if `const_in_array_repeat_expressions` feature gate should + // be mentioned, need to check if the rvalue is promotable. + let should_suggest = + should_suggest_const_in_array_repeat_expressions_attribute( + tcx, + self.mir_def_id, + body, + operand, + ); + debug!("check_rvalue: should_suggest={:?}", should_suggest); + + self.infcx.report_selection_error( + &traits::Obligation::new( + ObligationCause::new( + span, + self.tcx().hir().def_index_to_hir_id(self.mir_def_id.index), + traits::ObligationCauseCode::RepeatVec(should_suggest), ), - })), - ), - &traits::SelectionError::Unimplemented, - false, - false, - ); + self.param_env, + ty::Predicate::Trait(ty::Binder::bind(ty::TraitPredicate { + trait_ref: ty::TraitRef::new( + self.tcx().lang_items().copy_trait().unwrap(), + tcx.mk_substs_trait(ty, &[]), + ), + })), + ), + &traits::SelectionError::Unimplemented, + false, + false, + ); + } } } - }, + } Rvalue::NullaryOp(_, ty) => { // Even with unsized locals cannot box an unsized value. @@ -2206,18 +2127,17 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } } - CastKind::Pointer(PointerCast::ArrayToPointer) => { + CastKind::Pointer(PointerCast::ArrayToPointer) => { let ty_from = op.ty(*body, tcx); let opt_ty_elem = match ty_from.kind { - ty::RawPtr( - ty::TypeAndMut { mutbl: hir::Mutability::Not, ty: array_ty } - ) => { - match array_ty.kind { - ty::Array(ty_elem, _) => Some(ty_elem), - _ => None, - } - } + ty::RawPtr(ty::TypeAndMut { + mutbl: hir::Mutability::Not, + ty: array_ty, + }) => match array_ty.kind { + ty::Array(ty_elem, _) => Some(ty_elem), + _ => None, + }, _ => None, }; @@ -2235,11 +2155,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { }; let ty_to = match ty.kind { - ty::RawPtr( - ty::TypeAndMut { mutbl: hir::Mutability::Not, ty: ty_to } - ) => { - ty_to - } + ty::RawPtr(ty::TypeAndMut { + mutbl: hir::Mutability::Not, + ty: ty_to, + }) => ty_to, _ => { span_mirbug!( self, @@ -2278,13 +2197,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | (_, Some(CastTy::FnPtr)) | (Some(CastTy::Float), Some(CastTy::Ptr(_))) | (Some(CastTy::Ptr(_)), Some(CastTy::Float)) - | (Some(CastTy::FnPtr), Some(CastTy::Float)) => span_mirbug!( - self, - rvalue, - "Invalid cast {:?} -> {:?}", - ty_from, - ty, - ), + | (Some(CastTy::FnPtr), Some(CastTy::Float)) => { + span_mirbug!(self, rvalue, "Invalid cast {:?} -> {:?}", ty_from, ty,) + } (Some(CastTy::Int(_)), Some(CastTy::Int(_))) | (Some(CastTy::Float), Some(CastTy::Int(_))) | (Some(CastTy::Int(_)), Some(CastTy::Float)) @@ -2312,25 +2227,24 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_left = left.ty(*body, tcx); if let ty::RawPtr(_) | ty::FnPtr(_) = ty_left.kind { let ty_right = right.ty(*body, tcx); - let common_ty = self.infcx.next_ty_var( - TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span: body.source_info(location).span, - } - ); + let common_ty = self.infcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: body.source_info(location).span, + }); self.sub_types( common_ty, ty_left, location.to_locations(), - ConstraintCategory::Boring - ).unwrap_or_else(|err| { + ConstraintCategory::Boring, + ) + .unwrap_or_else(|err| { bug!("Could not equate type variable with {:?}: {:?}", ty_left, err) }); if let Err(terr) = self.sub_types( common_ty, ty_right, location.to_locations(), - ConstraintCategory::Boring + ConstraintCategory::Boring, ) { span_mirbug!( self, @@ -2447,13 +2361,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { borrowed_place: &Place<'tcx>, ) { // These constraints are only meaningful during borrowck: - let BorrowCheckContext { - borrow_set, - location_table, - all_facts, - constraints, - .. - } = self.borrowck_context; + let BorrowCheckContext { borrow_set, location_table, all_facts, constraints, .. } = + self.borrowck_context; // In Polonius mode, we also push a `borrow_region` fact // linking the loan to the region (in some cases, though, @@ -2646,10 +2555,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { &closure_region_requirements.outlives_requirements[idx]; Some(( (r1_vid, r2_vid), - ( - outlives_requirements.category, - outlives_requirements.blame_span, - ), + (outlives_requirements.category, outlives_requirements.blame_span), )) } GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, @@ -2657,14 +2563,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { }) .collect(); - let existing = self.borrowck_context + let existing = self + .borrowck_context .constraints .closure_bounds_mapping .insert(location, bounds_mapping); - assert!( - existing.is_none(), - "Multiple closures at the same location." - ); + assert!(existing.is_none(), "Multiple closures at the same location."); self.push_region_constraints( location.to_locations(), @@ -2683,9 +2587,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { category: ConstraintCategory, ) { self.prove_predicates( - Some(ty::Predicate::Trait( - trait_ref.to_poly_trait_ref().to_poly_trait_predicate(), - )), + Some(ty::Predicate::Trait(trait_ref.to_poly_trait_ref().to_poly_trait_predicate())), locations, category, ); @@ -2709,10 +2611,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { category: ConstraintCategory, ) { for predicate in predicates { - debug!( - "prove_predicates(predicate={:?}, locations={:?})", - predicate, locations, - ); + debug!("prove_predicates(predicate={:?}, locations={:?})", predicate, locations,); self.prove_predicate(predicate, locations, category); } @@ -2724,17 +2623,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { locations: Locations, category: ConstraintCategory, ) { - debug!( - "prove_predicate(predicate={:?}, location={:?})", - predicate, locations, - ); + debug!("prove_predicate(predicate={:?}, location={:?})", predicate, locations,); let param_env = self.param_env; self.fully_perform_op( locations, category, param_env.and(type_op::prove_predicate::ProvePredicate::new(predicate)), - ).unwrap_or_else(|NoSolution| { + ) + .unwrap_or_else(|NoSolution| { span_mirbug!(self, NoSolution, "could not prove {:?}", predicate); }) } @@ -2748,10 +2645,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } for (block, block_data) in body.basic_blocks().iter_enumerated() { - let mut location = Location { - block, - statement_index: 0, - }; + let mut location = Location { block, statement_index: 0 }; for stmt in &block_data.statements { if !stmt.source_info.span.is_dummy() { self.last_span = stmt.source_info.span; @@ -2775,7 +2669,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { location.to_locations(), ConstraintCategory::Boring, param_env.and(type_op::normalize::Normalize::new(value)), - ).unwrap_or_else(|NoSolution| { + ) + .unwrap_or_else(|NoSolution| { span_mirbug!(self, NoSolution, "failed to normalize `{:?}`", value); value }) diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index 24282a6617a..ee218d84cd8 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -46,25 +46,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { scope: Option<region::Scope>, expr: Expr<'tcx>, ) -> BlockAnd<Rvalue<'tcx>> { - debug!( - "expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", - block, scope, expr - ); + debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr); let this = self; let expr_span = expr.span; let source_info = this.source_info(expr_span); match expr.kind { - ExprKind::Scope { - region_scope, - lint_level, - value, - } => { + ExprKind::Scope { region_scope, lint_level, value } => { let region_scope = (region_scope, source_info); - this.in_scope(region_scope, lint_level, |this| { - this.as_rvalue(block, scope, value) - }) + this.in_scope(region_scope, lint_level, |this| this.as_rvalue(block, scope, value)) } ExprKind::Repeat { value, count } => { let value_operand = unpack!(block = this.as_operand(block, scope, value)); @@ -106,35 +97,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // The `Box<T>` temporary created here is not a part of the HIR, // and therefore is not considered during generator OIBIT // determination. See the comment about `box` at `yield_in_scope`. - let result = this - .local_decls - .push(LocalDecl::new_internal(expr.ty, expr_span)); + let result = this.local_decls.push(LocalDecl::new_internal(expr.ty, expr_span)); this.cfg.push( block, - Statement { - source_info, - kind: StatementKind::StorageLive(result), - }, + Statement { source_info, kind: StatementKind::StorageLive(result) }, ); if let Some(scope) = scope { // schedule a shallow free of that memory, lest we unwind: - this.schedule_drop_storage_and_value( - expr_span, - scope, - result, - ); + this.schedule_drop_storage_and_value(expr_span, scope, result); } // malloc some memory of suitable type (thus far, uninitialized): let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty); - this.cfg - .push_assign(block, source_info, &Place::from(result), box_); + this.cfg.push_assign(block, source_info, &Place::from(result), box_); // initialize the box contents: unpack!( block = this.into( &this.hir.tcx().mk_place_deref(Place::from(result)), - block, value + block, + value ) ); block.and(Rvalue::Use(Operand::Move(Place::from(result)))) @@ -193,12 +175,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields)) } - ExprKind::Closure { - closure_id, - substs, - upvars, - movability, - } => { + ExprKind::Closure { closure_id, substs, upvars, movability } => { // see (*) above let operands: Vec<_> = upvars .into_iter() @@ -225,9 +202,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { match upvar.kind { ExprKind::Borrow { borrow_kind: - BorrowKind::Mut { - allow_two_phase_borrow: false, - }, + BorrowKind::Mut { allow_two_phase_borrow: false }, arg, } => unpack!( block = this.limit_capture_mutability( @@ -238,7 +213,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } - }).collect(); + }) + .collect(); let result = match substs { UpvarSubsts::Generator(substs) => { // We implicitly set the discriminant to 0. See @@ -261,11 +237,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.cfg.terminate( block, source_info, - TerminatorKind::Yield { - value: value, - resume: resume, - drop: cleanup, - }, + TerminatorKind::Yield { value: value, resume: resume, drop: cleanup }, ); resume.and(this.unit_rvalue()) } @@ -414,29 +386,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let this = self; let source_info = this.source_info(upvar_span); - let temp = this - .local_decls - .push(LocalDecl::new_temp(upvar_ty, upvar_span)); + let temp = this.local_decls.push(LocalDecl::new_temp(upvar_ty, upvar_span)); - this.cfg.push( - block, - Statement { - source_info, - kind: StatementKind::StorageLive(temp), - }, - ); + this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); let arg_place = unpack!(block = this.as_place(block, arg)); let mutability = match arg_place.as_ref() { - PlaceRef { - base: &PlaceBase::Local(local), - projection: &[], - } => this.local_decls[local].mutability, - PlaceRef { - base: &PlaceBase::Local(local), - projection: &[ProjectionElem::Deref], - } => { + PlaceRef { base: &PlaceBase::Local(local), projection: &[] } => { + this.local_decls[local].mutability + } + PlaceRef { base: &PlaceBase::Local(local), projection: &[ProjectionElem::Deref] } => { debug_assert!( this.local_decls[local].is_ref_for_guard(), "Unexpected capture place", @@ -449,16 +409,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } | PlaceRef { ref base, - projection: &[ - ref proj_base @ .., - ProjectionElem::Field(upvar_index, _), - ProjectionElem::Deref - ], + projection: + &[ref proj_base @ .., ProjectionElem::Field(upvar_index, _), ProjectionElem::Deref], } => { - let place = PlaceRef { - base, - projection: proj_base, - }; + let place = PlaceRef { base, projection: proj_base }; // Not projected from the implicit `self` in a closure. debug_assert!( @@ -480,9 +434,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let borrow_kind = match mutability { Mutability::Not => BorrowKind::Unique, - Mutability::Mut => BorrowKind::Mut { - allow_two_phase_borrow: false, - }, + Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false }, }; this.cfg.push_assign( @@ -496,11 +448,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // anything because no values with a destructor can be created in // a constant at this time, even if the type may need dropping. if let Some(temp_lifetime) = temp_lifetime { - this.schedule_drop_storage_and_value( - upvar_span, - temp_lifetime, - temp, - ); + this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp); } block.and(Operand::Move(Place::from(temp))) diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 6869930509c..e1a51e2fb35 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -11,13 +11,13 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder}; use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode}; use crate::hair::{self, *}; use rustc::hir::HirId; -use rustc::mir::*; use rustc::middle::region; -use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty}; +use rustc::mir::*; use rustc::ty::layout::VariantIdx; -use rustc_index::bit_set::BitSet; +use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use smallvec::{SmallVec, smallvec}; +use rustc_index::bit_set::BitSet; +use smallvec::{smallvec, SmallVec}; use syntax::ast::Name; use syntax_pos::Span; @@ -139,9 +139,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // create binding start block for link them by false edges let candidate_count = arms.iter().map(|c| c.top_pats_hack().len()).sum::<usize>(); - let pre_binding_blocks: Vec<_> = (0..candidate_count) - .map(|_| self.cfg.start_new_block()) - .collect(); + let pre_binding_blocks: Vec<_> = + (0..candidate_count).map(|_| self.cfg.start_new_block()).collect(); let mut match_has_guard = false; @@ -155,29 +154,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .map(|arm| { let arm_has_guard = arm.guard.is_some(); match_has_guard |= arm_has_guard; - let arm_candidates: Vec<_> = arm.top_pats_hack() + let arm_candidates: Vec<_> = arm + .top_pats_hack() .iter() .zip(candidate_pre_binding_blocks.by_ref()) - .map( - |(pattern, pre_binding_block)| { - Candidate { - span: pattern.span, - match_pairs: smallvec![ - MatchPair::new(scrutinee_place.clone(), pattern), - ], - bindings: vec![], - ascriptions: vec![], - otherwise_block: if arm_has_guard { - Some(self.cfg.start_new_block()) - } else { - None - }, - pre_binding_block: *pre_binding_block, - next_candidate_pre_binding_block: - next_candidate_pre_binding_blocks.next().copied(), - } + .map(|(pattern, pre_binding_block)| Candidate { + span: pattern.span, + match_pairs: smallvec![MatchPair::new(scrutinee_place.clone(), pattern),], + bindings: vec![], + ascriptions: vec![], + otherwise_block: if arm_has_guard { + Some(self.cfg.start_new_block()) + } else { + None }, - ) + pre_binding_block: *pre_binding_block, + next_candidate_pre_binding_block: next_candidate_pre_binding_blocks + .next() + .copied(), + }) .collect(); (arm, arm_candidates) }) @@ -226,50 +221,53 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Step 5. Create everything else: the guards and the arms. let match_scope = self.scopes.topmost(); - let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| { - let arm_source_info = self.source_info(arm.span); - let arm_scope = (arm.scope, arm_source_info); - self.in_scope(arm_scope, arm.lint_level, |this| { - let body = this.hir.mirror(arm.body.clone()); - let scope = this.declare_bindings( - None, - arm.span, - &arm.top_pats_hack()[0], - ArmHasGuard(arm.guard.is_some()), - Some((Some(&scrutinee_place), scrutinee_span)), - ); - - let arm_block; - if candidates.len() == 1 { - arm_block = this.bind_and_guard_matched_candidate( - candidates.pop().unwrap(), - arm.guard.clone(), - &fake_borrow_temps, - scrutinee_span, - match_scope, + let arm_end_blocks: Vec<_> = arm_candidates + .into_iter() + .map(|(arm, mut candidates)| { + let arm_source_info = self.source_info(arm.span); + let arm_scope = (arm.scope, arm_source_info); + self.in_scope(arm_scope, arm.lint_level, |this| { + let body = this.hir.mirror(arm.body.clone()); + let scope = this.declare_bindings( + None, + arm.span, + &arm.top_pats_hack()[0], + ArmHasGuard(arm.guard.is_some()), + Some((Some(&scrutinee_place), scrutinee_span)), ); - } else { - arm_block = this.cfg.start_new_block(); - for candidate in candidates { - this.clear_top_scope(arm.scope); - let binding_end = this.bind_and_guard_matched_candidate( - candidate, + + let arm_block; + if candidates.len() == 1 { + arm_block = this.bind_and_guard_matched_candidate( + candidates.pop().unwrap(), arm.guard.clone(), &fake_borrow_temps, scrutinee_span, match_scope, ); - this.cfg.goto(binding_end, source_info, arm_block); + } else { + arm_block = this.cfg.start_new_block(); + for candidate in candidates { + this.clear_top_scope(arm.scope); + let binding_end = this.bind_and_guard_matched_candidate( + candidate, + arm.guard.clone(), + &fake_borrow_temps, + scrutinee_span, + match_scope, + ); + this.cfg.goto(binding_end, source_info, arm_block); + } } - } - if let Some(source_scope) = scope { - this.source_scope = source_scope; - } + if let Some(source_scope) = scope { + this.source_scope = source_scope; + } - this.into(destination, arm_block, body) + this.into(destination, arm_block, body) + }) }) - }).collect(); + .collect(); // all the arm blocks will rejoin here let end_block = self.cfg.start_new_block(); @@ -291,12 +289,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) -> BlockAnd<()> { match *irrefutable_pat.kind { // Optimize the case of `let x = ...` to write directly into `x` - PatKind::Binding { - mode: BindingMode::ByValue, - var, - subpattern: None, - .. - } => { + PatKind::Binding { mode: BindingMode::ByValue, var, subpattern: None, .. } => { let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); unpack!(block = self.into(&place, block, initializer)); @@ -318,20 +311,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // dubious way, so it may be that the test is kind of // broken. PatKind::AscribeUserType { - subpattern: Pat { - kind: box PatKind::Binding { - mode: BindingMode::ByValue, - var, - subpattern: None, + subpattern: + Pat { + kind: + box PatKind::Binding { + mode: BindingMode::ByValue, + var, + subpattern: None, + .. + }, .. }, - .. - }, - ascription: hair::pattern::Ascription { - user_ty: pat_ascription_ty, - variance: _, - user_ty_span, - }, + ascription: + hair::pattern::Ascription { user_ty: pat_ascription_ty, variance: _, user_ty_span }, } => { let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); @@ -353,10 +345,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Statement { source_info: ty_source_info, kind: StatementKind::AscribeUserType( - box( - place, - user_ty, - ), + box (place, user_ty), // We always use invariant as the variance here. This is because the // variance field from the ascription refers to the variance to use // when applying the type to the value being matched, but this @@ -504,13 +493,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) -> Place<'tcx> { let local_id = self.var_local_id(var, for_guard); let source_info = self.source_info(span); - self.cfg.push( - block, - Statement { - source_info, - kind: StatementKind::StorageLive(local_id), - }, - ); + self.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(local_id) }); let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); self.schedule_drop(span, region_scope, local_id, DropKind::Storage); Place::from(local_id) @@ -519,12 +502,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) { let local_id = self.var_local_id(var, for_guard); let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); - self.schedule_drop( - span, - region_scope, - local_id, - DropKind::Value, - ); + self.schedule_drop(span, region_scope, local_id, DropKind::Value); } pub(super) fn visit_bindings( @@ -544,31 +522,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) { debug!("visit_bindings: pattern={:?} pattern_user_ty={:?}", pattern, pattern_user_ty); match *pattern.kind { - PatKind::Binding { - mutability, - name, - mode, - var, - ty, - ref subpattern, - .. - } => { + PatKind::Binding { mutability, name, mode, var, ty, ref subpattern, .. } => { f(self, mutability, name, mode, var, pattern.span, ty, pattern_user_ty.clone()); if let Some(subpattern) = subpattern.as_ref() { self.visit_bindings(subpattern, pattern_user_ty, f); } } - PatKind::Array { - ref prefix, - ref slice, - ref suffix, - } - | PatKind::Slice { - ref prefix, - ref slice, - ref suffix, - } => { + PatKind::Array { ref prefix, ref slice, ref suffix } + | PatKind::Slice { ref prefix, ref slice, ref suffix } => { let from = u32::try_from(prefix.len()).unwrap(); let to = u32::try_from(suffix.len()).unwrap(); for subpattern in prefix { @@ -590,11 +552,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { PatKind::AscribeUserType { ref subpattern, - ascription: hair::pattern::Ascription { - ref user_ty, - user_ty_span, - variance: _, - }, + ascription: hair::pattern::Ascription { ref user_ty, user_ty_span, variance: _ }, } => { // This corresponds to something like // @@ -627,8 +585,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { PatKind::Variant { adt_def, substs: _, variant_index, ref subpatterns } => { for subpattern in subpatterns { - let subpattern_user_ty = pattern_user_ty.clone().variant( - adt_def, variant_index, subpattern.field); + let subpattern_user_ty = + pattern_user_ty.clone().variant(adt_def, variant_index, subpattern.field); self.visit_bindings(&subpattern.pattern, subpattern_user_ty, f); } } @@ -736,10 +694,7 @@ enum TestKind<'tcx> { Range(PatRange<'tcx>), /// Test length of the slice is equal to len - Len { - len: u64, - op: BinOp, - }, + Len { len: u64, op: BinOp }, } #[derive(Debug)] @@ -789,10 +744,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) { debug!( "matched_candidate(span={:?}, candidates={:?}, start_block={:?}, otherwise_block={:?})", - span, - candidates, - start_block, - otherwise_block, + span, candidates, start_block, otherwise_block, ); // Start by simplifying candidates. Once this process is complete, all @@ -805,22 +757,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // The candidates are sorted by priority. Check to see whether the // higher priority candidates (and hence at the front of the slice) // have satisfied all their match pairs. - let fully_matched = candidates - .iter() - .take_while(|c| c.match_pairs.is_empty()) - .count(); - debug!( - "match_candidates: {:?} candidates fully matched", - fully_matched - ); + let fully_matched = candidates.iter().take_while(|c| c.match_pairs.is_empty()).count(); + debug!("match_candidates: {:?} candidates fully matched", fully_matched); let (matched_candidates, unmatched_candidates) = candidates.split_at_mut(fully_matched); let block: BasicBlock = if !matched_candidates.is_empty() { - let otherwise_block = self.select_matched_candidates( - matched_candidates, - start_block, - fake_borrows, - ); + let otherwise_block = + self.select_matched_candidates(matched_candidates, start_block, fake_borrows); if let Some(last_otherwise_block) = otherwise_block { last_otherwise_block @@ -848,13 +791,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } // Test for the remaining candidates. - self.test_candidates( - span, - unmatched_candidates, - block, - otherwise_block, - fake_borrows, - ); + self.test_candidates(span, unmatched_candidates, block, otherwise_block, fake_borrows); } /// Link up matched candidates. For example, if we have something like @@ -903,8 +840,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // _ => 3, // } if let Some(fake_borrows) = fake_borrows { - for Binding { source, .. } - in matched_candidates.iter().flat_map(|candidate| &candidate.bindings) + for Binding { source, .. } in + matched_candidates.iter().flat_map(|candidate| &candidate.bindings) { if let Some(i) = source.projection.iter().rposition(|elem| *elem == ProjectionElem::Deref) @@ -924,8 +861,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .position(|c| c.otherwise_block.is_none()) .unwrap_or(matched_candidates.len() - 1); - let (reachable_candidates, unreachable_candidates) - = matched_candidates.split_at_mut(fully_matched_with_guard + 1); + let (reachable_candidates, unreachable_candidates) = + matched_candidates.split_at_mut(fully_matched_with_guard + 1); let first_candidate = &reachable_candidates[0]; let first_prebinding_block = first_candidate.pre_binding_block; @@ -967,7 +904,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - let last_candidate = reachable_candidates.last().unwrap(); if let Some(otherwise) = last_candidate.otherwise_block { let source_info = self.source_info(last_candidate.span); @@ -1114,11 +1050,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // may want to add cases based on the candidates that are // available match test.kind { - TestKind::SwitchInt { - switch_ty, - ref mut options, - ref mut indices, - } => { + TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => { for candidate in candidates.iter() { if !self.add_cases_to_switch( &match_place, @@ -1131,10 +1063,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } - TestKind::Switch { - adt_def: _, - ref mut variants, - } => { + TestKind::Switch { adt_def: _, ref mut variants } => { for candidate in candidates.iter() { if !self.add_variants_to_switch(&match_place, candidate, variants) { break; @@ -1145,18 +1074,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } // Insert a Shallow borrow of any places that is switched on. - fake_borrows.as_mut().map(|fb| { - fb.insert(match_place.clone()) - }); + fake_borrows.as_mut().map(|fb| fb.insert(match_place.clone())); // perform the test, branching to one of N blocks. For each of // those N possible outcomes, create a (initially empty) // vector of candidates. Those are the candidates that still // apply if the test has that particular outcome. - debug!( - "match_candidates: test={:?} match_pair={:?}", - test, match_pair - ); + debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair); let mut target_candidates: Vec<Vec<&mut Candidate<'pat, 'tcx>>> = vec![]; target_candidates.resize_with(test.targets(), Default::default); @@ -1201,38 +1125,36 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { otherwise_block = Some(remainder_start.unwrap()); }; - target_candidates.into_iter().map(|mut candidates| { - if candidates.len() != 0 { - let candidate_start = &mut None; - this.match_candidates( - span, - candidate_start, - otherwise_block, - &mut *candidates, - fake_borrows, - ); - candidate_start.unwrap() - } else { - *otherwise_block.get_or_insert_with(|| { - let unreachable = this.cfg.start_new_block(); - let source_info = this.source_info(span); - this.cfg.terminate( - unreachable, - source_info, - TerminatorKind::Unreachable, + target_candidates + .into_iter() + .map(|mut candidates| { + if candidates.len() != 0 { + let candidate_start = &mut None; + this.match_candidates( + span, + candidate_start, + otherwise_block, + &mut *candidates, + fake_borrows, ); - unreachable - }) - } - }).collect() + candidate_start.unwrap() + } else { + *otherwise_block.get_or_insert_with(|| { + let unreachable = this.cfg.start_new_block(); + let source_info = this.source_info(span); + this.cfg.terminate( + unreachable, + source_info, + TerminatorKind::Unreachable, + ); + unreachable + }) + } + }) + .collect() }; - self.perform_test( - block, - &match_place, - &test, - make_target_blocks, - ); + self.perform_test(block, &match_place, &test, make_target_blocks); } // Determine the fake borrows that are needed to ensure that the place @@ -1249,8 +1171,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let mut all_fake_borrows = Vec::with_capacity(fake_borrows.len()); // Insert a Shallow borrow of the prefixes of any fake borrows. - for place in fake_borrows - { + for place in fake_borrows { let mut cursor = place.projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -1259,10 +1180,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Insert a shallow borrow after a deref. For other // projections the borrow of prefix_cursor will // conflict with any mutation of base. - all_fake_borrows.push(PlaceRef { - base: &place.base, - projection: proj_base, - }); + all_fake_borrows.push(PlaceRef { base: &place.base, projection: proj_base }); } } @@ -1275,21 +1193,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("add_fake_borrows all_fake_borrows = {:?}", all_fake_borrows); - all_fake_borrows.into_iter().map(|matched_place| { - let fake_borrow_deref_ty = Place::ty_from( - matched_place.base, - matched_place.projection, - &self.local_decls, - tcx, - ) - .ty; - let fake_borrow_ty = tcx.mk_imm_ref(tcx.lifetimes.re_erased, fake_borrow_deref_ty); - let fake_borrow_temp = self.local_decls.push( - LocalDecl::new_temp(fake_borrow_ty, temp_span) - ); - - (matched_place, fake_borrow_temp) - }).collect() + all_fake_borrows + .into_iter() + .map(|matched_place| { + let fake_borrow_deref_ty = Place::ty_from( + matched_place.base, + matched_place.projection, + &self.local_decls, + tcx, + ) + .ty; + let fake_borrow_ty = tcx.mk_imm_ref(tcx.lifetimes.re_erased, fake_borrow_deref_ty); + let fake_borrow_temp = + self.local_decls.push(LocalDecl::new_temp(fake_borrow_ty, temp_span)); + + (matched_place, fake_borrow_temp) + }) + .collect() } } @@ -1424,10 +1344,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let Some(guard) = guard { let tcx = self.hir.tcx(); - self.bind_matched_candidate_for_guard( - block, - &candidate.bindings, - ); + self.bind_matched_candidate_for_guard(block, &candidate.bindings); let guard_frame = GuardFrame { locals: candidate .bindings @@ -1449,12 +1366,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { projection: tcx.intern_place_elems(place.projection), }, ); - self.cfg.push_assign( - block, - scrutinee_source_info, - &Place::from(*temp), - borrow, - ); + self.cfg.push_assign(block, scrutinee_source_info, &Place::from(*temp), borrow); } // the block to branch to if the guard fails; if there is no @@ -1464,13 +1376,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; let source_info = self.source_info(guard.span); let guard_end = self.source_info(tcx.sess.source_map().end_point(guard.span)); - let (post_guard_block, otherwise_post_guard_block) - = self.test_bool(block, guard, source_info); + let (post_guard_block, otherwise_post_guard_block) = + self.test_bool(block, guard, source_info); let guard_frame = self.guard_context.pop().unwrap(); - debug!( - "Exiting guard building context with locals: {:?}", - guard_frame - ); + debug!("Exiting guard building context with locals: {:?}", guard_frame); for &(_, temp) in fake_borrows { let cause = FakeReadCause::ForMatchGuard; @@ -1520,10 +1429,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let cause = FakeReadCause::ForGuardBinding; self.cfg.push_fake_read(post_guard_block, guard_end, cause, Place::from(local_id)); } - self.bind_matched_candidate_for_arm_body( - post_guard_block, - by_value_bindings, - ); + self.bind_matched_candidate_for_arm_body(post_guard_block, by_value_bindings); post_guard_block } else { @@ -1544,25 +1450,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!( "adding user ascription at span {:?} of place {:?} and {:?}", - source_info.span, - ascription.source, - ascription.user_ty, + source_info.span, ascription.source, ascription.user_ty, ); let user_ty = ascription.user_ty.clone().user_ty( &mut self.canonical_user_type_annotations, ascription.source.ty(&self.local_decls, self.hir.tcx()).ty, - source_info.span + source_info.span, ); self.cfg.push( block, Statement { source_info, kind: StatementKind::AscribeUserType( - box( - ascription.source.clone(), - user_ty, - ), + box (ascription.source.clone(), user_ty), ascription.variance, ), }, @@ -1570,11 +1471,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - fn bind_matched_candidate_for_guard( - &mut self, - block: BasicBlock, - bindings: &[Binding<'tcx>], - ) { + fn bind_matched_candidate_for_guard(&mut self, block: BasicBlock, bindings: &[Binding<'tcx>]) { debug!("bind_matched_candidate_for_guard(block={:?}, bindings={:?})", block, bindings); // Assign each of the bindings. Since we are binding for a @@ -1593,8 +1490,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { match binding.binding_mode { BindingMode::ByValue => { let rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, binding.source.clone()); - self.cfg - .push_assign(block, source_info, &ref_for_guard, rvalue); + self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue); } BindingMode::ByRef(borrow_kind) => { let value_for_arm = self.storage_live_binding( @@ -1605,11 +1501,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); let rvalue = Rvalue::Ref(re_erased, borrow_kind, binding.source.clone()); - self.cfg - .push_assign(block, source_info, &value_for_arm, rvalue); + self.cfg.push_assign(block, source_info, &value_for_arm, rvalue); let rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, value_for_arm); - self.cfg - .push_assign(block, source_info, &ref_for_guard, rvalue); + self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue); } } } @@ -1619,7 +1513,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, block: BasicBlock, bindings: impl IntoIterator<Item = &'b Binding<'tcx>>, - ) where 'tcx: 'b { + ) where + 'tcx: 'b, + { debug!("bind_matched_candidate_for_arm_body(block={:?})", block); let re_erased = self.hir.tcx().lifetimes.re_erased; @@ -1667,10 +1563,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); let tcx = self.hir.tcx(); - let debug_source_info = SourceInfo { - span: source_info.span, - scope: visibility_scope, - }; + let debug_source_info = SourceInfo { span: source_info.span, scope: visibility_scope }; let binding_mode = match mode { BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()), BindingMode::ByRef(_) => ty::BindingMode::BindByReference(mutability.into()), @@ -1683,18 +1576,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info, internal: false, is_block_tail: None, - local_info: LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var( - VarBindingForm { - binding_mode, - // hypothetically, `visit_bindings` could try to unzip - // an outermost hir::Ty as we descend, matching up - // idents in pat; but complex w/ unclear UI payoff. - // Instead, just abandon providing diagnostic info. - opt_ty_info: None, - opt_match_place, - pat_span, - }, - ))), + local_info: LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm { + binding_mode, + // hypothetically, `visit_bindings` could try to unzip + // an outermost hir::Ty as we descend, matching up + // idents in pat; but complex w/ unclear UI payoff. + // Instead, just abandon providing diagnostic info. + opt_ty_info: None, + opt_match_place, + pat_span, + }))), }; let for_arm_body = self.local_decls.push(local); self.var_debug_info.push(VarDebugInfo { @@ -1719,10 +1610,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info: debug_source_info, place: ref_for_guard.into(), }); - LocalsForNode::ForGuard { - ref_for_guard, - for_arm_body, - } + LocalsForNode::ForGuard { ref_for_guard, for_arm_body } } else { LocalsForNode::One(for_arm_body) }; diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 3a2a2dc412e..327372beb47 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -1,22 +1,22 @@ use crate::build; use crate::build::scope::DropKind; use crate::hair::cx::Cx; -use crate::hair::{LintLevel, BindingMode, PatKind}; +use crate::hair::{BindingMode, LintLevel, PatKind}; use crate::transform::MirSource; use crate::util as mir_util; use rustc::hir; -use rustc::hir::{Node, GeneratorKind}; use rustc::hir::def_id::DefId; +use rustc::hir::{GeneratorKind, Node}; use rustc::middle::lang_items; use rustc::middle::region; use rustc::mir::*; -use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::subst::Subst; +use rustc::ty::{self, Ty, TyCtxt}; use rustc::util::nodemap::HirIdMap; +use rustc_index::vec::{Idx, IndexVec}; +use rustc_target::spec::abi::Abi; use rustc_target::spec::PanicStrategy; -use rustc_index::vec::{IndexVec, Idx}; use std::u32; -use rustc_target::spec::abi::Abi; use syntax::attr::{self, UnwindAttr}; use syntax::symbol::kw; use syntax_pos::Span; @@ -30,40 +30,27 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> { // Figure out what primary body this item has. let (body_id, return_ty_span) = match tcx.hir().get(id) { Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(_, decl, body_id, _, _), .. }) - | Node::Item( - hir::Item { - kind: hir::ItemKind::Fn(hir::FnSig { decl, .. }, _, body_id), - .. - } - ) - | Node::ImplItem( - hir::ImplItem { - kind: hir::ImplItemKind::Method(hir::FnSig { decl, .. }, body_id), - .. - } - ) - | Node::TraitItem( - hir::TraitItem { - kind: hir::TraitItemKind::Method( - hir::FnSig { decl, .. }, - hir::TraitMethod::Provided(body_id), - ), - .. - } - ) => { - (*body_id, decl.output.span()) - } + | Node::Item(hir::Item { + kind: hir::ItemKind::Fn(hir::FnSig { decl, .. }, _, body_id), + .. + }) + | Node::ImplItem(hir::ImplItem { + kind: hir::ImplItemKind::Method(hir::FnSig { decl, .. }, body_id), + .. + }) + | Node::TraitItem(hir::TraitItem { + kind: + hir::TraitItemKind::Method(hir::FnSig { decl, .. }, hir::TraitMethod::Provided(body_id)), + .. + }) => (*body_id, decl.output.span()), Node::Item(hir::Item { kind: hir::ItemKind::Static(ty, _, body_id), .. }) | Node::Item(hir::Item { kind: hir::ItemKind::Const(ty, body_id), .. }) | Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Const(ty, body_id), .. }) - | Node::TraitItem( - hir::TraitItem { kind: hir::TraitItemKind::Const(ty, Some(body_id)), .. } - ) => { - (*body_id, ty.span) - } - Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => { - (*body, tcx.hir().span(*hir_id)) - } + | Node::TraitItem(hir::TraitItem { + kind: hir::TraitItemKind::Const(ty, Some(body_id)), + .. + }) => (*body_id, ty.span), + Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => (*body, tcx.hir().span(*hir_id)), _ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id), }; @@ -100,61 +87,54 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> { }; let body = tcx.hir().body(body_id); - let explicit_arguments = - body.params - .iter() - .enumerate() - .map(|(index, arg)| { - let owner_id = tcx.hir().body_owner(body_id); - let opt_ty_info; - let self_arg; - if let Some(ref fn_decl) = tcx.hir().fn_decl_by_hir_id(owner_id) { - opt_ty_info = fn_decl.inputs.get(index).map(|ty| ty.span); - self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() { - match fn_decl.implicit_self { - hir::ImplicitSelfKind::Imm => Some(ImplicitSelfKind::Imm), - hir::ImplicitSelfKind::Mut => Some(ImplicitSelfKind::Mut), - hir::ImplicitSelfKind::ImmRef => Some(ImplicitSelfKind::ImmRef), - hir::ImplicitSelfKind::MutRef => Some(ImplicitSelfKind::MutRef), - _ => None, - } - } else { - None - }; - } else { - opt_ty_info = None; - self_arg = None; + let explicit_arguments = body.params.iter().enumerate().map(|(index, arg)| { + let owner_id = tcx.hir().body_owner(body_id); + let opt_ty_info; + let self_arg; + if let Some(ref fn_decl) = tcx.hir().fn_decl_by_hir_id(owner_id) { + opt_ty_info = fn_decl.inputs.get(index).map(|ty| ty.span); + self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() { + match fn_decl.implicit_self { + hir::ImplicitSelfKind::Imm => Some(ImplicitSelfKind::Imm), + hir::ImplicitSelfKind::Mut => Some(ImplicitSelfKind::Mut), + hir::ImplicitSelfKind::ImmRef => Some(ImplicitSelfKind::ImmRef), + hir::ImplicitSelfKind::MutRef => Some(ImplicitSelfKind::MutRef), + _ => None, } + } else { + None + }; + } else { + opt_ty_info = None; + self_arg = None; + } - // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` - // (as it's created inside the body itself, not passed in from outside). - let ty = if fn_sig.c_variadic && index == fn_sig.inputs().len() { - let va_list_did = tcx.require_lang_item( - lang_items::VaListTypeLangItem, - Some(arg.span), - ); - let region = tcx.mk_region(ty::ReScope(region::Scope { - id: body.value.hir_id.local_id, - data: region::ScopeData::CallSite - })); - - tcx.type_of(va_list_did).subst(tcx, &[region.into()]) - } else { - fn_sig.inputs()[index] - }; + // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` + // (as it's created inside the body itself, not passed in from outside). + let ty = if fn_sig.c_variadic && index == fn_sig.inputs().len() { + let va_list_did = + tcx.require_lang_item(lang_items::VaListTypeLangItem, Some(arg.span)); + let region = tcx.mk_region(ty::ReScope(region::Scope { + id: body.value.hir_id.local_id, + data: region::ScopeData::CallSite, + })); + + tcx.type_of(va_list_did).subst(tcx, &[region.into()]) + } else { + fn_sig.inputs()[index] + }; - ArgInfo(ty, opt_ty_info, Some(&arg), self_arg) - }); + ArgInfo(ty, opt_ty_info, Some(&arg), self_arg) + }); let arguments = implicit_argument.into_iter().chain(explicit_arguments); let (yield_ty, return_ty) = if body.generator_kind.is_some() { let gen_sig = match ty.kind { - ty::Generator(gen_def_id, gen_substs, ..) => - gen_substs.as_generator().sig(gen_def_id, tcx), - _ => - span_bug!(tcx.hir().span(id), - "generator w/o generator type: {:?}", ty), + ty::Generator(gen_def_id, gen_substs, ..) => { + gen_substs.as_generator().sig(gen_def_id, tcx) + } + _ => span_bug!(tcx.hir().span(id), "generator w/o generator type: {:?}", ty), }; (Some(gen_sig.yield_ty), gen_sig.return_ty) } else { @@ -191,8 +171,7 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> BodyAndCache<'_> { build::construct_const(cx, body_id, return_ty, return_ty_span) }; - mir_util::dump_mir(tcx, None, "mir_map", &0, - MirSource::item(def_id), &body, |_, _| Ok(()) ); + mir_util::dump_mir(tcx, None, "mir_map", &0, MirSource::item(def_id), &body, |_, _| Ok(())); lints::check(tcx, &body, def_id); @@ -214,7 +193,7 @@ fn liberated_closure_env_ty( let (closure_def_id, closure_substs) = match closure_ty.kind { ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs), - _ => bug!("closure expr does not have closure type: {:?}", closure_ty) + _ => bug!("closure expr does not have closure type: {:?}", closure_ty), }; let closure_env_ty = tcx.closure_env_ty(closure_def_id, closure_substs).unwrap(); @@ -232,7 +211,7 @@ pub enum BlockFrame { Statement { /// If true, then statement discards result from evaluating /// the expression (such as examples 1 and 2 above). - ignores_expr_result: bool + ignores_expr_result: bool, }, /// Evaluation is currently within the tail expression of a block. @@ -243,7 +222,7 @@ pub enum BlockFrame { /// the result of evaluating the block's tail expression. /// /// Example: `let _ = { STMT_1; EXPR };` - tail_result_is_ignored: bool + tail_result_is_ignored: bool, }, /// Generic mark meaning that the block occurred as a subexpression @@ -258,19 +237,17 @@ impl BlockFrame { match *self { BlockFrame::TailExpr { .. } => true, - BlockFrame::Statement { .. } | - BlockFrame::SubExpr => false, + BlockFrame::Statement { .. } | BlockFrame::SubExpr => false, } } fn is_statement(&self) -> bool { match *self { BlockFrame::Statement { .. } => true, - BlockFrame::TailExpr { .. } | - BlockFrame::SubExpr => false, + BlockFrame::TailExpr { .. } | BlockFrame::SubExpr => false, } } - } +} #[derive(Debug)] struct BlockContext(Vec<BlockFrame>); @@ -348,9 +325,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } impl BlockContext { - fn new() -> Self { BlockContext(vec![]) } - fn push(&mut self, bf: BlockFrame) { self.0.push(bf); } - fn pop(&mut self) -> Option<BlockFrame> { self.0.pop() } + fn new() -> Self { + BlockContext(vec![]) + } + fn push(&mut self, bf: BlockFrame) { + self.0.push(bf); + } + fn pop(&mut self) -> Option<BlockFrame> { + self.0.pop() + } /// Traverses the frames on the `BlockContext`, searching for either /// the first block-tail expression frame with no intervening @@ -367,8 +350,9 @@ impl BlockContext { match bf { BlockFrame::SubExpr => continue, BlockFrame::Statement { .. } => break, - &BlockFrame::TailExpr { tail_result_is_ignored } => - return Some(BlockTailInfo { tail_result_is_ignored }) + &BlockFrame::TailExpr { tail_result_is_ignored } => { + return Some(BlockTailInfo { tail_result_is_ignored }); + } } } @@ -390,8 +374,8 @@ impl BlockContext { Some(BlockFrame::SubExpr) => false, // otherwise: use accumulated is_ignored state. - Some(BlockFrame::TailExpr { tail_result_is_ignored: ignored }) | - Some(BlockFrame::Statement { ignores_expr_result: ignored }) => *ignored, + Some(BlockFrame::TailExpr { tail_result_is_ignored: ignored }) + | Some(BlockFrame::Statement { ignores_expr_result: ignored }) => *ignored, } } } @@ -422,9 +406,7 @@ struct GuardFrameLocal { impl GuardFrameLocal { fn new(id: hir::HirId, _binding_mode: BindingMode) -> Self { - GuardFrameLocal { - id: id, - } + GuardFrameLocal { id: id } } } @@ -457,13 +439,18 @@ enum ForGuard { impl LocalsForNode { fn local_id(&self, for_guard: ForGuard) -> Local { match (self, for_guard) { - (&LocalsForNode::One(local_id), ForGuard::OutsideGuard) | - (&LocalsForNode::ForGuard { ref_for_guard: local_id, .. }, ForGuard::RefWithinGuard) | - (&LocalsForNode::ForGuard { for_arm_body: local_id, .. }, ForGuard::OutsideGuard) => - local_id, + (&LocalsForNode::One(local_id), ForGuard::OutsideGuard) + | ( + &LocalsForNode::ForGuard { ref_for_guard: local_id, .. }, + ForGuard::RefWithinGuard, + ) + | (&LocalsForNode::ForGuard { for_arm_body: local_id, .. }, ForGuard::OutsideGuard) => { + local_id + } - (&LocalsForNode::One(_), ForGuard::RefWithinGuard) => - bug!("anything with one local should never be within a guard."), + (&LocalsForNode::One(_), ForGuard::RefWithinGuard) => { + bug!("anything with one local should never be within a guard.") + } } } } @@ -503,20 +490,16 @@ impl BlockAndExtension for BasicBlock { /// Update a block pointer and return the value. /// Use it like `let x = unpack!(block = self.foo(block, foo))`. macro_rules! unpack { - ($x:ident = $c:expr) => { - { - let BlockAnd(b, v) = $c; - $x = b; - v - } - }; - - ($c:expr) => { - { - let BlockAnd(b, ()) = $c; - b - } - }; + ($x:ident = $c:expr) => {{ + let BlockAnd(b, v) = $c; + $x = b; + v + }}; + + ($c:expr) => {{ + let BlockAnd(b, ()) = $c; + b + }}; } fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, _abi: Abi) -> bool { @@ -525,10 +508,14 @@ fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, _abi: Abi) -> bool { let unwind_attr = attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs); // We never unwind, so it's not relevant to stop an unwind. - if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; } + if tcx.sess.panic_strategy() != PanicStrategy::Unwind { + return false; + } // We cannot add landing pads, so don't add one. - if tcx.sess.no_landing_pads() { return false; } + if tcx.sess.no_landing_pads() { + return false; + } // This is a special case: some functions have a C abi but are meant to // unwind anyway. Don't stop them. @@ -555,7 +542,7 @@ fn construct_fn<'a, 'tcx, A>( body: &'tcx hir::Body<'tcx>, ) -> Body<'tcx> where - A: Iterator<Item=ArgInfo<'tcx>> + A: Iterator<Item = ArgInfo<'tcx>>, { let arguments: Vec<_> = arguments.collect(); @@ -565,55 +552,63 @@ where let fn_def_id = tcx_hir.local_def_id(fn_id); - let mut builder = Builder::new(hir, + let mut builder = Builder::new( + hir, span, arguments.len(), safety, return_ty, return_ty_span, - body.generator_kind); + body.generator_kind, + ); - let call_site_scope = region::Scope { - id: body.value.hir_id.local_id, - data: region::ScopeData::CallSite - }; - let arg_scope = region::Scope { - id: body.value.hir_id.local_id, - data: region::ScopeData::Arguments - }; + let call_site_scope = + region::Scope { id: body.value.hir_id.local_id, data: region::ScopeData::CallSite }; + let arg_scope = + region::Scope { id: body.value.hir_id.local_id, data: region::ScopeData::Arguments }; let mut block = START_BLOCK; let source_info = builder.source_info(span); let call_site_s = (call_site_scope, source_info); - unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { - if should_abort_on_panic(tcx, fn_def_id, abi) { - builder.schedule_abort(); - } + unpack!( + block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { + if should_abort_on_panic(tcx, fn_def_id, abi) { + builder.schedule_abort(); + } - let arg_scope_s = (arg_scope, source_info); - // `return_block` is called when we evaluate a `return` expression, so - // we just use `START_BLOCK` here. - unpack!(block = builder.in_breakable_scope( - None, - START_BLOCK, - Place::return_place(), - |builder| { - builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { - builder.args_and_body(block, fn_def_id, &arguments, arg_scope, &body.value) - }) - }, - )); - // Attribute epilogue to function's closing brace - let fn_end = span.shrink_to_hi(); - let source_info = builder.source_info(fn_end); - let return_block = builder.return_block(); - builder.cfg.goto(block, source_info, return_block); - builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); - // Attribute any unreachable codepaths to the function's closing brace - if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); - } - return_block.unit() - })); + let arg_scope_s = (arg_scope, source_info); + // `return_block` is called when we evaluate a `return` expression, so + // we just use `START_BLOCK` here. + unpack!( + block = builder.in_breakable_scope( + None, + START_BLOCK, + Place::return_place(), + |builder| { + builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { + builder.args_and_body( + block, + fn_def_id, + &arguments, + arg_scope, + &body.value, + ) + }) + }, + ) + ); + // Attribute epilogue to function's closing brace + let fn_end = span.shrink_to_hi(); + let source_info = builder.source_info(fn_end); + let return_block = builder.return_block(); + builder.cfg.goto(block, source_info, return_block); + builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); + // Attribute any unreachable codepaths to the function's closing brace + if let Some(unreachable_block) = builder.cached_unreachable_block { + builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); + } + return_block.unit() + }) + ); assert_eq!(block, builder.return_block()); let mut spread_arg = None; @@ -621,8 +616,7 @@ where // RustCall pseudo-ABI untuples the last argument. spread_arg = Some(Local::new(arguments.len())); } - info!("fn_id {:?} has attrs {:?}", fn_def_id, - tcx.get_attrs(fn_def_id)); + info!("fn_id {:?} has attrs {:?}", fn_def_id, tcx.get_attrs(fn_def_id)); let mut body = builder.finish(); body.spread_arg = spread_arg; @@ -638,15 +632,7 @@ fn construct_const<'a, 'tcx>( let tcx = hir.tcx(); let owner_id = tcx.hir().body_owner(body_id); let span = tcx.hir().span(owner_id); - let mut builder = Builder::new( - hir, - span, - 0, - Safety::Safe, - const_ty, - const_ty_span, - None, - ); + let mut builder = Builder::new(hir, span, 0, Safety::Safe, const_ty, const_ty_span, None); let mut block = START_BLOCK; let ast_expr = &tcx.hir().body(body_id).value; @@ -662,17 +648,13 @@ fn construct_const<'a, 'tcx>( // Constants may be match expressions in which case an unreachable block may // be created, so terminate it properly. if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, - TerminatorKind::Unreachable); + builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); } builder.finish() } -fn construct_error<'a, 'tcx>( - hir: Cx<'a, 'tcx>, - body_id: hir::BodyId -) -> Body<'tcx> { +fn construct_error<'a, 'tcx>(hir: Cx<'a, 'tcx>, body_id: hir::BodyId) -> Body<'tcx> { let owner_id = hir.tcx().hir().body_owner(body_id); let span = hir.tcx().hir().span(owner_id); let ty = hir.tcx().types.err; @@ -683,14 +665,15 @@ fn construct_error<'a, 'tcx>( } impl<'a, 'tcx> Builder<'a, 'tcx> { - fn new(hir: Cx<'a, 'tcx>, - span: Span, - arg_count: usize, - safety: Safety, - return_ty: Ty<'tcx>, - return_span: Span, - generator_kind: Option<GeneratorKind>) - -> Builder<'a, 'tcx> { + fn new( + hir: Cx<'a, 'tcx>, + span: Span, + arg_count: usize, + safety: Safety, + return_ty: Ty<'tcx>, + return_span: Span, + generator_kind: Option<GeneratorKind>, + ) -> Builder<'a, 'tcx> { let lint_level = LintLevel::Explicit(hir.root_lint_level); let mut builder = Builder { hir, @@ -722,7 +705,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { assert_eq!(builder.cfg.start_new_block(), START_BLOCK); assert_eq!( builder.new_source_scope(span, lint_level, Some(safety)), - OUTERMOST_SOURCE_SCOPE); + OUTERMOST_SOURCE_SCOPE + ); builder.source_scopes[OUTERMOST_SOURCE_SCOPE].parent_scope = None; builder @@ -744,23 +728,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.var_debug_info, self.fn_span, self.hir.control_flow_destroyed(), - self.generator_kind + self.generator_kind, ) } - fn args_and_body(&mut self, - mut block: BasicBlock, - fn_def_id: DefId, - arguments: &[ArgInfo<'tcx>], - argument_scope: region::Scope, - ast_body: &'tcx hir::Expr) - -> BlockAnd<()> - { + fn args_and_body( + &mut self, + mut block: BasicBlock, + fn_def_id: DefId, + arguments: &[ArgInfo<'tcx>], + argument_scope: region::Scope, + ast_body: &'tcx hir::Expr, + ) -> BlockAnd<()> { // Allocate locals for the function arguments for &ArgInfo(ty, _, arg_opt, _) in arguments.iter() { let source_info = SourceInfo { scope: OUTERMOST_SOURCE_SCOPE, - span: arg_opt.map_or(self.fn_span, |arg| arg.pat.span) + span: arg_opt.map_or(self.fn_span, |arg| arg.pat.span), }; let arg_local = self.local_decls.push(LocalDecl { mutability: Mutability::Mut, @@ -804,51 +788,54 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let (def_id, upvar_substs) = match closure_ty.kind { ty::Closure(def_id, substs) => (def_id, ty::UpvarSubsts::Closure(substs)), ty::Generator(def_id, substs, _) => (def_id, ty::UpvarSubsts::Generator(substs)), - _ => span_bug!(self.fn_span, "upvars with non-closure env ty {:?}", closure_ty) + _ => span_bug!(self.fn_span, "upvars with non-closure env ty {:?}", closure_ty), }; let upvar_tys = upvar_substs.upvar_tys(def_id, tcx); let upvars_with_tys = upvars.iter().zip(upvar_tys); - self.upvar_mutbls = upvars_with_tys.enumerate().map(|(i, ((&var_id, &upvar_id), ty))| { - let capture = hir_tables.upvar_capture(upvar_id); - - let mut mutability = Mutability::Not; - let mut name = kw::Invalid; - if let Some(Node::Binding(pat)) = tcx_hir.find(var_id) { - if let hir::PatKind::Binding(_, _, ident, _) = pat.kind { - name = ident.name; - match hir_tables.extract_binding_mode(tcx.sess, pat.hir_id, pat.span) { - Some(ty::BindByValue(hir::Mutability::Mut)) => { - mutability = Mutability::Mut; + self.upvar_mutbls = upvars_with_tys + .enumerate() + .map(|(i, ((&var_id, &upvar_id), ty))| { + let capture = hir_tables.upvar_capture(upvar_id); + + let mut mutability = Mutability::Not; + let mut name = kw::Invalid; + if let Some(Node::Binding(pat)) = tcx_hir.find(var_id) { + if let hir::PatKind::Binding(_, _, ident, _) = pat.kind { + name = ident.name; + match hir_tables.extract_binding_mode(tcx.sess, pat.hir_id, pat.span) { + Some(ty::BindByValue(hir::Mutability::Mut)) => { + mutability = Mutability::Mut; + } + Some(_) => mutability = Mutability::Not, + _ => {} } - Some(_) => mutability = Mutability::Not, - _ => {} } } - } - let mut projs = closure_env_projs.clone(); - projs.push(ProjectionElem::Field(Field::new(i), ty)); - match capture { - ty::UpvarCapture::ByValue => {} - ty::UpvarCapture::ByRef(..) => { - projs.push(ProjectionElem::Deref); - } - }; + let mut projs = closure_env_projs.clone(); + projs.push(ProjectionElem::Field(Field::new(i), ty)); + match capture { + ty::UpvarCapture::ByValue => {} + ty::UpvarCapture::ByRef(..) => { + projs.push(ProjectionElem::Deref); + } + }; - self.var_debug_info.push(VarDebugInfo { - name, - source_info: SourceInfo { - scope: OUTERMOST_SOURCE_SCOPE, - span: tcx_hir.span(var_id), - }, - place: Place { - base: closure_env_arg.into(), - projection: tcx.intern_place_elems(&projs), - }, - }); + self.var_debug_info.push(VarDebugInfo { + name, + source_info: SourceInfo { + scope: OUTERMOST_SOURCE_SCOPE, + span: tcx_hir.span(var_id), + }, + place: Place { + base: closure_env_arg.into(), + projection: tcx.intern_place_elems(&projs), + }, + }); - mutability - }).collect(); + mutability + }) + .collect(); } let mut scope = None; @@ -862,7 +849,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Make sure we drop (parts of) the argument even when not matched on. self.schedule_drop( arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span), - argument_scope, local, DropKind::Value, + argument_scope, + local, + DropKind::Value, ); if let Some(arg) = arg_opt { @@ -881,22 +870,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } => { self.local_decls[local].mutability = mutability; self.local_decls[local].source_info.scope = self.source_scope; - self.local_decls[local].local_info = - if let Some(kind) = self_binding { - LocalInfo::User(ClearCrossCrate::Set( - BindingForm::ImplicitSelf(*kind), - )) - } else { - let binding_mode = ty::BindingMode::BindByValue(mutability.into()); - LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var( - VarBindingForm { - binding_mode, - opt_ty_info, - opt_match_place: Some((Some(place.clone()), span)), - pat_span: span, - }, - ))) - }; + self.local_decls[local].local_info = if let Some(kind) = self_binding { + LocalInfo::User(ClearCrossCrate::Set(BindingForm::ImplicitSelf(*kind))) + } else { + let binding_mode = ty::BindingMode::BindByValue(mutability.into()); + LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var( + VarBindingForm { + binding_mode, + opt_ty_info, + opt_match_place: Some((Some(place.clone()), span)), + pat_span: span, + }, + ))) + }; self.var_indices.insert(var, LocalsForNode::One(local)); } _ => { @@ -927,13 +913,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, arg_hir_id: hir::HirId, original_source_scope: SourceScope, - pattern_span: Span + pattern_span: Span, ) { let tcx = self.hir.tcx(); - let current_root = tcx.maybe_lint_level_root_bounded( - arg_hir_id, - self.hir.root_lint_level - ); + let current_root = tcx.maybe_lint_level_root_bounded(arg_hir_id, self.hir.root_lint_level); let parent_root = tcx.maybe_lint_level_root_bounded( self.source_scopes[original_source_scope] .local_data @@ -943,11 +926,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.hir.root_lint_level, ); if current_root != parent_root { - self.source_scope = self.new_source_scope( - pattern_span, - LintLevel::Explicit(current_root), - None - ); + self.source_scope = + self.new_source_scope(pattern_span, LintLevel::Explicit(current_root), None); } } diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 4fa4e87e0ff..6cb73e0c8d7 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -1,30 +1,31 @@ // Not in interpret to make sure we do not use private implementation details -use std::fmt; -use std::error::Error; use std::borrow::{Borrow, Cow}; -use std::hash::Hash; use std::collections::hash_map::Entry; use std::convert::TryInto; +use std::error::Error; +use std::fmt; +use std::hash::Hash; +use crate::interpret::eval_nullary_intrinsic; use rustc::hir::def::DefKind; use rustc::hir::def_id::DefId; -use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef}; use rustc::mir; -use rustc::ty::{self, Ty, TyCtxt, subst::Subst}; -use rustc::ty::layout::{self, HasTyCtxt, LayoutOf, VariantIdx}; +use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef}; use rustc::traits::Reveal; +use rustc::ty::layout::{self, HasTyCtxt, LayoutOf, VariantIdx}; +use rustc::ty::{self, subst::Subst, Ty, TyCtxt}; use rustc_data_structures::fx::FxHashMap; -use crate::interpret::eval_nullary_intrinsic; -use syntax::{source_map::{Span, DUMMY_SP}, symbol::Symbol}; +use syntax::{ + source_map::{Span, DUMMY_SP}, + symbol::Symbol, +}; -use crate::interpret::{self, - PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar, Pointer, - RawConst, ConstValue, Machine, - InterpResult, InterpErrorInfo, GlobalId, InterpCx, StackPopCleanup, AssertMessage, - Allocation, AllocId, MemoryKind, Memory, - snapshot, RefTracking, intern_const_alloc_recursive, +use crate::interpret::{ + self, intern_const_alloc_recursive, snapshot, AllocId, Allocation, AssertMessage, ConstValue, + GlobalId, ImmTy, Immediate, InterpCx, InterpErrorInfo, InterpResult, MPlaceTy, Machine, Memory, + MemoryKind, OpTy, PlaceTy, Pointer, RawConst, RefTracking, Scalar, StackPopCleanup, }; /// Number of steps until the detector even starts doing anything. @@ -94,7 +95,7 @@ fn op_to_const<'tcx>( let ptr = mplace.ptr.to_ptr().unwrap(); let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); ConstValue::ByRef { alloc, offset: ptr.offset } - }, + } // see comment on `let try_as_immediate` above Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x { ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s), @@ -108,30 +109,23 @@ fn op_to_const<'tcx>( let ptr = mplace.ptr.to_ptr().unwrap(); let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); ConstValue::ByRef { alloc, offset: ptr.offset } - }, + } }, Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => { let (data, start) = match a.not_undef().unwrap() { - Scalar::Ptr(ptr) => ( - ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), - ptr.offset.bytes(), - ), + Scalar::Ptr(ptr) => { + (ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), ptr.offset.bytes()) + } Scalar::Raw { .. } => ( - ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes( - b"" as &[u8], - )), + ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(b"" as &[u8])), 0, ), }; let len = b.to_machine_usize(&ecx.tcx.tcx).unwrap(); let start = start.try_into().unwrap(); let len: usize = len.try_into().unwrap(); - ConstValue::Slice { - data, - start, - end: start + len, - } - }, + ConstValue::Slice { data, start, end: start + len } + } }; ecx.tcx.mk_const(ty::Const { val: ty::ConstKind::Value(val), ty: op.layout.ty }) } @@ -159,7 +153,7 @@ fn eval_body_using_ecx<'mir, 'tcx>( let decl = body.local_decls.get(arg).expect("arg missing from local_decls"); let layout = ecx.layout_of(decl.ty.subst(tcx, cid.instance.substs))?; assert!(layout.is_zst()) - }; + } ecx.push_stack_frame( cid.instance, @@ -196,11 +190,7 @@ impl fmt::Display for ConstEvalError { use self::ConstEvalError::*; match *self { NeedsRfc(ref msg) => { - write!( - f, - "\"{}\" needs an rfc before being allowed inside constants", - msg - ) + write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg) } ConstAccessesStatic => write!(f, "constant accesses static"), } @@ -251,38 +241,32 @@ impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> { impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { #[inline(always)] fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool - where K: Borrow<Q> + where + K: Borrow<Q>, { FxHashMap::contains_key(self, k) } #[inline(always)] - fn insert(&mut self, k: K, v: V) -> Option<V> - { + fn insert(&mut self, k: K, v: V) -> Option<V> { FxHashMap::insert(self, k, v) } #[inline(always)] fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> - where K: Borrow<Q> + where + K: Borrow<Q>, { FxHashMap::remove(self, k) } #[inline(always)] fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { - self.iter() - .filter_map(move |(k, v)| f(k, &*v)) - .collect() + self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() } #[inline(always)] - fn get_or<E>( - &self, - k: K, - vacant: impl FnOnce() -> Result<V, E> - ) -> Result<&V, E> - { + fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { match self.get(&k) { Some(v) => Ok(v), None => { @@ -293,12 +277,7 @@ impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { } #[inline(always)] - fn get_mut_or<E>( - &mut self, - k: K, - vacant: impl FnOnce() -> Result<V, E> - ) -> Result<&mut V, E> - { + fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { match self.entry(k) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(e) => { @@ -347,7 +326,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock> // unwinding is not supported in consts + _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { debug!("find_mir_or_eval_fn: {:?}", instance); @@ -387,10 +366,11 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, Ok(body) => *body, Err(err) => { if let err_unsup!(NoMirFor(ref path)) = err.kind { - return Err( - ConstEvalError::NeedsRfc(format!("calling extern function `{}`", path)) - .into(), - ); + return Err(ConstEvalError::NeedsRfc(format!( + "calling extern function `{}`", + path + )) + .into()); } return Err(err); } @@ -402,7 +382,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, fn_val: !, _args: &[OpTy<'tcx>], _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock> + _unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { match fn_val {} } @@ -413,16 +393,14 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock> + _unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { if ecx.emulate_intrinsic(span, instance, args, ret)? { return Ok(()); } // An intrinsic that we do not support let intrinsic_name = ecx.tcx.item_name(instance.def_id()); - Err( - ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into() - ) + Err(ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()) } fn assert_panic( @@ -450,22 +428,15 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, OverflowNeg => err_panic!(OverflowNeg), DivisionByZero => err_panic!(DivisionByZero), RemainderByZero => err_panic!(RemainderByZero), - ResumedAfterReturn(generator_kind) - => err_panic!(ResumedAfterReturn(*generator_kind)), - ResumedAfterPanic(generator_kind) - => err_panic!(ResumedAfterPanic(*generator_kind)), + ResumedAfterReturn(generator_kind) => err_panic!(ResumedAfterReturn(*generator_kind)), + ResumedAfterPanic(generator_kind) => err_panic!(ResumedAfterPanic(*generator_kind)), Panic { .. } => bug!("`Panic` variant cannot occur in MIR"), } .into()) } - fn ptr_to_int( - _mem: &Memory<'mir, 'tcx, Self>, - _ptr: Pointer, - ) -> InterpResult<'tcx, u64> { - Err( - ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into(), - ) + fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> { + Err(ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into()) } fn binary_ptr_op( @@ -474,9 +445,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, _left: ImmTy<'tcx>, _right: ImmTy<'tcx>, ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { - Err( - ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into(), - ) + Err(ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) } fn find_foreign_static( @@ -498,10 +467,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, } #[inline(always)] - fn tag_static_base_pointer( - _memory_extra: &MemoryExtra, - _id: AllocId, - ) -> Self::PointerTag { + fn tag_static_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag { () } @@ -509,9 +475,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, _ecx: &mut InterpCx<'mir, 'tcx, Self>, _dest: PlaceTy<'tcx>, ) -> InterpResult<'tcx> { - Err( - ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into(), - ) + Err(ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) } fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { @@ -530,12 +494,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, } let span = ecx.frame().span; - ecx.machine.loop_detector.observe_and_analyze( - *ecx.tcx, - span, - &ecx.memory, - &ecx.stack[..], - ) + ecx.machine.loop_detector.observe_and_analyze(*ecx.tcx, span, &ecx.memory, &ecx.stack[..]) } #[inline(always)] @@ -643,11 +602,7 @@ fn validate_and_turn_into_const<'tcx>( let mplace = ecx.raw_const_to_mplace(constant)?; let mut ref_tracking = RefTracking::new(mplace); while let Some((mplace, path)) = ref_tracking.todo.pop() { - ecx.validate_operand( - mplace.into(), - path, - Some(&mut ref_tracking), - )?; + ecx.validate_operand(mplace.into(), path, Some(&mut ref_tracking))?; } // Now that we validated, turn this into a proper constant. // Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides @@ -693,7 +648,7 @@ pub fn const_eval_validated_provider<'tcx>( // Promoteds should never be "too generic" when getting evaluated. // They either don't get evaluated, or we are in a monomorphic context assert!(key.value.promoted.is_none()); - }, + } // dedupliate calls other => return other, } @@ -707,17 +662,14 @@ pub fn const_eval_validated_provider<'tcx>( ty::FnDef(_, substs) => substs, _ => bug!("intrinsic with type {:?}", ty), }; - return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs) - .map_err(|error| { - let span = tcx.def_span(def_id); - let error = ConstEvalErr { error: error.kind, stacktrace: vec![], span }; - error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic") - }) + return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| { + let span = tcx.def_span(def_id); + let error = ConstEvalErr { error: error.kind, stacktrace: vec![], span }; + error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic") + }); } - tcx.const_eval_raw(key).and_then(|val| { - validate_and_turn_into_const(tcx, val, key) - }) + tcx.const_eval_raw(key).and_then(|val| validate_and_turn_into_const(tcx, val, key)) } pub fn const_eval_raw_provider<'tcx>( @@ -737,7 +689,7 @@ pub fn const_eval_raw_provider<'tcx>( key.param_env.reveal = Reveal::UserFacing; match tcx.const_eval_raw(key) { // try again with reveal all as requested - Err(ErrorHandled::TooGeneric) => {}, + Err(ErrorHandled::TooGeneric) => {} // dedupliate calls other => return other, } @@ -770,72 +722,68 @@ pub fn const_eval_raw_provider<'tcx>( ); let res = ecx.load_mir(cid.instance.def, cid.promoted); - res.and_then( - |body| eval_body_using_ecx(&mut ecx, cid, *body) - ).and_then(|place| { - Ok(RawConst { - alloc_id: place.ptr.assert_ptr().alloc_id, - ty: place.layout.ty + res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, *body)) + .and_then(|place| { + Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty }) }) - }).map_err(|error| { - let err = error_to_const_error(&ecx, error); - // errors in statics are always emitted as fatal errors - if is_static { - // Ensure that if the above error was either `TooGeneric` or `Reported` - // an error must be reported. - let v = err.report_as_error(ecx.tcx, "could not evaluate static initializer"); - tcx.sess.delay_span_bug( - err.span, - &format!("static eval failure did not emit an error: {:#?}", v) - ); - v - } else if def_id.is_local() { - // constant defined in this crate, we can figure out a lint level! - match tcx.def_kind(def_id) { - // constants never produce a hard error at the definition site. Anything else is - // a backwards compatibility hazard (and will break old versions of winapi for sure) - // - // note that validation may still cause a hard error on this very same constant, - // because any code that existed before validation could not have failed validation - // thus preventing such a hard error from being a backwards compatibility hazard - Some(DefKind::Const) | Some(DefKind::AssocConst) => { - let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); - err.report_as_lint( - tcx.at(tcx.def_span(def_id)), - "any use of this value will cause an error", - hir_id, - Some(err.span), - ) - }, - // promoting runtime code is only allowed to error if it references broken constants - // any other kind of error will be reported to the user as a deny-by-default lint - _ => if let Some(p) = cid.promoted { - let span = tcx.promoted_mir(def_id)[p].span; - if let err_inval!(ReferencedConstant) = err.error { - err.report_as_error( - tcx.at(span), - "evaluation of constant expression failed", - ) - } else { + .map_err(|error| { + let err = error_to_const_error(&ecx, error); + // errors in statics are always emitted as fatal errors + if is_static { + // Ensure that if the above error was either `TooGeneric` or `Reported` + // an error must be reported. + let v = err.report_as_error(ecx.tcx, "could not evaluate static initializer"); + tcx.sess.delay_span_bug( + err.span, + &format!("static eval failure did not emit an error: {:#?}", v), + ); + v + } else if def_id.is_local() { + // constant defined in this crate, we can figure out a lint level! + match tcx.def_kind(def_id) { + // constants never produce a hard error at the definition site. Anything else is + // a backwards compatibility hazard (and will break old versions of winapi for sure) + // + // note that validation may still cause a hard error on this very same constant, + // because any code that existed before validation could not have failed validation + // thus preventing such a hard error from being a backwards compatibility hazard + Some(DefKind::Const) | Some(DefKind::AssocConst) => { + let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); err.report_as_lint( - tcx.at(span), - "reaching this expression at runtime will panic or abort", - tcx.hir().as_local_hir_id(def_id).unwrap(), + tcx.at(tcx.def_span(def_id)), + "any use of this value will cause an error", + hir_id, Some(err.span), ) } - // anything else (array lengths, enum initializers, constant patterns) are reported - // as hard errors - } else { - err.report_as_error( - ecx.tcx, - "evaluation of constant value failed", - ) - }, + // promoting runtime code is only allowed to error if it references broken constants + // any other kind of error will be reported to the user as a deny-by-default lint + _ => { + if let Some(p) = cid.promoted { + let span = tcx.promoted_mir(def_id)[p].span; + if let err_inval!(ReferencedConstant) = err.error { + err.report_as_error( + tcx.at(span), + "evaluation of constant expression failed", + ) + } else { + err.report_as_lint( + tcx.at(span), + "reaching this expression at runtime will panic or abort", + tcx.hir().as_local_hir_id(def_id).unwrap(), + Some(err.span), + ) + } + // anything else (array lengths, enum initializers, constant patterns) are reported + // as hard errors + } else { + err.report_as_error(ecx.tcx, "evaluation of constant value failed") + } + } + } + } else { + // use of broken constant from other crate + err.report_as_error(ecx.tcx, "could not evaluate constant") } - } else { - // use of broken constant from other crate - err.report_as_error(ecx.tcx, "could not evaluate constant") - } - }) + }) } diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 1141239e49a..a558f0671e1 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -7,21 +7,21 @@ use std::hash::Hash; use rustc::mir; use rustc::mir::interpret::truncate; -use rustc::ty::{self, Ty}; use rustc::ty::layout::{ - self, Size, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt + self, Align, HasDataLayout, LayoutOf, PrimitiveExt, Size, TyLayout, VariantIdx, }; use rustc::ty::TypeFoldable; +use rustc::ty::{self, Ty}; use rustc_macros::HashStable; use super::{ - GlobalId, AllocId, Allocation, Scalar, InterpResult, Pointer, PointerArithmetic, - InterpCx, Machine, AllocMap, AllocationExtra, - RawConst, Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind, LocalValue, + AllocId, AllocMap, Allocation, AllocationExtra, GlobalId, ImmTy, Immediate, InterpCx, + InterpResult, LocalValue, Machine, MemoryKind, OpTy, Operand, Pointer, PointerArithmetic, + RawConst, Scalar, ScalarMaybeUndef, }; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)] -pub struct MemPlace<Tag=(), Id=AllocId> { +pub struct MemPlace<Tag = (), Id = AllocId> { /// A place may have an integral pointer for ZSTs, and since it might /// be turned back into a reference before ever being dereferenced. /// However, it may never be undef. @@ -34,20 +34,17 @@ pub struct MemPlace<Tag=(), Id=AllocId> { } #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)] -pub enum Place<Tag=(), Id=AllocId> { +pub enum Place<Tag = (), Id = AllocId> { /// A place referring to a value allocated in the `Memory` system. Ptr(MemPlace<Tag, Id>), /// To support alloc-free locals, we are able to write directly to a local. /// (Without that optimization, we'd just always be a `MemPlace`.) - Local { - frame: usize, - local: mir::Local, - }, + Local { frame: usize, local: mir::Local }, } #[derive(Copy, Clone, Debug)] -pub struct PlaceTy<'tcx, Tag=()> { +pub struct PlaceTy<'tcx, Tag = ()> { place: Place<Tag>, // Keep this private; it helps enforce invariants. pub layout: TyLayout<'tcx>, } @@ -62,7 +59,7 @@ impl<'tcx, Tag> ::std::ops::Deref for PlaceTy<'tcx, Tag> { /// A MemPlace with its layout. Constructing it is only possible in this module. #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] -pub struct MPlaceTy<'tcx, Tag=()> { +pub struct MPlaceTy<'tcx, Tag = ()> { mplace: MemPlace<Tag>, pub layout: TyLayout<'tcx>, } @@ -78,10 +75,7 @@ impl<'tcx, Tag> ::std::ops::Deref for MPlaceTy<'tcx, Tag> { impl<'tcx, Tag> From<MPlaceTy<'tcx, Tag>> for PlaceTy<'tcx, Tag> { #[inline(always)] fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self { - PlaceTy { - place: Place::Ptr(mplace.mplace), - layout: mplace.layout - } + PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout } } } @@ -89,11 +83,7 @@ impl<Tag> MemPlace<Tag> { /// Replace ptr tag, maintain vtable tag (if any) #[inline] pub fn replace_tag(self, new_tag: Tag) -> Self { - MemPlace { - ptr: self.ptr.erase_tag().with_tag(new_tag), - align: self.align, - meta: self.meta, - } + MemPlace { ptr: self.ptr.erase_tag().with_tag(new_tag), align: self.align, meta: self.meta } } #[inline] @@ -107,11 +97,7 @@ impl<Tag> MemPlace<Tag> { #[inline(always)] pub fn from_scalar_ptr(ptr: Scalar<Tag>, align: Align) -> Self { - MemPlace { - ptr, - align, - meta: None, - } + MemPlace { ptr, align, meta: None } } /// Produces a Place that will error if attempted to be read from or written to @@ -156,19 +142,16 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { MPlaceTy { mplace: MemPlace::from_scalar_ptr( Scalar::from_uint(layout.align.abi.bytes(), cx.pointer_size()), - layout.align.abi + layout.align.abi, ), - layout + layout, } } /// Replace ptr tag, maintain vtable tag (if any) #[inline] pub fn replace_tag(self, new_tag: Tag) -> Self { - MPlaceTy { - mplace: self.mplace.replace_tag(new_tag), - layout: self.layout, - } + MPlaceTy { mplace: self.mplace.replace_tag(new_tag), layout: self.layout } } #[inline] @@ -179,10 +162,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { layout: TyLayout<'tcx>, cx: &impl HasDataLayout, ) -> InterpResult<'tcx, Self> { - Ok(MPlaceTy { - mplace: self.mplace.offset(offset, meta, cx)?, - layout, - }) + Ok(MPlaceTy { mplace: self.mplace.offset(offset, meta, cx)?, layout }) } #[inline] @@ -195,8 +175,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { if self.layout.is_unsized() { // We need to consult `meta` metadata match self.layout.ty.kind { - ty::Slice(..) | ty::Str => - return self.mplace.meta.unwrap().to_machine_usize(cx), + ty::Slice(..) | ty::Str => return self.mplace.meta.unwrap().to_machine_usize(cx), _ => bug!("len not supported on unsized type {:?}", self.layout.ty), } } else { @@ -256,7 +235,6 @@ impl<Tag: ::std::fmt::Debug> Place<Tag> { match self { Place::Ptr(mplace) => mplace, _ => bug!("assert_mem_place: expected Place::Ptr, got {:?}", self), - } } } @@ -288,9 +266,8 @@ where &self, val: ImmTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { - let pointee_type = val.layout.ty.builtin_deref(true) - .expect("`ref_to_mplace` called on non-ptr type") - .ty; + let pointee_type = + val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type").ty; let layout = self.layout_of(pointee_type)?; let (ptr, meta) = match *val { Immediate::Scalar(ptr) => (ptr.not_undef()?, None), @@ -347,7 +324,8 @@ where &self, mut place: MPlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { - let (size, align) = self.size_and_align_of_mplace(place)? + let (size, align) = self + .size_and_align_of_mplace(place)? .unwrap_or((place.layout.size, place.layout.align.abi)); assert!(place.mplace.align <= align, "dynamic alignment less strict than static one?"); place.mplace.align = align; // maximally strict checking @@ -379,8 +357,9 @@ where ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { // Not using the layout method because we want to compute on u64 let offset = match base.layout.fields { - layout::FieldPlacement::Arbitrary { ref offsets, .. } => - offsets[usize::try_from(field).unwrap()], + layout::FieldPlacement::Arbitrary { ref offsets, .. } => { + offsets[usize::try_from(field).unwrap()] + } layout::FieldPlacement::Array { stride, .. } => { let len = base.len(self)?; if field >= len { @@ -390,9 +369,13 @@ where stride * field } layout::FieldPlacement::Union(count) => { - assert!(field < count as u64, - "Tried to access field {} of union {:#?} with {} fields", - field, base.layout, count); + assert!( + field < count as u64, + "Tried to access field {} of union {:#?} with {} fields", + field, + base.layout, + count + ); // Offset is always 0 Size::from_bytes(0) } @@ -409,13 +392,14 @@ where let align = match self.size_and_align_of(base.meta, field_layout)? { Some((_, align)) => align, None if offset == Size::ZERO => - // An extern type at offset 0, we fall back to its static alignment. - // FIXME: Once we have made decisions for how to handle size and alignment - // of `extern type`, this should be adapted. It is just a temporary hack - // to get some code to work that probably ought to work. - field_layout.align.abi, - None => - bug!("Cannot compute offset for extern type field at non-0 offset"), + // An extern type at offset 0, we fall back to its static alignment. + // FIXME: Once we have made decisions for how to handle size and alignment + // of `extern type`, this should be adapted. It is just a temporary hack + // to get some code to work that probably ought to work. + { + field_layout.align.abi + } + None => bug!("Cannot compute offset for extern type field at non-0 offset"), }; (base.meta, offset.align_to(align)) } else { @@ -467,8 +451,7 @@ where // Not using layout method because that works with usize, and does not work with slices // (that have count 0 in their layout). let from_offset = match base.layout.fields { - layout::FieldPlacement::Array { stride, .. } => - stride * from, + layout::FieldPlacement::Array { stride, .. } => stride * from, _ => bug!("Unexpected layout of index access: {:#?}", base.layout), }; @@ -477,14 +460,12 @@ where let (meta, ty) = match base.layout.ty.kind { // It is not nice to match on the type, but that seems to be the only way to // implement this. - ty::Array(inner, _) => - (None, self.tcx.mk_array(inner, inner_len)), + ty::Array(inner, _) => (None, self.tcx.mk_array(inner, inner_len)), ty::Slice(..) => { let len = Scalar::from_uint(inner_len, self.pointer_size()); (Some(len), base.layout.ty) } - _ => - bug!("cannot subslice non-array type: `{:?}`", base.layout.ty), + _ => bug!("cannot subslice non-array type: `{:?}`", base.layout.ty), }; let layout = self.layout_of(ty)?; base.offset(from_offset, meta, layout, self) @@ -520,11 +501,7 @@ where self.mplace_field(base, u64::try_from(n).unwrap())? } - ConstantIndex { - offset, - min_length, - from_end, - } => { + ConstantIndex { offset, min_length, from_end } => { let n = base.len(self)?; if n < min_length as u64 { // This can only be reached in ConstProp and non-rustc-MIR. @@ -542,8 +519,9 @@ where self.mplace_field(base, index)? } - Subslice { from, to, from_end } => - self.mplace_subslice(base, u64::from(from), u64::from(to), from_end)?, + Subslice { from, to, from_end } => { + self.mplace_subslice(base, u64::from(from), u64::from(to), from_end)? + } }) } @@ -569,8 +547,9 @@ where ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> { // Downcast just changes the layout Ok(match base.place { - Place::Ptr(mplace) => - self.mplace_downcast(MPlaceTy { mplace, layout: base.layout }, variant)?.into(), + Place::Ptr(mplace) => { + self.mplace_downcast(MPlaceTy { mplace, layout: base.layout }, variant)?.into() + } Place::Local { .. } => { let layout = base.layout.for_variant(self, variant); PlaceTy { layout, ..base } @@ -586,7 +565,7 @@ where ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> { use rustc::mir::ProjectionElem::*; Ok(match *proj_elem { - Field(field, _) => self.place_field(base, field.index() as u64)?, + Field(field, _) => self.place_field(base, field.index() as u64)?, Downcast(_, variant) => self.place_downcast(base, variant)?, Deref => self.deref_operand(self.place_to_op(base)?)?.into(), // For the other variants, we have to force an allocation. @@ -602,7 +581,7 @@ where /// `eval_place` and `eval_place_to_op`. pub(super) fn eval_static_to_mplace( &self, - place_static: &mir::Static<'tcx> + place_static: &mir::Static<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { use rustc::mir::StaticKind; @@ -617,10 +596,7 @@ where throw_inval!(TooGeneric); } - self.const_eval_raw(GlobalId { - instance, - promoted: Some(promoted), - })? + self.const_eval_raw(GlobalId { instance, promoted: Some(promoted) })? } StaticKind::Static => { @@ -674,19 +650,14 @@ where // bail out. None => Place::null(&*self), }, - layout: self.layout_of( - self.subst_from_frame_and_normalize_erasing_regions( - self.frame().body.return_ty() - ) - )?, + layout: self.layout_of(self.subst_from_frame_and_normalize_erasing_regions( + self.frame().body.return_ty(), + ))?, } - }, + } PlaceBase::Local(local) => PlaceTy { // This works even for dead/uninitialized locals; we check further when writing - place: Place::Local { - frame: self.cur_frame(), - local: *local, - }, + place: Place::Local { frame: self.cur_frame(), local: *local }, layout: self.layout_of_local(self.frame(), *local, None)?, }, PlaceBase::Static(place_static) => self.eval_static_to_mplace(&place_static)?.into(), @@ -756,13 +727,19 @@ where // This is a very common path, avoid some checks in release mode assert!(!dest.layout.is_unsized(), "Cannot write unsized data"); match src { - Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) => - assert_eq!(self.pointer_size(), dest.layout.size, - "Size mismatch when writing pointer"), - Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Raw { size, .. })) => - assert_eq!(Size::from_bytes(size.into()), dest.layout.size, - "Size mismatch when writing bits"), - Immediate::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size + Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) => assert_eq!( + self.pointer_size(), + dest.layout.size, + "Size mismatch when writing pointer" + ), + Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Raw { size, .. })) => { + assert_eq!( + Size::from_bytes(size.into()), + dest.layout.size, + "Size mismatch when writing bits" + ) + } + Immediate::Scalar(ScalarMaybeUndef::Undef) => {} // undef can have any size Immediate::ScalarPair(_, _) => { // FIXME: Can we check anything here? } @@ -785,7 +762,7 @@ where mplace } } - }, + } Place::Ptr(mplace) => mplace, // already referring to memory }; let dest = MPlaceTy { mplace, layout: dest.layout }; @@ -808,8 +785,7 @@ where // wrong type. // Invalid places are a thing: the return place of a diverging function - let ptr = match self.check_mplace_access(dest, None)? - { + let ptr = match self.check_mplace_access(dest, None)? { Some(ptr) => ptr, None => return Ok(()), // zero-sized access }; @@ -821,12 +797,16 @@ where match value { Immediate::Scalar(scalar) => { match dest.layout.abi { - layout::Abi::Scalar(_) => {}, // fine - _ => bug!("write_immediate_to_mplace: invalid Scalar layout: {:#?}", - dest.layout) + layout::Abi::Scalar(_) => {} // fine + _ => { + bug!("write_immediate_to_mplace: invalid Scalar layout: {:#?}", dest.layout) + } } self.memory.get_raw_mut(ptr.alloc_id)?.write_scalar( - tcx, ptr, scalar, dest.layout.size + tcx, + ptr, + scalar, + dest.layout.size, ) } Immediate::ScalarPair(a_val, b_val) => { @@ -835,8 +815,10 @@ where // which `ptr.offset(b_offset)` cannot possibly fail to satisfy. let (a, b) = match dest.layout.abi { layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value), - _ => bug!("write_immediate_to_mplace: invalid ScalarPair layout: {:#?}", - dest.layout) + _ => bug!( + "write_immediate_to_mplace: invalid ScalarPair layout: {:#?}", + dest.layout + ), }; let (a_size, b_size) = (a.size(self), b.size(self)); let b_offset = a_size.align_to(b.align(self).abi); @@ -846,12 +828,8 @@ where // but that does not work: We could be a newtype around a pair, then the // fields do not match the `ScalarPair` components. - self.memory - .get_raw_mut(ptr.alloc_id)? - .write_scalar(tcx, ptr, a_val, a_size)?; - self.memory - .get_raw_mut(b_ptr.alloc_id)? - .write_scalar(tcx, b_ptr, b_val, b_size) + self.memory.get_raw_mut(ptr.alloc_id)?.write_scalar(tcx, ptr, a_val, a_size)?; + self.memory.get_raw_mut(b_ptr.alloc_id)?.write_scalar(tcx, b_ptr, b_val, b_size) } } } @@ -885,8 +863,12 @@ where ) -> InterpResult<'tcx> { // We do NOT compare the types for equality, because well-typed code can // actually "transmute" `&mut T` to `&T` in an assignment without a cast. - assert!(src.layout.details == dest.layout.details, - "Layout mismatch when copying!\nsrc: {:#?}\ndest: {:#?}", src, dest); + assert!( + src.layout.details == dest.layout.details, + "Layout mismatch when copying!\nsrc: {:#?}\ndest: {:#?}", + src, + dest + ); // Let us see if the layout is simple so we take a shortcut, avoid force_allocation. let src = match self.try_read_immediate(src)? { @@ -906,15 +888,19 @@ where // is being initialized! let (dest, size) = self.force_allocation_maybe_sized(dest, src.meta)?; let size = size.unwrap_or_else(|| { - assert!(!dest.layout.is_unsized(), - "Cannot copy into already initialized unsized place"); + assert!( + !dest.layout.is_unsized(), + "Cannot copy into already initialized unsized place" + ); dest.layout.size }); assert_eq!(src.meta, dest.meta, "Can only copy between equally-sized instances"); - let src = self.check_mplace_access(src, Some(size)) + let src = self + .check_mplace_access(src, Some(size)) .expect("places should be checked on creation"); - let dest = self.check_mplace_access(dest, Some(size)) + let dest = self + .check_mplace_access(dest, Some(size)) .expect("places should be checked on creation"); let (src_ptr, dest_ptr) = match (src, dest) { (Some(src_ptr), Some(dest_ptr)) => (src_ptr, dest_ptr), @@ -922,12 +908,7 @@ where _ => bug!("The pointers should both be Some or both None"), }; - self.memory.copy( - src_ptr, - dest_ptr, - size, - /*nonoverlapping*/ true, - ) + self.memory.copy(src_ptr, dest_ptr, size, /*nonoverlapping*/ true) } /// Copies the data from an operand to a place. The layouts may disagree, but they must @@ -942,12 +923,18 @@ where return self.copy_op(src, dest); } // We still require the sizes to match. - assert!(src.layout.size == dest.layout.size, - "Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest); + assert!( + src.layout.size == dest.layout.size, + "Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", + src, + dest + ); // Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want // to avoid that here. - assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(), - "Cannot transmute unsized data"); + assert!( + !src.layout.is_unsized() && !dest.layout.is_unsized(), + "Cannot transmute unsized data" + ); // The hard case is `ScalarPair`. `src` is already read from memory in this case, // using `src.layout` to figure out which bytes to use for the 1st and 2nd field. @@ -1006,7 +993,8 @@ where // that has different alignment than the outer field. // We also need to support unsized types, and hence cannot use `allocate`. let local_layout = self.layout_of_local(&self.stack[frame], local, None)?; - let (size, align) = self.size_and_align_of(meta, local_layout)? + let (size, align) = self + .size_and_align_of(meta, local_layout)? .expect("Cannot allocate for non-dyn-sized type"); let ptr = self.memory.allocate(size, align, MemoryKind::Stack); let mplace = MemPlace { ptr: ptr.into(), align, meta }; @@ -1026,7 +1014,7 @@ where Err(mplace) => (mplace, None), // this already was an indirect local } } - Place::Ptr(mplace) => (mplace, None) + Place::Ptr(mplace) => (mplace, None), }; // Return with the original layout, so that the caller can go on Ok((MPlaceTy { mplace, layout: place.layout }, size)) @@ -1057,11 +1045,8 @@ where ) -> MPlaceTy<'tcx, M::PointerTag> { let ptr = self.memory.allocate_static_bytes(str.as_bytes(), kind); let meta = Scalar::from_uint(str.len() as u128, self.pointer_size()); - let mplace = MemPlace { - ptr: ptr.into(), - align: Align::from_bytes(1).unwrap(), - meta: Some(meta), - }; + let mplace = + MemPlace { ptr: ptr.into(), align: Align::from_bytes(1).unwrap(), meta: Some(meta) }; let layout = self.layout_of(self.tcx.mk_static_str()).unwrap(); MPlaceTy { mplace, layout } @@ -1072,7 +1057,6 @@ where variant_index: VariantIdx, dest: PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { - // Layout computation excludes uninhabited variants from consideration // therefore there's no way to represent those variants in the given layout. if dest.layout.for_variant(self, variant_index).abi.is_uninhabited() { @@ -1105,11 +1089,8 @@ where self.write_scalar(Scalar::from_uint(discr_val, size), discr_dest)?; } layout::Variants::Multiple { - discr_kind: layout::DiscriminantKind::Niche { - dataful_variant, - ref niche_variants, - niche_start, - }, + discr_kind: + layout::DiscriminantKind::Niche { dataful_variant, ref niche_variants, niche_start }, discr: ref discr_layout, discr_index, .. @@ -1119,7 +1100,8 @@ where if variant_index != dataful_variant { let variants_start = niche_variants.start().as_u32(); - let variant_index_relative = variant_index.as_u32() + let variant_index_relative = variant_index + .as_u32() .checked_sub(variants_start) .expect("overflow computing relative variant idx"); // We need to use machine arithmetic when taking into account `niche_start`: @@ -1156,8 +1138,10 @@ where /// Turn a place with a `dyn Trait` type into a place with the actual dynamic type. /// Also return some more information so drop doesn't have to run the same code twice. - pub(super) fn unpack_dyn_trait(&self, mplace: MPlaceTy<'tcx, M::PointerTag>) - -> InterpResult<'tcx, (ty::Instance<'tcx>, MPlaceTy<'tcx, M::PointerTag>)> { + pub(super) fn unpack_dyn_trait( + &self, + mplace: MPlaceTy<'tcx, M::PointerTag>, + ) -> InterpResult<'tcx, (ty::Instance<'tcx>, MPlaceTy<'tcx, M::PointerTag>)> { let vtable = mplace.vtable(); // also sanity checks the type let (instance, ty) = self.read_drop_type_from_vtable(vtable)?; let layout = self.layout_of(ty)?; @@ -1170,10 +1154,7 @@ where assert_eq!(align, layout.align.abi); } - let mplace = MPlaceTy { - mplace: MemPlace { meta: None, ..*mplace }, - layout - }; + let mplace = MPlaceTy { mplace: MemPlace { meta: None, ..*mplace }, layout }; Ok((instance, mplace)) } } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index ac627d32392..0a783337ad1 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -176,32 +176,32 @@ use crate::monomorphize; -use rustc::hir::{self, CodegenFnAttrFlags}; -use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; -use rustc::mir::interpret::{AllocId, ConstValue}; +use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::{self, CodegenFnAttrFlags}; use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem}; -use rustc::ty::subst::{InternalSubsts, Subst, SubstsRef}; -use rustc::ty::{self, TypeFoldable, Ty, TyCtxt, GenericParamDefKind, Instance}; -use rustc::ty::print::obsolete::DefPathBasedNames; -use rustc::ty::adjustment::{CustomCoerceUnsized, PointerCast}; -use rustc::session::config::EntryFnType; -use rustc::mir::{self, Location, PlaceBase, Static, StaticKind}; +use rustc::mir::interpret::{AllocId, ConstValue}; +use rustc::mir::interpret::{ErrorHandled, GlobalAlloc, Scalar}; +use rustc::mir::mono::{InstantiationMode, MonoItem}; use rustc::mir::visit::Visitor as MirVisitor; -use rustc::mir::mono::{MonoItem, InstantiationMode}; -use rustc::mir::interpret::{Scalar, GlobalAlloc, ErrorHandled}; -use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap}; +use rustc::mir::{self, Location, PlaceBase, Static, StaticKind}; +use rustc::session::config::EntryFnType; +use rustc::ty::adjustment::{CustomCoerceUnsized, PointerCast}; +use rustc::ty::print::obsolete::DefPathBasedNames; +use rustc::ty::subst::{InternalSubsts, Subst, SubstsRef}; +use rustc::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable}; use rustc::util::common::time; +use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet}; +use rustc_data_structures::sync::{par_iter, MTLock, MTRef, ParallelIterator}; use rustc_index::bit_set::GrowableBitSet; -use rustc_data_structures::sync::{MTRef, MTLock, ParallelIterator, par_iter}; use std::iter; #[derive(PartialEq)] pub enum MonoItemCollectionMode { Eager, - Lazy + Lazy, } /// Maps every mono item to all mono items it references in its @@ -220,7 +220,6 @@ pub struct InliningMap<'tcx> { } impl<'tcx> InliningMap<'tcx> { - fn new() -> InliningMap<'tcx> { InliningMap { index: FxHashMap::default(), @@ -229,10 +228,9 @@ impl<'tcx> InliningMap<'tcx> { } } - fn record_accesses<I>(&mut self, - source: MonoItem<'tcx>, - new_targets: I) - where I: Iterator<Item=(MonoItem<'tcx>, bool)> + ExactSizeIterator + fn record_accesses<I>(&mut self, source: MonoItem<'tcx>, new_targets: I) + where + I: Iterator<Item = (MonoItem<'tcx>, bool)> + ExactSizeIterator, { assert!(!self.index.contains_key(&source)); @@ -257,12 +255,11 @@ impl<'tcx> InliningMap<'tcx> { // Internally iterate over all items referenced by `source` which will be // made available for inlining. pub fn with_inlining_candidates<F>(&self, source: MonoItem<'tcx>, mut f: F) - where F: FnMut(MonoItem<'tcx>) + where + F: FnMut(MonoItem<'tcx>), { if let Some(&(start_index, end_index)) = self.index.get(&source) { - for (i, candidate) in self.targets[start_index .. end_index] - .iter() - .enumerate() { + for (i, candidate) in self.targets[start_index..end_index].iter().enumerate() { if self.inlines.contains(start_index + i) { f(*candidate); } @@ -272,10 +269,11 @@ impl<'tcx> InliningMap<'tcx> { // Internally iterate over all items and the things each accesses. pub fn iter_accesses<F>(&self, mut f: F) - where F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>]) + where + F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>]), { for (&accessor, &(start_index, end_index)) in &self.index { - f(accessor, &self.targets[start_index .. end_index]) + f(accessor, &self.targets[start_index..end_index]) } } } @@ -287,8 +285,7 @@ pub fn collect_crate_mono_items( let _prof_timer = tcx.prof.generic_activity("monomorphization_collector"); let roots = time(tcx.sess, "collecting roots", || { - let _prof_timer = tcx.prof - .generic_activity("monomorphization_collector_root_collections"); + let _prof_timer = tcx.prof.generic_activity("monomorphization_collector_root_collections"); collect_roots(tcx, mode) }); @@ -298,8 +295,7 @@ pub fn collect_crate_mono_items( let mut inlining_map = MTLock::new(InliningMap::new()); { - let _prof_timer = tcx.prof - .generic_activity("monomorphization_collector_graph_walk"); + let _prof_timer = tcx.prof.generic_activity("monomorphization_collector_graph_walk"); let visited: MTRef<'_, _> = &mut visited; let inlining_map: MTRef<'_, _> = &mut inlining_map; @@ -307,11 +303,7 @@ pub fn collect_crate_mono_items( time(tcx.sess, "collecting mono items", || { par_iter(roots).for_each(|root| { let mut recursion_depths = DefIdMap::default(); - collect_items_rec(tcx, - root, - visited, - &mut recursion_depths, - inlining_map); + collect_items_rec(tcx, root, visited, &mut recursion_depths, inlining_map); }); }); } @@ -330,12 +322,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem< debug!("collect_roots: entry_fn = {:?}", entry_fn); - let mut visitor = RootCollector { - tcx, - mode, - entry_fn, - output: &mut roots, - }; + let mut visitor = RootCollector { tcx, mode, entry_fn, output: &mut roots }; tcx.hir().krate().visit_all_item_likes(&mut visitor); @@ -388,9 +375,7 @@ fn collect_items_rec<'tcx>( debug_assert!(should_monomorphize_locally(tcx, &instance)); // Keep track of the monomorphization recursion depth - recursion_depth_reset = Some(check_recursion_limit(tcx, - instance, - recursion_depths)); + recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths)); check_type_length_limit(tcx, instance); collect_neighbours(tcx, instance, &mut neighbors); @@ -423,10 +408,8 @@ fn record_accesses<'tcx>( mono_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy }; - let accesses = callees.into_iter() - .map(|mono_item| { - (*mono_item, is_inlining_candidate(mono_item)) - }); + let accesses = + callees.into_iter().map(|mono_item| (*mono_item, is_inlining_candidate(mono_item))); inlining_map.lock_mut().record_accesses(caller, accesses); } @@ -452,8 +435,7 @@ fn check_recursion_limit<'tcx>( // more than the recursion limit is assumed to be causing an // infinite expansion. if recursion_depth > *tcx.sess.recursion_limit.get() { - let error = format!("reached the recursion limit while instantiating `{}`", - instance); + let error = format!("reached the recursion limit while instantiating `{}`", instance); if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) { tcx.sess.span_fatal(tcx.hir().span(hir_id), &error); } else { @@ -498,18 +480,17 @@ fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) { // Only use the shrunk version if it's really shorter. // This also avoids the case where before and after slices overlap. - if shrunk.len() < s.len() { - shrunk - } else { - s - } + if shrunk.len() < s.len() { shrunk } else { s } }; - let msg = format!("reached the type-length limit while instantiating `{}`", - shrink(instance.to_string(), 32, 32)); + let msg = format!( + "reached the type-length limit while instantiating `{}`", + shrink(instance.to_string(), 32, 32) + ); let mut diag = tcx.sess.struct_span_fatal(tcx.def_span(instance.def_id()), &msg); diag.note(&format!( "consider adding a `#![type_length_limit=\"{}\"]` attribute to your crate", - type_length)); + type_length + )); diag.emit(); tcx.sess.abort_if_errors(); } @@ -531,7 +512,9 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { // have to instantiate all methods of the trait being cast to, so we // can build the appropriate vtable. mir::Rvalue::Cast( - mir::CastKind::Pointer(PointerCast::Unsize), ref operand, target_ty + mir::CastKind::Pointer(PointerCast::Unsize), + ref operand, + target_ty, ) => { let target_ty = self.tcx.subst_and_normalize_erasing_regions( self.param_substs, @@ -544,21 +527,24 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { ty::ParamEnv::reveal_all(), &source_ty, ); - let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.tcx, - source_ty, - target_ty); + let (source_ty, target_ty) = + find_vtable_types_for_unsizing(self.tcx, source_ty, target_ty); // This could also be a different Unsize instruction, like // from a fixed sized array to a slice. But we are only // interested in things that produce a vtable. if target_ty.is_trait() && !source_ty.is_trait() { - create_mono_items_for_vtable_methods(self.tcx, - target_ty, - source_ty, - self.output); + create_mono_items_for_vtable_methods( + self.tcx, + target_ty, + source_ty, + self.output, + ); } } mir::Rvalue::Cast( - mir::CastKind::Pointer(PointerCast::ReifyFnPointer), ref operand, _ + mir::CastKind::Pointer(PointerCast::ReifyFnPointer), + ref operand, + _, ) => { let fn_ty = operand.ty(self.body, self.tcx); let fn_ty = self.tcx.subst_and_normalize_erasing_regions( @@ -569,7 +555,9 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { visit_fn_use(self.tcx, fn_ty, false, &mut self.output); } mir::Rvalue::Cast( - mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)), ref operand, _ + mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)), + ref operand, + _, ) => { let source_ty = operand.ty(self.body, self.tcx); let source_ty = self.tcx.subst_and_normalize_erasing_regions( @@ -580,8 +568,11 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { match source_ty.kind { ty::Closure(def_id, substs) => { let instance = Instance::resolve_closure( - self.tcx, def_id, - substs, ty::ClosureKind::FnOnce); + self.tcx, + def_id, + substs, + ty::ClosureKind::FnOnce, + ); if should_monomorphize_locally(self.tcx, &instance) { self.output.push(create_fn_mono_item(instance)); } @@ -614,9 +605,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { self.super_const(constant); } - fn visit_terminator_kind(&mut self, - kind: &mir::TerminatorKind<'tcx>, - location: Location) { + fn visit_terminator_kind(&mut self, kind: &mir::TerminatorKind<'tcx>, location: Location) { debug!("visiting terminator {:?} @ {:?}", kind, location); let tcx = self.tcx; @@ -630,8 +619,8 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { ); visit_fn_use(self.tcx, callee_ty, true, &mut self.output); } - mir::TerminatorKind::Drop { ref location, .. } | - mir::TerminatorKind::DropAndReplace { ref location, .. } => { + mir::TerminatorKind::Drop { ref location, .. } + | mir::TerminatorKind::DropAndReplace { ref location, .. } => { let ty = location.ty(self.body, self.tcx).ty; let ty = tcx.subst_and_normalize_erasing_regions( self.param_substs, @@ -640,26 +629,28 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { ); visit_drop_use(self.tcx, ty, true, self.output); } - mir::TerminatorKind::Goto { .. } | - mir::TerminatorKind::SwitchInt { .. } | - mir::TerminatorKind::Resume | - mir::TerminatorKind::Abort | - mir::TerminatorKind::Return | - mir::TerminatorKind::Unreachable | - mir::TerminatorKind::Assert { .. } => {} - mir::TerminatorKind::GeneratorDrop | - mir::TerminatorKind::Yield { .. } | - mir::TerminatorKind::FalseEdges { .. } | - mir::TerminatorKind::FalseUnwind { .. } => bug!(), + mir::TerminatorKind::Goto { .. } + | mir::TerminatorKind::SwitchInt { .. } + | mir::TerminatorKind::Resume + | mir::TerminatorKind::Abort + | mir::TerminatorKind::Return + | mir::TerminatorKind::Unreachable + | mir::TerminatorKind::Assert { .. } => {} + mir::TerminatorKind::GeneratorDrop + | mir::TerminatorKind::Yield { .. } + | mir::TerminatorKind::FalseEdges { .. } + | mir::TerminatorKind::FalseUnwind { .. } => bug!(), } self.super_terminator_kind(kind, location); } - fn visit_place_base(&mut self, - place_base: &mir::PlaceBase<'tcx>, - _context: mir::visit::PlaceContext, - location: Location) { + fn visit_place_base( + &mut self, + place_base: &mir::PlaceBase<'tcx>, + _context: mir::visit::PlaceContext, + location: Location, + ) { match place_base { PlaceBase::Static(box Static { kind: StaticKind::Static, def_id, .. }) => { debug!("visiting static {:?} @ {:?}", def_id, location); @@ -678,11 +669,11 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { let instance = Instance::new(*def_id, substs.subst(self.tcx, self.param_substs)); match self.tcx.const_eval_promoted(instance, *promoted) { Ok(val) => collect_const(self.tcx, val, substs, self.output), - Err(ErrorHandled::Reported) => {}, + Err(ErrorHandled::Reported) => {} Err(ErrorHandled::TooGeneric) => { let span = self.tcx.promoted_mir(*def_id)[*promoted].span; span_bug!(span, "collection encountered polymorphic constant") - }, + } } } PlaceBase::Local(_) => { @@ -709,11 +700,8 @@ fn visit_fn_use<'tcx>( output: &mut Vec<MonoItem<'tcx>>, ) { if let ty::FnDef(def_id, substs) = ty.kind { - let resolver = if is_direct_call { - ty::Instance::resolve - } else { - ty::Instance::resolve_for_fn_ptr - }; + let resolver = + if is_direct_call { ty::Instance::resolve } else { ty::Instance::resolve_for_fn_ptr }; let instance = resolver(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap(); visit_instance_use(tcx, instance, is_direct_call, output); } @@ -727,12 +715,11 @@ fn visit_instance_use<'tcx>( ) { debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call); if !should_monomorphize_locally(tcx, &instance) { - return + return; } match instance.def { - ty::InstanceDef::Virtual(..) | - ty::InstanceDef::Intrinsic(_) => { + ty::InstanceDef::Virtual(..) | ty::InstanceDef::Intrinsic(_) => { if !is_direct_call { bug!("{:?} being reified", instance); } @@ -743,13 +730,13 @@ fn visit_instance_use<'tcx>( output.push(create_fn_mono_item(instance)); } } - ty::InstanceDef::DropGlue(_, Some(_)) | - ty::InstanceDef::VtableShim(..) | - ty::InstanceDef::ReifyShim(..) | - ty::InstanceDef::ClosureOnceShim { .. } | - ty::InstanceDef::Item(..) | - ty::InstanceDef::FnPtrShim(..) | - ty::InstanceDef::CloneShim(..) => { + ty::InstanceDef::DropGlue(_, Some(_)) + | ty::InstanceDef::VtableShim(..) + | ty::InstanceDef::ReifyShim(..) + | ty::InstanceDef::ClosureOnceShim { .. } + | ty::InstanceDef::Item(..) + | ty::InstanceDef::FnPtrShim(..) + | ty::InstanceDef::CloneShim(..) => { output.push(create_fn_mono_item(instance)); } } @@ -761,14 +748,14 @@ fn visit_instance_use<'tcx>( fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool { let def_id = match instance.def { ty::InstanceDef::Item(def_id) => def_id, - ty::InstanceDef::VtableShim(..) | - ty::InstanceDef::ReifyShim(..) | - ty::InstanceDef::ClosureOnceShim { .. } | - ty::InstanceDef::Virtual(..) | - ty::InstanceDef::FnPtrShim(..) | - ty::InstanceDef::DropGlue(..) | - ty::InstanceDef::Intrinsic(_) | - ty::InstanceDef::CloneShim(..) => return true + ty::InstanceDef::VtableShim(..) + | ty::InstanceDef::ReifyShim(..) + | ty::InstanceDef::ClosureOnceShim { .. } + | ty::InstanceDef::Virtual(..) + | ty::InstanceDef::FnPtrShim(..) + | ty::InstanceDef::DropGlue(..) + | ty::InstanceDef::Intrinsic(_) + | ty::InstanceDef::CloneShim(..) => return true, }; if tcx.is_foreign_item(def_id) { @@ -781,8 +768,9 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx return true; } - if tcx.is_reachable_non_generic(def_id) || - is_available_upstream_generic(tcx, def_id, instance.substs) { + if tcx.is_reachable_non_generic(def_id) + || is_available_upstream_generic(tcx, def_id, instance.substs) + { // We can link to the item in question, no instance needed // in this crate. return false; @@ -804,21 +792,21 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx // monomorphizations but always instantiate our own internal versions // instead. if !tcx.sess.opts.share_generics() { - return false + return false; } // If this instance has non-erasable parameters, it cannot be a shared // monomorphization. Non-generic instances are already handled above // by `is_reachable_non_generic()`. if substs.non_erasable_generics().next().is_none() { - return false + return false; } // Take a look at the available monomorphizations listed in the metadata // of upstream crates. tcx.upstream_monomorphizations_for(def_id) - .map(|set| set.contains_key(substs)) - .unwrap_or(false) + .map(|set| set.contains_key(substs)) + .unwrap_or(false) } } @@ -886,43 +874,42 @@ fn find_vtable_types_for_unsizing<'tcx>( }; match (&source_ty.kind, &target_ty.kind) { - (&ty::Ref(_, a, _), - &ty::Ref(_, b, _)) | - (&ty::Ref(_, a, _), - &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) | - (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), - &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => { + (&ty::Ref(_, a, _), &ty::Ref(_, b, _)) + | (&ty::Ref(_, a, _), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) + | (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => { ptr_vtable(a, b) } (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => { ptr_vtable(source_ty.boxed_ty(), target_ty.boxed_ty()) } - (&ty::Adt(source_adt_def, source_substs), - &ty::Adt(target_adt_def, target_substs)) => { + (&ty::Adt(source_adt_def, source_substs), &ty::Adt(target_adt_def, target_substs)) => { assert_eq!(source_adt_def, target_adt_def); - let kind = - monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty); + let kind = monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty); let coerce_index = match kind { - CustomCoerceUnsized::Struct(i) => i + CustomCoerceUnsized::Struct(i) => i, }; let source_fields = &source_adt_def.non_enum_variant().fields; let target_fields = &target_adt_def.non_enum_variant().fields; - assert!(coerce_index < source_fields.len() && - source_fields.len() == target_fields.len()); + assert!( + coerce_index < source_fields.len() && source_fields.len() == target_fields.len() + ); - find_vtable_types_for_unsizing(tcx, + find_vtable_types_for_unsizing( + tcx, source_fields[coerce_index].ty(tcx, source_substs), - target_fields[coerce_index].ty(tcx, target_substs) + target_fields[coerce_index].ty(tcx, target_substs), ) } - _ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}", - source_ty, - target_ty) + _ => bug!( + "find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}", + source_ty, + target_ty + ), } } @@ -939,8 +926,12 @@ fn create_mono_items_for_vtable_methods<'tcx>( impl_ty: Ty<'tcx>, output: &mut Vec<MonoItem<'tcx>>, ) { - assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_bound_vars() && - !impl_ty.needs_subst() && !impl_ty.has_escaping_bound_vars()); + assert!( + !trait_ty.needs_subst() + && !trait_ty.has_escaping_bound_vars() + && !impl_ty.needs_subst() + && !impl_ty.has_escaping_bound_vars() + ); if let ty::Dynamic(ref trait_ty, ..) = trait_ty.kind { if let Some(principal) = trait_ty.principal() { @@ -949,12 +940,19 @@ fn create_mono_items_for_vtable_methods<'tcx>( // Walk all methods of the trait, including those of its supertraits let methods = tcx.vtable_methods(poly_trait_ref); - let methods = methods.iter().cloned().filter_map(|method| method) - .map(|(def_id, substs)| ty::Instance::resolve_for_vtable( - tcx, - ty::ParamEnv::reveal_all(), - def_id, - substs).unwrap()) + let methods = methods + .iter() + .cloned() + .filter_map(|method| method) + .map(|(def_id, substs)| { + ty::Instance::resolve_for_vtable( + tcx, + ty::ParamEnv::reveal_all(), + def_id, + substs, + ) + .unwrap() + }) .filter(|&instance| should_monomorphize_locally(tcx, &instance)) .map(|instance| create_fn_mono_item(instance)); output.extend(methods); @@ -979,33 +977,33 @@ struct RootCollector<'a, 'tcx> { impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { fn visit_item(&mut self, item: &'v hir::Item<'v>) { match item.kind { - hir::ItemKind::ExternCrate(..) | - hir::ItemKind::Use(..) | - hir::ItemKind::ForeignMod(..) | - hir::ItemKind::TyAlias(..) | - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) | - hir::ItemKind::OpaqueTy(..) | - hir::ItemKind::Mod(..) => { + hir::ItemKind::ExternCrate(..) + | hir::ItemKind::Use(..) + | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::TyAlias(..) + | hir::ItemKind::Trait(..) + | hir::ItemKind::TraitAlias(..) + | hir::ItemKind::OpaqueTy(..) + | hir::ItemKind::Mod(..) => { // Nothing to do, just keep recursing. } hir::ItemKind::Impl(..) => { if self.mode == MonoItemCollectionMode::Eager { - create_mono_items_for_default_impls(self.tcx, - item, - self.output); + create_mono_items_for_default_impls(self.tcx, item, self.output); } } - hir::ItemKind::Enum(_, ref generics) | - hir::ItemKind::Struct(_, ref generics) | - hir::ItemKind::Union(_, ref generics) => { + hir::ItemKind::Enum(_, ref generics) + | hir::ItemKind::Struct(_, ref generics) + | hir::ItemKind::Union(_, ref generics) => { if generics.params.is_empty() { if self.mode == MonoItemCollectionMode::Eager { let def_id = self.tcx.hir().local_def_id(item.hir_id); - debug!("RootCollector: ADT drop-glue for {}", - def_id_to_string(self.tcx, def_id)); + debug!( + "RootCollector: ADT drop-glue for {}", + def_id_to_string(self.tcx, def_id) + ); let ty = Instance::new(def_id, InternalSubsts::empty()).ty(self.tcx); visit_drop_use(self.tcx, ty, true, self.output); @@ -1013,15 +1011,15 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { } } hir::ItemKind::GlobalAsm(..) => { - debug!("RootCollector: ItemKind::GlobalAsm({})", - def_id_to_string(self.tcx, - self.tcx.hir().local_def_id(item.hir_id))); + debug!( + "RootCollector: ItemKind::GlobalAsm({})", + def_id_to_string(self.tcx, self.tcx.hir().local_def_id(item.hir_id)) + ); self.output.push(MonoItem::GlobalAsm(item.hir_id)); } hir::ItemKind::Static(..) => { let def_id = self.tcx.hir().local_def_id(item.hir_id); - debug!("RootCollector: ItemKind::Static({})", - def_id_to_string(self.tcx, def_id)); + debug!("RootCollector: ItemKind::Static({})", def_id_to_string(self.tcx, def_id)); self.output.push(MonoItem::Static(def_id)); } hir::ItemKind::Const(..) => { @@ -1060,17 +1058,19 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { impl RootCollector<'_, 'v> { fn is_root(&self, def_id: DefId) -> bool { - !item_requires_monomorphization(self.tcx, def_id) && match self.mode { - MonoItemCollectionMode::Eager => { - true - } - MonoItemCollectionMode::Lazy => { - self.entry_fn.map(|(id, _)| id) == Some(def_id) || - self.tcx.is_reachable_non_generic(def_id) || - self.tcx.codegen_fn_attrs(def_id).flags.contains( - CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) + !item_requires_monomorphization(self.tcx, def_id) + && match self.mode { + MonoItemCollectionMode::Eager => true, + MonoItemCollectionMode::Lazy => { + self.entry_fn.map(|(id, _)| id) == Some(def_id) + || self.tcx.is_reachable_non_generic(def_id) + || self + .tcx + .codegen_fn_attrs(def_id) + .flags + .contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) + } } - } } /// If `def_id` represents a root, pushes it onto the list of @@ -1106,16 +1106,15 @@ impl RootCollector<'_, 'v> { // late-bound regions, since late-bound // regions must appear in the argument // listing. - let main_ret_ty = self.tcx.erase_regions( - &main_ret_ty.no_bound_vars().unwrap(), - ); + let main_ret_ty = self.tcx.erase_regions(&main_ret_ty.no_bound_vars().unwrap()); let start_instance = Instance::resolve( self.tcx, ty::ParamEnv::reveal_all(), start_def_id, - self.tcx.intern_substs(&[main_ret_ty.into()]) - ).unwrap(); + self.tcx.intern_substs(&[main_ret_ty.into()]), + ) + .unwrap(); self.output.push(create_fn_mono_item(start_instance)); } @@ -1136,28 +1135,24 @@ fn create_mono_items_for_default_impls<'tcx>( for param in &generics.params { match param.kind { hir::GenericParamKind::Lifetime { .. } => {} - hir::GenericParamKind::Type { .. } | - hir::GenericParamKind::Const { .. } => { - return + hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => { + return; } } } let impl_def_id = tcx.hir().local_def_id(item.hir_id); - debug!("create_mono_items_for_default_impls(item={})", - def_id_to_string(tcx, impl_def_id)); + debug!( + "create_mono_items_for_default_impls(item={})", + def_id_to_string(tcx, impl_def_id) + ); if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) { let param_env = ty::ParamEnv::reveal_all(); - let trait_ref = tcx.normalize_erasing_regions( - param_env, - trait_ref, - ); + let trait_ref = tcx.normalize_erasing_regions(param_env, trait_ref); let overridden_methods: FxHashSet<_> = - impl_item_refs.iter() - .map(|iiref| iiref.ident.modern()) - .collect(); + impl_item_refs.iter().map(|iiref| iiref.ident.modern()).collect(); for method in tcx.provided_trait_methods(trait_ref.def_id) { if overridden_methods.contains(&method.ident.modern()) { continue; @@ -1167,31 +1162,25 @@ fn create_mono_items_for_default_impls<'tcx>( continue; } - let substs = InternalSubsts::for_item(tcx, method.def_id, |param, _| { - match param.kind { + let substs = + InternalSubsts::for_item(tcx, method.def_id, |param, _| match param.kind { GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), - GenericParamDefKind::Type { .. } | - GenericParamDefKind::Const => { + GenericParamDefKind::Type { .. } | GenericParamDefKind::Const => { trait_ref.substs[param.index as usize] } - } - }); - let instance = ty::Instance::resolve(tcx, - param_env, - method.def_id, - substs).unwrap(); + }); + let instance = + ty::Instance::resolve(tcx, param_env, method.def_id, substs).unwrap(); let mono_item = create_fn_mono_item(instance); - if mono_item.is_instantiable(tcx) - && should_monomorphize_locally(tcx, &instance) { + if mono_item.is_instantiable(tcx) && should_monomorphize_locally(tcx, &instance) + { output.push(mono_item); } } } } - _ => { - bug!() - } + _ => bug!(), } } @@ -1211,7 +1200,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon for &((), inner) in alloc.relocations().values() { collect_miri(tcx, inner, output); } - }, + } Some(GlobalAlloc::Function(fn_instance)) => { if should_monomorphize_locally(tcx, &fn_instance) { trace!("collecting {:?} with {:#?}", alloc_id, fn_instance); @@ -1231,12 +1220,8 @@ fn collect_neighbours<'tcx>( debug!("collect_neighbours: {:?}", instance.def_id()); let body = tcx.instance_mir(instance.def); - MirNeighborCollector { - tcx, - body: &body, - output, - param_substs: instance.substs, - }.visit_body(body); + MirNeighborCollector { tcx, body: &body, output, param_substs: instance.substs } + .visit_body(body); } fn def_id_to_string(tcx: TyCtxt<'_>, def_id: DefId) -> String { @@ -1255,17 +1240,15 @@ fn collect_const<'tcx>( debug!("visiting const {:?}", constant); let param_env = ty::ParamEnv::reveal_all(); - let substituted_constant = tcx.subst_and_normalize_erasing_regions( - param_substs, - param_env, - &constant, - ); + let substituted_constant = + tcx.subst_and_normalize_erasing_regions(param_substs, param_env, &constant); match substituted_constant.val { - ty::ConstKind::Value(ConstValue::Scalar(Scalar::Ptr(ptr))) => - collect_miri(tcx, ptr.alloc_id, output), - ty::ConstKind::Value(ConstValue::Slice { data: alloc, start: _, end: _ }) | - ty::ConstKind::Value(ConstValue::ByRef { alloc, .. }) => { + ty::ConstKind::Value(ConstValue::Scalar(Scalar::Ptr(ptr))) => { + collect_miri(tcx, ptr.alloc_id, output) + } + ty::ConstKind::Value(ConstValue::Slice { data: alloc, start: _, end: _ }) + | ty::ConstKind::Value(ConstValue::ByRef { alloc, .. }) => { for &((), id) in alloc.relocations().values() { collect_miri(tcx, id, output); } @@ -1273,12 +1256,12 @@ fn collect_const<'tcx>( ty::ConstKind::Unevaluated(def_id, substs) => { match tcx.const_eval_resolve(param_env, def_id, substs, None) { Ok(val) => collect_const(tcx, val, param_substs, output), - Err(ErrorHandled::Reported) => {}, - Err(ErrorHandled::TooGeneric) => span_bug!( - tcx.def_span(def_id), "collection encountered polymorphic constant", - ), + Err(ErrorHandled::Reported) => {} + Err(ErrorHandled::TooGeneric) => { + span_bug!(tcx.def_span(def_id), "collection encountered polymorphic constant",) + } } - }, - _ => {}, + } + _ => {} } } diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 1d5e65c6d27..c3a5175abac 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -6,11 +6,12 @@ // This pass is supposed to perform only simple checks not requiring name resolution // or type checking or some other kind of complex analysis. -use std::mem; +use errors::{Applicability, FatalError}; use rustc::lint; use rustc::session::Session; use rustc_data_structures::fx::FxHashMap; use rustc_parse::validate_attr; +use std::mem; use syntax::ast::*; use syntax::attr; use syntax::expand::is_proc_macro_attr; @@ -20,7 +21,6 @@ use syntax::symbol::{kw, sym}; use syntax::visit::{self, Visitor}; use syntax::{span_err, struct_span_err, walk_list}; use syntax_pos::Span; -use errors::{Applicability, FatalError}; use rustc_error_codes::*; @@ -68,8 +68,9 @@ impl<'a> AstValidator<'a> { AssocTyConstraintKind::Equality { .. } => {} AssocTyConstraintKind::Bound { .. } => { if self.is_assoc_ty_bound_banned { - self.err_handler().span_err(constraint.span, - "associated type bounds are not allowed within structs, enums, or unions" + self.err_handler().span_err( + constraint.span, + "associated type bounds are not allowed within structs, enums, or unions", ); } } @@ -125,9 +126,7 @@ impl<'a> AstValidator<'a> { } fn check_lifetime(&self, ident: Ident) { - let valid_names = [kw::UnderscoreLifetime, - kw::StaticLifetime, - kw::Invalid]; + let valid_names = [kw::UnderscoreLifetime, kw::StaticLifetime, kw::Invalid]; if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() { self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names"); } @@ -142,13 +141,11 @@ impl<'a> AstValidator<'a> { fn invalid_visibility(&self, vis: &Visibility, note: Option<&str>) { if let VisibilityKind::Inherited = vis.node { - return + return; } - let mut err = struct_span_err!(self.session, - vis.span, - E0449, - "unnecessary visibility qualifier"); + let mut err = + struct_span_err!(self.session, vis.span, E0449, "unnecessary visibility qualifier"); if vis.node.is_pub() { err.span_label(vis.span, "`pub` not permitted here because it's implied"); } @@ -161,10 +158,10 @@ impl<'a> AstValidator<'a> { fn check_decl_no_pat(decl: &FnDecl, mut report_err: impl FnMut(Span, bool)) { for Param { pat, .. } in &decl.inputs { match pat.kind { - PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, None) | - PatKind::Wild => {} - PatKind::Ident(BindingMode::ByValue(Mutability::Mut), _, None) => - report_err(pat.span, true), + PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, None) | PatKind::Wild => {} + PatKind::Ident(BindingMode::ByValue(Mutability::Mut), _, None) => { + report_err(pat.span, true) + } _ => report_err(pat.span, false), } } @@ -174,26 +171,34 @@ impl<'a> AstValidator<'a> { if asyncness.is_async() { struct_span_err!(self.session, span, E0706, "trait fns cannot be declared `async`") .note("`async` trait functions are not currently supported") - .note("consider using the `async-trait` crate: \ - https://crates.io/crates/async-trait") + .note( + "consider using the `async-trait` crate: \ + https://crates.io/crates/async-trait", + ) .emit(); } } fn check_trait_fn_not_const(&self, constness: Spanned<Constness>) { if constness.node == Constness::Const { - struct_span_err!(self.session, constness.span, E0379, - "trait fns cannot be declared const") - .span_label(constness.span, "trait fns cannot be const") - .emit(); + struct_span_err!( + self.session, + constness.span, + E0379, + "trait fns cannot be declared const" + ) + .span_label(constness.span, "trait fns cannot be const") + .emit(); } } fn no_questions_in_bounds(&self, bounds: &GenericBounds, where_: &str, is_trait: bool) { for bound in bounds { if let GenericBound::Trait(ref poly, TraitBoundModifier::Maybe) = *bound { - let mut err = self.err_handler().struct_span_err(poly.span, - &format!("`?Trait` is not permitted in {}", where_)); + let mut err = self.err_handler().struct_span_err( + poly.span, + &format!("`?Trait` is not permitted in {}", where_), + ); if is_trait { let path_str = pprust::path_to_string(&poly.trait_ref.path); err.note(&format!("traits are `?{}` by default", path_str)); @@ -223,48 +228,61 @@ impl<'a> AstValidator<'a> { ExprKind::Lit(..) | ExprKind::Err => {} ExprKind::Path(..) if allow_paths => {} ExprKind::Unary(UnOp::Neg, ref inner) - if match inner.kind { ExprKind::Lit(_) => true, _ => false } => {} - _ => self.err_handler().span_err(expr.span, "arbitrary expressions aren't allowed \ - in patterns") + if match inner.kind { + ExprKind::Lit(_) => true, + _ => false, + } => {} + _ => self.err_handler().span_err( + expr.span, + "arbitrary expressions aren't allowed \ + in patterns", + ), } } fn check_late_bound_lifetime_defs(&self, params: &[GenericParam]) { // Check only lifetime parameters are present and that the lifetime // parameters that are present have no bounds. - let non_lt_param_spans: Vec<_> = params.iter().filter_map(|param| match param.kind { - GenericParamKind::Lifetime { .. } => { - if !param.bounds.is_empty() { - let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect(); - self.err_handler() - .span_err(spans, "lifetime bounds cannot be used in this context"); + let non_lt_param_spans: Vec<_> = params + .iter() + .filter_map(|param| match param.kind { + GenericParamKind::Lifetime { .. } => { + if !param.bounds.is_empty() { + let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect(); + self.err_handler() + .span_err(spans, "lifetime bounds cannot be used in this context"); + } + None } - None - } - _ => Some(param.ident.span), - }).collect(); + _ => Some(param.ident.span), + }) + .collect(); if !non_lt_param_spans.is_empty() { - self.err_handler().span_err(non_lt_param_spans, - "only lifetime parameters can be used in this context"); + self.err_handler().span_err( + non_lt_param_spans, + "only lifetime parameters can be used in this context", + ); } } fn check_fn_decl(&self, fn_decl: &FnDecl) { match &*fn_decl.inputs { - [Param { ty, span, .. }] => if let TyKind::CVarArgs = ty.kind { - self.err_handler() - .span_err( + [Param { ty, span, .. }] => { + if let TyKind::CVarArgs = ty.kind { + self.err_handler().span_err( *span, "C-variadic function must be declared with at least one named argument", ); - }, - [ps @ .., _] => for Param { ty, span, .. } in ps { - if let TyKind::CVarArgs = ty.kind { - self.err_handler() - .span_err( + } + } + [ps @ .., _] => { + for Param { ty, span, .. } in ps { + if let TyKind::CVarArgs = ty.kind { + self.err_handler().span_err( *span, "`...` must be the last argument of a C-variadic function", ); + } } } _ => {} @@ -278,16 +296,22 @@ impl<'a> AstValidator<'a> { let arr = [sym::allow, sym::cfg, sym::cfg_attr, sym::deny, sym::forbid, sym::warn]; !arr.contains(&attr.name_or_empty()) && attr::is_builtin_attr(attr) }) - .for_each(|attr| if attr.is_doc_comment() { - self.err_handler().struct_span_err( - attr.span, - "documentation comments cannot be applied to function parameters" - ) - .span_label(attr.span, "doc comments are not allowed here") - .emit(); - } else { - self.err_handler().span_err(attr.span, "allow, cfg, cfg_attr, deny, \ - forbid, and warn are the only allowed built-in attributes in function parameters") + .for_each(|attr| { + if attr.is_doc_comment() { + self.err_handler() + .struct_span_err( + attr.span, + "documentation comments cannot be applied to function parameters", + ) + .span_label(attr.span, "doc comments are not allowed here") + .emit(); + } else { + self.err_handler().span_err( + attr.span, + "allow, cfg, cfg_attr, deny, \ + forbid, and warn are the only allowed built-in attributes in function parameters", + ) + } }); } @@ -348,14 +372,7 @@ enum GenericPosition { fn validate_generics_order<'a>( sess: &Session, handler: &errors::Handler, - generics: impl Iterator< - Item = ( - ParamKindOrd, - Option<&'a [GenericBound]>, - Span, - Option<String> - ), - >, + generics: impl Iterator<Item = (ParamKindOrd, Option<&'a [GenericBound]>, Span, Option<String>)>, pos: GenericPosition, span: Span, ) { @@ -410,13 +427,15 @@ fn validate_generics_order<'a>( }; for (param_ord, (max_param, spans)) in &out_of_order { - let mut err = handler.struct_span_err(spans.clone(), + let mut err = handler.struct_span_err( + spans.clone(), &format!( "{} {pos}s must be declared prior to {} {pos}s", param_ord, max_param, pos = pos_str, - )); + ), + ); if let GenericPosition::Param = pos { err.span_suggestion( span, @@ -464,8 +483,13 @@ impl<'a> Visitor<'a> for AstValidator<'a> { TyKind::BareFn(ref bfty) => { self.check_fn_decl(&bfty.decl); Self::check_decl_no_pat(&bfty.decl, |span, _| { - struct_span_err!(self.session, span, E0561, - "patterns aren't allowed in function pointer types").emit(); + struct_span_err!( + self.session, + span, + E0561, + "patterns aren't allowed in function pointer types" + ) + .emit(); }); self.check_late_bound_lifetime_defs(&bfty.generic_params); } @@ -474,8 +498,12 @@ impl<'a> Visitor<'a> for AstValidator<'a> { for bound in bounds { if let GenericBound::Outlives(ref lifetime) = *bound { if any_lifetime_bounds { - span_err!(self.session, lifetime.ident.span, E0226, - "only a single explicit lifetime bound is permitted"); + span_err!( + self.session, + lifetime.ident.span, + E0226, + "only a single explicit lifetime bound is permitted" + ); break; } any_lifetime_bounds = true; @@ -486,7 +514,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { TyKind::ImplTrait(_, ref bounds) => { if self.is_impl_trait_banned { struct_span_err!( - self.session, ty.span, E0667, + self.session, + ty.span, + E0667, "`impl Trait` is not allowed in path parameters" ) .emit(); @@ -494,7 +524,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if let Some(outer_impl_trait_sp) = self.outer_impl_trait { struct_span_err!( - self.session, ty.span, E0666, + self.session, + ty.span, + E0666, "nested `impl Trait` is not allowed" ) .span_label(outer_impl_trait_sp, "outer `impl Trait`") @@ -502,8 +534,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { .emit(); } - if !bounds.iter() - .any(|b| if let GenericBound::Trait(..) = *b { true } else { false }) { + if !bounds + .iter() + .any(|b| if let GenericBound::Trait(..) = *b { true } else { false }) + { self.err_handler().span_err(ty.span, "at least one trait must be specified"); } @@ -527,7 +561,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_item(&mut self, item: &'a Item) { - if item.attrs.iter().any(|attr| is_proc_macro_attr(attr) ) { + if item.attrs.iter().any(|attr| is_proc_macro_attr(attr)) { self.has_proc_macro_decls = true; } @@ -537,7 +571,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if let TyKind::Err = ty.kind { self.err_handler() .struct_span_err(item.span, "`impl Trait for .. {}` is an obsolete syntax") - .help("use `auto trait Trait {}` instead").emit(); + .help("use `auto trait Trait {}` instead") + .emit(); } if unsafety == Unsafety::Unsafe && polarity == ImplPolarity::Negative { span_err!(self.session, item.span, E0198, "negative impls cannot be unsafe"); @@ -551,8 +586,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } } ItemKind::Impl(unsafety, polarity, defaultness, _, None, _, _) => { - self.invalid_visibility(&item.vis, - Some("place qualifiers on individual impl items instead")); + self.invalid_visibility( + &item.vis, + Some("place qualifiers on individual impl items instead"), + ); if unsafety == Unsafety::Unsafe { span_err!(self.session, item.span, E0197, "inherent impls cannot be unsafe"); } @@ -562,7 +599,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if defaultness == Defaultness::Default { self.err_handler() .struct_span_err(item.span, "inherent impls cannot be default") - .note("only trait implementations may be annotated with default").emit(); + .note("only trait implementations may be annotated with default") + .emit(); } } ItemKind::Fn(ref sig, ref generics, _) => { @@ -588,8 +626,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } // Reject C-varadic type unless the function is `unsafe extern "C"` semantically. match sig.header.ext { - Extern::Explicit(StrLit { symbol_unescaped: sym::C, .. }) | - Extern::Implicit if sig.header.unsafety == Unsafety::Unsafe => {} + Extern::Explicit(StrLit { symbol_unescaped: sym::C, .. }) + | Extern::Implicit + if sig.header.unsafety == Unsafety::Unsafe => {} _ => self.check_c_varadic_type(&sig.decl), } } @@ -611,19 +650,31 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if is_auto == IsAuto::Yes { // Auto traits cannot have generics, super traits nor contain items. if !generics.params.is_empty() { - struct_span_err!(self.session, item.span, E0567, + struct_span_err!( + self.session, + item.span, + E0567, "auto traits cannot have generic parameters" - ).emit(); + ) + .emit(); } if !bounds.is_empty() { - struct_span_err!(self.session, item.span, E0568, + struct_span_err!( + self.session, + item.span, + E0568, "auto traits cannot have super traits" - ).emit(); + ) + .emit(); } if !trait_items.is_empty() { - struct_span_err!(self.session, item.span, E0380, + struct_span_err!( + self.session, + item.span, + E0380, "auto traits cannot have methods or associated items" - ).emit(); + ) + .emit(); } } self.no_questions_in_bounds(bounds, "supertraits", true); @@ -634,12 +685,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } ItemKind::Union(ref vdata, _) => { if let VariantData::Tuple(..) | VariantData::Unit(..) = vdata { - self.err_handler().span_err(item.span, - "tuple and unit unions are not permitted"); + self.err_handler() + .span_err(item.span, "tuple and unit unions are not permitted"); } if vdata.fields().is_empty() { - self.err_handler().span_err(item.span, - "unions cannot have zero fields"); + self.err_handler().span_err(item.span, "unions cannot have zero fields"); } } _ => {} @@ -653,9 +703,14 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ForeignItemKind::Fn(ref decl, _) => { self.check_fn_decl(decl); Self::check_decl_no_pat(decl, |span, _| { - struct_span_err!(self.session, span, E0130, - "patterns aren't allowed in foreign function declarations") - .span_label(span, "pattern not allowed in foreign function").emit(); + struct_span_err!( + self.session, + span, + E0130, + "patterns aren't allowed in foreign function declarations" + ) + .span_label(span, "pattern not allowed in foreign function") + .emit(); }); } ForeignItemKind::Static(..) | ForeignItemKind::Ty | ForeignItemKind::Macro(..) => {} @@ -673,11 +728,16 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.session, self.err_handler(), data.args.iter().map(|arg| { - (match arg { - GenericArg::Lifetime(..) => ParamKindOrd::Lifetime, - GenericArg::Type(..) => ParamKindOrd::Type, - GenericArg::Const(..) => ParamKindOrd::Const, - }, None, arg.span(), None) + ( + match arg { + GenericArg::Lifetime(..) => ParamKindOrd::Lifetime, + GenericArg::Type(..) => ParamKindOrd::Type, + GenericArg::Const(..) => ParamKindOrd::Const, + }, + None, + arg.span(), + None, + ) }), GenericPosition::Arg, generic_args.span(), @@ -686,8 +746,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> { // Type bindings such as `Item = impl Debug` in `Iterator<Item = Debug>` // are allowed to contain nested `impl Trait`. self.with_impl_trait(None, |this| { - walk_list!(this, visit_assoc_ty_constraint_from_generic_args, - &data.constraints); + walk_list!( + this, + visit_assoc_ty_constraint_from_generic_args, + &data.constraints + ); }); } GenericArgs::Parenthesized(ref data) => { @@ -790,10 +853,16 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.with_banned_assoc_ty_bound(|this| visit::walk_struct_def(this, s)) } - fn visit_enum_def(&mut self, enum_definition: &'a EnumDef, - generics: &'a Generics, item_id: NodeId, _: Span) { - self.with_banned_assoc_ty_bound( - |this| visit::walk_enum_def(this, enum_definition, generics, item_id)) + fn visit_enum_def( + &mut self, + enum_definition: &'a EnumDef, + generics: &'a Generics, + item_id: NodeId, + _: Span, + ) { + self.with_banned_assoc_ty_bound(|this| { + visit::walk_enum_def(this, enum_definition, generics, item_id) + }) } fn visit_impl_item(&mut self, ii: &'a AssocItem) { @@ -827,14 +896,18 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if mut_ident { self.lint_buffer.buffer_lint( lint::builtin::PATTERNS_IN_FNS_WITHOUT_BODY, - ti.id, span, - "patterns aren't allowed in methods without bodies" + ti.id, + span, + "patterns aren't allowed in methods without bodies", ); } else { struct_span_err!( - self.session, span, E0642, + self.session, + span, + E0642, "patterns aren't allowed in methods without bodies" - ).emit(); + ) + .emit(); } }); } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index d3656248b1b..979489319a5 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -7,73 +7,73 @@ //! Type-relative name resolution (methods, fields, associated items) happens in `librustc_typeck`. //! Lifetime names are resolved in `librustc/middle/resolve_lifetime.rs`. -#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] +// ignore-tidy-filelength +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(bool_to_option)] #![feature(crate_visibility_modifier)] #![feature(label_break_value)] #![feature(nll)] - -#![recursion_limit="256"] +#![recursion_limit = "256"] pub use rustc::hir::def::{Namespace, PerNS}; use Determinacy::*; +use rustc::hir::def::Namespace::*; +use rustc::hir::def::{self, CtorKind, CtorOf, DefKind, ExportMap, NonMacroAttrKind, PartialRes}; +use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc::hir::map::Definitions; -use rustc::hir::{self, PrimTy, Bool, Char, Float, Int, Uint, Str}; +use rustc::hir::{self, Bool, Char, Float, Int, PrimTy, Str, Uint}; +use rustc::hir::{GlobMap, TraitMap}; +use rustc::lint; use rustc::middle::cstore::{CrateStore, MetadataLoaderDyn}; use rustc::session::Session; -use rustc::lint; -use rustc::hir::def::{self, DefKind, PartialRes, CtorKind, CtorOf, NonMacroAttrKind, ExportMap}; -use rustc::hir::def::Namespace::*; -use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefId}; -use rustc::hir::{TraitMap, GlobMap}; -use rustc::ty::{self, DefIdTree, ResolverOutputs}; -use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap}; use rustc::span_bug; +use rustc::ty::{self, DefIdTree, ResolverOutputs}; +use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap, NodeSet}; -use rustc_metadata::creader::{CrateLoader, CStore}; +use rustc_metadata::creader::{CStore, CrateLoader}; -use syntax::{struct_span_err, unwrap_or}; -use syntax::ast::{self, Name, NodeId, Ident, FloatTy, IntTy, UintTy}; -use syntax::ast::{CRATE_NODE_ID, Crate}; +use errors::{Applicability, DiagnosticBuilder}; +use syntax::ast::{self, FloatTy, Ident, IntTy, Name, NodeId, UintTy}; +use syntax::ast::{Crate, CRATE_NODE_ID}; use syntax::ast::{ItemKind, Path}; use syntax::attr; use syntax::print::pprust; -use syntax::symbol::{kw, sym}; use syntax::source_map::Spanned; +use syntax::symbol::{kw, sym}; use syntax::visit::{self, Visitor}; +use syntax::{struct_span_err, unwrap_or}; use syntax_expand::base::SyntaxExtension; -use syntax_pos::hygiene::{MacroKind, ExpnId, ExpnKind, Transparency, SyntaxContext}; +use syntax_pos::hygiene::{ExpnId, ExpnKind, MacroKind, SyntaxContext, Transparency}; use syntax_pos::{Span, DUMMY_SP}; -use errors::{Applicability, DiagnosticBuilder}; use log::debug; -use std::cell::{Cell, RefCell}; -use std::{cmp, fmt, iter, ptr}; -use std::collections::BTreeSet; +use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::ptr_key::PtrKey; use rustc_data_structures::sync::Lrc; -use rustc_data_structures::fx::FxIndexMap; +use std::cell::{Cell, RefCell}; +use std::collections::BTreeSet; +use std::{cmp, fmt, iter, ptr}; -use diagnostics::{Suggestion, ImportSuggestion}; -use diagnostics::{find_span_of_binding_until_next_binding, extend_span_to_previous_binding}; +use diagnostics::{extend_span_to_previous_binding, find_span_of_binding_until_next_binding}; +use diagnostics::{ImportSuggestion, Suggestion}; use late::{HasGenericParams, PathSource, Rib, RibKind::*}; -use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver}; use macros::{LegacyBinding, LegacyScope}; +use resolve_imports::{ImportDirective, ImportDirectiveSubclass, ImportResolver, NameResolution}; use rustc_error_codes::*; type Res = def::Res<NodeId>; +mod build_reduced_graph; +mod check_unused; mod def_collector; mod diagnostics; mod late; mod macros; -mod check_unused; -mod build_reduced_graph; mod resolve_imports; enum Weak { @@ -141,12 +141,7 @@ impl<'a> ParentScope<'a> { /// Creates a parent scope with the passed argument used as the module scope component, /// and other scope components set to default empty values. pub fn module(module: Module<'a>) -> ParentScope<'a> { - ParentScope { - module, - expansion: ExpnId::root(), - legacy: LegacyScope::Empty, - derives: &[], - } + ParentScope { module, expansion: ExpnId::root(), legacy: LegacyScope::Empty, derives: &[] } } } @@ -155,7 +150,7 @@ struct BindingError { name: Name, origin: BTreeSet<Span>, target: BTreeSet<Span>, - could_be_path: bool + could_be_path: bool, } impl PartialOrd for BindingError { @@ -242,25 +237,17 @@ impl Segment { } fn from_ident(ident: Ident) -> Segment { - Segment { - ident, - id: None, - } + Segment { ident, id: None } } fn names_to_string(segments: &[Segment]) -> String { - names_to_string(&segments.iter() - .map(|seg| seg.ident.name) - .collect::<Vec<_>>()) + names_to_string(&segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>()) } } impl<'a> From<&'a ast::PathSegment> for Segment { fn from(seg: &'a ast::PathSegment) -> Segment { - Segment { - ident: seg.ident, - id: Some(seg.id), - } + Segment { ident: seg.ident, id: Some(seg.id) } } } @@ -272,11 +259,7 @@ struct UsePlacementFinder { impl UsePlacementFinder { fn check(krate: &Crate, target_module: NodeId) -> (Option<Span>, bool) { - let mut finder = UsePlacementFinder { - target_module, - span: None, - found_use: false, - }; + let mut finder = UsePlacementFinder { target_module, span: None, found_use: false }; visit::walk_crate(&mut finder, krate); (finder.span, finder.found_use) } @@ -308,25 +291,27 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { self.found_use = true; return; } - }, + } // don't place use before extern crate ItemKind::ExternCrate(_) => {} // but place them before the first other item - _ => if self.span.map_or(true, |span| item.span < span ) { - if !item.span.from_expansion() { - // don't insert between attributes and an item - if item.attrs.is_empty() { - self.span = Some(item.span.shrink_to_lo()); - } else { - // find the first attribute on the item - for attr in &item.attrs { - if self.span.map_or(true, |span| attr.span < span) { - self.span = Some(attr.span.shrink_to_lo()); + _ => { + if self.span.map_or(true, |span| item.span < span) { + if !item.span.from_expansion() { + // don't insert between attributes and an item + if item.attrs.is_empty() { + self.span = Some(item.span.shrink_to_lo()); + } else { + // find the first attribute on the item + for attr in &item.attrs { + if self.span.map_or(true, |span| attr.span < span) { + self.span = Some(attr.span.shrink_to_lo()); + } } } } } - }, + } } } } @@ -380,12 +365,15 @@ enum ModuleOrUniformRoot<'a> { impl ModuleOrUniformRoot<'_> { fn same_def(lhs: Self, rhs: Self) -> bool { match (lhs, rhs) { - (ModuleOrUniformRoot::Module(lhs), - ModuleOrUniformRoot::Module(rhs)) => lhs.def_id() == rhs.def_id(), - (ModuleOrUniformRoot::CrateRootAndExternPrelude, - ModuleOrUniformRoot::CrateRootAndExternPrelude) | - (ModuleOrUniformRoot::ExternPrelude, ModuleOrUniformRoot::ExternPrelude) | - (ModuleOrUniformRoot::CurrentScope, ModuleOrUniformRoot::CurrentScope) => true, + (ModuleOrUniformRoot::Module(lhs), ModuleOrUniformRoot::Module(rhs)) => { + lhs.def_id() == rhs.def_id() + } + ( + ModuleOrUniformRoot::CrateRootAndExternPrelude, + ModuleOrUniformRoot::CrateRootAndExternPrelude, + ) + | (ModuleOrUniformRoot::ExternPrelude, ModuleOrUniformRoot::ExternPrelude) + | (ModuleOrUniformRoot::CurrentScope, ModuleOrUniformRoot::CurrentScope) => true, _ => false, } } @@ -489,11 +477,13 @@ pub struct ModuleData<'a> { type Module<'a> = &'a ModuleData<'a>; impl<'a> ModuleData<'a> { - fn new(parent: Option<Module<'a>>, - kind: ModuleKind, - normal_ancestor_id: DefId, - expansion: ExpnId, - span: Span) -> Self { + fn new( + parent: Option<Module<'a>>, + kind: ModuleKind, + normal_ancestor_id: DefId, + expansion: ExpnId, + span: Span, + ) -> Self { ModuleData { parent, kind, @@ -511,7 +501,9 @@ impl<'a> ModuleData<'a> { } fn for_each_child<R, F>(&'a self, resolver: &mut R, mut f: F) - where R: AsMut<Resolver<'a>>, F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>) + where + R: AsMut<Resolver<'a>>, + F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>), { for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() { name_resolution.borrow().binding.map(|binding| f(resolver, key.ident, key.ns, binding)); @@ -549,8 +541,9 @@ impl<'a> ModuleData<'a> { fn nearest_item_scope(&'a self) -> Module<'a> { match self.kind { - ModuleKind::Def(DefKind::Enum, ..) | ModuleKind::Def(DefKind::Trait, ..) => - self.parent.expect("enum or trait module without a parent"), + ModuleKind::Def(DefKind::Enum, ..) | ModuleKind::Def(DefKind::Trait, ..) => { + self.parent.expect("enum or trait module without a parent") + } _ => self, } } @@ -597,11 +590,7 @@ impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> { enum NameBindingKind<'a> { Res(Res, /* is_macro_export */ bool), Module(Module<'a>), - Import { - binding: &'a NameBinding<'a>, - directive: &'a ImportDirective<'a>, - used: Cell<bool>, - }, + Import { binding: &'a NameBinding<'a>, directive: &'a ImportDirective<'a>, used: Cell<bool> }, } impl<'a> NameBindingKind<'a> { @@ -641,24 +630,22 @@ enum AmbiguityKind { impl AmbiguityKind { fn descr(self) -> &'static str { match self { - AmbiguityKind::Import => - "name vs any other name during import resolution", - AmbiguityKind::BuiltinAttr => - "built-in attribute vs any other name", - AmbiguityKind::DeriveHelper => - "derive helper attribute vs any other name", - AmbiguityKind::LegacyVsModern => - "`macro_rules` vs non-`macro_rules` from other module", - AmbiguityKind::GlobVsOuter => - "glob import vs any other name from outer scope during import/macro resolution", - AmbiguityKind::GlobVsGlob => - "glob import vs glob import in the same module", - AmbiguityKind::GlobVsExpanded => + AmbiguityKind::Import => "name vs any other name during import resolution", + AmbiguityKind::BuiltinAttr => "built-in attribute vs any other name", + AmbiguityKind::DeriveHelper => "derive helper attribute vs any other name", + AmbiguityKind::LegacyVsModern => "`macro_rules` vs non-`macro_rules` from other module", + AmbiguityKind::GlobVsOuter => { + "glob import vs any other name from outer scope during import/macro resolution" + } + AmbiguityKind::GlobVsGlob => "glob import vs glob import in the same module", + AmbiguityKind::GlobVsExpanded => { "glob import vs macro-expanded name in the same \ - module during import/macro resolution", - AmbiguityKind::MoreExpandedVsOuter => + module during import/macro resolution" + } + AmbiguityKind::MoreExpandedVsOuter => { "macro-expanded name vs less macro-expanded name \ - from outer scope during import/macro resolution", + from outer scope during import/macro resolution" + } } } } @@ -699,10 +686,11 @@ impl<'a> NameBinding<'a> { } fn is_ambiguity(&self) -> bool { - self.ambiguity.is_some() || match self.kind { - NameBindingKind::Import { binding, .. } => binding.is_ambiguity(), - _ => false, - } + self.ambiguity.is_some() + || match self.kind { + NameBindingKind::Import { binding, .. } => binding.is_ambiguity(), + _ => false, + } } // We sometimes need to treat variants as `pub` for backwards compatibility. @@ -716,8 +704,8 @@ impl<'a> NameBinding<'a> { fn is_variant(&self) -> bool { match self.kind { - NameBindingKind::Res(Res::Def(DefKind::Variant, _), _) | - NameBindingKind::Res(Res::Def(DefKind::Ctor(CtorOf::Variant, ..), _), _) => true, + NameBindingKind::Res(Res::Def(DefKind::Variant, _), _) + | NameBindingKind::Res(Res::Def(DefKind::Ctor(CtorOf::Variant, ..), _), _) => true, _ => false, } } @@ -725,13 +713,14 @@ impl<'a> NameBinding<'a> { fn is_extern_crate(&self) -> bool { match self.kind { NameBindingKind::Import { - directive: &ImportDirective { - subclass: ImportDirectiveSubclass::ExternCrate { .. }, .. - }, .. + directive: + &ImportDirective { subclass: ImportDirectiveSubclass::ExternCrate { .. }, .. }, + .. } => true, - NameBindingKind::Module( - &ModuleData { kind: ModuleKind::Def(DefKind::Mod, def_id, _), .. } - ) => def_id.index == CRATE_DEF_INDEX, + NameBindingKind::Module(&ModuleData { + kind: ModuleKind::Def(DefKind::Mod, def_id, _), + .. + }) => def_id.index == CRATE_DEF_INDEX, _ => false, } } @@ -937,10 +926,10 @@ pub struct Resolver<'a> { unused_macros: NodeMap<Span>, proc_macro_stubs: NodeSet, /// Traces collected during macro resolution and validated when it's complete. - single_segment_macro_resolutions: Vec<(Ident, MacroKind, ParentScope<'a>, - Option<&'a NameBinding<'a>>)>, - multi_segment_macro_resolutions: Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>, - Option<Res>)>, + single_segment_macro_resolutions: + Vec<(Ident, MacroKind, ParentScope<'a>, Option<&'a NameBinding<'a>>)>, + multi_segment_macro_resolutions: + Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>, Option<Res>)>, builtin_attrs: Vec<(Ident, ParentScope<'a>)>, /// `derive(Copy)` marks items they are applied to so they are treated specially later. /// Derive macros cannot modify the item themselves and have to store the markers in the global @@ -1002,8 +991,10 @@ impl<'a> ResolverArenas<'a> { fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> { self.name_bindings.alloc(name_binding) } - fn alloc_import_directive(&'a self, import_directive: ImportDirective<'a>) - -> &'a ImportDirective<'_> { + fn alloc_import_directive( + &'a self, + import_directive: ImportDirective<'a>, + ) -> &'a ImportDirective<'_> { self.import_directives.alloc(import_directive) } fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> { @@ -1018,7 +1009,9 @@ impl<'a> ResolverArenas<'a> { } impl<'a> AsMut<Resolver<'a>> for Resolver<'a> { - fn as_mut(&mut self) -> &mut Resolver<'a> { self } + fn as_mut(&mut self) -> &mut Resolver<'a> { + self + } } impl<'a, 'b> DefIdTree for &'a Resolver<'b> { @@ -1026,7 +1019,8 @@ impl<'a, 'b> DefIdTree for &'a Resolver<'b> { match id.krate { LOCAL_CRATE => self.definitions.def_key(id.index).parent, _ => self.cstore().def_key(id).parent, - }.map(|index| DefId { index, ..id }) + } + .map(|index| DefId { index, ..id }) } } @@ -1044,22 +1038,18 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> { components: &[Name], ns: Namespace, ) -> (ast::Path, Res) { - let root = if crate_root.is_some() { - kw::PathRoot - } else { - kw::Crate - }; + let root = if crate_root.is_some() { kw::PathRoot } else { kw::Crate }; let segments = iter::once(Ident::with_dummy_span(root)) .chain( - crate_root.into_iter() + crate_root + .into_iter() .chain(components.iter().cloned()) - .map(Ident::with_dummy_span) - ).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>(); + .map(Ident::with_dummy_span), + ) + .map(|i| self.new_ast_path_segment(i)) + .collect::<Vec<_>>(); - let path = ast::Path { - span, - segments, - }; + let path = ast::Path { span, segments }; let parent_scope = &ParentScope::module(self.graph_root); let res = match self.resolve_ast_path(&path, ns, parent_scope) { @@ -1098,27 +1088,20 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> { } impl<'a> Resolver<'a> { - pub fn new(session: &'a Session, - krate: &Crate, - crate_name: &str, - metadata_loader: &'a MetadataLoaderDyn, - arenas: &'a ResolverArenas<'a>) - -> Resolver<'a> { + pub fn new( + session: &'a Session, + krate: &Crate, + crate_name: &str, + metadata_loader: &'a MetadataLoaderDyn, + arenas: &'a ResolverArenas<'a>, + ) -> Resolver<'a> { let root_def_id = DefId::local(CRATE_DEF_INDEX); - let root_module_kind = ModuleKind::Def( - DefKind::Mod, - root_def_id, - kw::Invalid, - ); + let root_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Invalid); let graph_root = arenas.alloc_module(ModuleData { no_implicit_prelude: attr::contains_name(&krate.attrs, sym::no_implicit_prelude), ..ModuleData::new(None, root_module_kind, root_def_id, ExpnId::root(), krate.span) }); - let empty_module_kind = ModuleKind::Def( - DefKind::Mod, - root_def_id, - kw::Invalid, - ); + let empty_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Invalid); let empty_module = arenas.alloc_module(ModuleData { no_implicit_prelude: true, ..ModuleData::new( @@ -1135,11 +1118,13 @@ impl<'a> Resolver<'a> { let mut definitions = Definitions::default(); definitions.create_root_def(crate_name, session.local_crate_disambiguator()); - let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = - session.opts.externs.iter() - .filter(|(_, entry)| entry.add_prelude) - .map(|(name, _)| (Ident::from_str(name), Default::default())) - .collect(); + let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = session + .opts + .externs + .iter() + .filter(|(_, entry)| entry.add_prelude) + .map(|(name, _)| (Ident::from_str(name), Default::default())) + .collect(); if !attr::contains_name(&krate.attrs, sym::no_core) { extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default()); @@ -1245,10 +1230,12 @@ impl<'a> Resolver<'a> { multi_segment_macro_resolutions: Default::default(), builtin_attrs: Default::default(), containers_deriving_copy: Default::default(), - active_features: - features.declared_lib_features.iter().map(|(feat, ..)| *feat) - .chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat)) - .collect(), + active_features: features + .declared_lib_features + .iter() + .map(|(feat, ..)| *feat) + .chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat)) + .collect(), variant_vis: Default::default(), lint_buffer: lint::LintBuffer::default(), next_node_id: NodeId::from_u32(1), @@ -1256,7 +1243,9 @@ impl<'a> Resolver<'a> { } pub fn next_node_id(&mut self) -> NodeId { - let next = self.next_node_id.as_usize() + let next = self + .next_node_id + .as_usize() .checked_add(1) .expect("input too large; ran out of NodeIds"); self.next_node_id = ast::NodeId::from_usize(next); @@ -1281,9 +1270,11 @@ impl<'a> Resolver<'a> { glob_map: self.glob_map, maybe_unused_trait_imports: self.maybe_unused_trait_imports, maybe_unused_extern_crates: self.maybe_unused_extern_crates, - extern_prelude: self.extern_prelude.iter().map(|(ident, entry)| { - (ident.name, entry.introduced_by_item) - }).collect(), + extern_prelude: self + .extern_prelude + .iter() + .map(|(ident, entry)| (ident.name, entry.introduced_by_item)) + .collect(), } } @@ -1297,9 +1288,11 @@ impl<'a> Resolver<'a> { glob_map: self.glob_map.clone(), maybe_unused_trait_imports: self.maybe_unused_trait_imports.clone(), maybe_unused_extern_crates: self.maybe_unused_extern_crates.clone(), - extern_prelude: self.extern_prelude.iter().map(|(ident, entry)| { - (ident.name, entry.introduced_by_item) - }).collect(), + extern_prelude: self + .extern_prelude + .iter() + .map(|(ident, entry)| (ident.name, entry.introduced_by_item)) + .collect(), } } @@ -1341,8 +1334,7 @@ impl<'a> Resolver<'a> { /// Entry point to crate resolution. pub fn resolve_crate(&mut self, krate: &Crate) { - let _prof_timer = - self.session.prof.generic_activity("resolve_crate"); + let _prof_timer = self.session.prof.generic_activity("resolve_crate"); ImportResolver { r: self }.finalize_imports(); self.finalize_macro_resolutions(); @@ -1385,17 +1377,31 @@ impl<'a> Resolver<'a> { &module.lazy_resolutions } - fn resolution(&mut self, module: Module<'a>, key: BindingKey) - -> &'a RefCell<NameResolution<'a>> { - *self.resolutions(module).borrow_mut().entry(key) - .or_insert_with(|| self.arenas.alloc_name_resolution()) + fn resolution( + &mut self, + module: Module<'a>, + key: BindingKey, + ) -> &'a RefCell<NameResolution<'a>> { + *self + .resolutions(module) + .borrow_mut() + .entry(key) + .or_insert_with(|| self.arenas.alloc_name_resolution()) } - fn record_use(&mut self, ident: Ident, ns: Namespace, - used_binding: &'a NameBinding<'a>, is_lexical_scope: bool) { + fn record_use( + &mut self, + ident: Ident, + ns: Namespace, + used_binding: &'a NameBinding<'a>, + is_lexical_scope: bool, + ) { if let Some((b2, kind)) = used_binding.ambiguity { self.ambiguity_errors.push(AmbiguityError { - kind, ident, b1: used_binding, b2, + kind, + ident, + b1: used_binding, + b2, misc1: AmbiguityErrorMisc::None, misc2: AmbiguityErrorMisc::None, }); @@ -1497,8 +1503,9 @@ impl<'a> Resolver<'a> { loop { let visit = match scope { // Derive helpers are not in scope when resolving derives in the same container. - Scope::DeriveHelpers(expn_id) => - !(expn_id == parent_scope.expansion && macro_kind == Some(MacroKind::Derive)), + Scope::DeriveHelpers(expn_id) => { + !(expn_id == parent_scope.expansion && macro_kind == Some(MacroKind::Derive)) + } Scope::DeriveHelpersCompat => true, Scope::MacroRules(..) => true, Scope::CrateRoot => true, @@ -1523,32 +1530,31 @@ impl<'a> Resolver<'a> { // Derive helpers are not visible to code generated by bang or derive macros. let expn_data = expn_id.expn_data(); match expn_data.kind { - ExpnKind::Root | - ExpnKind::Macro(MacroKind::Bang, _) | - ExpnKind::Macro(MacroKind::Derive, _) => Scope::DeriveHelpersCompat, + ExpnKind::Root + | ExpnKind::Macro(MacroKind::Bang, _) + | ExpnKind::Macro(MacroKind::Derive, _) => Scope::DeriveHelpersCompat, _ => Scope::DeriveHelpers(expn_data.parent), } } Scope::DeriveHelpers(..) => Scope::DeriveHelpersCompat, - Scope::DeriveHelpersCompat => - Scope::MacroRules(parent_scope.legacy), + Scope::DeriveHelpersCompat => Scope::MacroRules(parent_scope.legacy), Scope::MacroRules(legacy_scope) => match legacy_scope { - LegacyScope::Binding(binding) => Scope::MacroRules( - binding.parent_legacy_scope - ), + LegacyScope::Binding(binding) => Scope::MacroRules(binding.parent_legacy_scope), LegacyScope::Invocation(invoc_id) => Scope::MacroRules( - self.output_legacy_scopes.get(&invoc_id).cloned() - .unwrap_or(self.invocation_parent_scopes[&invoc_id].legacy) + self.output_legacy_scopes + .get(&invoc_id) + .cloned() + .unwrap_or(self.invocation_parent_scopes[&invoc_id].legacy), ), LegacyScope::Empty => Scope::Module(module), - } + }, Scope::CrateRoot => match ns { TypeNS => { ident.span.adjust(ExpnId::root()); Scope::ExternPrelude } ValueNS | MacroNS => break, - } + }, Scope::Module(module) => { use_prelude = !module.no_implicit_prelude; match self.hygienic_lexical_parent(module, &mut ident.span) { @@ -1573,7 +1579,7 @@ impl<'a> Resolver<'a> { TypeNS => Scope::BuiltinTypes, ValueNS => break, // nowhere else to search MacroNS => Scope::BuiltinAttrs, - } + }, Scope::BuiltinTypes => break, // nowhere else to search }; } @@ -1598,14 +1604,15 @@ impl<'a> Resolver<'a> { /// /// Invariant: This must only be called during main resolution, not during /// import resolution. - fn resolve_ident_in_lexical_scope(&mut self, - mut ident: Ident, - ns: Namespace, - parent_scope: &ParentScope<'a>, - record_used_id: Option<NodeId>, - path_span: Span, - ribs: &[Rib<'a>]) - -> Option<LexicalScopeBinding<'a>> { + fn resolve_ident_in_lexical_scope( + &mut self, + mut ident: Ident, + ns: Namespace, + parent_scope: &ParentScope<'a>, + record_used_id: Option<NodeId>, + path_span: Span, + ribs: &[Rib<'a>], + ) -> Option<LexicalScopeBinding<'a>> { assert!(ns == TypeNS || ns == ValueNS); if ident.name == kw::Invalid { return Some(LexicalScopeBinding::Res(Res::Err)); @@ -1626,20 +1633,21 @@ impl<'a> Resolver<'a> { // Walk backwards up the ribs in scope. let record_used = record_used_id.is_some(); let mut module = self.graph_root; - for i in (0 .. ribs.len()).rev() { + for i in (0..ribs.len()).rev() { debug!("walk rib\n{:?}", ribs[i].bindings); // Use the rib kind to determine whether we are resolving parameters // (modern hygiene) or local variables (legacy hygiene). - let rib_ident = if ribs[i].kind.contains_params() { - modern_ident - } else { - ident - }; + let rib_ident = if ribs[i].kind.contains_params() { modern_ident } else { ident }; if let Some(res) = ribs[i].bindings.get(&rib_ident).cloned() { // The ident resolves to a type parameter or local variable. - return Some(LexicalScopeBinding::Res( - self.validate_res_from_ribs(i, rib_ident, res, record_used, path_span, ribs), - )); + return Some(LexicalScopeBinding::Res(self.validate_res_from_ribs( + i, + rib_ident, + res, + record_used, + path_span, + ribs, + ))); } module = match ribs[i].kind { @@ -1648,12 +1656,11 @@ impl<'a> Resolver<'a> { // If an invocation of this macro created `ident`, give up on `ident` // and switch to `ident`'s source from the macro definition. ident.span.remove_mark(); - continue + continue; } _ => continue, }; - let item = self.resolve_ident_in_module_unadjusted( ModuleOrUniformRoot::Module(module), ident, @@ -1668,7 +1675,7 @@ impl<'a> Resolver<'a> { } match module.kind { - ModuleKind::Block(..) => {}, // We can see through blocks + ModuleKind::Block(..) => {} // We can see through blocks _ => break, } } @@ -1677,8 +1684,12 @@ impl<'a> Resolver<'a> { let mut poisoned = None; loop { let opt_module = if let Some(node_id) = record_used_id { - self.hygienic_lexical_parent_with_compatibility_fallback(module, &mut ident.span, - node_id, &mut poisoned) + self.hygienic_lexical_parent_with_compatibility_fallback( + module, + &mut ident.span, + node_id, + &mut poisoned, + ) } else { self.hygienic_lexical_parent(module, &mut ident.span) }; @@ -1704,11 +1715,12 @@ impl<'a> Resolver<'a> { ProcMacroDeriveResolutionFallback(ident.span), ); } - return Some(LexicalScopeBinding::Item(binding)) + return Some(LexicalScopeBinding::Item(binding)); } Err(Determined) => continue, - Err(Undetermined) => - span_bug!(ident.span, "undetermined resolution during main resolution pass"), + Err(Undetermined) => { + span_bug!(ident.span, "undetermined resolution during main resolution pass") + } } } @@ -1719,8 +1731,9 @@ impl<'a> Resolver<'a> { return Some(LexicalScopeBinding::Item(binding)); } if let Some(ident) = self.registered_tools.get(&ident) { - let binding = (Res::ToolMod, ty::Visibility::Public, - ident.span, ExpnId::root()).to_name_binding(self.arenas); + let binding = + (Res::ToolMod, ty::Visibility::Public, ident.span, ExpnId::root()) + .to_name_binding(self.arenas); return Some(LexicalScopeBinding::Item(binding)); } } @@ -1740,8 +1753,9 @@ impl<'a> Resolver<'a> { if ns == TypeNS { if let Some(prim_ty) = self.primitive_type_table.primitive_types.get(&ident.name) { - let binding = (Res::PrimTy(*prim_ty), ty::Visibility::Public, - DUMMY_SP, ExpnId::root()).to_name_binding(self.arenas); + let binding = + (Res::PrimTy(*prim_ty), ty::Visibility::Public, DUMMY_SP, ExpnId::root()) + .to_name_binding(self.arenas); return Some(LexicalScopeBinding::Item(binding)); } } @@ -1749,8 +1763,11 @@ impl<'a> Resolver<'a> { None } - fn hygienic_lexical_parent(&mut self, module: Module<'a>, span: &mut Span) - -> Option<Module<'a>> { + fn hygienic_lexical_parent( + &mut self, + module: Module<'a>, + span: &mut Span, + ) -> Option<Module<'a>> { if !module.expansion.outer_expn_is_descendant_of(span.ctxt()) { return Some(self.macro_def_scope(span.remove_mark())); } @@ -1762,10 +1779,13 @@ impl<'a> Resolver<'a> { None } - fn hygienic_lexical_parent_with_compatibility_fallback(&mut self, module: Module<'a>, - span: &mut Span, node_id: NodeId, - poisoned: &mut Option<NodeId>) - -> Option<Module<'a>> { + fn hygienic_lexical_parent_with_compatibility_fallback( + &mut self, + module: Module<'a>, + span: &mut Span, + node_id: NodeId, + poisoned: &mut Option<NodeId>, + ) -> Option<Module<'a>> { if let module @ Some(..) = self.hygienic_lexical_parent(module, span) { return module; } @@ -1783,8 +1803,9 @@ impl<'a> Resolver<'a> { // So we have to fall back to the module's parent during lexical resolution in this case. if let Some(parent) = module.parent { // Inner module is inside the macro, parent module is outside of the macro. - if module.expansion != parent.expansion && - module.expansion.is_descendant_of(parent.expansion) { + if module.expansion != parent.expansion + && module.expansion.is_descendant_of(parent.expansion) + { // The macro is a proc macro derive if let Some(&def_id) = self.macro_defs.get(&module.expansion) { if let Some(ext) = self.get_macro_by_def_id(def_id) { @@ -1809,11 +1830,10 @@ impl<'a> Resolver<'a> { ns: Namespace, parent_scope: &ParentScope<'a>, record_used: bool, - path_span: Span + path_span: Span, ) -> Result<&'a NameBinding<'a>, Determinacy> { - self.resolve_ident_in_module_ext( - module, ident, ns, parent_scope, record_used, path_span - ).map_err(|(determinacy, _)| determinacy) + self.resolve_ident_in_module_ext(module, ident, ns, parent_scope, record_used, path_span) + .map_err(|(determinacy, _)| determinacy) } fn resolve_ident_in_module_ext( @@ -1823,7 +1843,7 @@ impl<'a> Resolver<'a> { ns: Namespace, parent_scope: &ParentScope<'a>, record_used: bool, - path_span: Span + path_span: Span, ) -> Result<&'a NameBinding<'a>, (Determinacy, Weak)> { let tmp_parent_scope; let mut adjusted_parent_scope = parent_scope; @@ -1838,13 +1858,18 @@ impl<'a> Resolver<'a> { ModuleOrUniformRoot::ExternPrelude => { ident.span.modernize_and_adjust(ExpnId::root()); } - ModuleOrUniformRoot::CrateRootAndExternPrelude | - ModuleOrUniformRoot::CurrentScope => { + ModuleOrUniformRoot::CrateRootAndExternPrelude | ModuleOrUniformRoot::CurrentScope => { // No adjustments } } let result = self.resolve_ident_in_module_unadjusted_ext( - module, ident, ns, adjusted_parent_scope, false, record_used, path_span, + module, + ident, + ns, + adjusted_parent_scope, + false, + record_used, + path_span, ); result } @@ -1909,7 +1934,13 @@ impl<'a> Resolver<'a> { crate_lint: CrateLint, ) -> PathResult<'a> { self.resolve_path_with_ribs( - path, opt_ns, parent_scope, record_used, path_span, crate_lint, None + path, + opt_ns, + parent_scope, + record_used, + path_span, + crate_lint, + None, ) } @@ -1930,11 +1961,7 @@ impl<'a> Resolver<'a> { debug!( "resolve_path(path={:?}, opt_ns={:?}, record_used={:?}, \ path_span={:?}, crate_lint={:?})", - path, - opt_ns, - record_used, - path_span, - crate_lint, + path, opt_ns, record_used, path_span, crate_lint, ); for (i, &Segment { ident, id }) in path.iter().enumerate() { @@ -1954,9 +1981,7 @@ impl<'a> Resolver<'a> { let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS }; let name = ident.name; - allow_super &= ns == TypeNS && - (name == kw::SelfLower || - name == kw::Super); + allow_super &= ns == TypeNS && (name == kw::SelfLower || name == kw::Super); if ns == TypeNS { if allow_super && name == kw::Super { @@ -1971,7 +1996,8 @@ impl<'a> Resolver<'a> { if let Some(self_module) = self_module { if let Some(parent) = self_module.parent { module = Some(ModuleOrUniformRoot::Module( - self.resolve_self(&mut ctxt, parent))); + self.resolve_self(&mut ctxt, parent), + )); continue; } } @@ -1987,25 +2013,22 @@ impl<'a> Resolver<'a> { if name == kw::SelfLower { let mut ctxt = ident.span.ctxt().modern(); module = Some(ModuleOrUniformRoot::Module( - self.resolve_self(&mut ctxt, parent_scope.module))); + self.resolve_self(&mut ctxt, parent_scope.module), + )); continue; } if name == kw::PathRoot && ident.span.rust_2018() { module = Some(ModuleOrUniformRoot::ExternPrelude); continue; } - if name == kw::PathRoot && - ident.span.rust_2015() && self.session.rust_2018() { + if name == kw::PathRoot && ident.span.rust_2015() && self.session.rust_2018() { // `::a::b` from 2015 macro on 2018 global edition module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude); continue; } - if name == kw::PathRoot || - name == kw::Crate || - name == kw::DollarCrate { + if name == kw::PathRoot || name == kw::Crate || name == kw::DollarCrate { // `::a::b`, `crate::a::b` or `$crate::a::b` - module = Some(ModuleOrUniformRoot::Module( - self.resolve_crate_root(ident))); + module = Some(ModuleOrUniformRoot::Module(self.resolve_crate_root(ident))); continue; } } @@ -2033,26 +2056,44 @@ impl<'a> Resolver<'a> { let binding = if let Some(module) = module { self.resolve_ident_in_module( - module, ident, ns, parent_scope, record_used, path_span + module, + ident, + ns, + parent_scope, + record_used, + path_span, ) } else if ribs.is_none() || opt_ns.is_none() || opt_ns == Some(MacroNS) { let scopes = ScopeSet::All(ns, opt_ns.is_none()); - self.early_resolve_ident_in_lexical_scope(ident, scopes, parent_scope, record_used, - record_used, path_span) + self.early_resolve_ident_in_lexical_scope( + ident, + scopes, + parent_scope, + record_used, + record_used, + path_span, + ) } else { let record_used_id = if record_used { crate_lint.node_id().or(Some(CRATE_NODE_ID)) } else { None }; match self.resolve_ident_in_lexical_scope( - ident, ns, parent_scope, record_used_id, path_span, &ribs.unwrap()[ns] + ident, + ns, + parent_scope, + record_used_id, + path_span, + &ribs.unwrap()[ns], ) { // we found a locally-imported or available item/module Some(LexicalScopeBinding::Item(binding)) => Ok(binding), // we found a local variable or type param Some(LexicalScopeBinding::Res(res)) - if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => { + if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => + { record_segment_res(self, res); return PathResult::NonModule(PartialRes::with_unresolved_segments( - res, path.len() - 1 + res, + path.len() - 1, )); } _ => Err(Determinacy::determined(record_used)), @@ -2071,11 +2112,13 @@ impl<'a> Resolver<'a> { record_segment_res(self, res); } else if res == Res::ToolMod && i + 1 != path.len() { if binding.is_import() { - self.session.struct_span_err( - ident.span, "cannot use a tool module through an import" - ).span_note( - binding.span, "the tool module imported here" - ).emit(); + self.session + .struct_span_err( + ident.span, + "cannot use a tool module through an import", + ) + .span_note(binding.span, "the tool module imported here") + .emit(); } let res = Res::NonMacroAttr(NonMacroAttrKind::Tool); return PathResult::NonModule(PartialRes::new(res)); @@ -2089,7 +2132,8 @@ impl<'a> Resolver<'a> { second_binding, ); return PathResult::NonModule(PartialRes::with_unresolved_segments( - res, path.len() - i - 1 + res, + path.len() - i - 1, )); } else { let label = format!( @@ -2112,7 +2156,8 @@ impl<'a> Resolver<'a> { if let Some(ModuleOrUniformRoot::Module(module)) = module { if opt_ns.is_some() && !module.is_normal() { return PathResult::NonModule(PartialRes::with_unresolved_segments( - module.res().unwrap(), path.len() - i + module.res().unwrap(), + path.len() - i, )); } } @@ -2121,11 +2166,11 @@ impl<'a> Resolver<'a> { _ => None, }; let (label, suggestion) = if module_res == self.graph_root.res() { - let is_mod = |res| { - match res { Res::Def(DefKind::Mod, _) => true, _ => false } + let is_mod = |res| match res { + Res::Def(DefKind::Mod, _) => true, + _ => false, }; - let mut candidates = - self.lookup_import_candidates(ident, TypeNS, is_mod); + let mut candidates = self.lookup_import_candidates(ident, TypeNS, is_mod); candidates.sort_by_cached_key(|c| { (c.path.segments.len(), pprust::path_to_string(&c.path)) }); @@ -2191,7 +2236,7 @@ impl<'a> Resolver<'a> { // We're only interested in `use` paths which should start with // `{{root}}` currently. if first_name != kw::PathRoot { - return + return; } match path.get(1) { @@ -2213,19 +2258,20 @@ impl<'a> Resolver<'a> { // Careful: we still want to rewrite paths from // renamed extern crates. if let ImportDirectiveSubclass::ExternCrate { source: None, .. } = d.subclass { - return + return; } } } - let diag = lint::builtin::BuiltinLintDiagnostics - ::AbsPathWithModule(diag_span); + let diag = lint::builtin::BuiltinLintDiagnostics::AbsPathWithModule(diag_span); self.lint_buffer.buffer_lint_with_diagnostic( lint::builtin::ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, - diag_id, diag_span, + diag_id, + diag_span, "absolute paths must start with `self`, `super`, \ - `crate`, or an external crate name in the 2018 edition", - diag); + `crate`, or an external crate name in the 2018 edition", + diag, + ); } // Validate a local resolution (from ribs). @@ -2262,8 +2308,10 @@ impl<'a> Resolver<'a> { for rib in ribs { match rib.kind { - NormalRibKind | ModuleRibKind(..) | MacroDefinition(..) | - ForwardTyParamBanRibKind => { + NormalRibKind + | ModuleRibKind(..) + | MacroDefinition(..) + | ForwardTyParamBanRibKind => { // Nothing to do. Continue. } ItemRibKind(_) | FnItemRibKind | AssocItemRibKind => { @@ -2288,16 +2336,19 @@ impl<'a> Resolver<'a> { } } if let Some(res_err) = res_err { - self.report_error(span, res_err); - return Res::Err; + self.report_error(span, res_err); + return Res::Err; } } Res::Def(DefKind::TyParam, _) | Res::SelfTy(..) => { for rib in ribs { let has_generic_params = match rib.kind { - NormalRibKind | AssocItemRibKind | - ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind | - ConstantItemRibKind => { + NormalRibKind + | AssocItemRibKind + | ModuleRibKind(..) + | MacroDefinition(..) + | ForwardTyParamBanRibKind + | ConstantItemRibKind => { // Nothing to do. Continue. continue; } @@ -2307,8 +2358,13 @@ impl<'a> Resolver<'a> { }; if record_used { - self.report_error(span, ResolutionError::GenericParamsFromOuterFunction( - res, has_generic_params)); + self.report_error( + span, + ResolutionError::GenericParamsFromOuterFunction( + res, + has_generic_params, + ), + ); } return Res::Err; } @@ -2330,8 +2386,13 @@ impl<'a> Resolver<'a> { // This was an attempt to use a const parameter outside its scope. if record_used { - self.report_error(span, ResolutionError::GenericParamsFromOuterFunction( - res, has_generic_params)); + self.report_error( + span, + ResolutionError::GenericParamsFromOuterFunction( + res, + has_generic_params, + ), + ); } return Res::Err; } @@ -2368,11 +2429,14 @@ impl<'a> Resolver<'a> { // Some non-controversial subset of ambiguities "modern macro name" vs "macro_rules" // is disambiguated to mitigate regressions from macro modularization. // Scoping for `macro_rules` behaves like scoping for `let` at module level, in general. - match (self.binding_parent_modules.get(&PtrKey(legacy)), - self.binding_parent_modules.get(&PtrKey(modern))) { - (Some(legacy), Some(modern)) => - legacy.normal_ancestor_id == modern.normal_ancestor_id && - modern.is_ancestor_of(legacy), + match ( + self.binding_parent_modules.get(&PtrKey(legacy)), + self.binding_parent_modules.get(&PtrKey(modern)), + ) { + (Some(legacy), Some(modern)) => { + legacy.normal_ancestor_id == modern.normal_ancestor_id + && modern.is_ancestor_of(legacy) + } _ => false, } } @@ -2387,8 +2451,10 @@ impl<'a> Resolver<'a> { }; let (built_in, from) = if from_prelude { ("", " from prelude") - } else if b.is_extern_crate() && !b.is_import() && - self.session.opts.externs.get(&ident.as_str()).is_some() { + } else if b.is_extern_crate() + && !b.is_import() + && self.session.opts.externs.get(&ident.as_str()).is_some() + { ("", " passed with `--extern`") } else if add_built_in { (" built-in", "") @@ -2397,12 +2463,16 @@ impl<'a> Resolver<'a> { }; let article = if built_in.is_empty() { res.article() } else { "a" }; - format!("{a}{built_in} {thing}{from}", - a = article, thing = res.descr(), built_in = built_in, from = from) + format!( + "{a}{built_in} {thing}{from}", + a = article, + thing = res.descr(), + built_in = built_in, + from = from + ) } else { let introduced = if b.is_import() { "imported" } else { "defined" }; - format!("the {thing} {introduced} here", - thing = res.descr(), introduced = introduced) + format!("the {thing} {introduced} here", thing = res.descr(), introduced = introduced) } } @@ -2415,40 +2485,56 @@ impl<'a> Resolver<'a> { (b1, b2, misc1, misc2, false) }; - let mut err = struct_span_err!(self.session, ident.span, E0659, - "`{ident}` is ambiguous ({why})", - ident = ident, why = kind.descr()); + let mut err = struct_span_err!( + self.session, + ident.span, + E0659, + "`{ident}` is ambiguous ({why})", + ident = ident, + why = kind.descr() + ); err.span_label(ident.span, "ambiguous name"); let mut could_refer_to = |b: &NameBinding<'_>, misc: AmbiguityErrorMisc, also: &str| { let what = self.binding_description(b, ident, misc == AmbiguityErrorMisc::FromPrelude); - let note_msg = format!("`{ident}` could{also} refer to {what}", - ident = ident, also = also, what = what); + let note_msg = format!( + "`{ident}` could{also} refer to {what}", + ident = ident, + also = also, + what = what + ); let thing = b.res().descr(); let mut help_msgs = Vec::new(); - if b.is_glob_import() && (kind == AmbiguityKind::GlobVsGlob || - kind == AmbiguityKind::GlobVsExpanded || - kind == AmbiguityKind::GlobVsOuter && - swapped != also.is_empty()) { - help_msgs.push(format!("consider adding an explicit import of \ - `{ident}` to disambiguate", ident = ident)) + if b.is_glob_import() + && (kind == AmbiguityKind::GlobVsGlob + || kind == AmbiguityKind::GlobVsExpanded + || kind == AmbiguityKind::GlobVsOuter && swapped != also.is_empty()) + { + help_msgs.push(format!( + "consider adding an explicit import of \ + `{ident}` to disambiguate", + ident = ident + )) } if b.is_extern_crate() && ident.span.rust_2018() { help_msgs.push(format!( "use `::{ident}` to refer to this {thing} unambiguously", - ident = ident, thing = thing, + ident = ident, + thing = thing, )) } if misc == AmbiguityErrorMisc::SuggestCrate { help_msgs.push(format!( "use `crate::{ident}` to refer to this {thing} unambiguously", - ident = ident, thing = thing, + ident = ident, + thing = thing, )) } else if misc == AmbiguityErrorMisc::SuggestSelf { help_msgs.push(format!( "use `self::{ident}` to refer to this {thing} unambiguously", - ident = ident, thing = thing, + ident = ident, + thing = thing, )) } @@ -2493,14 +2579,16 @@ impl<'a> Resolver<'a> { E0603, "{}{} `{}` is private", binding.res().descr(), - if is_constructor { " constructor"} else { "" }, + if is_constructor { " constructor" } else { "" }, ident.name, ) }; let mut err = if let NameBindingKind::Res( - Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), ctor_def_id), _ - ) = binding.kind { + Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), ctor_def_id), + _, + ) = binding.kind + { let def_id = (&*self).parent(ctor_def_id).expect("no parent for a constructor"); if let Some(fields) = self.field_names.get(&def_id) { let mut err = mk_struct_span_error(true); @@ -2532,12 +2620,14 @@ impl<'a> Resolver<'a> { } } - fn report_conflict<'b>(&mut self, - parent: Module<'_>, - ident: Ident, - ns: Namespace, - new_binding: &NameBinding<'b>, - old_binding: &NameBinding<'b>) { + fn report_conflict<'b>( + &mut self, + parent: Module<'_>, + ident: Ident, + ns: Namespace, + new_binding: &NameBinding<'b>, + old_binding: &NameBinding<'b>, + ) { // Error on the second of two conflicting names if old_binding.span.lo() > new_binding.span.lo() { return self.report_conflict(parent, ident, ns, old_binding, new_binding); @@ -2592,10 +2682,12 @@ impl<'a> Resolver<'a> { }, }; - err.note(&format!("`{}` must be defined only once in the {} namespace of this {}", - name, - ns.descr(), - container)); + err.note(&format!( + "`{}` must be defined only once in the {} namespace of this {}", + name, + ns.descr(), + container + )); err.span_label(span, format!("`{}` re{} here", name, new_participle)); err.span_label( @@ -2608,39 +2700,45 @@ impl<'a> Resolver<'a> { let directive = match (&new_binding.kind, &old_binding.kind) { // If there are two imports where one or both have attributes then prefer removing the // import without attributes. - (Import { directive: new, .. }, Import { directive: old, .. }) if { - !new_binding.span.is_dummy() && !old_binding.span.is_dummy() && - (new.has_attributes || old.has_attributes) - } => { + (Import { directive: new, .. }, Import { directive: old, .. }) + if { + !new_binding.span.is_dummy() + && !old_binding.span.is_dummy() + && (new.has_attributes || old.has_attributes) + } => + { if old.has_attributes { Some((new, new_binding.span, true)) } else { Some((old, old_binding.span, true)) } - }, + } // Otherwise prioritize the new binding. - (Import { directive, .. }, other) if !new_binding.span.is_dummy() => - Some((directive, new_binding.span, other.is_import())), - (other, Import { directive, .. }) if !old_binding.span.is_dummy() => - Some((directive, old_binding.span, other.is_import())), + (Import { directive, .. }, other) if !new_binding.span.is_dummy() => { + Some((directive, new_binding.span, other.is_import())) + } + (other, Import { directive, .. }) if !old_binding.span.is_dummy() => { + Some((directive, old_binding.span, other.is_import())) + } _ => None, }; // Check if the target of the use for both bindings is the same. let duplicate = new_binding.res().opt_def_id() == old_binding.res().opt_def_id(); let has_dummy_span = new_binding.span.is_dummy() || old_binding.span.is_dummy(); - let from_item = self.extern_prelude.get(&ident) - .map(|entry| entry.introduced_by_item) - .unwrap_or(true); + let from_item = + self.extern_prelude.get(&ident).map(|entry| entry.introduced_by_item).unwrap_or(true); // Only suggest removing an import if both bindings are to the same def, if both spans // aren't dummy spans. Further, if both bindings are imports, then the ident must have // been introduced by a item. - let should_remove_import = duplicate && !has_dummy_span && - ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item); + let should_remove_import = duplicate + && !has_dummy_span + && ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item); match directive { - Some((directive, span, true)) if should_remove_import && directive.is_nested() => - self.add_suggestion_for_duplicate_nested_use(&mut err, directive, span), + Some((directive, span, true)) if should_remove_import && directive.is_nested() => { + self.add_suggestion_for_duplicate_nested_use(&mut err, directive, span) + } Some((directive, _, true)) if should_remove_import && !directive.is_glob() => { // Simple case - remove the entire import. Due to the above match arm, this can // only be a single use so just remove it entirely. @@ -2650,10 +2748,11 @@ impl<'a> Resolver<'a> { String::new(), Applicability::MaybeIncorrect, ); - }, - Some((directive, span, _)) => - self.add_suggestion_for_rename_of_use(&mut err, name, directive, span), - _ => {}, + } + Some((directive, span, _)) => { + self.add_suggestion_for_rename_of_use(&mut err, name, directive, span) + } + _ => {} } err.emit(); @@ -2684,13 +2783,14 @@ impl<'a> Resolver<'a> { let mut suggestion = None; match directive.subclass { - ImportDirectiveSubclass::SingleImport { type_ns_only: true, .. } => - suggestion = Some(format!("self as {}", suggested_name)), + ImportDirectiveSubclass::SingleImport { type_ns_only: true, .. } => { + suggestion = Some(format!("self as {}", suggested_name)) + } ImportDirectiveSubclass::SingleImport { source, .. } => { - if let Some(pos) = source.span.hi().0.checked_sub(binding_span.lo().0) - .map(|pos| pos as usize) { - if let Ok(snippet) = self.session.source_map() - .span_to_snippet(binding_span) { + if let Some(pos) = + source.span.hi().0.checked_sub(binding_span.lo().0).map(|pos| pos as usize) + { + if let Ok(snippet) = self.session.source_map().span_to_snippet(binding_span) { if pos <= snippet.len() { suggestion = Some(format!( "{} as {}{}", @@ -2702,12 +2802,13 @@ impl<'a> Resolver<'a> { } } } - ImportDirectiveSubclass::ExternCrate { source, target, .. } => + ImportDirectiveSubclass::ExternCrate { source, target, .. } => { suggestion = Some(format!( "extern crate {} as {};", source.unwrap_or(target.name), suggested_name, - )), + )) + } _ => unreachable!(), } @@ -2762,21 +2863,28 @@ impl<'a> Resolver<'a> { // - Given `use issue_52891::{d, e, a};` where `a` is a duplicate then `binding_span` is // `a` and `directive.use_span` is `issue_52891::{d, e, a};`. - let (found_closing_brace, span) = find_span_of_binding_until_next_binding( - self.session, binding_span, directive.use_span, - ); + let (found_closing_brace, span) = + find_span_of_binding_until_next_binding(self.session, binding_span, directive.use_span); // If there was a closing brace then identify the span to remove any trailing commas from // previous imports. if found_closing_brace { if let Some(span) = extend_span_to_previous_binding(self.session, span) { - err.tool_only_span_suggestion(span, message, String::new(), - Applicability::MaybeIncorrect); + err.tool_only_span_suggestion( + span, + message, + String::new(), + Applicability::MaybeIncorrect, + ); } else { // Remove the entire line if we cannot extend the span back, this indicates a // `issue_52891::{self}` case. - err.span_suggestion(directive.use_span_with_attributes, message, String::new(), - Applicability::MaybeIncorrect); + err.span_suggestion( + directive.use_span_with_attributes, + message, + String::new(), + Applicability::MaybeIncorrect, + ); } return; @@ -2785,8 +2893,11 @@ impl<'a> Resolver<'a> { err.span_suggestion(span, message, String::new(), Applicability::MachineApplicable); } - fn extern_prelude_get(&mut self, ident: Ident, speculative: bool) - -> Option<&'a NameBinding<'a>> { + fn extern_prelude_get( + &mut self, + ident: Ident, + speculative: bool, + ) -> Option<&'a NameBinding<'a>> { if ident.is_path_segment_keyword() { // Make sure `self`, `super` etc produce an error when passed to here. return None; @@ -2801,14 +2912,17 @@ impl<'a> Resolver<'a> { let crate_id = if !speculative { self.crate_loader.process_path_extern(ident.name, ident.span) } else if let Some(crate_id) = - self.crate_loader.maybe_process_path_extern(ident.name, ident.span) { + self.crate_loader.maybe_process_path_extern(ident.name, ident.span) + { crate_id } else { return None; }; let crate_root = self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX }); - Some((crate_root, ty::Visibility::Public, DUMMY_SP, ExpnId::root()) - .to_name_binding(self.arenas)) + Some( + (crate_root, ty::Visibility::Public, DUMMY_SP, ExpnId::root()) + .to_name_binding(self.arenas), + ) } }) } @@ -2819,15 +2933,17 @@ impl<'a> Resolver<'a> { /// just that an error occurred. // FIXME(Manishearth): intra-doc links won't get warned of epoch changes. pub fn resolve_str_path_error( - &mut self, span: Span, path_str: &str, ns: Namespace, module_id: NodeId + &mut self, + span: Span, + path_str: &str, + ns: Namespace, + module_id: NodeId, ) -> Result<(ast::Path, Res), ()> { let path = if path_str.starts_with("::") { ast::Path { span, segments: iter::once(Ident::with_dummy_span(kw::PathRoot)) - .chain({ - path_str.split("::").skip(1).map(Ident::from_str) - }) + .chain({ path_str.split("::").skip(1).map(Ident::from_str) }) .map(|i| self.new_ast_path_segment(i)) .collect(), } @@ -2858,24 +2974,27 @@ impl<'a> Resolver<'a> { parent_scope: &ParentScope<'a>, ) -> Result<Res, (Span, ResolutionError<'a>)> { match self.resolve_path( - &Segment::from_path(path), Some(ns), parent_scope, true, path.span, CrateLint::No + &Segment::from_path(path), + Some(ns), + parent_scope, + true, + path.span, + CrateLint::No, ) { - PathResult::Module(ModuleOrUniformRoot::Module(module)) => - Ok(module.res().unwrap()), - PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => - Ok(path_res.base_res()), - PathResult::NonModule(..) => { - Err((path.span, ResolutionError::FailedToResolve { + PathResult::Module(ModuleOrUniformRoot::Module(module)) => Ok(module.res().unwrap()), + PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => { + Ok(path_res.base_res()) + } + PathResult::NonModule(..) => Err(( + path.span, + ResolutionError::FailedToResolve { label: String::from("type-relative paths are not supported in this context"), suggestion: None, - })) - } + }, + )), PathResult::Module(..) | PathResult::Indeterminate => unreachable!(), PathResult::Failed { span, label, suggestion, .. } => { - Err((span, ResolutionError::FailedToResolve { - label, - suggestion, - })) + Err((span, ResolutionError::FailedToResolve { label, suggestion })) } } } @@ -2899,9 +3018,7 @@ impl<'a> Resolver<'a> { fn names_to_string(names: &[Name]) -> String { let mut result = String::new(); - for (i, name) in names.iter() - .filter(|name| **name != kw::PathRoot) - .enumerate() { + for (i, name) in names.iter().filter(|name| **name != kw::PathRoot).enumerate() { if i > 0 { result.push_str("::"); } @@ -2966,9 +3083,9 @@ impl CrateLint { fn node_id(&self) -> Option<NodeId> { match *self { CrateLint::No => None, - CrateLint::SimplePath(id) | - CrateLint::UsePath { root_id: id, .. } | - CrateLint::QPathTrait { qpath_id: id, .. } => Some(id), + CrateLint::SimplePath(id) + | CrateLint::UsePath { root_id: id, .. } + | CrateLint::QPathTrait { qpath_id: id, .. } => Some(id), } } } diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 3ad53737f49..800c40ffdb1 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -2,26 +2,26 @@ use ImportDirectiveSubclass::*; -use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc}; -use crate::{CrateLint, Module, ModuleOrUniformRoot, PerNS, ScopeSet, ParentScope, Weak}; -use crate::Determinacy::{self, *}; -use crate::Namespace::{self, TypeNS, MacroNS}; -use crate::{NameBinding, NameBindingKind, ToNameBinding, PathResult, PrivacyError}; -use crate::{Resolver, ResolutionError, BindingKey, Segment, ModuleKind}; -use crate::{names_to_string, module_to_string}; use crate::diagnostics::Suggestion; +use crate::Determinacy::{self, *}; +use crate::Namespace::{self, MacroNS, TypeNS}; +use crate::{module_to_string, names_to_string}; +use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind}; +use crate::{BindingKey, ModuleKind, ResolutionError, Resolver, Segment}; +use crate::{CrateLint, Module, ModuleOrUniformRoot, ParentScope, PerNS, ScopeSet, Weak}; +use crate::{NameBinding, NameBindingKind, PathResult, PrivacyError, ToNameBinding}; -use errors::{Applicability, pluralize}; +use errors::{pluralize, Applicability}; -use rustc_data_structures::ptr_key::PtrKey; -use rustc::ty; +use rustc::hir::def::{self, Export, PartialRes}; +use rustc::hir::def_id::DefId; use rustc::lint::builtin::BuiltinLintDiagnostics; use rustc::lint::builtin::{PUB_USE_OF_PRIVATE_EXTERN_CRATE, UNUSED_IMPORTS}; -use rustc::hir::def_id::DefId; -use rustc::hir::def::{self, PartialRes, Export}; use rustc::session::DiagnosticMessageId; +use rustc::ty; use rustc::util::nodemap::FxHashSet; use rustc::{bug, span_bug}; +use rustc_data_structures::ptr_key::PtrKey; use syntax::ast::{Ident, Name, NodeId}; use syntax::symbol::kw; @@ -59,7 +59,7 @@ pub enum ImportDirectiveSubclass<'a> { GlobImport { is_prelude: bool, max_vis: Cell<ty::Visibility>, // The visibility of the greatest re-export. - // n.b. `max_vis` is only used in `finalize_import` to check for re-export errors. + // n.b. `max_vis` is only used in `finalize_import` to check for re-export errors. }, ExternCrate { source: Option<Name>, @@ -117,13 +117,16 @@ crate struct ImportDirective<'a> { impl<'a> ImportDirective<'a> { pub fn is_glob(&self) -> bool { - match self.subclass { ImportDirectiveSubclass::GlobImport { .. } => true, _ => false } + match self.subclass { + ImportDirectiveSubclass::GlobImport { .. } => true, + _ => false, + } } pub fn is_nested(&self) -> bool { match self.subclass { ImportDirectiveSubclass::SingleImport { nested, .. } => nested, - _ => false + _ => false, } } @@ -147,8 +150,11 @@ impl<'a> NameResolution<'a> { // Returns the binding for the name if it is known or None if it not known. pub(crate) fn binding(&self) -> Option<&'a NameBinding<'a>> { self.binding.and_then(|binding| { - if !binding.is_glob_import() || - self.single_imports.is_empty() { Some(binding) } else { None } + if !binding.is_glob_import() || self.single_imports.is_empty() { + Some(binding) + } else { + None + } }) } @@ -168,8 +174,15 @@ impl<'a> Resolver<'a> { path_span: Span, ) -> Result<&'a NameBinding<'a>, Determinacy> { self.resolve_ident_in_module_unadjusted_ext( - module, ident, ns, parent_scope, false, record_used, path_span - ).map_err(|(determinacy, _)| determinacy) + module, + ident, + ns, + parent_scope, + false, + record_used, + path_span, + ) + .map_err(|(determinacy, _)| determinacy) } /// Attempts to resolve `ident` in namespaces `ns` of `module`. @@ -189,8 +202,12 @@ impl<'a> Resolver<'a> { ModuleOrUniformRoot::CrateRootAndExternPrelude => { assert!(!restricted_shadowing); let binding = self.early_resolve_ident_in_lexical_scope( - ident, ScopeSet::AbsolutePath(ns), parent_scope, - record_used, record_used, path_span, + ident, + ScopeSet::AbsolutePath(ns), + parent_scope, + record_used, + record_used, + path_span, ); return binding.map_err(|determinacy| (determinacy, Weak::No)); } @@ -205,20 +222,17 @@ impl<'a> Resolver<'a> { Err((Undetermined, Weak::No)) } else { Err((Determined, Weak::No)) - } + }; } ModuleOrUniformRoot::CurrentScope => { assert!(!restricted_shadowing); if ns == TypeNS { - if ident.name == kw::Crate || - ident.name == kw::DollarCrate { + if ident.name == kw::Crate || ident.name == kw::DollarCrate { let module = self.resolve_crate_root(ident); - let binding = (module, ty::Visibility::Public, - module.span, ExpnId::root()) - .to_name_binding(self.arenas); + let binding = (module, ty::Visibility::Public, module.span, ExpnId::root()) + .to_name_binding(self.arenas); return Ok(binding); - } else if ident.name == kw::Super || - ident.name == kw::SelfLower { + } else if ident.name == kw::Super || ident.name == kw::SelfLower { // FIXME: Implement these with renaming requirements so that e.g. // `use super;` doesn't work, but `use super as name;` does. // Fall through here to get an error from `early_resolve_...`. @@ -227,16 +241,20 @@ impl<'a> Resolver<'a> { let scopes = ScopeSet::All(ns, true); let binding = self.early_resolve_ident_in_lexical_scope( - ident, scopes, parent_scope, record_used, record_used, path_span + ident, + scopes, + parent_scope, + record_used, + record_used, + path_span, ); return binding.map_err(|determinacy| (determinacy, Weak::No)); } }; let key = self.new_key(ident, ns); - let resolution = self.resolution(module, key) - .try_borrow_mut() - .map_err(|_| (Determined, Weak::No))?; // This happens when there is a cycle of imports. + let resolution = + self.resolution(module, key).try_borrow_mut().map_err(|_| (Determined, Weak::No))?; // This happens when there is a cycle of imports. if let Some(binding) = resolution.binding { if !restricted_shadowing && binding.expansion != ExpnId::root() { @@ -254,54 +272,60 @@ impl<'a> Resolver<'a> { } // `extern crate` are always usable for backwards compatibility, see issue #37020, // remove this together with `PUB_USE_OF_PRIVATE_EXTERN_CRATE`. - let usable = this.is_accessible_from(binding.vis, parent_scope.module) || - binding.is_extern_crate(); + let usable = this.is_accessible_from(binding.vis, parent_scope.module) + || binding.is_extern_crate(); if usable { Ok(binding) } else { Err((Determined, Weak::No)) } }; if record_used { - return resolution.binding.and_then(|binding| { - // If the primary binding is blacklisted, search further and return the shadowed - // glob binding if it exists. What we really want here is having two separate - // scopes in a module - one for non-globs and one for globs, but until that's done - // use this hack to avoid inconsistent resolution ICEs during import validation. - if let Some(blacklisted_binding) = self.blacklisted_binding { - if ptr::eq(binding, blacklisted_binding) { - return resolution.shadowed_glob; - } - } - Some(binding) - }).ok_or((Determined, Weak::No)).and_then(|binding| { - if self.last_import_segment && check_usable(self, binding).is_err() { - Err((Determined, Weak::No)) - } else { - self.record_use(ident, ns, binding, restricted_shadowing); - - if let Some(shadowed_glob) = resolution.shadowed_glob { - // Forbid expanded shadowing to avoid time travel. - if restricted_shadowing && - binding.expansion != ExpnId::root() && - binding.res() != shadowed_glob.res() { - self.ambiguity_errors.push(AmbiguityError { - kind: AmbiguityKind::GlobVsExpanded, - ident, - b1: binding, - b2: shadowed_glob, - misc1: AmbiguityErrorMisc::None, - misc2: AmbiguityErrorMisc::None, - }); + return resolution + .binding + .and_then(|binding| { + // If the primary binding is blacklisted, search further and return the shadowed + // glob binding if it exists. What we really want here is having two separate + // scopes in a module - one for non-globs and one for globs, but until that's done + // use this hack to avoid inconsistent resolution ICEs during import validation. + if let Some(blacklisted_binding) = self.blacklisted_binding { + if ptr::eq(binding, blacklisted_binding) { + return resolution.shadowed_glob; } } + Some(binding) + }) + .ok_or((Determined, Weak::No)) + .and_then(|binding| { + if self.last_import_segment && check_usable(self, binding).is_err() { + Err((Determined, Weak::No)) + } else { + self.record_use(ident, ns, binding, restricted_shadowing); + + if let Some(shadowed_glob) = resolution.shadowed_glob { + // Forbid expanded shadowing to avoid time travel. + if restricted_shadowing + && binding.expansion != ExpnId::root() + && binding.res() != shadowed_glob.res() + { + self.ambiguity_errors.push(AmbiguityError { + kind: AmbiguityKind::GlobVsExpanded, + ident, + b1: binding, + b2: shadowed_glob, + misc1: AmbiguityErrorMisc::None, + misc2: AmbiguityErrorMisc::None, + }); + } + } - if !self.is_accessible_from(binding.vis, parent_scope.module) && + if !self.is_accessible_from(binding.vis, parent_scope.module) && // Remove this together with `PUB_USE_OF_PRIVATE_EXTERN_CRATE` - !(self.last_import_segment && binding.is_extern_crate()) { - self.privacy_errors.push(PrivacyError(path_span, ident, binding)); - } + !(self.last_import_segment && binding.is_extern_crate()) + { + self.privacy_errors.push(PrivacyError(path_span, ident, binding)); + } - Ok(binding) - } - }) + Ok(binding) + } + }); } // Items and single imports are not shadowable, if we have one, then it's determined. @@ -319,18 +343,28 @@ impl<'a> Resolver<'a> { if !self.is_accessible_from(single_import.vis.get(), parent_scope.module) { continue; } - let module = unwrap_or!(single_import.imported_module.get(), - return Err((Undetermined, Weak::No))); + let module = unwrap_or!( + single_import.imported_module.get(), + return Err((Undetermined, Weak::No)) + ); let ident = match single_import.subclass { SingleImport { source, .. } => source, _ => unreachable!(), }; - match self.resolve_ident_in_module(module, ident, ns, &single_import.parent_scope, - false, path_span) { + match self.resolve_ident_in_module( + module, + ident, + ns, + &single_import.parent_scope, + false, + path_span, + ) { Err(Determined) => continue, - Ok(binding) if !self.is_accessible_from( - binding.vis, single_import.parent_scope.module - ) => continue, + Ok(binding) + if !self.is_accessible_from(binding.vis, single_import.parent_scope.module) => + { + continue; + } Ok(_) | Err(Undetermined) => return Err((Undetermined, Weak::No)), } } @@ -374,7 +408,7 @@ impl<'a> Resolver<'a> { // if it can then our "no resolution" result is not determined and can be invalidated. for glob_import in module.globs.borrow().iter() { if !self.is_accessible_from(glob_import.vis.get(), parent_scope.module) { - continue + continue; } let module = match glob_import.imported_module.get() { Some(ModuleOrUniformRoot::Module(module)) => module, @@ -403,9 +437,11 @@ impl<'a> Resolver<'a> { match result { Err(Determined) => continue, - Ok(binding) if !self.is_accessible_from( - binding.vis, glob_import.parent_scope.module - ) => continue, + Ok(binding) + if !self.is_accessible_from(binding.vis, glob_import.parent_scope.module) => + { + continue; + } Ok(_) | Err(Undetermined) => return Err((Undetermined, Weak::Yes)), } } @@ -416,11 +452,15 @@ impl<'a> Resolver<'a> { // Given a binding and an import directive that resolves to it, // return the corresponding binding defined by the import directive. - crate fn import(&self, binding: &'a NameBinding<'a>, directive: &'a ImportDirective<'a>) - -> &'a NameBinding<'a> { + crate fn import( + &self, + binding: &'a NameBinding<'a>, + directive: &'a ImportDirective<'a>, + ) -> &'a NameBinding<'a> { let vis = if binding.pseudo_vis().is_at_least(directive.vis.get(), self) || // cf. `PUB_USE_OF_PRIVATE_EXTERN_CRATE` - !directive.is_glob() && binding.is_extern_crate() { + !directive.is_glob() && binding.is_extern_crate() + { directive.vis.get() } else { binding.pseudo_vis() @@ -433,11 +473,7 @@ impl<'a> Resolver<'a> { } self.arenas.alloc_name_binding(NameBinding { - kind: NameBindingKind::Import { - binding, - directive, - used: Cell::new(false), - }, + kind: NameBindingKind::Import { binding, directive, used: Cell::new(false) }, ambiguity: None, span: directive.span, vis, @@ -464,21 +500,22 @@ impl<'a> Resolver<'a> { match (old_binding.is_glob_import(), binding.is_glob_import()) { (true, true) => { if res != old_binding.res() { - resolution.binding = Some(this.ambiguity(AmbiguityKind::GlobVsGlob, - old_binding, binding)); + resolution.binding = Some(this.ambiguity( + AmbiguityKind::GlobVsGlob, + old_binding, + binding, + )); } else if !old_binding.vis.is_at_least(binding.vis, &*this) { // We are glob-importing the same item but with greater visibility. resolution.binding = Some(binding); } } (old_glob @ true, false) | (old_glob @ false, true) => { - let (glob_binding, nonglob_binding) = if old_glob { - (old_binding, binding) - } else { - (binding, old_binding) - }; + let (glob_binding, nonglob_binding) = + if old_glob { (old_binding, binding) } else { (binding, old_binding) }; if glob_binding.res() != nonglob_binding.res() - && key.ns == MacroNS && nonglob_binding.expansion != ExpnId::root() + && key.ns == MacroNS + && nonglob_binding.expansion != ExpnId::root() { resolution.binding = Some(this.ambiguity( AmbiguityKind::GlobVsExpanded, @@ -492,15 +529,25 @@ impl<'a> Resolver<'a> { } (false, false) => { if let (&NameBindingKind::Res(_, true), &NameBindingKind::Res(_, true)) = - (&old_binding.kind, &binding.kind) { - - this.session.struct_span_err( - binding.span, - &format!("a macro named `{}` has already been exported", key.ident), - ) - .span_label(binding.span, format!("`{}` already exported", key.ident)) - .span_note(old_binding.span, "previous macro export is now shadowed") - .emit(); + (&old_binding.kind, &binding.kind) + { + this.session + .struct_span_err( + binding.span, + &format!( + "a macro named `{}` has already been exported", + key.ident + ), + ) + .span_label( + binding.span, + format!("`{}` already exported", key.ident), + ) + .span_note( + old_binding.span, + "previous macro export is now shadowed", + ) + .emit(); resolution.binding = Some(binding); } else { @@ -517,7 +564,8 @@ impl<'a> Resolver<'a> { } fn ambiguity( - &self, kind: AmbiguityKind, + &self, + kind: AmbiguityKind, primary_binding: &'a NameBinding<'a>, secondary_binding: &'a NameBinding<'a>, ) -> &'a NameBinding<'a> { @@ -529,13 +577,9 @@ impl<'a> Resolver<'a> { // Use `f` to mutate the resolution of the name in the module. // If the resolution becomes a success, define it in the module's glob importers. - fn update_resolution<T, F>( - &mut self, - module: Module<'a>, - key: BindingKey, - f: F, - ) -> T - where F: FnOnce(&mut Resolver<'a>, &mut NameResolution<'a>) -> T + fn update_resolution<T, F>(&mut self, module: Module<'a>, key: BindingKey, f: F) -> T + where + F: FnOnce(&mut Resolver<'a>, &mut NameResolution<'a>) -> T, { // Ensure that `resolution` isn't borrowed when defining in the module's glob importers, // during which the resolution might end up getting re-defined via a glob cycle. @@ -551,7 +595,7 @@ impl<'a> Resolver<'a> { Some(binding) => match old_binding { Some(old_binding) if ptr::eq(old_binding, binding) => return t, _ => (binding, t), - } + }, } }; @@ -650,7 +694,6 @@ impl<'a, 'b> ImportResolver<'a, 'b> { .chain(indeterminate_imports.into_iter().map(|i| (true, i))) { if let Some(err) = self.finalize_import(import) { - if let SingleImport { source, ref source_bindings, .. } = import.subclass { if source.name == kw::SelfLower { // Silence `unresolved import` error if E0429 is already emitted @@ -664,8 +707,9 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // resolution for it so that later resolve stages won't complain. self.r.import_dummy_binding(import); if prev_root_id.as_u32() != 0 - && prev_root_id.as_u32() != import.root_id.as_u32() - && !errors.is_empty() { + && prev_root_id.as_u32() != import.root_id.as_u32() + && !errors.is_empty() + { // In the case of a new import line, throw a diagnostic message // for the previous line. self.throw_unresolved_import_error(errors, None); @@ -714,23 +758,11 @@ impl<'a, 'b> ImportResolver<'a, 'b> { let (span, msg) = if errors.is_empty() { (span.unwrap(), "unresolved import".to_string()) } else { - let span = MultiSpan::from_spans( - errors - .iter() - .map(|(_, err)| err.span) - .collect(), - ); + let span = MultiSpan::from_spans(errors.iter().map(|(_, err)| err.span).collect()); - let paths = errors - .iter() - .map(|(path, _)| format!("`{}`", path)) - .collect::<Vec<_>>(); + let paths = errors.iter().map(|(path, _)| format!("`{}`", path)).collect::<Vec<_>>(); - let msg = format!( - "unresolved import{} {}", - pluralize!(paths.len()), - paths.join(", "), - ); + let msg = format!("unresolved import{} {}", pluralize!(paths.len()), paths.join(", "),); (span, msg) }; @@ -790,57 +822,69 @@ impl<'a, 'b> ImportResolver<'a, 'b> { directive.imported_module.set(Some(module)); let (source, target, source_bindings, target_bindings, type_ns_only) = - match directive.subclass { - SingleImport { source, target, ref source_bindings, - ref target_bindings, type_ns_only, .. } => - (source, target, source_bindings, target_bindings, type_ns_only), - GlobImport { .. } => { - self.resolve_glob_import(directive); - return true; - } - _ => unreachable!(), - }; + match directive.subclass { + SingleImport { + source, + target, + ref source_bindings, + ref target_bindings, + type_ns_only, + .. + } => (source, target, source_bindings, target_bindings, type_ns_only), + GlobImport { .. } => { + self.resolve_glob_import(directive); + return true; + } + _ => unreachable!(), + }; let mut indeterminate = false; - self.r.per_ns(|this, ns| if !type_ns_only || ns == TypeNS { - if let Err(Undetermined) = source_bindings[ns].get() { - // For better failure detection, pretend that the import will - // not define any names while resolving its module path. - let orig_vis = directive.vis.replace(ty::Visibility::Invisible); - let binding = this.resolve_ident_in_module( - module, source, ns, &directive.parent_scope, false, directive.span - ); - directive.vis.set(orig_vis); + self.r.per_ns(|this, ns| { + if !type_ns_only || ns == TypeNS { + if let Err(Undetermined) = source_bindings[ns].get() { + // For better failure detection, pretend that the import will + // not define any names while resolving its module path. + let orig_vis = directive.vis.replace(ty::Visibility::Invisible); + let binding = this.resolve_ident_in_module( + module, + source, + ns, + &directive.parent_scope, + false, + directive.span, + ); + directive.vis.set(orig_vis); - source_bindings[ns].set(binding); - } else { - return - }; + source_bindings[ns].set(binding); + } else { + return; + }; - let parent = directive.parent_scope.module; - match source_bindings[ns].get() { - Err(Undetermined) => indeterminate = true, - // Don't update the resolution, because it was never added. - Err(Determined) if target.name == kw::Underscore => {} - Err(Determined) => { - let key = this.new_key(target, ns); - this.update_resolution(parent, key, |_, resolution| { - resolution.single_imports.remove(&PtrKey(directive)); - }); - } - Ok(binding) if !binding.is_importable() => { - let msg = format!("`{}` is not directly importable", target); - struct_span_err!(this.session, directive.span, E0253, "{}", &msg) - .span_label(directive.span, "cannot be imported directly") - .emit(); - // Do not import this illegal binding. Import a dummy binding and pretend - // everything is fine - this.import_dummy_binding(directive); - } - Ok(binding) => { - let imported_binding = this.import(binding, directive); - target_bindings[ns].set(Some(imported_binding)); - this.define(parent, target, ns, imported_binding); + let parent = directive.parent_scope.module; + match source_bindings[ns].get() { + Err(Undetermined) => indeterminate = true, + // Don't update the resolution, because it was never added. + Err(Determined) if target.name == kw::Underscore => {} + Err(Determined) => { + let key = this.new_key(target, ns); + this.update_resolution(parent, key, |_, resolution| { + resolution.single_imports.remove(&PtrKey(directive)); + }); + } + Ok(binding) if !binding.is_importable() => { + let msg = format!("`{}` is not directly importable", target); + struct_span_err!(this.session, directive.span, E0253, "{}", &msg) + .span_label(directive.span, "cannot be imported directly") + .emit(); + // Do not import this illegal binding. Import a dummy binding and pretend + // everything is fine + this.import_dummy_binding(directive); + } + Ok(binding) => { + let imported_binding = this.import(binding, directive); + target_bindings[ns].set(Some(imported_binding)); + this.define(parent, target, ns, imported_binding); + } } } }); @@ -854,7 +898,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { /// consolidate multiple unresolved import errors into a single diagnostic. fn finalize_import( &mut self, - directive: &'b ImportDirective<'b> + directive: &'b ImportDirective<'b>, ) -> Option<UnresolvedImportError> { let orig_vis = directive.vis.replace(ty::Visibility::Invisible); let prev_ambiguity_errors_len = self.r.ambiguity_errors.len(); @@ -892,10 +936,8 @@ impl<'a, 'b> ImportResolver<'a, 'b> { PathResult::Failed { is_error_from_last_segment: false, span, label, suggestion } => { if no_ambiguity { assert!(directive.imported_module.get().is_none()); - self.r.report_error(span, ResolutionError::FailedToResolve { - label, - suggestion, - }); + self.r + .report_error(span, ResolutionError::FailedToResolve { label, suggestion }); } return None; } @@ -907,26 +949,22 @@ impl<'a, 'b> ImportResolver<'a, 'b> { directive.module_path.clone(), &directive.parent_scope, ) { - Some((suggestion, note)) => { - UnresolvedImportError { - span, - label: None, - note, - suggestion: Some(( - vec![(span, Segment::names_to_string(&suggestion))], - String::from("a similar path exists"), - Applicability::MaybeIncorrect, - )), - } - } - None => { - UnresolvedImportError { - span, - label: Some(label), - note: Vec::new(), - suggestion, - } - } + Some((suggestion, note)) => UnresolvedImportError { + span, + label: None, + note, + suggestion: Some(( + vec![(span, Segment::names_to_string(&suggestion))], + String::from("a similar path exists"), + Applicability::MaybeIncorrect, + )), + }, + None => UnresolvedImportError { + span, + label: Some(label), + note: Vec::new(), + suggestion, + }, }; return Some(err); } @@ -942,11 +980,17 @@ impl<'a, 'b> ImportResolver<'a, 'b> { PathResult::Indeterminate | PathResult::NonModule(..) => unreachable!(), }; - let (ident, target, source_bindings, target_bindings, type_ns_only) = - match directive.subclass { - SingleImport { source, target, ref source_bindings, - ref target_bindings, type_ns_only, .. } => - (source, target, source_bindings, target_bindings, type_ns_only), + let (ident, target, source_bindings, target_bindings, type_ns_only) = match directive + .subclass + { + SingleImport { + source, + target, + ref source_bindings, + ref target_bindings, + type_ns_only, + .. + } => (source, target, source_bindings, target_bindings, type_ns_only), GlobImport { is_prelude, ref max_vis } => { if directive.module_path.len() <= 1 { // HACK(eddyb) `lint_if_path_starts_with_module` needs at least @@ -974,11 +1018,14 @@ impl<'a, 'b> ImportResolver<'a, 'b> { } if !is_prelude && max_vis.get() != ty::Visibility::Invisible && // Allow empty globs. - !max_vis.get().is_at_least(directive.vis.get(), &*self) { - let msg = - "glob import doesn't reexport anything because no candidate is public enough"; + !max_vis.get().is_at_least(directive.vis.get(), &*self) + { + let msg = "glob import doesn't reexport anything because no candidate is public enough"; self.r.lint_buffer.buffer_lint( - UNUSED_IMPORTS, directive.id, directive.span, msg, + UNUSED_IMPORTS, + directive.id, + directive.span, + msg, ); } return None; @@ -987,79 +1034,107 @@ impl<'a, 'b> ImportResolver<'a, 'b> { }; let mut all_ns_err = true; - self.r.per_ns(|this, ns| if !type_ns_only || ns == TypeNS { - let orig_vis = directive.vis.replace(ty::Visibility::Invisible); - let orig_blacklisted_binding = - mem::replace(&mut this.blacklisted_binding, target_bindings[ns].get()); - let orig_last_import_segment = mem::replace(&mut this.last_import_segment, true); - let binding = this.resolve_ident_in_module( - module, ident, ns, &directive.parent_scope, true, directive.span - ); - this.last_import_segment = orig_last_import_segment; - this.blacklisted_binding = orig_blacklisted_binding; - directive.vis.set(orig_vis); + self.r.per_ns(|this, ns| { + if !type_ns_only || ns == TypeNS { + let orig_vis = directive.vis.replace(ty::Visibility::Invisible); + let orig_blacklisted_binding = + mem::replace(&mut this.blacklisted_binding, target_bindings[ns].get()); + let orig_last_import_segment = mem::replace(&mut this.last_import_segment, true); + let binding = this.resolve_ident_in_module( + module, + ident, + ns, + &directive.parent_scope, + true, + directive.span, + ); + this.last_import_segment = orig_last_import_segment; + this.blacklisted_binding = orig_blacklisted_binding; + directive.vis.set(orig_vis); - match binding { - Ok(binding) => { - // Consistency checks, analogous to `finalize_macro_resolutions`. - let initial_res = source_bindings[ns].get().map(|initial_binding| { - all_ns_err = false; - if let Some(target_binding) = target_bindings[ns].get() { - // Note that as_str() de-gensyms the Symbol - if target.name.as_str() == "_" && - initial_binding.is_extern_crate() && !initial_binding.is_import() { - this.record_use(ident, ns, target_binding, - directive.module_path.is_empty()); + match binding { + Ok(binding) => { + // Consistency checks, analogous to `finalize_macro_resolutions`. + let initial_res = source_bindings[ns].get().map(|initial_binding| { + all_ns_err = false; + if let Some(target_binding) = target_bindings[ns].get() { + // Note that as_str() de-gensyms the Symbol + if target.name.as_str() == "_" + && initial_binding.is_extern_crate() + && !initial_binding.is_import() + { + this.record_use( + ident, + ns, + target_binding, + directive.module_path.is_empty(), + ); + } + } + initial_binding.res() + }); + let res = binding.res(); + if let Ok(initial_res) = initial_res { + if res != initial_res && this.ambiguity_errors.is_empty() { + span_bug!(directive.span, "inconsistent resolution for an import"); + } + } else { + if res != Res::Err + && this.ambiguity_errors.is_empty() + && this.privacy_errors.is_empty() + { + let msg = "cannot determine resolution for the import"; + let msg_note = + "import resolution is stuck, try simplifying other imports"; + this.session + .struct_span_err(directive.span, msg) + .note(msg_note) + .emit(); } - } - initial_binding.res() - }); - let res = binding.res(); - if let Ok(initial_res) = initial_res { - if res != initial_res && this.ambiguity_errors.is_empty() { - span_bug!(directive.span, "inconsistent resolution for an import"); - } - } else { - if res != Res::Err && - this.ambiguity_errors.is_empty() && this.privacy_errors.is_empty() { - let msg = "cannot determine resolution for the import"; - let msg_note = - "import resolution is stuck, try simplifying other imports"; - this.session.struct_span_err(directive.span, msg).note(msg_note).emit(); } } - } - Err(..) => { - // FIXME: This assert may fire if public glob is later shadowed by a private - // single import (see test `issue-55884-2.rs`). In theory single imports should - // always block globs, even if they are not yet resolved, so that this kind of - // self-inconsistent resolution never happens. - // Reenable the assert when the issue is fixed. - // assert!(result[ns].get().is_err()); + Err(..) => { + // FIXME: This assert may fire if public glob is later shadowed by a private + // single import (see test `issue-55884-2.rs`). In theory single imports should + // always block globs, even if they are not yet resolved, so that this kind of + // self-inconsistent resolution never happens. + // Reenable the assert when the issue is fixed. + // assert!(result[ns].get().is_err()); + } } } }); if all_ns_err { let mut all_ns_failed = true; - self.r.per_ns(|this, ns| if !type_ns_only || ns == TypeNS { - let binding = this.resolve_ident_in_module( - module, ident, ns, &directive.parent_scope, true, directive.span - ); - if binding.is_ok() { - all_ns_failed = false; + self.r.per_ns(|this, ns| { + if !type_ns_only || ns == TypeNS { + let binding = this.resolve_ident_in_module( + module, + ident, + ns, + &directive.parent_scope, + true, + directive.span, + ); + if binding.is_ok() { + all_ns_failed = false; + } } }); return if all_ns_failed { let resolutions = match module { - ModuleOrUniformRoot::Module(module) => - Some(self.r.resolutions(module).borrow()), + ModuleOrUniformRoot::Module(module) => { + Some(self.r.resolutions(module).borrow()) + } _ => None, }; let resolutions = resolutions.as_ref().into_iter().flat_map(|r| r.iter()); let names = resolutions.filter_map(|(BindingKey { ident: i, .. }, resolution)| { - if *i == ident { return None; } // Never suggest the same name + if *i == ident { + return None; + } // Never suggest the same name match *resolution.borrow() { NameResolution { binding: Some(name_binding), .. } => { match name_binding.kind { @@ -1070,29 +1145,31 @@ impl<'a, 'b> ImportResolver<'a, 'b> { NameBindingKind::Res(Res::Err, _) => return None, _ => Some(&i.name), } - }, + } _ => Some(&i.name), } - }, - NameResolution { ref single_imports, .. } - if single_imports.is_empty() => None, + } + NameResolution { ref single_imports, .. } if single_imports.is_empty() => { + None + } _ => Some(&i.name), } }); - let lev_suggestion = find_best_match_for_name(names, &ident.as_str(), None) - .map(|suggestion| - (vec![(ident.span, suggestion.to_string())], - String::from("a similar name exists in the module"), - Applicability::MaybeIncorrect) - ); + let lev_suggestion = + find_best_match_for_name(names, &ident.as_str(), None).map(|suggestion| { + ( + vec![(ident.span, suggestion.to_string())], + String::from("a similar name exists in the module"), + Applicability::MaybeIncorrect, + ) + }); - let (suggestion, note) = match self.check_for_module_export_macro( - directive, module, ident, - ) { - Some((suggestion, note)) => (suggestion.or(lev_suggestion), note), - _ => (lev_suggestion, Vec::new()), - }; + let (suggestion, note) = + match self.check_for_module_export_macro(directive, module, ident) { + Some((suggestion, note)) => (suggestion.or(lev_suggestion), note), + _ => (lev_suggestion, Vec::new()), + }; let label = match module { ModuleOrUniformRoot::Module(module) => { @@ -1124,7 +1201,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // `resolve_ident_in_module` reported a privacy error. self.r.import_dummy_binding(directive); None - } + }; } let mut reexport_error = None; @@ -1144,26 +1221,33 @@ impl<'a, 'b> ImportResolver<'a, 'b> { if !any_successful_reexport { let (ns, binding) = reexport_error.unwrap(); if ns == TypeNS && binding.is_extern_crate() { - let msg = format!("extern crate `{}` is private, and cannot be \ + let msg = format!( + "extern crate `{}` is private, and cannot be \ re-exported (error E0365), consider declaring with \ `pub`", - ident); - self.r.lint_buffer.buffer_lint(PUB_USE_OF_PRIVATE_EXTERN_CRATE, - directive.id, - directive.span, - &msg); + ident + ); + self.r.lint_buffer.buffer_lint( + PUB_USE_OF_PRIVATE_EXTERN_CRATE, + directive.id, + directive.span, + &msg, + ); } else if ns == TypeNS { - struct_span_err!(self.r.session, directive.span, E0365, - "`{}` is private, and cannot be re-exported", ident) - .span_label(directive.span, format!("re-export of private `{}`", ident)) - .note(&format!("consider declaring type or module `{}` with `pub`", ident)) - .emit(); + struct_span_err!( + self.r.session, + directive.span, + E0365, + "`{}` is private, and cannot be re-exported", + ident + ) + .span_label(directive.span, format!("re-export of private `{}`", ident)) + .note(&format!("consider declaring type or module `{}` with `pub`", ident)) + .emit(); } else { let msg = format!("`{}` is private, and cannot be re-exported", ident); - let note_msg = format!( - "consider marking `{}` as `pub` in the imported module", - ident, - ); + let note_msg = + format!("consider marking `{}` as `pub` in the imported module", ident,); struct_span_err!(self.r.session, directive.span, E0364, "{}", &msg) .span_note(directive.span, ¬e_msg) .emit(); @@ -1190,8 +1274,10 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // Record what this import resolves to for later uses in documentation, // this may resolve to either a value or a type, but for documentation // purposes it's good enough to just favor one over the other. - self.r.per_ns(|this, ns| if let Some(binding) = source_bindings[ns].get().ok() { - this.import_res_map.entry(directive.id).or_default()[ns] = Some(binding.res()); + self.r.per_ns(|this, ns| { + if let Some(binding) = source_bindings[ns].get().ok() { + this.import_res_map.entry(directive.id).or_default()[ns] = Some(binding.res()); + } }); self.check_for_redundant_imports( @@ -1225,52 +1311,41 @@ impl<'a, 'b> ImportResolver<'a, 'b> { return; } - let mut is_redundant = PerNS { - value_ns: None, - type_ns: None, - macro_ns: None, - }; + let mut is_redundant = PerNS { value_ns: None, type_ns: None, macro_ns: None }; - let mut redundant_span = PerNS { - value_ns: None, - type_ns: None, - macro_ns: None, - }; + let mut redundant_span = PerNS { value_ns: None, type_ns: None, macro_ns: None }; - self.r.per_ns(|this, ns| if let Some(binding) = source_bindings[ns].get().ok() { - if binding.res() == Res::Err { - return; - } + self.r.per_ns(|this, ns| { + if let Some(binding) = source_bindings[ns].get().ok() { + if binding.res() == Res::Err { + return; + } - let orig_blacklisted_binding = mem::replace( - &mut this.blacklisted_binding, - target_bindings[ns].get() - ); + let orig_blacklisted_binding = + mem::replace(&mut this.blacklisted_binding, target_bindings[ns].get()); - match this.early_resolve_ident_in_lexical_scope( - target, - ScopeSet::All(ns, false), - &directive.parent_scope, - false, - false, - directive.span, - ) { - Ok(other_binding) => { - is_redundant[ns] = Some( - binding.res() == other_binding.res() - && !other_binding.is_ambiguity() - ); - redundant_span[ns] = - Some((other_binding.span, other_binding.is_import())); + match this.early_resolve_ident_in_lexical_scope( + target, + ScopeSet::All(ns, false), + &directive.parent_scope, + false, + false, + directive.span, + ) { + Ok(other_binding) => { + is_redundant[ns] = Some( + binding.res() == other_binding.res() && !other_binding.is_ambiguity(), + ); + redundant_span[ns] = Some((other_binding.span, other_binding.is_import())); + } + Err(_) => is_redundant[ns] = Some(false), } - Err(_) => is_redundant[ns] = Some(false) - } - this.blacklisted_binding = orig_blacklisted_binding; + this.blacklisted_binding = orig_blacklisted_binding; + } }); - if !is_redundant.is_empty() && - is_redundant.present_items().all(|is_redundant| is_redundant) + if !is_redundant.is_empty() && is_redundant.present_items().all(|is_redundant| is_redundant) { let mut redundant_spans: Vec<_> = redundant_span.present_items().collect(); redundant_spans.sort(); @@ -1297,7 +1372,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { if module.is_trait() { self.r.session.span_err(directive.span, "items in traits are not importable."); return; - } else if module.def_id() == directive.parent_scope.module.def_id() { + } else if module.def_id() == directive.parent_scope.module.def_id() { return; } else if let GlobImport { is_prelude: true, .. } = directive.subclass { self.r.prelude = Some(module); @@ -1309,9 +1384,15 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // Ensure that `resolutions` isn't borrowed during `try_define`, // since it might get updated via a glob cycle. - let bindings = self.r.resolutions(module).borrow().iter().filter_map(|(key, resolution)| { - resolution.borrow().binding().map(|binding| (*key, binding)) - }).collect::<Vec<_>>(); + let bindings = self + .r + .resolutions(module) + .borrow() + .iter() + .filter_map(|(key, resolution)| { + resolution.borrow().binding().map(|binding| (*key, binding)) + }) + .collect::<Vec<_>>(); for (mut key, binding) in bindings { let scope = match key.ident.span.reverse_glob_adjust(module.expansion, directive.span) { Some(Some(def)) => self.r.macro_def_scope(def), @@ -1340,8 +1421,8 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // Filter away ambiguous imports and anything that has def-site // hygiene. // FIXME: Implement actual cross-crate hygiene. - let is_good_import = binding.is_import() && !binding.is_ambiguity() - && !ident.span.from_expansion(); + let is_good_import = + binding.is_import() && !binding.is_ambiguity() && !ident.span.from_expansion(); if is_good_import || binding.is_macro_def() { let res = binding.res(); if res != Res::Err { @@ -1350,71 +1431,72 @@ impl<'a, 'b> ImportResolver<'a, 'b> { this.cstore().export_macros_untracked(def_id.krate); } } - reexports.push(Export { - ident, - res, - span: binding.span, - vis: binding.vis, - }); + reexports.push(Export { ident, res, span: binding.span, vis: binding.vis }); } } if let NameBindingKind::Import { binding: orig_binding, directive, .. } = binding.kind { - if ns == TypeNS && orig_binding.is_variant() && - !orig_binding.vis.is_at_least(binding.vis, &*this) { - let msg = match directive.subclass { - ImportDirectiveSubclass::SingleImport { .. } => { - format!("variant `{}` is private and cannot be re-exported", - ident) - }, - ImportDirectiveSubclass::GlobImport { .. } => { - let msg = "enum is private and its variants \ - cannot be re-exported".to_owned(); - let error_id = (DiagnosticMessageId::ErrorId(0), // no code?! - Some(binding.span), - msg.clone()); - let fresh = this.session.one_time_diagnostics - .borrow_mut().insert(error_id); - if !fresh { - return; - } - msg - }, - ref s @ _ => bug!("unexpected import subclass {:?}", s) - }; - let mut err = this.session.struct_span_err(binding.span, &msg); - - let imported_module = match directive.imported_module.get() { - Some(ModuleOrUniformRoot::Module(module)) => module, - _ => bug!("module should exist"), - }; - let parent_module = imported_module.parent.expect("parent should exist"); - let resolutions = this.resolutions(parent_module).borrow(); - let enum_path_segment_index = directive.module_path.len() - 1; - let enum_ident = directive.module_path[enum_path_segment_index].ident; - - let key = this.new_key(enum_ident, TypeNS); - let enum_resolution = resolutions.get(&key) - .expect("resolution should exist"); - let enum_span = enum_resolution.borrow() - .binding.expect("binding should exist") - .span; - let enum_def_span = this.session.source_map().def_span(enum_span); - let enum_def_snippet = this.session.source_map() - .span_to_snippet(enum_def_span).expect("snippet should exist"); - // potentially need to strip extant `crate`/`pub(path)` for suggestion - let after_vis_index = enum_def_snippet.find("enum") - .expect("`enum` keyword should exist in snippet"); - let suggestion = format!("pub {}", - &enum_def_snippet[after_vis_index..]); - - this.session - .diag_span_suggestion_once(&mut err, - DiagnosticMessageId::ErrorId(0), - enum_def_span, - "consider making the enum public", - suggestion); - err.emit(); + if ns == TypeNS + && orig_binding.is_variant() + && !orig_binding.vis.is_at_least(binding.vis, &*this) + { + let msg = match directive.subclass { + ImportDirectiveSubclass::SingleImport { .. } => { + format!("variant `{}` is private and cannot be re-exported", ident) + } + ImportDirectiveSubclass::GlobImport { .. } => { + let msg = "enum is private and its variants \ + cannot be re-exported" + .to_owned(); + let error_id = ( + DiagnosticMessageId::ErrorId(0), // no code?! + Some(binding.span), + msg.clone(), + ); + let fresh = + this.session.one_time_diagnostics.borrow_mut().insert(error_id); + if !fresh { + return; + } + msg + } + ref s @ _ => bug!("unexpected import subclass {:?}", s), + }; + let mut err = this.session.struct_span_err(binding.span, &msg); + + let imported_module = match directive.imported_module.get() { + Some(ModuleOrUniformRoot::Module(module)) => module, + _ => bug!("module should exist"), + }; + let parent_module = imported_module.parent.expect("parent should exist"); + let resolutions = this.resolutions(parent_module).borrow(); + let enum_path_segment_index = directive.module_path.len() - 1; + let enum_ident = directive.module_path[enum_path_segment_index].ident; + + let key = this.new_key(enum_ident, TypeNS); + let enum_resolution = resolutions.get(&key).expect("resolution should exist"); + let enum_span = + enum_resolution.borrow().binding.expect("binding should exist").span; + let enum_def_span = this.session.source_map().def_span(enum_span); + let enum_def_snippet = this + .session + .source_map() + .span_to_snippet(enum_def_span) + .expect("snippet should exist"); + // potentially need to strip extant `crate`/`pub(path)` for suggestion + let after_vis_index = enum_def_snippet + .find("enum") + .expect("`enum` keyword should exist in snippet"); + let suggestion = format!("pub {}", &enum_def_snippet[after_vis_index..]); + + this.session.diag_span_suggestion_once( + &mut err, + DiagnosticMessageId::ErrorId(0), + enum_def_span, + "consider making the enum public", + suggestion, + ); + err.emit(); } } }); @@ -1427,11 +1509,12 @@ impl<'a, 'b> ImportResolver<'a, 'b> { } } -fn import_path_to_string(names: &[Ident], - subclass: &ImportDirectiveSubclass<'_>, - span: Span) -> String { - let pos = names.iter() - .position(|p| span == p.span && p.name != kw::PathRoot); +fn import_path_to_string( + names: &[Ident], + subclass: &ImportDirectiveSubclass<'_>, + span: Span, +) -> String { + let pos = names.iter().position(|p| span == p.span && p.name != kw::PathRoot); let global = !names.is_empty() && names[0].name == kw::PathRoot; if let Some(pos) = pos { let names = if global { &names[1..pos + 1] } else { &names[..pos + 1] }; diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 9b737428d5a..5f0fbcc3b0f 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -2,33 +2,33 @@ //! The main routine here is `ast_ty_to_ty()`; each use is parameterized by an //! instance of `AstConv`. -use errors::{Applicability, DiagnosticId}; -use crate::hir::{self, GenericArg, GenericArgs, ExprKind}; -use crate::hir::def::{CtorOf, Res, DefKind}; +use crate::hir::def::{CtorOf, DefKind, Res}; use crate::hir::def_id::DefId; -use crate::hir::HirVec; use crate::hir::ptr::P; +use crate::hir::HirVec; +use crate::hir::{self, ExprKind, GenericArg, GenericArgs}; use crate::lint; use crate::middle::lang_items::SizedTraitLangItem; use crate::middle::resolve_lifetime as rl; use crate::namespace::Namespace; +use crate::require_c_abi_if_c_variadic; +use crate::util::common::ErrorReported; +use crate::util::nodemap::FxHashMap; +use errors::{Applicability, DiagnosticId}; use rustc::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; use rustc::traits; -use rustc::ty::{self, DefIdTree, Ty, TyCtxt, Const, ToPredicate, TypeFoldable}; -use rustc::ty::{GenericParamDef, GenericParamDefKind}; -use rustc::ty::subst::{self, Subst, InternalSubsts, SubstsRef}; +use rustc::ty::subst::{self, InternalSubsts, Subst, SubstsRef}; use rustc::ty::wf::object_region_bounds; +use rustc::ty::{self, Const, DefIdTree, ToPredicate, Ty, TyCtxt, TypeFoldable}; +use rustc::ty::{GenericParamDef, GenericParamDefKind}; use rustc_target::spec::abi; -use crate::require_c_abi_if_c_variadic; use smallvec::SmallVec; use syntax::ast; use syntax::errors::pluralize; use syntax::feature_gate::feature_err; -use syntax::util::lev_distance::find_best_match_for_name; use syntax::symbol::sym; -use syntax_pos::{DUMMY_SP, Span, MultiSpan}; -use crate::util::common::ErrorReported; -use crate::util::nodemap::FxHashMap; +use syntax::util::lev_distance::find_best_match_for_name; +use syntax_pos::{MultiSpan, Span, DUMMY_SP}; use std::collections::BTreeSet; use std::iter; @@ -60,12 +60,8 @@ pub trait AstConv<'tcx> { fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) -> ty::GenericPredicates<'tcx>; /// Returns the lifetime to use when a lifetime is omitted (and not elided). - fn re_infer( - &self, - param: Option<&ty::GenericParamDef>, - span: Span, - ) - -> Option<ty::Region<'tcx>>; + fn re_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) + -> Option<ty::Region<'tcx>>; /// Returns the type to use when a type is omitted. fn ty_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx>; @@ -85,12 +81,13 @@ pub trait AstConv<'tcx> { /// signatures for that reason. In a function body, we can always /// handle it because we can use inference variables to remove the /// late-bound regions. - fn projected_ty_from_poly_trait_ref(&self, - span: Span, - item_def_id: DefId, - item_segment: &hir::PathSegment, - poly_trait_ref: ty::PolyTraitRef<'tcx>) - -> Ty<'tcx>; + fn projected_ty_from_poly_trait_ref( + &self, + span: Span, + item_def_id: DefId, + item_segment: &hir::PathSegment, + poly_trait_ref: ty::PolyTraitRef<'tcx>, + ) -> Ty<'tcx>; /// Normalize an associated type coming from the user. fn normalize_ty(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>; @@ -128,25 +125,20 @@ enum GenericArgPosition { } impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { - pub fn ast_region_to_region(&self, + pub fn ast_region_to_region( + &self, lifetime: &hir::Lifetime, - def: Option<&ty::GenericParamDef>) - -> ty::Region<'tcx> - { + def: Option<&ty::GenericParamDef>, + ) -> ty::Region<'tcx> { let tcx = self.tcx(); - let lifetime_name = |def_id| { - tcx.hir().name(tcx.hir().as_local_hir_id(def_id).unwrap()) - }; + let lifetime_name = |def_id| tcx.hir().name(tcx.hir().as_local_hir_id(def_id).unwrap()); let r = match tcx.named_region(lifetime.hir_id) { - Some(rl::Region::Static) => { - tcx.lifetimes.re_static - } + Some(rl::Region::Static) => tcx.lifetimes.re_static, Some(rl::Region::LateBound(debruijn, id, _)) => { let name = lifetime_name(id); - tcx.mk_region(ty::ReLateBound(debruijn, - ty::BrNamed(id, name))) + tcx.mk_region(ty::ReLateBound(debruijn, ty::BrNamed(id, name))) } Some(rl::Region::LateBoundAnon(debruijn, index)) => { @@ -155,54 +147,47 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { Some(rl::Region::EarlyBound(index, id, _)) => { let name = lifetime_name(id); - tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { - def_id: id, - index, - name, - })) + tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { def_id: id, index, name })) } Some(rl::Region::Free(scope, id)) => { let name = lifetime_name(id); tcx.mk_region(ty::ReFree(ty::FreeRegion { scope, - bound_region: ty::BrNamed(id, name) + bound_region: ty::BrNamed(id, name), })) // (*) -- not late-bound, won't change } None => { - self.re_infer(def, lifetime.span) - .unwrap_or_else(|| { - // This indicates an illegal lifetime - // elision. `resolve_lifetime` should have - // reported an error in this case -- but if - // not, let's error out. - tcx.sess.delay_span_bug(lifetime.span, "unelided lifetime in signature"); - - // Supply some dummy value. We don't have an - // `re_error`, annoyingly, so use `'static`. - tcx.lifetimes.re_static - }) + self.re_infer(def, lifetime.span).unwrap_or_else(|| { + // This indicates an illegal lifetime + // elision. `resolve_lifetime` should have + // reported an error in this case -- but if + // not, let's error out. + tcx.sess.delay_span_bug(lifetime.span, "unelided lifetime in signature"); + + // Supply some dummy value. We don't have an + // `re_error`, annoyingly, so use `'static`. + tcx.lifetimes.re_static + }) } }; - debug!("ast_region_to_region(lifetime={:?}) yields {:?}", - lifetime, - r); + debug!("ast_region_to_region(lifetime={:?}) yields {:?}", lifetime, r); r } /// Given a path `path` that refers to an item `I` with the declared generics `decl_generics`, /// returns an appropriate set of substitutions for this particular reference to `I`. - pub fn ast_path_substs_for_ty(&self, + pub fn ast_path_substs_for_ty( + &self, span: Span, def_id: DefId, - item_segment: &hir::PathSegment) - -> SubstsRef<'tcx> - { + item_segment: &hir::PathSegment, + ) -> SubstsRef<'tcx> { let (substs, assoc_bindings, _) = self.create_substs_for_ast_path( span, def_id, @@ -218,29 +203,26 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } /// Report error if there is an explicit type parameter when using `impl Trait`. - fn check_impl_trait( - tcx: TyCtxt<'_>, - seg: &hir::PathSegment, - generics: &ty::Generics, - ) -> bool { + fn check_impl_trait(tcx: TyCtxt<'_>, seg: &hir::PathSegment, generics: &ty::Generics) -> bool { let explicit = !seg.infer_args; let impl_trait = generics.params.iter().any(|param| match param.kind { ty::GenericParamDefKind::Type { - synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), .. + synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), + .. } => true, _ => false, }); if explicit && impl_trait { - let spans = - seg.generic_args().args - .iter() - .filter_map(|arg| - match arg { - GenericArg::Type(_) => Some(arg.span()), - _ => None - }) - .collect::<Vec<_>>(); + let spans = seg + .generic_args() + .args + .iter() + .filter_map(|arg| match arg { + GenericArg::Type(_) => Some(arg.span()), + _ => None, + }) + .collect::<Vec<_>>(); let mut err = struct_span_err! { tcx.sess, @@ -270,26 +252,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { is_method_call: bool, ) -> bool { let empty_args = P(hir::GenericArgs { - args: HirVec::new(), bindings: HirVec::new(), parenthesized: false, + args: HirVec::new(), + bindings: HirVec::new(), + parenthesized: false, }); let suppress_mismatch = Self::check_impl_trait(tcx, seg, &def); Self::check_generic_arg_count( tcx, span, def, - if let Some(ref args) = seg.args { - args - } else { - &empty_args - }, - if is_method_call { - GenericArgPosition::MethodCall - } else { - GenericArgPosition::Value - }, + if let Some(ref args) = seg.args { args } else { &empty_args }, + if is_method_call { GenericArgPosition::MethodCall } else { GenericArgPosition::Value }, def.parent.is_none() && def.has_self, // `has_self` - seg.infer_args || suppress_mismatch, // `infer_args` - ).0 + seg.infer_args || suppress_mismatch, // `infer_args` + ) + .0 } /// Checks that the correct number of generic arguments have been provided. @@ -336,7 +313,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let note = "the late bound lifetime parameter is introduced here"; let span = args.args[0].span(); if position == GenericArgPosition::Value - && arg_counts.lifetimes != param_counts.lifetimes { + && arg_counts.lifetimes != param_counts.lifetimes + { let mut err = tcx.sess.struct_span_err(span, msg); err.span_note(span_late, note); err.emit(); @@ -344,8 +322,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } else { let mut multispan = MultiSpan::from_span(span); multispan.push_span_label(span_late, note.to_string()); - tcx.lint_hir(lint::builtin::LATE_BOUND_LIFETIME_ARGUMENTS, - args.args[0].id(), multispan, msg); + tcx.lint_hir( + lint::builtin::LATE_BOUND_LIFETIME_ARGUMENTS, + args.args[0].id(), + multispan, + msg, + ); reported_late_bound_region_err = Some(false); } } @@ -354,11 +336,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let check_kind_count = |kind, required, permitted, provided, offset| { debug!( "check_kind_count: kind: {} required: {} permitted: {} provided: {} offset: {}", - kind, - required, - permitted, - provided, - offset + kind, required, permitted, provided, offset ); // We enforce the following: `required` <= `provided` <= `permitted`. // For kinds without defaults (e.g.., lifetimes), `required == permitted`. @@ -372,7 +350,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let (bound, quantifier) = if required != permitted { if provided < required { (required, "at least ") - } else { // provided > permitted + } else { + // provided > permitted (permitted, "at most ") } } else { @@ -383,32 +362,32 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let (spans, label) = if required == permitted && provided > permitted { // In the case when the user has provided too many arguments, // we want to point to the unexpected arguments. - let spans: Vec<Span> = args.args[offset+permitted .. offset+provided] - .iter() - .map(|arg| arg.span()) - .collect(); + let spans: Vec<Span> = args.args[offset + permitted..offset + provided] + .iter() + .map(|arg| arg.span()) + .collect(); potential_assoc_types = Some(spans.clone()); - (spans, format!( "unexpected {} argument", kind)) + (spans, format!("unexpected {} argument", kind)) } else { - (vec![span], format!( - "expected {}{} {} argument{}", - quantifier, - bound, - kind, - pluralize!(bound), - )) + ( + vec![span], + format!( + "expected {}{} {} argument{}", + quantifier, + bound, + kind, + pluralize!(bound), + ), + ) }; let mut err = tcx.sess.struct_span_err_with_code( spans.clone(), &format!( "wrong number of {} arguments: expected {}{}, found {}", - kind, - quantifier, - bound, - provided, + kind, quantifier, bound, provided, ), - DiagnosticId::Error("E0107".into()) + DiagnosticId::Error("E0107".into()), ); for span in spans { err.span_label(span, label.as_str()); @@ -422,7 +401,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { }; if reported_late_bound_region_err.is_none() - && (!infer_lifetimes || arg_counts.lifetimes > param_counts.lifetimes) { + && (!infer_lifetimes || arg_counts.lifetimes > param_counts.lifetimes) + { check_kind_count( "lifetime", param_counts.lifetimes, @@ -442,8 +422,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ); } // Note that type errors are currently be emitted *after* const errors. - if !infer_args - || arg_counts.types > param_counts.types - defaults.types - has_self as usize { + if !infer_args || arg_counts.types > param_counts.types - defaults.types - has_self as usize + { check_kind_count( "type", param_counts.types - defaults.types - has_self as usize, @@ -493,8 +473,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self_ty: Option<Ty<'tcx>>, args_for_def_id: impl Fn(DefId) -> (Option<&'b GenericArgs>, bool), provided_kind: impl Fn(&GenericParamDef, &GenericArg) -> subst::GenericArg<'tcx>, - inferred_kind: impl Fn(Option<&[subst::GenericArg<'tcx>]>, &GenericParamDef, bool) - -> subst::GenericArg<'tcx>, + inferred_kind: impl Fn( + Option<&[subst::GenericArg<'tcx>]>, + &GenericParamDef, + bool, + ) -> subst::GenericArg<'tcx>, ) -> SubstsRef<'tcx> { // Collect the segments of the path; we need to substitute arguments // for parameters throughout the entire path (wherever there are @@ -531,8 +514,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { if let Some(¶m) = params.peek() { if param.index == 0 { if let GenericParamDefKind::Type { .. } = param.kind { - substs.push(self_ty.map(|ty| ty.into()) - .unwrap_or_else(|| inferred_kind(None, param, true))); + substs.push( + self_ty + .map(|ty| ty.into()) + .unwrap_or_else(|| inferred_kind(None, param, true)), + ); params.next(); } } @@ -542,8 +528,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // Check whether this segment takes generic arguments and the user has provided any. let (generic_args, infer_args) = args_for_def_id(def_id); - let mut args = generic_args.iter().flat_map(|generic_args| generic_args.args.iter()) - .peekable(); + let mut args = + generic_args.iter().flat_map(|generic_args| generic_args.args.iter()).peekable(); loop { // We're going to iterate through the generic arguments that the user @@ -628,21 +614,23 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// `[Vec<u8>, u8]` and `generic_args` are the arguments for the associated /// type itself: `['a]`. The returned `SubstsRef` concatenates these two /// lists: `[Vec<u8>, u8, 'a]`. - fn create_substs_for_ast_path<'a>(&self, + fn create_substs_for_ast_path<'a>( + &self, span: Span, def_id: DefId, parent_substs: &[subst::GenericArg<'tcx>], generic_args: &'a hir::GenericArgs, infer_args: bool, - self_ty: Option<Ty<'tcx>>) - -> (SubstsRef<'tcx>, Vec<ConvertedBinding<'a, 'tcx>>, Option<Vec<Span>>) - { + self_ty: Option<Ty<'tcx>>, + ) -> (SubstsRef<'tcx>, Vec<ConvertedBinding<'a, 'tcx>>, Option<Vec<Span>>) { // If the type is parameterized by this region, then replace this // region with the current anon region binding (in other words, // whatever & would get replaced with). - debug!("create_substs_for_ast_path(def_id={:?}, self_ty={:?}, \ + debug!( + "create_substs_for_ast_path(def_id={:?}, self_ty={:?}, \ generic_args={:?})", - def_id, self_ty, generic_args); + def_id, self_ty, generic_args + ); let tcx = self.tcx(); let generic_params = tcx.generics_of(def_id); @@ -670,9 +658,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { infer_args, ); - let is_object = self_ty.map_or(false, |ty| { - ty == self.tcx().types.trait_object_dummy_self - }); + let is_object = self_ty.map_or(false, |ty| ty == self.tcx().types.trait_object_dummy_self); let default_needs_object_self = |param: &ty::GenericParamDef| { if let GenericParamDefKind::Type { has_default, .. } = param.kind { if is_object && has_default { @@ -697,19 +683,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // Provide the generic args, and whether types should be inferred. |_| (Some(generic_args), infer_args), // Provide substitutions for parameters for which (valid) arguments have been provided. - |param, arg| { - match (¶m.kind, arg) { - (GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => { - self.ast_region_to_region(<, Some(param)).into() - } - (GenericParamDefKind::Type { .. }, GenericArg::Type(ty)) => { - self.ast_ty_to_ty(&ty).into() - } - (GenericParamDefKind::Const, GenericArg::Const(ct)) => { - self.ast_const_to_const(&ct.value, tcx.type_of(param.def_id)).into() - } - _ => unreachable!(), + |param, arg| match (¶m.kind, arg) { + (GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => { + self.ast_region_to_region(<, Some(param)).into() + } + (GenericParamDefKind::Type { .. }, GenericArg::Type(ty)) => { + self.ast_ty_to_ty(&ty).into() } + (GenericParamDefKind::Const, GenericArg::Const(ct)) => { + self.ast_const_to_const(&ct.value, tcx.type_of(param.def_id)).into() + } + _ => unreachable!(), }, // Provide substitutions for parameters for which arguments are inferred. |substs, param, infer_args| { @@ -725,32 +709,36 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // defaults. This will lead to an ICE if we are not // careful! if default_needs_object_self(param) { - struct_span_err!(tcx.sess, span, E0393, + struct_span_err!( + tcx.sess, + span, + E0393, "the type parameter `{}` must be explicitly specified", param.name ) - .span_label(span, format!( - "missing reference to `{}`", param.name)) - .note(&format!( - "because of the default `Self` reference, type parameters \ - must be specified on object types")) - .emit(); + .span_label(span, format!("missing reference to `{}`", param.name)) + .note(&format!( + "because of the default `Self` reference, type parameters \ + must be specified on object types" + )) + .emit(); tcx.types.err.into() } else { // This is a default type parameter. self.normalize_ty( span, - tcx.at(span).type_of(param.def_id) - .subst_spanned(tcx, substs.unwrap(), Some(span)) - ).into() + tcx.at(span).type_of(param.def_id).subst_spanned( + tcx, + substs.unwrap(), + Some(span), + ), + ) + .into() } } else if infer_args { // No type parameters were provided, we can infer all. - let param = if !default_needs_object_self(param) { - Some(param) - } else { - None - }; + let param = + if !default_needs_object_self(param) { Some(param) } else { None }; self.ty_infer(param, span).into() } else { // We've already errored above about the mismatch. @@ -781,24 +769,26 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // not a "type parameter" of the `Iterator` trait, but rather // a restriction on `<T as Iterator>::Item`, so it is passed // back separately. - let assoc_bindings = generic_args.bindings.iter() + let assoc_bindings = generic_args + .bindings + .iter() .map(|binding| { let kind = match binding.kind { - hir::TypeBindingKind::Equality { ref ty } => - ConvertedBindingKind::Equality(self.ast_ty_to_ty(ty)), - hir::TypeBindingKind::Constraint { ref bounds } => - ConvertedBindingKind::Constraint(bounds), + hir::TypeBindingKind::Equality { ref ty } => { + ConvertedBindingKind::Equality(self.ast_ty_to_ty(ty)) + } + hir::TypeBindingKind::Constraint { ref bounds } => { + ConvertedBindingKind::Constraint(bounds) + } }; - ConvertedBinding { - item_name: binding.ident, - kind, - span: binding.span, - } + ConvertedBinding { item_name: binding.ident, kind, span: binding.span } }) .collect(); - debug!("create_substs_for_ast_path(generic_params={:?}, self_ty={:?}) -> {:?}", - generic_params, self_ty, substs); + debug!( + "create_substs_for_ast_path(generic_params={:?}, self_ty={:?}) -> {:?}", + generic_params, self_ty, substs + ); (substs, assoc_bindings, potential_assoc_types) } @@ -823,7 +813,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { item_segment.generic_args(), item_segment.infer_args, None, - ).0 + ) + .0 } } @@ -833,21 +824,24 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T = X>` /// are disallowed. Otherwise, they are pushed onto the vector given. - pub fn instantiate_mono_trait_ref(&self, + pub fn instantiate_mono_trait_ref( + &self, trait_ref: &hir::TraitRef, - self_ty: Ty<'tcx> - ) -> ty::TraitRef<'tcx> - { + self_ty: Ty<'tcx>, + ) -> ty::TraitRef<'tcx> { self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1); - self.ast_path_to_mono_trait_ref(trait_ref.path.span, - trait_ref.trait_def_id(), - self_ty, - trait_ref.path.segments.last().unwrap()) + self.ast_path_to_mono_trait_ref( + trait_ref.path.span, + trait_ref.trait_def_id(), + self_ty, + trait_ref.path.segments.last().unwrap(), + ) } /// The given trait-ref must actually be a trait. - pub(super) fn instantiate_poly_trait_ref_inner(&self, + pub(super) fn instantiate_poly_trait_ref_inner( + &self, trait_ref: &hir::TraitRef, span: Span, self_ty: Ty<'tcx>, @@ -873,20 +867,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let mut dup_bindings = FxHashMap::default(); for binding in &assoc_bindings { // Specify type to assert that error was already reported in `Err` case. - let _: Result<_, ErrorReported> = - self.add_predicates_for_ast_type_binding( - trait_ref.hir_ref_id, - poly_trait_ref, - binding, - bounds, - speculative, - &mut dup_bindings, - ); + let _: Result<_, ErrorReported> = self.add_predicates_for_ast_type_binding( + trait_ref.hir_ref_id, + poly_trait_ref, + binding, + bounds, + speculative, + &mut dup_bindings, + ); // Okay to ignore `Err` because of `ErrorReported` (see above). } - debug!("instantiate_poly_trait_ref({:?}, bounds={:?}) -> {:?}", - trait_ref, bounds, poly_trait_ref); + debug!( + "instantiate_poly_trait_ref({:?}, bounds={:?}) -> {:?}", + trait_ref, bounds, poly_trait_ref + ); potential_assoc_types } @@ -909,7 +904,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// where `'a` is a bound region at depth 0. Similarly, the `poly_trait_ref` would be /// `Bar<'a>`. The returned poly-trait-ref will have this binder instantiated explicitly, /// however. - pub fn instantiate_poly_trait_ref(&self, + pub fn instantiate_poly_trait_ref( + &self, poly_trait_ref: &hir::PolyTraitRef, self_ty: Ty<'tcx>, bounds: &mut Bounds<'tcx>, @@ -923,18 +919,15 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ) } - fn ast_path_to_mono_trait_ref(&self, + fn ast_path_to_mono_trait_ref( + &self, span: Span, trait_def_id: DefId, self_ty: Ty<'tcx>, - trait_segment: &hir::PathSegment - ) -> ty::TraitRef<'tcx> - { + trait_segment: &hir::PathSegment, + ) -> ty::TraitRef<'tcx> { let (substs, assoc_bindings, _) = - self.create_substs_for_ast_trait_ref(span, - trait_def_id, - self_ty, - trait_segment); + self.create_substs_for_ast_trait_ref(span, trait_def_id, self_ty, trait_segment); assoc_bindings.first().map(|b| AstConv::prohibit_assoc_ty_binding(self.tcx(), b.span)); ty::TraitRef::new(trait_def_id, substs) } @@ -946,13 +939,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self_ty: Ty<'tcx>, trait_segment: &'a hir::PathSegment, ) -> (SubstsRef<'tcx>, Vec<ConvertedBinding<'a, 'tcx>>, Option<Vec<Span>>) { - debug!("create_substs_for_ast_trait_ref(trait_segment={:?})", - trait_segment); + debug!("create_substs_for_ast_trait_ref(trait_segment={:?})", trait_segment); let trait_def = self.tcx().trait_def(trait_def_id); - if !self.tcx().features().unboxed_closures && - trait_segment.generic_args().parenthesized != trait_def.paren_sugar + if !self.tcx().features().unboxed_closures + && trait_segment.generic_args().parenthesized != trait_def.paren_sugar { // For now, require that parenthetical notation be used only with `Fn()` etc. let msg = if trait_def.paren_sugar { @@ -964,22 +956,24 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { feature_err(&self.tcx().sess.parse_sess, sym::unboxed_closures, span, msg).emit(); } - self.create_substs_for_ast_path(span, - trait_def_id, - &[], - trait_segment.generic_args(), - trait_segment.infer_args, - Some(self_ty)) + self.create_substs_for_ast_path( + span, + trait_def_id, + &[], + trait_segment.generic_args(), + trait_segment.infer_args, + Some(self_ty), + ) } - fn trait_defines_associated_type_named(&self, - trait_def_id: DefId, - assoc_name: ast::Ident) - -> bool - { + fn trait_defines_associated_type_named( + &self, + trait_def_id: DefId, + assoc_name: ast::Ident, + ) -> bool { self.tcx().associated_items(trait_def_id).any(|item| { - item.kind == ty::AssocKind::Type && - self.tcx().hygienic_eq(assoc_name, item.ident, trait_def_id) + item.kind == ty::AssocKind::Type + && self.tcx().hygienic_eq(assoc_name, item.ident, trait_def_id) }) } @@ -1045,7 +1039,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// **A note on binders:** there is an implied binder around /// `param_ty` and `ast_bounds`. See `instantiate_poly_trait_ref` /// for more details. - fn add_bounds(&self, + fn add_bounds( + &self, param_ty: Ty<'tcx>, ast_bounds: &[hir::GenericBound], bounds: &mut Bounds<'tcx>, @@ -1055,25 +1050,20 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { for ast_bound in ast_bounds { match *ast_bound { - hir::GenericBound::Trait(ref b, hir::TraitBoundModifier::None) => - trait_bounds.push(b), + hir::GenericBound::Trait(ref b, hir::TraitBoundModifier::None) => { + trait_bounds.push(b) + } hir::GenericBound::Trait(_, hir::TraitBoundModifier::Maybe) => {} - hir::GenericBound::Outlives(ref l) => - region_bounds.push(l), + hir::GenericBound::Outlives(ref l) => region_bounds.push(l), } } for bound in trait_bounds { - let _ = self.instantiate_poly_trait_ref( - bound, - param_ty, - bounds, - ); + let _ = self.instantiate_poly_trait_ref(bound, param_ty, bounds); } - bounds.region_bounds.extend(region_bounds - .into_iter() - .map(|r| (self.ast_region_to_region(r, None), r.span)) + bounds.region_bounds.extend( + region_bounds.into_iter().map(|r| (self.ast_region_to_region(r, None), r.span)), ); } @@ -1093,7 +1083,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// example above, but is not true in supertrait listings like `trait Foo: Bar + Baz`. /// /// `span` should be the declaration size of the parameter. - pub fn compute_bounds(&self, + pub fn compute_bounds( + &self, param_ty: Ty<'tcx>, ast_bounds: &[hir::GenericBound], sized_by_default: SizedByDefault, @@ -1105,11 +1096,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { bounds.trait_bounds.sort_by_key(|(t, _)| t.def_id()); bounds.implicitly_sized = if let SizedByDefault::Yes = sized_by_default { - if !self.is_unsized(ast_bounds, span) { - Some(span) - } else { - None - } + if !self.is_unsized(ast_bounds, span) { Some(span) } else { None } } else { None }; @@ -1172,40 +1159,45 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { span_bug!( binding.span, "anonymous bound region {:?} in binding but not trait ref", - br); + br + ); } }; - struct_span_err!(tcx.sess, - binding.span, - E0582, - "binding for associated type `{}` references lifetime `{}`, \ + struct_span_err!( + tcx.sess, + binding.span, + E0582, + "binding for associated type `{}` references lifetime `{}`, \ which does not appear in the trait input types", - binding.item_name, br_name) - .emit(); + binding.item_name, + br_name + ) + .emit(); } } } - let candidate = if self.trait_defines_associated_type_named(trait_ref.def_id(), - binding.item_name) { - // Simple case: X is defined in the current trait. - Ok(trait_ref) - } else { - // Otherwise, we have to walk through the supertraits to find - // those that do. - self.one_bound_for_assoc_type( - || traits::supertraits(tcx, trait_ref), - &trait_ref.print_only_trait_path().to_string(), - binding.item_name, - binding.span - ) - }?; + let candidate = + if self.trait_defines_associated_type_named(trait_ref.def_id(), binding.item_name) { + // Simple case: X is defined in the current trait. + Ok(trait_ref) + } else { + // Otherwise, we have to walk through the supertraits to find + // those that do. + self.one_bound_for_assoc_type( + || traits::supertraits(tcx, trait_ref), + &trait_ref.print_only_trait_path().to_string(), + binding.item_name, + binding.span, + ) + }?; let (assoc_ident, def_scope) = tcx.adjust_ident_and_get_scope(binding.item_name, candidate.def_id(), hir_ref_id); - let assoc_ty = tcx.associated_items(candidate.def_id()).find(|i| { - i.kind == ty::AssocKind::Type && i.ident.modern() == assoc_ident - }).expect("missing associated type"); + let assoc_ty = tcx + .associated_items(candidate.def_id()) + .find(|i| i.kind == ty::AssocKind::Type && i.ident.modern() == assoc_ident) + .expect("missing associated type"); if !assoc_ty.vis.is_accessible_from(def_scope, tcx) { let msg = format!("associated type `{}` is private", binding.item_name); @@ -1214,16 +1206,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { tcx.check_stability(assoc_ty.def_id, Some(hir_ref_id), binding.span); if !speculative { - dup_bindings.entry(assoc_ty.def_id) + dup_bindings + .entry(assoc_ty.def_id) .and_modify(|prev_span| { - struct_span_err!(self.tcx().sess, binding.span, E0719, - "the value of the associated type `{}` (from the trait `{}`) \ + struct_span_err!( + self.tcx().sess, + binding.span, + E0719, + "the value of the associated type `{}` (from the trait `{}`) \ is already specified", - binding.item_name, - tcx.def_path_str(assoc_ty.container.id())) - .span_label(binding.span, "re-bound here") - .span_label(*prev_span, format!("`{}` bound here first", binding.item_name)) - .emit(); + binding.item_name, + tcx.def_path_str(assoc_ty.container.id()) + ) + .span_label(binding.span, "re-bound here") + .span_label(*prev_span, format!("`{}` bound here first", binding.item_name)) + .emit(); }) .or_insert(binding.span); } @@ -1234,16 +1231,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // the "projection predicate" for: // // `<T as Iterator>::Item = u32` - bounds.projection_bounds.push((candidate.map_bound(|trait_ref| { - ty::ProjectionPredicate { + bounds.projection_bounds.push(( + candidate.map_bound(|trait_ref| ty::ProjectionPredicate { projection_ty: ty::ProjectionTy::from_ref_and_name( tcx, trait_ref, binding.item_name, ), ty, - } - }), binding.span)); + }), + binding.span, + )); } ConvertedBindingKind::Constraint(ast_bounds) => { // "Desugar" a constraint like `T: Iterator<Item: Debug>` to @@ -1259,36 +1257,25 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { Ok(()) } - fn ast_path_to_ty(&self, - span: Span, - did: DefId, - item_segment: &hir::PathSegment) - -> Ty<'tcx> - { + fn ast_path_to_ty(&self, span: Span, did: DefId, item_segment: &hir::PathSegment) -> Ty<'tcx> { let substs = self.ast_path_substs_for_ty(span, did, item_segment); - self.normalize_ty( - span, - self.tcx().at(span).type_of(did).subst(self.tcx(), substs) - ) + self.normalize_ty(span, self.tcx().at(span).type_of(did).subst(self.tcx(), substs)) } - fn conv_object_ty_poly_trait_ref(&self, + fn conv_object_ty_poly_trait_ref( + &self, span: Span, trait_bounds: &[hir::PolyTraitRef], - lifetime: &hir::Lifetime) - -> Ty<'tcx> - { + lifetime: &hir::Lifetime, + ) -> Ty<'tcx> { let tcx = self.tcx(); let mut bounds = Bounds::default(); let mut potential_assoc_types = Vec::new(); let dummy_self = self.tcx().types.trait_object_dummy_self; for trait_bound in trait_bounds.iter().rev() { - let cur_potential_assoc_types = self.instantiate_poly_trait_ref( - trait_bound, - dummy_self, - &mut bounds, - ); + let cur_potential_assoc_types = + self.instantiate_poly_trait_ref(trait_bound, dummy_self, &mut bounds); potential_assoc_types.extend(cur_potential_assoc_types.into_iter().flatten()); } @@ -1301,19 +1288,23 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { if regular_traits.len() > 1 { let first_trait = ®ular_traits[0]; let additional_trait = ®ular_traits[1]; - let mut err = struct_span_err!(tcx.sess, additional_trait.bottom().1, E0225, + let mut err = struct_span_err!( + tcx.sess, + additional_trait.bottom().1, + E0225, "only auto traits can be used as additional traits in a trait object" ); - additional_trait.label_with_exp_info(&mut err, - "additional non-auto trait", "additional use"); - first_trait.label_with_exp_info(&mut err, - "first non-auto trait", "first use"); + additional_trait.label_with_exp_info( + &mut err, + "additional non-auto trait", + "additional use", + ); + first_trait.label_with_exp_info(&mut err, "first non-auto trait", "first use"); err.emit(); } if regular_traits.is_empty() && auto_traits.is_empty() { - span_err!(tcx.sess, span, E0224, - "at least one trait is required for an object type"); + span_err!(tcx.sess, span, E0224, "at least one trait is required for an object type"); return tcx.types.err; } @@ -1327,8 +1318,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { tcx.report_object_safety_error( span, item.trait_ref().def_id(), - object_safety_violations - ).emit(); + object_safety_violations, + ) + .emit(); return tcx.types.err; } } @@ -1336,7 +1328,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // Use a `BTreeSet` to keep output in a more consistent order. let mut associated_types = BTreeSet::default(); - let regular_traits_refs = bounds.trait_bounds + let regular_traits_refs = bounds + .trait_bounds .into_iter() .filter(|(trait_ref, _)| !tcx.trait_is_auto(trait_ref.def_id())) .map(|(trait_ref, _)| trait_ref); @@ -1344,16 +1337,16 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { debug!("conv_object_ty_poly_trait_ref: observing object predicate `{:?}`", trait_ref); match trait_ref { ty::Predicate::Trait(pred) => { - associated_types - .extend(tcx.associated_items(pred.def_id()) - .filter(|item| item.kind == ty::AssocKind::Type) - .map(|item| item.def_id)); + associated_types.extend( + tcx.associated_items(pred.def_id()) + .filter(|item| item.kind == ty::AssocKind::Type) + .map(|item| item.def_id), + ); } ty::Predicate::Projection(pred) => { // A `Self` within the original bound will be substituted with a // `trait_object_dummy_self`, so check for that. - let references_self = - pred.skip_binder().ty.walk().any(|t| t == dummy_self); + let references_self = pred.skip_binder().ty.walk().any(|t| t == dummy_self); // If the projection output contains `Self`, force the user to // elaborate it explicitly to avoid a lot of complexity. @@ -1376,7 +1369,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { bounds.projection_bounds.push((pred, DUMMY_SP)) } } - _ => () + _ => (), } } @@ -1385,15 +1378,19 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } if !associated_types.is_empty() { - let names = associated_types.iter().map(|item_def_id| { - let assoc_item = tcx.associated_item(*item_def_id); - let trait_def_id = assoc_item.container.id(); - format!( - "`{}` (from the trait `{}`)", - assoc_item.ident, - tcx.def_path_str(trait_def_id), - ) - }).collect::<Vec<_>>().join(", "); + let names = associated_types + .iter() + .map(|item_def_id| { + let assoc_item = tcx.associated_item(*item_def_id); + let trait_def_id = assoc_item.container.id(); + format!( + "`{}` (from the trait `{}`)", + assoc_item.ident, + tcx.def_path_str(trait_def_id), + ) + }) + .collect::<Vec<_>>() + .join(", "); let mut err = struct_span_err!( tcx.sess, span, @@ -1424,9 +1421,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { err.span_label(sp, format!("`{}` defined here", assoc_item.ident)); } if suggest { - if let Ok(snippet) = tcx.sess.source_map().span_to_snippet( - potential_assoc_types_spans[i], - ) { + if let Ok(snippet) = + tcx.sess.source_map().span_to_snippet(potential_assoc_types_spans[i]) + { suggestions.push(( potential_assoc_types_spans[i], format!("{} = {}", assoc_item.ident, snippet), @@ -1435,13 +1432,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } } if !suggestions.is_empty() { - let msg = format!("if you meant to specify the associated {}, write", - if suggestions.len() == 1 { "type" } else { "types" }); - err.multipart_suggestion( - &msg, - suggestions, - Applicability::MaybeIncorrect, + let msg = format!( + "if you meant to specify the associated {}, write", + if suggestions.len() == 1 { "type" } else { "types" } ); + err.multipart_suggestion(&msg, suggestions, Applicability::MaybeIncorrect); } err.emit(); } @@ -1462,18 +1457,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // completely ignores trait aliases, which could be object safety hazards. We // `delay_span_bug` here to avoid an ICE in stable even when the feature is // disabled. (#66420) - tcx.sess.delay_span_bug(DUMMY_SP, &format!( - "trait_ref_to_existential called on {:?} with non-dummy Self", - trait_ref, - )); + tcx.sess.delay_span_bug( + DUMMY_SP, + &format!( + "trait_ref_to_existential called on {:?} with non-dummy Self", + trait_ref, + ), + ); } ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref) }; // Erase the `dummy_self` (`trait_object_dummy_self`) used above. - let existential_trait_refs = regular_traits.iter().map(|i| { - i.trait_ref().map_bound(|trait_ref| trait_ref_to_existential(trait_ref)) - }); + let existential_trait_refs = regular_traits + .iter() + .map(|i| i.trait_ref().map_bound(|trait_ref| trait_ref_to_existential(trait_ref))); let existential_projections = bounds.projection_bounds.iter().map(|(bound, _)| { bound.map_bound(|b| { let trait_ref = trait_ref_to_existential(b.projection_ty.trait_ref(tcx)); @@ -1486,15 +1484,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { }); // Calling `skip_binder` is okay because the predicates are re-bound. - let regular_trait_predicates = existential_trait_refs.map( - |trait_ref| ty::ExistentialPredicate::Trait(*trait_ref.skip_binder())); - let auto_trait_predicates = auto_traits.into_iter().map( - |trait_ref| ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id())); - let mut v = - regular_trait_predicates + let regular_trait_predicates = existential_trait_refs + .map(|trait_ref| ty::ExistentialPredicate::Trait(*trait_ref.skip_binder())); + let auto_trait_predicates = auto_traits + .into_iter() + .map(|trait_ref| ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id())); + let mut v = regular_trait_predicates .chain(auto_trait_predicates) - .chain(existential_projections - .map(|x| ty::ExistentialPredicate::Projection(*x.skip_binder()))) + .chain( + existential_projections + .map(|x| ty::ExistentialPredicate::Projection(*x.skip_binder())), + ) .collect::<SmallVec<[_; 8]>>(); v.sort_by(|a, b| a.stable_cmp(tcx, b)); v.dedup(); @@ -1509,9 +1509,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self.ast_region_to_region(lifetime, None) } else { self.re_infer(None, span).unwrap_or_else(|| { - span_err!(tcx.sess, span, E0228, + span_err!( + tcx.sess, + span, + E0228, "the lifetime bound for this object type cannot be deduced \ - from context; please supply an explicit bound"); + from context; please supply an explicit bound" + ); tcx.lifetimes.re_static }) } @@ -1535,7 +1539,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { if let (Some(_), Ok(snippet)) = ( self.tcx().sess.confused_type_with_std_module.borrow().get(&span), self.tcx().sess.source_map().span_to_snippet(span), - ) { + ) { err.span_suggestion( span, "you are looking for the module in `std`, not the primitive type", @@ -1544,10 +1548,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ); } else { err.span_suggestion( - span, - "use fully-qualified syntax", - format!("<{} as {}>::{}", type_str, trait_str, name), - Applicability::HasPlaceholders + span, + "use fully-qualified syntax", + format!("<{} as {}>::{}", type_str, trait_str, name), + Applicability::HasPlaceholders, ); } err.emit(); @@ -1557,19 +1561,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // given by `assoc_name`. `ty_param_def_id` is the `DefId` of the type parameter // This function will fail if there are no suitable bounds or there is // any ambiguity. - fn find_bound_for_assoc_item(&self, - ty_param_def_id: DefId, - assoc_name: ast::Ident, - span: Span) - -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> - { + fn find_bound_for_assoc_item( + &self, + ty_param_def_id: DefId, + assoc_name: ast::Ident, + span: Span, + ) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> { let tcx = self.tcx(); debug!( "find_bound_for_assoc_item(ty_param_def_id={:?}, assoc_name={:?}, span={:?})", - ty_param_def_id, - assoc_name, - span, + ty_param_def_id, assoc_name, span, ); let predicates = &self.get_type_parameter_bounds(span, ty_param_def_id).predicates; @@ -1579,25 +1581,30 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let param_hir_id = tcx.hir().as_local_hir_id(ty_param_def_id).unwrap(); let param_name = tcx.hir().ty_param_name(param_hir_id); self.one_bound_for_assoc_type( - || traits::transitive_bounds(tcx, predicates - .iter().filter_map(|(p, _)| p.to_opt_poly_trait_ref())), + || { + traits::transitive_bounds( + tcx, + predicates.iter().filter_map(|(p, _)| p.to_opt_poly_trait_ref()), + ) + }, ¶m_name.as_str(), assoc_name, span, ) } - fn one_bound_for_assoc_type<I>(&self, - all_candidates: impl Fn() -> I, - ty_param_name: &str, - assoc_name: ast::Ident, - span: Span) - -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> - where I: Iterator<Item = ty::PolyTraitRef<'tcx>> + fn one_bound_for_assoc_type<I>( + &self, + all_candidates: impl Fn() -> I, + ty_param_name: &str, + assoc_name: ast::Ident, + span: Span, + ) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> + where + I: Iterator<Item = ty::PolyTraitRef<'tcx>>, { - let mut matching_candidates = all_candidates().filter(|r| { - self.trait_defines_associated_type_named(r.def_id(), assoc_name) - }); + let mut matching_candidates = all_candidates() + .filter(|r| self.trait_defines_associated_type_named(r.def_id(), assoc_name)); let bound = match matching_candidates.next() { Some(bound) => bound, @@ -1606,7 +1613,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { all_candidates, ty_param_name, assoc_name, - span + span, ); return Err(ErrorReported); } @@ -1619,28 +1626,42 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let bounds = iter::once(bound).chain(iter::once(bound2)).chain(matching_candidates); let mut err = struct_span_err!( - self.tcx().sess, span, E0221, + self.tcx().sess, + span, + E0221, "ambiguous associated type `{}` in bounds of `{}`", assoc_name, - ty_param_name); + ty_param_name + ); err.span_label(span, format!("ambiguous associated type `{}`", assoc_name)); for bound in bounds { - let bound_span = self.tcx().associated_items(bound.def_id()).find(|item| { - item.kind == ty::AssocKind::Type && - self.tcx().hygienic_eq(assoc_name, item.ident, bound.def_id()) - }) + let bound_span = self + .tcx() + .associated_items(bound.def_id()) + .find(|item| { + item.kind == ty::AssocKind::Type + && self.tcx().hygienic_eq(assoc_name, item.ident, bound.def_id()) + }) .and_then(|item| self.tcx().hir().span_if_local(item.def_id)); if let Some(span) = bound_span { - err.span_label(span, format!("ambiguous `{}` from `{}`", - assoc_name, - bound.print_only_trait_path())); + err.span_label( + span, + format!( + "ambiguous `{}` from `{}`", + assoc_name, + bound.print_only_trait_path() + ), + ); } else { - span_note!(&mut err, span, - "associated type `{}` could derive from `{}`", - ty_param_name, - bound.print_only_trait_path()); + span_note!( + &mut err, + span, + "associated type `{}` could derive from `{}`", + ty_param_name, + bound.print_only_trait_path() + ); } } err.emit(); @@ -1649,34 +1670,35 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { return Ok(bound); } - fn complain_about_assoc_type_not_found<I>(&self, - all_candidates: impl Fn() -> I, - ty_param_name: &str, - assoc_name: ast::Ident, - span: Span) - where I: Iterator<Item = ty::PolyTraitRef<'tcx>> { - let mut err = struct_span_err!(self.tcx().sess, span, E0220, - "associated type `{}` not found for `{}`", - assoc_name, - ty_param_name); + fn complain_about_assoc_type_not_found<I>( + &self, + all_candidates: impl Fn() -> I, + ty_param_name: &str, + assoc_name: ast::Ident, + span: Span, + ) where + I: Iterator<Item = ty::PolyTraitRef<'tcx>>, + { + let mut err = struct_span_err!( + self.tcx().sess, + span, + E0220, + "associated type `{}` not found for `{}`", + assoc_name, + ty_param_name + ); let all_candidate_names: Vec<_> = all_candidates() .map(|r| self.tcx().associated_items(r.def_id())) .flatten() - .filter_map(|item| - if item.kind == ty::AssocKind::Type { - Some(item.ident.name) - } else { - None - } + .filter_map( + |item| if item.kind == ty::AssocKind::Type { Some(item.ident.name) } else { None }, ) .collect(); - if let Some(suggested_name) = find_best_match_for_name( - all_candidate_names.iter(), - &assoc_name.as_str(), - None, - ) { + if let Some(suggested_name) = + find_best_match_for_name(all_candidate_names.iter(), &assoc_name.as_str(), None) + { err.span_suggestion( span, "there is an associated type with a similar name", @@ -1684,10 +1706,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { Applicability::MaybeIncorrect, ); } else { - err.span_label( - span, - format!("associated type `{}` not found", assoc_name) - ); + err.span_label(span, format!("associated type `{}` not found", assoc_name)); } err.emit(); @@ -1717,9 +1736,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let mut variant_resolution = None; if let ty::Adt(adt_def, _) = qself_ty.kind { if adt_def.is_enum() { - let variant_def = adt_def.variants.iter().find(|vd| { - tcx.hygienic_eq(assoc_ident, vd.ident, adt_def.did) - }); + let variant_def = adt_def + .variants + .iter() + .find(|vd| tcx.hygienic_eq(assoc_ident, vd.ident, adt_def.did)); if let Some(variant_def) = variant_def { if permit_variants { tcx.check_stability(variant_def.def_id, Some(hir_ref_id), span); @@ -1750,11 +1770,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { || traits::supertraits(tcx, ty::Binder::bind(trait_ref)), "Self", assoc_ident, - span + span, )? } - (&ty::Param(_), Res::SelfTy(Some(param_did), None)) | - (&ty::Param(_), Res::Def(DefKind::TyParam, param_did)) => { + (&ty::Param(_), Res::SelfTy(Some(param_did), None)) + | (&ty::Param(_), Res::Def(DefKind::TyParam, param_did)) => { self.find_bound_for_assoc_item(param_did, assoc_ident, span)? } _ => { @@ -1809,10 +1829,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let trait_did = bound.def_id(); let (assoc_ident, def_scope) = tcx.adjust_ident_and_get_scope(assoc_ident, trait_did, hir_ref_id); - let item = tcx.associated_items(trait_did).find(|i| { - Namespace::from(i.kind) == Namespace::Type && - i.ident.modern() == assoc_ident - }).expect("missing associated type"); + let item = tcx + .associated_items(trait_did) + .find(|i| Namespace::from(i.kind) == Namespace::Type && i.ident.modern() == assoc_ident) + .expect("missing associated type"); let ty = self.projected_ty_from_poly_trait_ref(span, item.def_id, assoc_segment, bound); let ty = self.normalize_ty(span, ty); @@ -1833,8 +1853,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ); let mut could_refer_to = |kind: DefKind, def_id, also| { - let note_msg = format!("`{}` could{} refer to {} defined here", - assoc_ident, also, kind.descr(def_id)); + let note_msg = format!( + "`{}` could{} refer to {} defined here", + assoc_ident, + also, + kind.descr(def_id) + ); err.span_note(tcx.def_span(def_id), ¬e_msg); }; could_refer_to(DefKind::Variant, variant_def_id, ""); @@ -1845,20 +1869,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { "use fully-qualified syntax", format!("<{} as {}>::{}", qself_ty, tcx.item_name(trait_did), assoc_ident), Applicability::MachineApplicable, - ).emit(); + ) + .emit(); } Ok((ty, kind, item.def_id)) } - fn qpath_to_ty(&self, - span: Span, - opt_self_ty: Option<Ty<'tcx>>, - item_def_id: DefId, - trait_segment: &hir::PathSegment, - item_segment: &hir::PathSegment) - -> Ty<'tcx> - { + fn qpath_to_ty( + &self, + span: Span, + opt_self_ty: Option<Ty<'tcx>>, + item_def_id: DefId, + trait_segment: &hir::PathSegment, + item_segment: &hir::PathSegment, + ) -> Ty<'tcx> { let tcx = self.tcx(); let trait_def_id = tcx.parent(item_def_id).unwrap(); @@ -1874,7 +1899,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { debug!("qpath_to_ty: self.item_def_id()={:?}", def_id); - let parent_def_id = def_id.and_then(|def_id| tcx.hir().as_local_hir_id(def_id)) + let parent_def_id = def_id + .and_then(|def_id| tcx.hir().as_local_hir_id(def_id)) .map(|hir_id| tcx.hir().get_parent_did(hir_id)); debug!("qpath_to_ty: parent_def_id={:?}", parent_def_id); @@ -1901,10 +1927,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { debug!("qpath_to_ty: self_type={:?}", self_ty); - let trait_ref = self.ast_path_to_mono_trait_ref(span, - trait_def_id, - self_ty, - trait_segment); + let trait_ref = self.ast_path_to_mono_trait_ref(span, trait_def_id, self_ty, trait_segment); let item_substs = self.create_substs_for_associated_item( tcx, @@ -1920,26 +1943,34 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } pub fn prohibit_generics<'a, T: IntoIterator<Item = &'a hir::PathSegment>>( - &self, segments: T) -> bool { + &self, + segments: T, + ) -> bool { let mut has_err = false; for segment in segments { let (mut err_for_lt, mut err_for_ty, mut err_for_ct) = (false, false, false); for arg in &segment.generic_args().args { let (span, kind) = match arg { hir::GenericArg::Lifetime(lt) => { - if err_for_lt { continue } + if err_for_lt { + continue; + } err_for_lt = true; has_err = true; (lt.span, "lifetime") } hir::GenericArg::Type(ty) => { - if err_for_ty { continue } + if err_for_ty { + continue; + } err_for_ty = true; has_err = true; (ty.span, "type") } hir::GenericArg::Const(ct) => { - if err_for_ct { continue } + if err_for_ct { + continue; + } err_for_ct = true; (ct.span, "const") } @@ -1967,8 +1998,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } pub fn prohibit_assoc_ty_binding(tcx: TyCtxt<'_>, span: Span) { - let mut err = struct_span_err!(tcx.sess, span, E0229, - "associated type bindings are not allowed here"); + let mut err = struct_span_err!( + tcx.sess, + span, + E0229, + "associated type bindings are not allowed here" + ); err.span_label(span, "associated type not allowed here").emit(); } @@ -2045,8 +2080,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } // Case 2. Reference to a variant constructor. - DefKind::Ctor(CtorOf::Variant, ..) - | DefKind::Variant => { + DefKind::Ctor(CtorOf::Variant, ..) | DefKind::Variant => { let adt_def = self_ty.map(|t| t.ty_adt_def().unwrap()); let (generics_def_id, index) = if let Some(adt_def) = adt_def { debug_assert!(adt_def.is_enum()); @@ -2079,16 +2113,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } // Case 3. Reference to a top-level value. - DefKind::Fn - | DefKind::Const - | DefKind::ConstParam - | DefKind::Static => { + DefKind::Fn | DefKind::Const | DefKind::ConstParam | DefKind::Static => { path_segs.push(PathSeg(def_id, last)); } // Case 4. Reference to a method or associated const. - DefKind::Method - | DefKind::AssocConst => { + DefKind::Method | DefKind::AssocConst => { if segments.len() >= 2 { let generics = tcx.generics_of(def_id); path_segs.push(PathSeg(generics.parent.unwrap(), last - 1)); @@ -2105,15 +2135,18 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } // Check a type `Path` and convert it to a `Ty`. - pub fn res_to_ty(&self, - opt_self_ty: Option<Ty<'tcx>>, - path: &hir::Path, - permit_variants: bool) - -> Ty<'tcx> { + pub fn res_to_ty( + &self, + opt_self_ty: Option<Ty<'tcx>>, + path: &hir::Path, + permit_variants: bool, + ) -> Ty<'tcx> { let tcx = self.tcx(); - debug!("res_to_ty(res={:?}, opt_self_ty={:?}, path_segments={:?})", - path.res, opt_self_ty, path.segments); + debug!( + "res_to_ty(res={:?}, opt_self_ty={:?}, path_segments={:?})", + path.res, opt_self_ty, path.segments + ); let span = path.span; match path.res { @@ -2123,10 +2156,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let item_segment = path.segments.split_last().unwrap(); self.prohibit_generics(item_segment.1); let substs = self.ast_path_substs_for_ty(span, did, item_segment.0); - self.normalize_ty( - span, - tcx.mk_opaque(did, substs), - ) + self.normalize_ty(span, tcx.mk_opaque(did, substs)) } Res::Def(DefKind::Enum, did) | Res::Def(DefKind::TyAlias, did) @@ -2146,13 +2176,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self.def_ids_for_value_path_segments(&path.segments, None, kind, def_id); let generic_segs: FxHashSet<_> = path_segs.iter().map(|PathSeg(_, index)| index).collect(); - self.prohibit_generics(path.segments.iter().enumerate().filter_map(|(index, seg)| { - if !generic_segs.contains(&index) { - Some(seg) - } else { - None - } - })); + self.prohibit_generics(path.segments.iter().enumerate().filter_map( + |(index, seg)| { + if !generic_segs.contains(&index) { Some(seg) } else { None } + }, + )); let PathSeg(def_id, index) = path_segs.last().unwrap(); self.ast_path_to_ty(span, *def_id, &path.segments[*index]) @@ -2184,11 +2212,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { Res::Def(DefKind::AssocTy, def_id) => { debug_assert!(path.segments.len() >= 2); self.prohibit_generics(&path.segments[..path.segments.len() - 2]); - self.qpath_to_ty(span, - opt_self_ty, - def_id, - &path.segments[path.segments.len() - 2], - path.segments.last().unwrap()) + self.qpath_to_ty( + span, + opt_self_ty, + def_id, + &path.segments[path.segments.len() - 2], + path.segments.last().unwrap(), + ) } Res::PrimTy(prim_ty) => { assert_eq!(opt_self_ty, None); @@ -2199,44 +2229,36 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { hir::Int(it) => tcx.mk_mach_int(it), hir::Uint(uit) => tcx.mk_mach_uint(uit), hir::Float(ft) => tcx.mk_mach_float(ft), - hir::Str => tcx.mk_str() + hir::Str => tcx.mk_str(), } } Res::Err => { self.set_tainted_by_errors(); return self.tcx().types.err; } - _ => span_bug!(span, "unexpected resolution: {:?}", path.res) + _ => span_bug!(span, "unexpected resolution: {:?}", path.res), } } /// Parses the programmer's textual representation of a type into our /// internal notion of a type. pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> { - debug!("ast_ty_to_ty(id={:?}, ast_ty={:?} ty_ty={:?})", - ast_ty.hir_id, ast_ty, ast_ty.kind); + debug!("ast_ty_to_ty(id={:?}, ast_ty={:?} ty_ty={:?})", ast_ty.hir_id, ast_ty, ast_ty.kind); let tcx = self.tcx(); let result_ty = match ast_ty.kind { - hir::TyKind::Slice(ref ty) => { - tcx.mk_slice(self.ast_ty_to_ty(&ty)) - } + hir::TyKind::Slice(ref ty) => tcx.mk_slice(self.ast_ty_to_ty(&ty)), hir::TyKind::Ptr(ref mt) => { - tcx.mk_ptr(ty::TypeAndMut { - ty: self.ast_ty_to_ty(&mt.ty), - mutbl: mt.mutbl - }) + tcx.mk_ptr(ty::TypeAndMut { ty: self.ast_ty_to_ty(&mt.ty), mutbl: mt.mutbl }) } hir::TyKind::Rptr(ref region, ref mt) => { let r = self.ast_region_to_region(region, None); debug!("ast_ty_to_ty: r={:?}", r); let t = self.ast_ty_to_ty(&mt.ty); - tcx.mk_ref(r, ty::TypeAndMut {ty: t, mutbl: mt.mutbl}) + tcx.mk_ref(r, ty::TypeAndMut { ty: t, mutbl: mt.mutbl }) } - hir::TyKind::Never => { - tcx.types.never - }, + hir::TyKind::Never => tcx.types.never, hir::TyKind::Tup(ref fields) => { tcx.mk_tup(fields.iter().map(|t| self.ast_ty_to_ty(&t))) } @@ -2249,9 +2271,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } hir::TyKind::Path(hir::QPath::Resolved(ref maybe_qself, ref path)) => { debug!("ast_ty_to_ty: maybe_qself={:?} path={:?}", maybe_qself, path); - let opt_self_ty = maybe_qself.as_ref().map(|qself| { - self.ast_ty_to_ty(qself) - }); + let opt_self_ty = maybe_qself.as_ref().map(|qself| self.ast_ty_to_ty(qself)); self.res_to_ty(opt_self_ty, path, false) } hir::TyKind::Def(item_id, ref lifetimes) => { @@ -2268,7 +2288,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { Res::Err }; self.associated_path_to_ty(ast_ty.hir_id, ast_ty.span, ty, res, segment, false) - .map(|(ty, _, _)| ty).unwrap_or(tcx.types.err) + .map(|(ty, _, _)| ty) + .unwrap_or(tcx.types.err) } hir::TyKind::Array(ref ty, ref length) => { let length = self.ast_const_to_const(length, tcx.types.usize); @@ -2276,10 +2297,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self.normalize_ty(ast_ty.span, array_ty) } hir::TyKind::Typeof(ref _e) => { - struct_span_err!(tcx.sess, ast_ty.span, E0516, - "`typeof` is a reserved keyword but unimplemented") - .span_label(ast_ty.span, "reserved keyword") - .emit(); + struct_span_err!( + tcx.sess, + ast_ty.span, + E0516, + "`typeof` is a reserved keyword but unimplemented" + ) + .span_label(ast_ty.span, "reserved keyword") + .emit(); tcx.types.err } @@ -2290,9 +2315,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // handled specially and will not descend into this routine. self.ty_infer(None, ast_ty.span) } - hir::TyKind::Err => { - tcx.types.err - } + hir::TyKind::Err => tcx.types.err, }; debug!("ast_ty_to_ty: result_ty={:?}", result_ty); @@ -2306,8 +2329,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments // currently have to be wrapped in curly brackets, so it's necessary to special-case. let expr = match &expr.kind { - ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() => - block.expr.as_ref().unwrap(), + ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() => { + block.expr.as_ref().unwrap() + } _ => expr, }; @@ -2323,7 +2347,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { pub fn ast_const_to_const( &self, ast_const: &hir::AnonConst, - ty: Ty<'tcx> + ty: Ty<'tcx>, ) -> &'tcx ty::Const<'tcx> { debug!("ast_const_to_const(id={:?}, ast_const={:?})", ast_const.hir_id, ast_const); @@ -2331,10 +2355,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let def_id = tcx.hir().local_def_id(ast_const.hir_id); let mut const_ = ty::Const { - val: ty::ConstKind::Unevaluated( - def_id, - InternalSubsts::identity_for_item(tcx, def_id), - ), + val: ty::ConstKind::Unevaluated(def_id, InternalSubsts::identity_for_item(tcx, def_id)), ty, }; @@ -2354,11 +2375,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { tcx.mk_const(const_) } - pub fn impl_trait_ty_to_ty( - &self, - def_id: DefId, - lifetimes: &[hir::GenericArg], - ) -> Ty<'tcx> { + pub fn impl_trait_ty_to_ty(&self, def_id: DefId, lifetimes: &[hir::GenericArg]) -> Ty<'tcx> { debug!("impl_trait_ty_to_ty(def_id={:?}, lifetimes={:?})", def_id, lifetimes); let tcx = self.tcx(); @@ -2376,15 +2393,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { bug!() } } - _ => bug!() + _ => bug!(), } } else { // Replace all parent lifetimes with `'static`. match param.kind { - GenericParamDefKind::Lifetime => { - tcx.lifetimes.re_static.into() - } - _ => tcx.mk_param_from_def(param) + GenericParamDefKind::Lifetime => tcx.lifetimes.re_static.into(), + _ => tcx.mk_param_from_def(param), } } }); @@ -2395,11 +2410,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ty } - pub fn ty_of_arg(&self, - ty: &hir::Ty, - expected_ty: Option<Ty<'tcx>>) - -> Ty<'tcx> - { + pub fn ty_of_arg(&self, ty: &hir::Ty, expected_ty: Option<Ty<'tcx>>) -> Ty<'tcx> { match ty.kind { hir::TyKind::Infer if expected_ty.is_some() => { self.record_ty(ty.hir_id, expected_ty.unwrap(), ty.span); @@ -2409,16 +2420,16 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } } - pub fn ty_of_fn(&self, - unsafety: hir::Unsafety, - abi: abi::Abi, - decl: &hir::FnDecl) - -> ty::PolyFnSig<'tcx> { + pub fn ty_of_fn( + &self, + unsafety: hir::Unsafety, + abi: abi::Abi, + decl: &hir::FnDecl, + ) -> ty::PolyFnSig<'tcx> { debug!("ty_of_fn"); let tcx = self.tcx(); - let input_tys = - decl.inputs.iter().map(|a| self.ty_of_arg(a, None)); + let input_tys = decl.inputs.iter().map(|a| self.ty_of_arg(a, None)); let output_ty = match decl.output { hir::Return(ref output) => self.ast_ty_to_ty(output), @@ -2427,13 +2438,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { debug!("ty_of_fn: output_ty={:?}", output_ty); - let bare_fn_ty = ty::Binder::bind(tcx.mk_fn_sig( - input_tys, - output_ty, - decl.c_variadic, - unsafety, - abi - )); + let bare_fn_ty = + ty::Binder::bind(tcx.mk_fn_sig(input_tys, output_ty, decl.c_variadic, unsafety, abi)); // Find any late-bound regions declared in return type that do // not appear in the arguments. These are not well-formed. @@ -2442,8 +2448,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // for<'a> fn() -> &'a str <-- 'a is bad // for<'a> fn(&'a String) -> &'a str <-- 'a is ok let inputs = bare_fn_ty.inputs(); - let late_bound_in_args = tcx.collect_constrained_late_bound_regions( - &inputs.map_bound(|i| i.to_owned())); + let late_bound_in_args = + tcx.collect_constrained_late_bound_regions(&inputs.map_bound(|i| i.to_owned())); let output = bare_fn_ty.output(); let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output); for br in late_bound_in_ret.difference(&late_bound_in_args) { @@ -2451,12 +2457,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ty::BrNamed(_, name) => format!("lifetime `{}`,", name), ty::BrAnon(_) | ty::BrEnv => "an anonymous lifetime".to_string(), }; - let mut err = struct_span_err!(tcx.sess, - decl.output.span(), - E0581, - "return type references {} \ + let mut err = struct_span_err!( + tcx.sess, + decl.output.span(), + E0581, + "return type references {} \ which is not constrained by the fn input types", - lifetime_name); + lifetime_name + ); if let ty::BrAnon(_) = *br { // The only way for an anonymous lifetime to wind up // in the return type but **also** be unconstrained is @@ -2464,8 +2472,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // input. See #47511 for an example. In this case, // though we can easily give a hint that ought to be // relevant. - err.note("lifetimes appearing in an associated type \ - are not considered constrained"); + err.note( + "lifetimes appearing in an associated type \ + are not considered constrained", + ); } err.emit(); } @@ -2478,20 +2488,19 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// provided, if they provided one, and otherwise search the supertypes of trait bounds /// for region bounds. It may be that we can derive no bound at all, in which case /// we return `None`. - fn compute_object_lifetime_bound(&self, + fn compute_object_lifetime_bound( + &self, span: Span, - existential_predicates: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>) - -> Option<ty::Region<'tcx>> // if None, use the default + existential_predicates: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>, + ) -> Option<ty::Region<'tcx>> // if None, use the default { let tcx = self.tcx(); - debug!("compute_opt_region_bound(existential_predicates={:?})", - existential_predicates); + debug!("compute_opt_region_bound(existential_predicates={:?})", existential_predicates); // No explicit region bound specified. Therefore, examine trait // bounds and see if we can derive region bounds from those. - let derived_region_bounds = - object_region_bounds(tcx, existential_predicates); + let derived_region_bounds = object_region_bounds(tcx, existential_predicates); // If there are no derived region bounds, then report back that we // can find no region bound. The caller will use the default. @@ -2510,8 +2519,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // error. let r = derived_region_bounds[0]; if derived_region_bounds[1..].iter().any(|r1| r != *r1) { - span_err!(tcx.sess, span, E0227, - "ambiguous lifetime bound, explicit lifetime bound required"); + span_err!( + tcx.sess, + span, + E0227, + "ambiguous lifetime bound, explicit lifetime bound required" + ); } return Some(r); } @@ -2573,29 +2586,36 @@ impl<'tcx> Bounds<'tcx> { tcx.lang_items().sized_trait().map(|sized| { let trait_ref = ty::Binder::bind(ty::TraitRef { def_id: sized, - substs: tcx.mk_substs_trait(param_ty, &[]) + substs: tcx.mk_substs_trait(param_ty, &[]), }); (trait_ref.to_predicate(), span) }) }); - sized_predicate.into_iter().chain( - self.region_bounds.iter().map(|&(region_bound, span)| { - // Account for the binder being introduced below; no need to shift `param_ty` - // because, at present at least, it either only refers to early-bound regions, - // or it's a generic associated type that deliberately has escaping bound vars. - let region_bound = ty::fold::shift_region(tcx, region_bound, 1); - let outlives = ty::OutlivesPredicate(param_ty, region_bound); - (ty::Binder::bind(outlives).to_predicate(), span) - }).chain( - self.trait_bounds.iter().map(|&(bound_trait_ref, span)| { - (bound_trait_ref.to_predicate(), span) - }) - ).chain( - self.projection_bounds.iter().map(|&(projection, span)| { - (projection.to_predicate(), span) - }) + sized_predicate + .into_iter() + .chain( + self.region_bounds + .iter() + .map(|&(region_bound, span)| { + // Account for the binder being introduced below; no need to shift `param_ty` + // because, at present at least, it either only refers to early-bound regions, + // or it's a generic associated type that deliberately has escaping bound vars. + let region_bound = ty::fold::shift_region(tcx, region_bound, 1); + let outlives = ty::OutlivesPredicate(param_ty, region_bound); + (ty::Binder::bind(outlives).to_predicate(), span) + }) + .chain( + self.trait_bounds + .iter() + .map(|&(bound_trait_ref, span)| (bound_trait_ref.to_predicate(), span)), + ) + .chain( + self.projection_bounds + .iter() + .map(|&(projection, span)| (projection.to_predicate(), span)), + ), ) - ).collect() + .collect() } } diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 182c64e9069..8a74143de01 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -1,9 +1,9 @@ -use crate::check::{FnCtxt, Expectation, Diverges, Needs}; use crate::check::coercion::CoerceMany; +use crate::check::{Diverges, Expectation, FnCtxt, Needs}; use rustc::hir::{self, ExprKind}; use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use rustc::traits::ObligationCauseCode; use rustc::traits::{IfExpressionCause, MatchExpressionArmCause, ObligationCause}; -use rustc::traits::{ObligationCauseCode}; use rustc::ty::Ty; use syntax_pos::Span; @@ -56,20 +56,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // rust-lang/rust#55810: Typecheck patterns first (via eager // collection into `Vec`), so we get types for all bindings. - let all_arm_pats_diverge: Vec<_> = arms.iter().map(|arm| { - let mut all_pats_diverge = Diverges::WarnedAlways; - self.diverges.set(Diverges::Maybe); - self.check_pat_top(&arm.pat, discrim_ty, Some(discrim.span)); - all_pats_diverge &= self.diverges.get(); + let all_arm_pats_diverge: Vec<_> = arms + .iter() + .map(|arm| { + let mut all_pats_diverge = Diverges::WarnedAlways; + self.diverges.set(Diverges::Maybe); + self.check_pat_top(&arm.pat, discrim_ty, Some(discrim.span)); + all_pats_diverge &= self.diverges.get(); - // As discussed with @eddyb, this is for disabling unreachable_code - // warnings on patterns (they're now subsumed by unreachable_patterns - // warnings). - match all_pats_diverge { - Diverges::Maybe => Diverges::Maybe, - Diverges::Always { .. } | Diverges::WarnedAlways => Diverges::WarnedAlways, - } - }).collect(); + // As discussed with @eddyb, this is for disabling unreachable_code + // warnings on patterns (they're now subsumed by unreachable_patterns + // warnings). + match all_pats_diverge { + Diverges::Maybe => Diverges::Maybe, + Diverges::Always { .. } | Diverges::WarnedAlways => Diverges::WarnedAlways, + } + }) + .collect(); // Now typecheck the blocks. // @@ -100,7 +103,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { CoerceMany::with_coercion_sites(coerce_first, arms) }; - let mut other_arms = vec![]; // used only for diagnostics + let mut other_arms = vec![]; // used only for diagnostics let mut prior_arm_ty = None; for (i, (arm, pats_diverge)) in arms.iter().zip(all_arm_pats_diverge).enumerate() { if let Some(g) = &arm.guard { @@ -113,11 +116,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } self.diverges.set(pats_diverge); - let arm_ty = if source_if && if_no_else && i != 0 && self.if_fallback_coercion( - expr.span, - &arms[0].body, - &mut coercion, - ) { + let arm_ty = if source_if + && if_no_else + && i != 0 + && self.if_fallback_coercion(expr.span, &arms[0].body, &mut coercion) + { tcx.types.err } else { // Only call this if this is not an `if` expr with an expected type and no `else` @@ -147,15 +150,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // The reason for the first arm to fail is not that the match arms diverge, // but rather that there's a prior obligation that doesn't hold. 0 => (arm_span, ObligationCauseCode::BlockTailExpression(arm.body.hir_id)), - _ => (expr.span, - ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause { + _ => ( + expr.span, + ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause { arm_span, source: match_src, prior_arms: other_arms.clone(), last_ty: prior_arm_ty.unwrap(), discrim_hir_id: discrim.hir_id, - }) - ), + }), + ), }; let cause = self.cause(span, code); coercion.coerce(self, &cause, &arm.body, arm_ty); @@ -177,8 +181,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { all_arms_diverge = Diverges::Always { span: expr.span, custom_note: Some( - "any code following this `match` expression is unreachable, as all arms diverge" - ) + "any code following this `match` expression is unreachable, as all arms diverge", + ), }; } @@ -218,18 +222,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let ret_reason = self.maybe_get_coercion_reason(then_expr.hir_id, span); let cause = self.cause(span, ObligationCauseCode::IfExpressionWithNoElse); let mut error = false; - coercion.coerce_forced_unit(self, &cause, &mut |err| { - if let Some((span, msg)) = &ret_reason { - err.span_label(*span, msg.as_str()); - } else if let ExprKind::Block(block, _) = &then_expr.kind { - if let Some(expr) = &block.expr { - err.span_label(expr.span, "found here".to_string()); + coercion.coerce_forced_unit( + self, + &cause, + &mut |err| { + if let Some((span, msg)) = &ret_reason { + err.span_label(*span, msg.as_str()); + } else if let ExprKind::Block(block, _) = &then_expr.kind { + if let Some(expr) = &block.expr { + err.span_label(expr.span, "found here".to_string()); + } } - } - err.note("`if` expressions without `else` evaluate to `()`"); - err.help("consider adding an `else` block that evaluates to the expected type"); - error = true; - }, ret_reason.is_none()); + err.note("`if` expressions without `else` evaluate to `()`"); + err.help("consider adding an `else` block that evaluates to the expected type"); + error = true; + }, + ret_reason.is_none(), + ); error } @@ -244,20 +253,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let node = hir.get(containing_id); if let Block(block) = node { // check that the body's parent is an fn - let parent = hir.get( - hir.get_parent_node( - hir.get_parent_node(block.hir_id), - ), - ); - if let (Some(expr), Item(hir::Item { - kind: hir::ItemKind::Fn(..), .. - })) = (&block.expr, parent) { + let parent = hir.get(hir.get_parent_node(hir.get_parent_node(block.hir_id))); + if let (Some(expr), Item(hir::Item { kind: hir::ItemKind::Fn(..), .. })) = + (&block.expr, parent) + { // check that the `if` expr without `else` is the fn body's expr if expr.span == span { - return self.get_fn_decl(hir_id).map(|(fn_decl, _)| ( - fn_decl.output.span(), - format!("expected `{}` because of this return type", fn_decl.output), - )); + return self.get_fn_decl(hir_id).map(|(fn_decl, _)| { + ( + fn_decl.output.span(), + format!("expected `{}` because of this return type", fn_decl.output), + ) + }); } } } @@ -309,7 +316,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // possibly incorrect trailing `;` in the else arm remove_semicolon = self.could_remove_semicolon(block, then_ty); stmt.span - } else { // empty block; point at its entirety + } else { + // empty block; point at its entirety // Avoid overlapping spans that aren't as readable: // ``` // 2 | let x = if true { @@ -342,7 +350,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else_expr.span } - } else { // shouldn't happen unless the parser has done something weird + } else { + // shouldn't happen unless the parser has done something weird else_expr.span }; @@ -354,20 +363,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // possibly incorrect trailing `;` in the else arm remove_semicolon = remove_semicolon.or(self.could_remove_semicolon(block, else_ty)); stmt.span - } else { // empty block; point at its entirety - outer_sp = None; // same as in `error_sp`; cleanup output + } else { + // empty block; point at its entirety + outer_sp = None; // same as in `error_sp`; cleanup output then_expr.span } - } else { // shouldn't happen unless the parser has done something weird + } else { + // shouldn't happen unless the parser has done something weird then_expr.span }; // Finally construct the cause: - self.cause(error_sp, ObligationCauseCode::IfExpression(box IfExpressionCause { - then: then_sp, - outer: outer_sp, - semicolon: remove_semicolon, - })) + self.cause( + error_sp, + ObligationCauseCode::IfExpression(box IfExpressionCause { + then: then_sp, + outer: outer_sp, + semicolon: remove_semicolon, + }), + ) } fn demand_discriminant_type( @@ -427,7 +441,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // (once introduced) is populated by the time we get here. // // See #44848. - let contains_ref_bindings = arms.iter() + let contains_ref_bindings = arms + .iter() .filter_map(|a| a.pat.contains_explicit_ref_binding()) .max_by_key(|m| match *m { hir::Mutability::Mut => 1, diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index e44b00d74bf..7daa489374f 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -55,23 +55,23 @@ use errors::DiagnosticBuilder; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::hir::ptr::P; -use rustc::infer::{Coercion, InferResult, InferOk}; use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use rustc::infer::{Coercion, InferOk, InferResult}; use rustc::traits::{self, ObligationCause, ObligationCauseCode}; use rustc::ty::adjustment::{ - Adjustment, Adjust, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast + Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast, }; -use rustc::ty::{self, TypeAndMut, Ty}; -use rustc::ty::fold::TypeFoldable; use rustc::ty::error::TypeError; +use rustc::ty::fold::TypeFoldable; use rustc::ty::relate::RelateResult; use rustc::ty::subst::SubstsRef; +use rustc::ty::{self, Ty, TypeAndMut}; +use rustc_target::spec::abi::Abi; use smallvec::{smallvec, SmallVec}; use std::ops::Deref; use syntax::feature_gate; use syntax::symbol::sym; use syntax_pos; -use rustc_target::spec::abi::Abi; use rustc_error_codes::*; @@ -97,31 +97,32 @@ impl<'a, 'tcx> Deref for Coerce<'a, 'tcx> { type CoerceResult<'tcx> = InferResult<'tcx, (Vec<Adjustment<'tcx>>, Ty<'tcx>)>; -fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability, - to_mutbl: hir::Mutability) - -> RelateResult<'tcx, ()> { +fn coerce_mutbls<'tcx>( + from_mutbl: hir::Mutability, + to_mutbl: hir::Mutability, +) -> RelateResult<'tcx, ()> { match (from_mutbl, to_mutbl) { - (hir::Mutability::Mut, hir::Mutability::Mut) | - (hir::Mutability::Not, hir::Mutability::Not) | - (hir::Mutability::Mut, hir::Mutability::Not) => Ok(()), + (hir::Mutability::Mut, hir::Mutability::Mut) + | (hir::Mutability::Not, hir::Mutability::Not) + | (hir::Mutability::Mut, hir::Mutability::Not) => Ok(()), (hir::Mutability::Not, hir::Mutability::Mut) => Err(TypeError::Mutability), } } -fn identity(_: Ty<'_>) -> Vec<Adjustment<'_>> { vec![] } +fn identity(_: Ty<'_>) -> Vec<Adjustment<'_>> { + vec![] +} fn simple<'tcx>(kind: Adjust<'tcx>) -> impl FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> { move |target| vec![Adjustment { kind, target }] } -fn success<'tcx>(adj: Vec<Adjustment<'tcx>>, - target: Ty<'tcx>, - obligations: traits::PredicateObligations<'tcx>) - -> CoerceResult<'tcx> { - Ok(InferOk { - value: (adj, target), - obligations - }) +fn success<'tcx>( + adj: Vec<Adjustment<'tcx>>, + target: Ty<'tcx>, + obligations: traits::PredicateObligations<'tcx>, +) -> CoerceResult<'tcx> { + Ok(InferOk { value: (adj, target), obligations }) } impl<'f, 'tcx> Coerce<'f, 'tcx> { @@ -130,12 +131,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { cause: ObligationCause<'tcx>, allow_two_phase: AllowTwoPhase, ) -> Self { - Coerce { - fcx, - cause, - allow_two_phase, - use_lub: false, - } + Coerce { fcx, cause, allow_two_phase, use_lub: false } } fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { @@ -151,13 +147,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } /// Unify two types (using sub or lub) and produce a specific coercion. - fn unify_and<F>(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) - -> CoerceResult<'tcx> - where F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> + fn unify_and<F>(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) -> CoerceResult<'tcx> + where + F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>, { - self.unify(&a, &b).and_then(|InferOk { value: ty, obligations }| { - success(f(ty), ty, obligations) - }) + self.unify(&a, &b) + .and_then(|InferOk { value: ty, obligations }| success(f(ty), ty, obligations)) } fn coerce(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { @@ -181,12 +176,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return if self.shallow_resolve(b).is_ty_var() { // Micro-optimization: no need for this if `b` is // already resolved in some way. - let diverging_ty = self.next_diverging_ty_var( - TypeVariableOrigin { - kind: TypeVariableOriginKind::AdjustmentType, - span: self.cause.span, - }, - ); + let diverging_ty = self.next_diverging_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::AdjustmentType, + span: self.cause.span, + }); self.unify_and(&b, &diverging_ty, simple(Adjust::NeverToAny)) } else { success(simple(Adjust::NeverToAny)(b), b, vec![]) @@ -259,13 +252,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`. /// To match `A` with `B`, autoderef will be performed, /// calling `deref`/`deref_mut` where necessary. - fn coerce_borrowed_pointer(&self, - a: Ty<'tcx>, - b: Ty<'tcx>, - r_b: ty::Region<'tcx>, - mt_b: TypeAndMut<'tcx>) - -> CoerceResult<'tcx> - { + fn coerce_borrowed_pointer( + &self, + a: Ty<'tcx>, + b: Ty<'tcx>, + r_b: ty::Region<'tcx>, + mt_b: TypeAndMut<'tcx>, + ) -> CoerceResult<'tcx> { debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b); // If we have a parameter of type `&M T_a` and the value @@ -380,11 +373,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } r_borrow_var.unwrap() }; - let derefd_ty_a = self.tcx.mk_ref(r, - TypeAndMut { - ty: referent_ty, - mutbl: mt_b.mutbl, // [1] above - }); + let derefd_ty_a = self.tcx.mk_ref( + r, + TypeAndMut { + ty: referent_ty, + mutbl: mt_b.mutbl, // [1] above + }, + ); match self.unify(derefd_ty_a, b) { Ok(ok) => { found = Some(ok); @@ -429,8 +424,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } let needs = Needs::maybe_mut_place(mt_b.mutbl); - let InferOk { value: mut adjustments, obligations: o } - = autoderef.adjust_steps_as_infer_ok(self, needs); + let InferOk { value: mut adjustments, obligations: o } = + autoderef.adjust_steps_as_infer_ok(self, needs); obligations.extend(o); obligations.extend(autoderef.into_obligations()); @@ -442,31 +437,28 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }; let mutbl = match mt_b.mutbl { hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - allow_two_phase_borrow: self.allow_two_phase, + hir::Mutability::Mut => { + AutoBorrowMutability::Mut { allow_two_phase_borrow: self.allow_two_phase } } }; adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)), - target: ty + target: ty, }); - debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", - ty, - adjustments); + debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments); success(adjustments, ty, obligations) } - // &[T; n] or &mut [T; n] -> &[T] // or &mut [T; n] -> &mut [T] // or &Concrete -> &Trait, etc. fn coerce_unsized(&self, source: Ty<'tcx>, target: Ty<'tcx>) -> CoerceResult<'tcx> { debug!("coerce_unsized(source={:?}, target={:?})", source, target); - let traits = (self.tcx.lang_items().unsize_trait(), - self.tcx.lang_items().coerce_unsized_trait()); + let traits = + (self.tcx.lang_items().unsize_trait(), self.tcx.lang_items().coerce_unsized_trait()); let (unsize_did, coerce_unsized_did) = if let (Some(u), Some(cu)) = traits { (u, cu) } else { @@ -493,32 +485,28 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // implementation. If it happens that this coercion is a function argument, // the reborrow in coerce_borrowed_ptr will pick it up. allow_two_phase_borrow: AllowTwoPhase::No, - } + }, }; - Some((Adjustment { - kind: Adjust::Deref(None), - target: ty_a - }, Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)), - target: self.tcx.mk_ref(r_borrow, ty::TypeAndMut { - mutbl: mutbl_b, - ty: ty_a - }) - })) + Some(( + Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)), + target: self + .tcx + .mk_ref(r_borrow, ty::TypeAndMut { mutbl: mutbl_b, ty: ty_a }), + }, + )) } (&ty::Ref(_, ty_a, mt_a), &ty::RawPtr(ty::TypeAndMut { mutbl: mt_b, .. })) => { coerce_mutbls(mt_a, mt_b)?; - Some((Adjustment { - kind: Adjust::Deref(None), - target: ty_a - }, Adjustment { - kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)), - target: self.tcx.mk_ptr(ty::TypeAndMut { - mutbl: mt_b, - ty: ty_a - }) - })) + Some(( + Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { + kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)), + target: self.tcx.mk_ptr(ty::TypeAndMut { mutbl: mt_b, ty: ty_a }), + }, + )) } _ => None, }; @@ -534,15 +522,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }; let coerce_target = self.next_ty_var(origin); let mut coercion = self.unify_and(coerce_target, target, |target| { - let unsize = Adjustment { - kind: Adjust::Pointer(PointerCast::Unsize), - target - }; + let unsize = Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target }; match reborrow { None => vec![unsize], - Some((ref deref, ref autoref)) => { - vec![deref.clone(), autoref.clone(), unsize] - } + Some((ref deref, ref autoref)) => vec![deref.clone(), autoref.clone(), unsize], } })?; @@ -562,13 +545,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // and almost never more than 3. By using a SmallVec we avoid an // allocation, at the (very small) cost of (occasionally) having to // shift subsequent elements down when removing the front element. - let mut queue: SmallVec<[_; 4]> = - smallvec![self.tcx.predicate_for_trait_def(self.fcx.param_env, - cause, - coerce_unsized_did, - 0, - coerce_source, - &[coerce_target.into()])]; + let mut queue: SmallVec<[_; 4]> = smallvec![self.tcx.predicate_for_trait_def( + self.fcx.param_env, + cause, + coerce_unsized_did, + 0, + coerce_source, + &[coerce_target.into()] + )]; let mut has_unsized_tuple_coercion = false; @@ -604,8 +588,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let unsize_ty = trait_ref.skip_binder().input_types().nth(1).unwrap(); debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_ref); match (&self_ty.kind, &unsize_ty.kind) { - (ty::Infer(ty::TyVar(v)), - ty::Dynamic(..)) if self.type_var_is_sized(*v) => { + (ty::Infer(ty::TyVar(v)), ty::Dynamic(..)) + if self.type_var_is_sized(*v) => + { debug!("coerce_unsized: have sized infer {:?}", v); coercion.obligations.push(obligation); // `$0: Unsize<dyn Trait>` where we know that `$0: Sized`, try going @@ -637,9 +622,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // be silent, as it causes a type mismatch later. } - Ok(Some(vtable)) => { - queue.extend(vtable.nested_obligations()) - } + Ok(Some(vtable)) => queue.extend(vtable.nested_obligations()), } } @@ -656,19 +639,21 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { Ok(coercion) } - fn coerce_from_safe_fn<F, G>(&self, - a: Ty<'tcx>, - fn_ty_a: ty::PolyFnSig<'tcx>, - b: Ty<'tcx>, - to_unsafe: F, - normal: G) - -> CoerceResult<'tcx> - where F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>, - G: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> + fn coerce_from_safe_fn<F, G>( + &self, + a: Ty<'tcx>, + fn_ty_a: ty::PolyFnSig<'tcx>, + b: Ty<'tcx>, + to_unsafe: F, + normal: G, + ) -> CoerceResult<'tcx> + where + F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>, + G: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>, { if let ty::FnPtr(fn_ty_b) = b.kind { - if let (hir::Unsafety::Normal, hir::Unsafety::Unsafe) - = (fn_ty_a.unsafety(), fn_ty_b.unsafety()) + if let (hir::Unsafety::Normal, hir::Unsafety::Unsafe) = + (fn_ty_a.unsafety(), fn_ty_b.unsafety()) { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); return self.unify_and(unsafe_a, b, to_unsafe); @@ -677,11 +662,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.unify_and(a, b, normal) } - fn coerce_from_fn_pointer(&self, - a: Ty<'tcx>, - fn_ty_a: ty::PolyFnSig<'tcx>, - b: Ty<'tcx>) - -> CoerceResult<'tcx> { + fn coerce_from_fn_pointer( + &self, + a: Ty<'tcx>, + fn_ty_a: ty::PolyFnSig<'tcx>, + b: Ty<'tcx>, + ) -> CoerceResult<'tcx> { //! Attempts to coerce from the type of a Rust function item //! into a closure or a `proc`. //! @@ -689,14 +675,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let b = self.shallow_resolve(b); debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); - self.coerce_from_safe_fn(a, fn_ty_a, b, - simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)), identity) + self.coerce_from_safe_fn( + a, + fn_ty_a, + b, + simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)), + identity, + ) } - fn coerce_from_fn_item(&self, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> CoerceResult<'tcx> { + fn coerce_from_fn_item(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { //! Attempts to coerce from the type of a Rust function item //! into a closure or a `proc`. @@ -707,9 +695,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::FnPtr(_) => { let a_sig = a.fn_sig(self.tcx); // Intrinsics are not coercible to function pointers - if a_sig.abi() == Abi::RustIntrinsic || - a_sig.abi() == Abi::PlatformIntrinsic { - return Err(TypeError::IntrinsicCast); + if a_sig.abi() == Abi::RustIntrinsic || a_sig.abi() == Abi::PlatformIntrinsic { + return Err(TypeError::IntrinsicCast); } let InferOk { value: a_sig, mut obligations } = self.normalize_associated_types_in_as_infer_ok(self.cause.span, &a_sig); @@ -723,15 +710,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { vec![ Adjustment { kind: Adjust::Pointer(PointerCast::ReifyFnPointer), - target: a_fn_pointer + target: a_fn_pointer, }, Adjustment { kind: Adjust::Pointer(PointerCast::UnsafeFnPointer), - target: unsafe_ty + target: unsafe_ty, }, ] }, - simple(Adjust::Pointer(PointerCast::ReifyFnPointer)) + simple(Adjust::Pointer(PointerCast::ReifyFnPointer)), )?; obligations.extend(o2); @@ -741,12 +728,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } } - fn coerce_closure_to_fn(&self, - a: Ty<'tcx>, - def_id_a: DefId, - substs_a: SubstsRef<'tcx>, - b: Ty<'tcx>) - -> CoerceResult<'tcx> { + fn coerce_closure_to_fn( + &self, + a: Ty<'tcx>, + def_id_a: DefId, + substs_a: SubstsRef<'tcx>, + b: Ty<'tcx>, + ) -> CoerceResult<'tcx> { //! Attempts to coerce from the type of a non-capturing closure //! into a function pointer. //! @@ -764,52 +752,46 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let sig = self.closure_sig(def_id_a, substs_a); let unsafety = fn_ty.unsafety(); let pointer_ty = self.tcx.coerce_closure_fn_ty(sig, unsafety); - debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", - a, b, pointer_ty); - self.unify_and(pointer_ty, b, simple( - Adjust::Pointer(PointerCast::ClosureFnPointer(unsafety)) - )) + debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty); + self.unify_and( + pointer_ty, + b, + simple(Adjust::Pointer(PointerCast::ClosureFnPointer(unsafety))), + ) } _ => self.unify_and(a, b, identity), } } - fn coerce_unsafe_ptr(&self, - a: Ty<'tcx>, - b: Ty<'tcx>, - mutbl_b: hir::Mutability) - -> CoerceResult<'tcx> { + fn coerce_unsafe_ptr( + &self, + a: Ty<'tcx>, + b: Ty<'tcx>, + mutbl_b: hir::Mutability, + ) -> CoerceResult<'tcx> { debug!("coerce_unsafe_ptr(a={:?}, b={:?})", a, b); let (is_ref, mt_a) = match a.kind { ty::Ref(_, ty, mutbl) => (true, ty::TypeAndMut { ty, mutbl }), ty::RawPtr(mt) => (false, mt), - _ => return self.unify_and(a, b, identity) + _ => return self.unify_and(a, b, identity), }; // Check that the types which they point at are compatible. - let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut { - mutbl: mutbl_b, - ty: mt_a.ty, - }); + let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut { mutbl: mutbl_b, ty: mt_a.ty }); coerce_mutbls(mt_a.mutbl, mutbl_b)?; // Although references and unsafe ptrs have the same // representation, we still register an Adjust::DerefRef so that // regionck knows that the region for `a` must be valid here. if is_ref { self.unify_and(a_unsafe, b, |target| { - vec![Adjustment { - kind: Adjust::Deref(None), - target: mt_a.ty - }, Adjustment { - kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), - target - }] + vec![ + Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }, + Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), target }, + ] }) } else if mt_a.mutbl != mutbl_b { - self.unify_and( - a_unsafe, b, simple(Adjust::Pointer(PointerCast::MutToConstPointer)) - ) + self.unify_and(a_unsafe, b, simple(Adjust::Pointer(PointerCast::MutToConstPointer))) } else { self.unify_and(a_unsafe, b, identity) } @@ -837,11 +819,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let (adjustments, _) = self.register_infer_ok_obligations(ok); self.apply_adjustments(expr, adjustments); - Ok(if expr_ty.references_error() { - self.tcx.types.err - } else { - target - }) + Ok(if expr_ty.references_error() { self.tcx.types.err } else { target }) } /// Same as `try_coerce()`, but without side-effects. @@ -861,14 +839,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// /// This is really an internal helper. From outside the coercion /// module, you should instantiate a `CoerceMany` instance. - fn try_find_coercion_lub<E>(&self, - cause: &ObligationCause<'tcx>, - exprs: &[E], - prev_ty: Ty<'tcx>, - new: &hir::Expr, - new_ty: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where E: AsCoercionSite + fn try_find_coercion_lub<E>( + &self, + cause: &ObligationCause<'tcx>, + exprs: &[E], + prev_ty: Ty<'tcx>, + new: &hir::Expr, + new_ty: Ty<'tcx>, + ) -> RelateResult<'tcx, Ty<'tcx>> + where + E: AsCoercionSite, { let prev_ty = self.resolve_vars_with_obligations(prev_ty); let new_ty = self.resolve_vars_with_obligations(new_ty); @@ -879,10 +859,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let (&ty::FnDef(..), &ty::FnDef(..)) = (&prev_ty.kind, &new_ty.kind) { // Don't reify if the function types have a LUB, i.e., they // are the same function and their parameters have a LUB. - let lub_ty = self.commit_if_ok(|_| { - self.at(cause, self.param_env) - .lub(prev_ty, new_ty) - }).map(|ok| self.register_infer_ok_obligations(ok)); + let lub_ty = self + .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty)) + .map(|ok| self.register_infer_ok_obligations(ok)); if lub_ty.is_ok() { // We have a LUB of prev_ty and new_ty, just return it. @@ -894,20 +873,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let a_sig = self.normalize_associated_types_in(new.span, &a_sig); let b_sig = new_ty.fn_sig(self.tcx); let b_sig = self.normalize_associated_types_in(new.span, &b_sig); - let sig = self.at(cause, self.param_env) - .trace(prev_ty, new_ty) - .lub(&a_sig, &b_sig) - .map(|ok| self.register_infer_ok_obligations(ok))?; + let sig = self + .at(cause, self.param_env) + .trace(prev_ty, new_ty) + .lub(&a_sig, &b_sig) + .map(|ok| self.register_infer_ok_obligations(ok))?; // Reify both sides and return the reified fn pointer type. let fn_ptr = self.tcx.mk_fn_ptr(sig); for expr in exprs.iter().map(|e| e.as_coercion_site()).chain(Some(new)) { // The only adjustment that can produce an fn item is // `NeverToAny`, so this should always be valid. - self.apply_adjustments(expr, vec![Adjustment { - kind: Adjust::Pointer(PointerCast::ReifyFnPointer), - target: fn_ptr - }]); + self.apply_adjustments( + expr, + vec![Adjustment { + kind: Adjust::Pointer(PointerCast::ReifyFnPointer), + target: fn_ptr, + }], + ); } return Ok(fn_ptr); } @@ -941,10 +924,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { for expr in exprs { let expr = expr.as_coercion_site(); let noop = match self.tables.borrow().expr_adjustments(expr) { - &[ - Adjustment { kind: Adjust::Deref(_), .. }, - Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mutbl_adj)), .. } - ] => { + &[Adjustment { kind: Adjust::Deref(_), .. }, Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mutbl_adj)), .. }] => + { match self.node_ty(expr.hir_id).kind { ty::Ref(_, _, mt_orig) => { let mutbl_adj: hir::Mutability = mutbl_adj.into(); @@ -961,10 +942,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; if !noop { - return self.commit_if_ok(|_| - self.at(cause, self.param_env) - .lub(prev_ty, new_ty) - ).map(|ok| self.register_infer_ok_obligations(ok)); + return self + .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty)) + .map(|ok| self.register_infer_ok_obligations(ok)); } } @@ -974,10 +954,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Some(e) = first_error { Err(e) } else { - self.commit_if_ok(|_| - self.at(cause, self.param_env) - .lub(prev_ty, new_ty) - ).map(|ok| self.register_infer_ok_obligations(ok)) + self.commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty)) + .map(|ok| self.register_infer_ok_obligations(ok)) } } Ok(ok) => { @@ -1062,19 +1040,12 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { /// expected to pass each element in the slice to `coerce(...)` in /// order. This is used with arrays in particular to avoid /// needlessly cloning the slice. - pub fn with_coercion_sites(expected_ty: Ty<'tcx>, - coercion_sites: &'exprs [E]) - -> Self { + pub fn with_coercion_sites(expected_ty: Ty<'tcx>, coercion_sites: &'exprs [E]) -> Self { Self::make(expected_ty, Expressions::UpFront(coercion_sites)) } fn make(expected_ty: Ty<'tcx>, expressions: Expressions<'tcx, 'exprs, E>) -> Self { - CoerceMany { - expected_ty, - final_ty: None, - expressions, - pushed: 0, - } + CoerceMany { expected_ty, final_ty: None, expressions, pushed: 0 } } /// Returns the "expected type" with which this coercion was @@ -1109,11 +1080,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { expression: &'tcx hir::Expr, expression_ty: Ty<'tcx>, ) { - self.coerce_inner(fcx, - cause, - Some(expression), - expression_ty, - None, false) + self.coerce_inner(fcx, cause, Some(expression), expression_ty, None, false) } /// Indicates that one of the inputs is a "forced unit". This @@ -1135,12 +1102,14 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { augment_error: &mut dyn FnMut(&mut DiagnosticBuilder<'_>), label_unit_as_expected: bool, ) { - self.coerce_inner(fcx, - cause, - None, - fcx.tcx.mk_unit(), - Some(augment_error), - label_unit_as_expected) + self.coerce_inner( + fcx, + cause, + None, + fcx.tcx.mk_unit(), + Some(augment_error), + label_unit_as_expected, + ) } /// The inner coercion "engine". If `expression` is `None`, this @@ -1212,11 +1181,11 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { // Another example is `break` with no argument expression. assert!(expression_ty.is_unit(), "if let hack without unit type"); fcx.at(cause, fcx.param_env) - .eq_exp(label_expression_as_expected, expression_ty, self.merged_ty()) - .map(|infer_ok| { - fcx.register_infer_ok_obligations(infer_ok); - expression_ty - }) + .eq_exp(label_expression_as_expected, expression_ty, self.merged_ty()) + .map(|infer_ok| { + fcx.register_infer_ok_obligations(infer_ok); + expression_ty + }) }; match result { @@ -1228,8 +1197,10 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { Expressions::UpFront(coercion_sites) => { // if the user gave us an array to validate, check that we got // the next expression in the list, as expected - assert_eq!(coercion_sites[self.pushed].as_coercion_site().hir_id, - e.hir_id); + assert_eq!( + coercion_sites[self.pushed].as_coercion_site().hir_id, + e.hir_id + ); } } self.pushed += 1; @@ -1255,8 +1226,11 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { match cause.code { ObligationCauseCode::ReturnNoExpression => { err = struct_span_err!( - fcx.tcx.sess, cause.span, E0069, - "`return;` in a function whose return type is not `()`"); + fcx.tcx.sess, + cause.span, + E0069, + "`return;` in a function whose return type is not `()`" + ); err.span_label(cause.span, "return type is not `()`"); } ObligationCauseCode::BlockTailExpression(blk_id) => { @@ -1273,7 +1247,14 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { } ObligationCauseCode::ReturnValue(id) => { err = self.report_return_mismatched_types( - cause, expected, found, coercion_error, fcx, id, None); + cause, + expected, + found, + coercion_error, + fcx, + id, + None, + ); } _ => { err = fcx.report_mismatched_types(cause, expected, found, coercion_error); @@ -1330,12 +1311,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { let parent_id = fcx.tcx.hir().get_parent_node(id); let fn_decl = if let Some((expr, blk_id)) = expression { pointing_at_return_type = fcx.suggest_mismatched_types_on_tail( - &mut err, - expr, - expected, - found, - cause.span, - blk_id, + &mut err, expr, expected, found, cause.span, blk_id, ); let parent = fcx.tcx.hir().get(parent_id); if let (Some(match_expr), true, false) = ( @@ -1356,7 +1332,12 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { if let (Some((fn_decl, can_suggest)), _) = (fn_decl, pointing_at_return_type) { if expression.is_none() { pointing_at_return_type |= fcx.suggest_missing_return_type( - &mut err, &fn_decl, expected, found, can_suggest); + &mut err, + &fn_decl, + expected, + found, + can_suggest, + ); } if !pointing_at_return_type { return_sp = Some(fn_decl.output.span()); // `impl Trait` return type @@ -1403,7 +1384,8 @@ impl AsCoercionSite for P<hir::Expr> { } impl<'a, T> AsCoercionSite for &'a T - where T: AsCoercionSite +where + T: AsCoercionSite, { fn as_coercion_site(&self) -> &hir::Expr { (**self).as_coercion_site() diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index d04baf7dd07..0b26933459f 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -1,17 +1,17 @@ use super::{probe, MethodCallee}; use crate::astconv::AstConv; -use crate::check::{FnCtxt, PlaceOp, callee, Needs}; -use crate::hir::GenericArg; +use crate::check::{callee, FnCtxt, Needs, PlaceOp}; use crate::hir::def_id::DefId; -use rustc::ty::subst::{Subst, SubstsRef}; +use crate::hir::GenericArg; +use rustc::hir; +use rustc::infer::{self, InferOk}; use rustc::traits; -use rustc::ty::{self, Ty, GenericParamDefKind}; -use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref, PointerCast}; +use rustc::ty::adjustment::{Adjust, Adjustment, OverloadedDeref, PointerCast}; use rustc::ty::adjustment::{AllowTwoPhase, AutoBorrow, AutoBorrowMutability}; use rustc::ty::fold::TypeFoldable; -use rustc::infer::{self, InferOk}; -use rustc::hir; +use rustc::ty::subst::{Subst, SubstsRef}; +use rustc::ty::{self, GenericParamDefKind, Ty}; use syntax_pos::Span; use std::ops::Deref; @@ -47,9 +47,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) -> ConfirmResult<'tcx> { debug!( "confirm(unadjusted_self_ty={:?}, pick={:?}, generic_args={:?})", - unadjusted_self_ty, - pick, - segment.args, + unadjusted_self_ty, pick, segment.args, ); let mut confirm_cx = ConfirmContext::new(self, span, self_expr, call_expr); @@ -64,12 +62,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { self_expr: &'tcx hir::Expr, call_expr: &'tcx hir::Expr, ) -> ConfirmContext<'a, 'tcx> { - ConfirmContext { - fcx, - span, - self_expr, - call_expr, - } + ConfirmContext { fcx, span, self_expr, call_expr } } fn confirm( @@ -125,11 +118,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { } // Create the final `MethodCallee`. - let callee = MethodCallee { - def_id: pick.item.def_id, - substs: all_substs, - sig: method_sig, - }; + let callee = MethodCallee { def_id: pick.item.def_id, substs: all_substs, sig: method_sig }; if let Some(hir::Mutability::Mut) = pick.autoref { self.convert_place_derefs_to_mutable(); @@ -167,32 +156,23 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { if let Some(mutbl) = pick.autoref { let region = self.next_region_var(infer::Autoref(self.span)); - target = self.tcx.mk_ref(region, ty::TypeAndMut { - mutbl, - ty: target - }); + target = self.tcx.mk_ref(region, ty::TypeAndMut { mutbl, ty: target }); let mutbl = match mutbl { hir::Mutability::Not => AutoBorrowMutability::Not, hir::Mutability::Mut => AutoBorrowMutability::Mut { // Method call receivers are the primary use case // for two-phase borrows. allow_two_phase_borrow: AllowTwoPhase::Yes, - } + }, }; - adjustments.push(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), - target - }); + adjustments + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), target }); if let Some(unsize_target) = pick.unsize { - target = self.tcx.mk_ref(region, ty::TypeAndMut { - mutbl: mutbl.into(), - ty: unsize_target - }); - adjustments.push(Adjustment { - kind: Adjust::Pointer(PointerCast::Unsize), - target - }); + target = self + .tcx + .mk_ref(region, ty::TypeAndMut { mutbl: mutbl.into(), ty: unsize_target }); + adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target }); } } else { // No unsizing should be performed without autoref (at @@ -216,16 +196,19 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { /// /// Note that this substitution may include late-bound regions from the impl level. If so, /// these are instantiated later in the `instantiate_method_sig` routine. - fn fresh_receiver_substs(&mut self, - self_ty: Ty<'tcx>, - pick: &probe::Pick<'tcx>) - -> SubstsRef<'tcx> { + fn fresh_receiver_substs( + &mut self, + self_ty: Ty<'tcx>, + pick: &probe::Pick<'tcx>, + ) -> SubstsRef<'tcx> { match pick.kind { probe::InherentImplPick => { let impl_def_id = pick.item.container.id(); - assert!(self.tcx.impl_trait_ref(impl_def_id).is_none(), - "impl {:?} is not an inherent impl", - impl_def_id); + assert!( + self.tcx.impl_trait_ref(impl_def_id).is_none(), + "impl {:?} is not an inherent impl", + impl_def_id + ); self.impl_self_ty(self.span, impl_def_id).substs } @@ -246,10 +229,10 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id); let upcast_trait_ref = this.replace_bound_vars_with_fresh_vars(&upcast_poly_trait_ref); - debug!("original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}", - original_poly_trait_ref, - upcast_trait_ref, - trait_def_id); + debug!( + "original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}", + original_poly_trait_ref, upcast_trait_ref, trait_def_id + ); upcast_trait_ref.substs }) } @@ -286,22 +269,24 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { self.fcx .autoderef(self.span, self_ty) .include_raw_pointers() - .filter_map(|(ty, _)| - match ty.kind { - ty::Dynamic(ref data, ..) => { - Some(closure(self, ty, data.principal().unwrap_or_else(|| { - span_bug!(self.span, "calling trait method on empty object?") - }))) - }, - _ => None, - } - ) + .filter_map(|(ty, _)| match ty.kind { + ty::Dynamic(ref data, ..) => Some(closure( + self, + ty, + data.principal().unwrap_or_else(|| { + span_bug!(self.span, "calling trait method on empty object?") + }), + )), + _ => None, + }) .next() - .unwrap_or_else(|| - span_bug!(self.span, - "self-type `{}` for ObjectPick never dereferenced to an object", - self_ty) - ) + .unwrap_or_else(|| { + span_bug!( + self.span, + "self-type `{}` for ObjectPick never dereferenced to an object", + self_ty + ) + }) } fn instantiate_method_substs( @@ -315,11 +300,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // variables. let generics = self.tcx.generics_of(pick.item.def_id); AstConv::check_generic_arg_count_for_call( - self.tcx, - self.span, - &generics, - &seg, - true, // `is_method_call` + self.tcx, self.span, &generics, &seg, true, // `is_method_call` ); // Create subst for early-bound lifetime parameters, combining @@ -335,26 +316,18 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // Provide the generic args, and whether types should be inferred. |_| { // The last argument of the returned tuple here is unimportant. - if let Some(ref data) = seg.args { - (Some(data), false) - } else { - (None, false) - } + if let Some(ref data) = seg.args { (Some(data), false) } else { (None, false) } }, // Provide substitutions for parameters for which (valid) arguments have been provided. - |param, arg| { - match (¶m.kind, arg) { - (GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => { - AstConv::ast_region_to_region(self.fcx, lt, Some(param)).into() - } - (GenericParamDefKind::Type { .. }, GenericArg::Type(ty)) => { - self.to_ty(ty).into() - } - (GenericParamDefKind::Const, GenericArg::Const(ct)) => { - self.to_const(&ct.value, self.tcx.type_of(param.def_id)).into() - } - _ => unreachable!(), + |param, arg| match (¶m.kind, arg) { + (GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => { + AstConv::ast_region_to_region(self.fcx, lt, Some(param)).into() } + (GenericParamDefKind::Type { .. }, GenericArg::Type(ty)) => self.to_ty(ty).into(), + (GenericParamDefKind::Const, GenericArg::Const(ct)) => { + self.to_const(&ct.value, self.tcx.type_of(param.def_id)).into() + } + _ => unreachable!(), }, // Provide substitutions for parameters for which arguments are inferred. |_, param, _| self.var_for_def(self.span, param), @@ -367,10 +340,12 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { self.register_predicates(obligations); } Err(_) => { - span_bug!(self.span, - "{} was a subtype of {} but now is not?", - self_ty, - method_self_ty); + span_bug!( + self.span, + "{} was a subtype of {} but now is not?", + self_ty, + method_self_ty + ); } } } @@ -378,20 +353,18 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // NOTE: this returns the *unnormalized* predicates and method sig. Because of // inference guessing, the predicates and method signature can't be normalized // until we unify the `Self` type. - fn instantiate_method_sig(&mut self, - pick: &probe::Pick<'tcx>, - all_substs: SubstsRef<'tcx>) - -> (ty::FnSig<'tcx>, ty::InstantiatedPredicates<'tcx>) { - debug!("instantiate_method_sig(pick={:?}, all_substs={:?})", - pick, - all_substs); + fn instantiate_method_sig( + &mut self, + pick: &probe::Pick<'tcx>, + all_substs: SubstsRef<'tcx>, + ) -> (ty::FnSig<'tcx>, ty::InstantiatedPredicates<'tcx>) { + debug!("instantiate_method_sig(pick={:?}, all_substs={:?})", pick, all_substs); // Instantiate the bounds on the method with the // type/early-bound-regions substitutions performed. There can // be no late-bound regions appearing here. let def_id = pick.item.def_id; - let method_predicates = self.tcx.predicates_of(def_id) - .instantiate(self.tcx, all_substs); + let method_predicates = self.tcx.predicates_of(def_id).instantiate(self.tcx, all_substs); debug!("method_predicates after subst = {:?}", method_predicates); @@ -404,8 +377,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // `instantiate_type_scheme` can normalize associated types that // may reference those regions. let method_sig = self.replace_bound_vars_with_fresh_vars(&sig); - debug!("late-bound lifetimes from method instantiated, method_sig={:?}", - method_sig); + debug!("late-bound lifetimes from method instantiated, method_sig={:?}", method_sig); let method_sig = method_sig.subst(self.tcx, all_substs); debug!("type scheme substituted, method_sig={:?}", method_sig); @@ -413,17 +385,21 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { (method_sig, method_predicates) } - fn add_obligations(&mut self, - fty: Ty<'tcx>, - all_substs: SubstsRef<'tcx>, - method_predicates: &ty::InstantiatedPredicates<'tcx>) { - debug!("add_obligations: fty={:?} all_substs={:?} method_predicates={:?}", - fty, - all_substs, - method_predicates); + fn add_obligations( + &mut self, + fty: Ty<'tcx>, + all_substs: SubstsRef<'tcx>, + method_predicates: &ty::InstantiatedPredicates<'tcx>, + ) { + debug!( + "add_obligations: fty={:?} all_substs={:?} method_predicates={:?}", + fty, all_substs, method_predicates + ); - self.add_obligations_for_parameters(traits::ObligationCause::misc(self.span, self.body_id), - method_predicates); + self.add_obligations_for_parameters( + traits::ObligationCause::misc(self.span, self.body_id), + method_predicates, + ); // this is a projection from a trait reference, so we have to // make sure that the trait reference inputs are well-formed. @@ -447,9 +423,9 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { loop { match exprs.last().unwrap().kind { - hir::ExprKind::Field(ref expr, _) | - hir::ExprKind::Index(ref expr, _) | - hir::ExprKind::Unary(hir::UnDeref, ref expr) => exprs.push(&expr), + hir::ExprKind::Field(ref expr, _) + | hir::ExprKind::Index(ref expr, _) + | hir::ExprKind::Unary(hir::UnDeref, ref expr) => exprs.push(&expr), _ => break, } } @@ -467,10 +443,8 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // Do not mutate adjustments in place, but rather take them, // and replace them after mutating them, to avoid having the // tables borrowed during (`deref_mut`) method resolution. - let previous_adjustments = self.tables - .borrow_mut() - .adjustments_mut() - .remove(expr.hir_id); + let previous_adjustments = + self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id); if let Some(mut adjustments) = previous_adjustments { let needs = Needs::MutPlace; for adjustment in &mut adjustments { @@ -478,10 +452,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { if let Some(ok) = self.try_overloaded_deref(expr.span, source, needs) { let method = self.register_infer_ok_obligations(ok); if let ty::Ref(region, _, mutbl) = method.sig.output().kind { - *deref = OverloadedDeref { - region, - mutbl, - }; + *deref = OverloadedDeref { region, mutbl }; } } } @@ -494,44 +465,49 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { hir::ExprKind::Index(ref base_expr, ref index_expr) => { let index_expr_ty = self.node_ty(index_expr.hir_id); self.convert_place_op_to_mutable( - PlaceOp::Index, expr, base_expr, &[index_expr_ty]); + PlaceOp::Index, + expr, + base_expr, + &[index_expr_ty], + ); } hir::ExprKind::Unary(hir::UnDeref, ref base_expr) => { - self.convert_place_op_to_mutable( - PlaceOp::Deref, expr, base_expr, &[]); + self.convert_place_op_to_mutable(PlaceOp::Deref, expr, base_expr, &[]); } _ => {} } } } - fn convert_place_op_to_mutable(&self, - op: PlaceOp, - expr: &hir::Expr, - base_expr: &hir::Expr, - arg_tys: &[Ty<'tcx>]) - { - debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", - op, expr, base_expr, arg_tys); + fn convert_place_op_to_mutable( + &self, + op: PlaceOp, + expr: &hir::Expr, + base_expr: &hir::Expr, + arg_tys: &[Ty<'tcx>], + ) { + debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys); if !self.tables.borrow().is_method_call(expr) { debug!("convert_place_op_to_mutable - builtin, nothing to do"); - return + return; } - let base_ty = self.tables.borrow().expr_adjustments(base_expr).last() + let base_ty = self + .tables + .borrow() + .expr_adjustments(base_expr) + .last() .map_or_else(|| self.node_ty(expr.hir_id), |adj| adj.target); let base_ty = self.resolve_vars_if_possible(&base_ty); // Need to deref because overloaded place ops take self by-reference. - let base_ty = base_ty.builtin_deref(false) - .expect("place op takes something that is not a ref") - .ty; + let base_ty = + base_ty.builtin_deref(false).expect("place op takes something that is not a ref").ty; - let method = self.try_overloaded_place_op( - expr.span, base_ty, arg_tys, Needs::MutPlace, op); + let method = self.try_overloaded_place_op(expr.span, base_ty, arg_tys, Needs::MutPlace, op); let method = match method { Some(ok) => self.register_infer_ok_obligations(ok), - None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed") + None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed"), }; debug!("convert_place_op_to_mutable: method={:?}", method); self.write_method_call(expr.hir_id, method); @@ -545,10 +521,9 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // Convert the autoref in the base expr to mutable with the correct // region and mutability. let base_expr_ty = self.node_ty(base_expr.hir_id); - if let Some(adjustments) = self.tables - .borrow_mut() - .adjustments_mut() - .get_mut(base_expr.hir_id) { + if let Some(adjustments) = + self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id) + { let mut source = base_expr_ty; for adjustment in &mut adjustments[..] { if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { @@ -560,21 +535,19 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // deployment, conservatively omit // overloaded operators. allow_two_phase_borrow: AllowTwoPhase::No, - } + }, }; adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl)); - adjustment.target = self.tcx.mk_ref(region, ty::TypeAndMut { - ty: source, - mutbl: mutbl.into(), - }); + adjustment.target = + self.tcx.mk_ref(region, ty::TypeAndMut { ty: source, mutbl: mutbl.into() }); } source = adjustment.target; } // If we have an autoref followed by unsizing at the end, fix the unsize target. match adjustments[..] { - [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, - Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] => { + [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] => + { *target = method.sig.inputs()[0]; } _ => {} @@ -585,27 +558,25 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { /////////////////////////////////////////////////////////////////////////// // MISCELLANY - fn predicates_require_illegal_sized_bound(&self, - predicates: &ty::InstantiatedPredicates<'tcx>) - -> bool { + fn predicates_require_illegal_sized_bound( + &self, + predicates: &ty::InstantiatedPredicates<'tcx>, + ) -> bool { let sized_def_id = match self.tcx.lang_items().sized_trait() { Some(def_id) => def_id, None => return false, }; traits::elaborate_predicates(self.tcx, predicates.predicates.clone()) - .filter_map(|predicate| { - match predicate { - ty::Predicate::Trait(trait_pred) if trait_pred.def_id() == sized_def_id => - Some(trait_pred), - _ => None, + .filter_map(|predicate| match predicate { + ty::Predicate::Trait(trait_pred) if trait_pred.def_id() == sized_def_id => { + Some(trait_pred) } + _ => None, }) - .any(|trait_pred| { - match trait_pred.skip_binder().self_ty().kind { - ty::Dynamic(..) => true, - _ => false, - } + .any(|trait_pred| match trait_pred.skip_binder().self_ty().kind { + ty::Dynamic(..) => true, + _ => false, }) } @@ -619,27 +590,31 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { } } - fn upcast(&mut self, - source_trait_ref: ty::PolyTraitRef<'tcx>, - target_trait_def_id: DefId) - -> ty::PolyTraitRef<'tcx> { - let upcast_trait_refs = self.tcx - .upcast_choices(source_trait_ref.clone(), target_trait_def_id); + fn upcast( + &mut self, + source_trait_ref: ty::PolyTraitRef<'tcx>, + target_trait_def_id: DefId, + ) -> ty::PolyTraitRef<'tcx> { + let upcast_trait_refs = + self.tcx.upcast_choices(source_trait_ref.clone(), target_trait_def_id); // must be exactly one trait ref or we'd get an ambig error etc if upcast_trait_refs.len() != 1 { - span_bug!(self.span, - "cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`", - source_trait_ref, - target_trait_def_id, - upcast_trait_refs); + span_bug!( + self.span, + "cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`", + source_trait_ref, + target_trait_def_id, + upcast_trait_refs + ); } upcast_trait_refs.into_iter().next().unwrap() } fn replace_bound_vars_with_fresh_vars<T>(&self, value: &ty::Binder<T>) -> T - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { self.fcx.replace_bound_vars_with_fresh_vars(self.span, infer::FnCall, value).0 } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index ea295103982..8e79cc13895 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -67,102 +67,100 @@ type parameter). */ -mod autoderef; -pub mod dropck; pub mod _match; -mod pat; -pub mod writeback; -mod regionck; -pub mod coercion; -pub mod demand; -mod expr; -pub mod method; -mod upvar; -mod wfcheck; +mod autoderef; +mod callee; mod cast; mod closure; -mod callee; +pub mod coercion; mod compare_method; +pub mod demand; +pub mod dropck; +mod expr; mod generator_interior; pub mod intrinsic; +pub mod method; mod op; +mod pat; +mod regionck; +mod upvar; +mod wfcheck; +pub mod writeback; use crate::astconv::{AstConv, PathSeg}; -use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralize}; -use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath}; -use rustc::hir::def::{CtorOf, Res, DefKind}; +use crate::middle::lang_items; +use crate::namespace::Namespace; +use errors::{pluralize, Applicability, DiagnosticBuilder, DiagnosticId}; +use rustc::hir::def::{CtorOf, DefKind, Res}; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; -use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::ptr::P; -use crate::middle::lang_items; -use crate::namespace::Namespace; -use rustc::infer::{self, InferCtxt, InferOk, InferResult}; +use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath}; use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse}; -use rustc_index::vec::Idx; -use rustc_target::spec::abi::Abi; +use rustc::infer::error_reporting::TypeAnnotationNeeded::E0282; use rustc::infer::opaque_types::OpaqueTypeDecl; use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; -use rustc::infer::error_reporting::TypeAnnotationNeeded::E0282; use rustc::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind}; +use rustc::infer::{self, InferCtxt, InferOk, InferResult}; use rustc::middle::region; use rustc::mir::interpret::ConstValue; use rustc::traits::{self, ObligationCause, ObligationCauseCode, TraitEngine}; -use rustc::ty::{ - self, AdtKind, CanonicalUserType, Ty, TyCtxt, Const, GenericParamDefKind, - ToPolyTraitRef, ToPredicate, RegionKind, UserType -}; use rustc::ty::adjustment::{ - Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast + Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast, }; use rustc::ty::fold::{TypeFoldable, TypeFolder}; +use rustc::ty::layout::VariantIdx; use rustc::ty::query::Providers; -use rustc::ty::subst::{ - GenericArgKind, Subst, InternalSubsts, SubstsRef, UserSelfTy, UserSubsts, +use rustc::ty::subst::{GenericArgKind, InternalSubsts, Subst, SubstsRef, UserSelfTy, UserSubsts}; +use rustc::ty::util::{Discr, IntTypeExt, Representability}; +use rustc::ty::{ + self, AdtKind, CanonicalUserType, Const, GenericParamDefKind, RegionKind, ToPolyTraitRef, + ToPredicate, Ty, TyCtxt, UserType, }; -use rustc::ty::util::{Representability, IntTypeExt, Discr}; -use rustc::ty::layout::VariantIdx; -use syntax_pos::{self, BytePos, Span, MultiSpan}; -use syntax_pos::hygiene::DesugaringKind; +use rustc_index::vec::Idx; +use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::attr; use syntax::feature_gate::feature_err; -use syntax::source_map::{DUMMY_SP, original_sp}; +use syntax::source_map::{original_sp, DUMMY_SP}; use syntax::symbol::{kw, sym, Ident}; use syntax::util::parser::ExprPrecedence; +use syntax_pos::hygiene::DesugaringKind; +use syntax_pos::{self, BytePos, MultiSpan, Span}; use rustc_error_codes::*; -use std::cell::{Cell, RefCell, Ref, RefMut}; -use std::collections::hash_map::Entry; +use std::cell::{Cell, Ref, RefCell, RefMut}; use std::cmp; +use std::collections::hash_map::Entry; use std::iter; use std::mem::replace; use std::ops::{self, Deref}; use std::slice; +use crate::lint; use crate::require_c_abi_if_c_variadic; -use crate::session::Session; use crate::session::config::EntryFnType; -use crate::TypeAndSubsts; -use crate::lint; +use crate::session::Session; use crate::util::captures::Captures; -use crate::util::common::{ErrorReported, indenter}; +use crate::util::common::{indenter, ErrorReported}; use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, HirIdMap}; +use crate::TypeAndSubsts; -pub use self::Expectation::*; use self::autoderef::Autoderef; use self::callee::DeferredCallResolution; use self::coercion::{CoerceMany, DynamicCoerceMany}; -use self::compare_method::{compare_impl_method, compare_const_impl, compare_ty_impl}; +use self::compare_method::{compare_const_impl, compare_impl_method, compare_ty_impl}; use self::method::{MethodCallee, SelfSource}; +pub use self::Expectation::*; use self::TupleArgumentsFlag::*; /// The type of a local binding, including the revealed type for anon types. #[derive(Copy, Clone, Debug)] pub struct LocalTy<'tcx> { decl_ty: Ty<'tcx>, - revealed_ty: Ty<'tcx> + revealed_ty: Ty<'tcx>, } /// A wrapper for `InferCtxt`'s `in_progress_tables` field. @@ -175,18 +173,14 @@ impl<'a, 'tcx> MaybeInProgressTables<'a, 'tcx> { fn borrow(self) -> Ref<'a, ty::TypeckTables<'tcx>> { match self.maybe_tables { Some(tables) => tables.borrow(), - None => { - bug!("MaybeInProgressTables: inh/fcx.tables.borrow() with no tables") - } + None => bug!("MaybeInProgressTables: inh/fcx.tables.borrow() with no tables"), } } fn borrow_mut(self) -> RefMut<'a, ty::TypeckTables<'tcx>> { match self.maybe_tables { Some(tables) => tables.borrow_mut(), - None => { - bug!("MaybeInProgressTables: inh/fcx.tables.borrow_mut() with no tables") - } + None => bug!("MaybeInProgressTables: inh/fcx.tables.borrow_mut() with no tables"), } } } @@ -296,16 +290,10 @@ impl<'a, 'tcx> Expectation<'tcx> { match *self { ExpectHasType(ety) => { let ety = fcx.shallow_resolve(ety); - if !ety.is_ty_var() { - ExpectHasType(ety) - } else { - NoExpectation - } - } - ExpectRvalueLikeUnsized(ety) => { - ExpectRvalueLikeUnsized(ety) + if !ety.is_ty_var() { ExpectHasType(ety) } else { NoExpectation } } - _ => NoExpectation + ExpectRvalueLikeUnsized(ety) => ExpectRvalueLikeUnsized(ety), + _ => NoExpectation, } } @@ -330,10 +318,8 @@ impl<'a, 'tcx> Expectation<'tcx> { /// for examples of where this comes up,. fn rvalue_hint(fcx: &FnCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> { match fcx.tcx.struct_tail_without_normalization(ty).kind { - ty::Slice(_) | ty::Str | ty::Dynamic(..) => { - ExpectRvalueLikeUnsized(ty) - } - _ => ExpectHasType(ty) + ty::Slice(_) | ty::Str | ty::Dynamic(..) => ExpectRvalueLikeUnsized(ty), + _ => ExpectHasType(ty), } } @@ -343,24 +329,16 @@ impl<'a, 'tcx> Expectation<'tcx> { fn resolve(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> { match self { NoExpectation => NoExpectation, - ExpectCastableToType(t) => { - ExpectCastableToType(fcx.resolve_vars_if_possible(&t)) - } - ExpectHasType(t) => { - ExpectHasType(fcx.resolve_vars_if_possible(&t)) - } - ExpectRvalueLikeUnsized(t) => { - ExpectRvalueLikeUnsized(fcx.resolve_vars_if_possible(&t)) - } + ExpectCastableToType(t) => ExpectCastableToType(fcx.resolve_vars_if_possible(&t)), + ExpectHasType(t) => ExpectHasType(fcx.resolve_vars_if_possible(&t)), + ExpectRvalueLikeUnsized(t) => ExpectRvalueLikeUnsized(fcx.resolve_vars_if_possible(&t)), } } fn to_option(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> { match self.resolve(fcx) { NoExpectation => None, - ExpectCastableToType(ty) | - ExpectHasType(ty) | - ExpectRvalueLikeUnsized(ty) => Some(ty), + ExpectCastableToType(ty) | ExpectHasType(ty) | ExpectRvalueLikeUnsized(ty) => Some(ty), } } @@ -378,20 +356,16 @@ impl<'a, 'tcx> Expectation<'tcx> { /// Like `only_has_type`, but instead of returning `None` if no /// hard constraint exists, creates a fresh type variable. fn coercion_target_type(self, fcx: &FnCtxt<'a, 'tcx>, span: Span) -> Ty<'tcx> { - self.only_has_type(fcx) - .unwrap_or_else(|| { - fcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span, - }) - }) + self.only_has_type(fcx).unwrap_or_else(|| { + fcx.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span }) + }) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Needs { MutPlace, - None + None, } impl Needs { @@ -408,7 +382,7 @@ pub struct UnsafetyState { pub def: hir::HirId, pub unsafety: hir::Unsafety, pub unsafe_push_count: u32, - from_fn: bool + from_fn: bool, } impl UnsafetyState { @@ -426,19 +400,18 @@ impl UnsafetyState { unsafety => { let (unsafety, def, count) = match blk.rules { - hir::PushUnsafeBlock(..) => - (unsafety, blk.hir_id, self.unsafe_push_count.checked_add(1).unwrap()), - hir::PopUnsafeBlock(..) => - (unsafety, blk.hir_id, self.unsafe_push_count.checked_sub(1).unwrap()), - hir::UnsafeBlock(..) => - (hir::Unsafety::Unsafe, blk.hir_id, self.unsafe_push_count), - hir::DefaultBlock => - (unsafety, self.def, self.unsafe_push_count), + hir::PushUnsafeBlock(..) => { + (unsafety, blk.hir_id, self.unsafe_push_count.checked_add(1).unwrap()) + } + hir::PopUnsafeBlock(..) => { + (unsafety, blk.hir_id, self.unsafe_push_count.checked_sub(1).unwrap()) + } + hir::UnsafeBlock(..) => { + (hir::Unsafety::Unsafe, blk.hir_id, self.unsafe_push_count) + } + hir::DefaultBlock => (unsafety, self.def, self.unsafe_push_count), }; - UnsafetyState{ def, - unsafety, - unsafe_push_count: count, - from_fn: false } + UnsafetyState { def, unsafety, unsafe_push_count: count, from_fn: false } } } } @@ -447,7 +420,7 @@ impl UnsafetyState { #[derive(Debug, Copy, Clone)] pub enum PlaceOp { Deref, - Index + Index, } /// Tracks whether executing a node may exit normally (versus @@ -475,12 +448,12 @@ pub enum Diverges { /// If this is `None`, a default messsage /// will be generated, which is suitable /// for most cases. - custom_note: Option<&'static str> + custom_note: Option<&'static str>, }, /// Same as `Always` but with a reachability /// warning already emitted. - WarnedAlways + WarnedAlways, } // Convenience impls for combining `Diverges`. @@ -514,20 +487,14 @@ impl ops::BitOrAssign for Diverges { impl Diverges { /// Creates a `Diverges::Always` with the provided `span` and the default note message. fn always(span: Span) -> Diverges { - Diverges::Always { - span, - custom_note: None - } + Diverges::Always { span, custom_note: None } } fn is_always(self) -> bool { // Enum comparison ignores the // contents of fields, so we just // fill them in with garbage here. - self >= Diverges::Always { - span: DUMMY_SP, - custom_note: None - } + self >= Diverges::Always { span: DUMMY_SP, custom_note: None } } } @@ -688,14 +655,12 @@ impl Inherited<'a, 'tcx> { let body = tcx.hir().body(body_id); tcx.mk_region(ty::ReScope(region::Scope { id: body.value.hir_id.local_id, - data: region::ScopeData::CallSite + data: region::ScopeData::CallSite, })) }); Inherited { - tables: MaybeInProgressTables { - maybe_tables: infcx.in_progress_tables, - }, + tables: MaybeInProgressTables { maybe_tables: infcx.in_progress_tables }, infcx, fulfillment_cx: RefCell::new(TraitEngine::new(tcx)), locals: RefCell::new(Default::default()), @@ -713,16 +678,14 @@ impl Inherited<'a, 'tcx> { fn register_predicate(&self, obligation: traits::PredicateObligation<'tcx>) { debug!("register_predicate({:?})", obligation); if obligation.has_escaping_bound_vars() { - span_bug!(obligation.cause.span, "escaping bound vars in predicate {:?}", - obligation); + span_bug!(obligation.cause.span, "escaping bound vars in predicate {:?}", obligation); } - self.fulfillment_cx - .borrow_mut() - .register_predicate_obligation(self, obligation); + self.fulfillment_cx.borrow_mut().register_predicate_obligation(self, obligation); } fn register_predicates<I>(&self, obligations: I) - where I: IntoIterator<Item = traits::PredicateObligation<'tcx>> + where + I: IntoIterator<Item = traits::PredicateObligation<'tcx>>, { for obligation in obligations { self.register_predicate(obligation); @@ -734,12 +697,15 @@ impl Inherited<'a, 'tcx> { infer_ok.value } - fn normalize_associated_types_in<T>(&self, - span: Span, - body_id: hir::HirId, - param_env: ty::ParamEnv<'tcx>, - value: &T) -> T - where T : TypeFoldable<'tcx> + fn normalize_associated_types_in<T>( + &self, + span: Span, + body_id: hir::HirId, + param_env: ty::ParamEnv<'tcx>, + value: &T, + ) -> T + where + T: TypeFoldable<'tcx>, { let ok = self.partially_normalize_associated_types_in(span, body_id, param_env, value); self.register_infer_ok_obligations(ok) @@ -754,8 +720,8 @@ impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> { fn visit_item(&mut self, i: &'tcx hir::Item<'tcx>) { check_item_type(self.tcx, i); } - fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) { } - fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) { } + fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} + fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} } pub fn check_wf_new(tcx: TyCtxt<'_>) { @@ -821,37 +787,29 @@ fn primary_body_of( id: hir::HirId, ) -> Option<(hir::BodyId, Option<&hir::Ty>, Option<&hir::FnHeader>, Option<&hir::FnDecl>)> { match tcx.hir().get(id) { - Node::Item(item) => { - match item.kind { - hir::ItemKind::Const(ref ty, body) | - hir::ItemKind::Static(ref ty, _, body) => - Some((body, Some(ty), None, None)), - hir::ItemKind::Fn(ref sig, .., body) => - Some((body, None, Some(&sig.header), Some(&sig.decl))), - _ => - None, + Node::Item(item) => match item.kind { + hir::ItemKind::Const(ref ty, body) | hir::ItemKind::Static(ref ty, _, body) => { + Some((body, Some(ty), None, None)) } - } - Node::TraitItem(item) => { - match item.kind { - hir::TraitItemKind::Const(ref ty, Some(body)) => - Some((body, Some(ty), None, None)), - hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => - Some((body, None, Some(&sig.header), Some(&sig.decl))), - _ => - None, + hir::ItemKind::Fn(ref sig, .., body) => { + Some((body, None, Some(&sig.header), Some(&sig.decl))) } - } - Node::ImplItem(item) => { - match item.kind { - hir::ImplItemKind::Const(ref ty, body) => - Some((body, Some(ty), None, None)), - hir::ImplItemKind::Method(ref sig, body) => - Some((body, None, Some(&sig.header), Some(&sig.decl))), - _ => - None, + _ => None, + }, + Node::TraitItem(item) => match item.kind { + hir::TraitItemKind::Const(ref ty, Some(body)) => Some((body, Some(ty), None, None)), + hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => { + Some((body, None, Some(&sig.header), Some(&sig.decl))) } - } + _ => None, + }, + Node::ImplItem(item) => match item.kind { + hir::ImplItemKind::Const(ref ty, body) => Some((body, Some(ty), None, None)), + hir::ImplItemKind::Method(ref sig, body) => { + Some((body, None, Some(&sig.header), Some(&sig.decl))) + } + _ => None, + }, Node::AnonConst(constant) => Some((constant.body, None, None, None)), _ => None, } @@ -904,9 +862,12 @@ fn used_trait_imports(tcx: TyCtxt<'_>, def_id: DefId) -> &DefIdSet { /// conclude that we don't have a defining use of `MyItem`. By mapping inference /// variables back to the actual generic parameters, we will correctly see that /// we have a defining use of `MyItem` -fn fixup_opaque_types<'tcx, T>(tcx: TyCtxt<'tcx>, val: &T) -> T where T: TypeFoldable<'tcx> { +fn fixup_opaque_types<'tcx, T>(tcx: TyCtxt<'tcx>, val: &T) -> T +where + T: TypeFoldable<'tcx>, +{ struct FixupFolder<'tcx> { - tcx: TyCtxt<'tcx> + tcx: TyCtxt<'tcx>, } impl<'tcx> TypeFolder<'tcx> for FixupFolder<'tcx> { @@ -937,18 +898,21 @@ fn fixup_opaque_types<'tcx, T>(tcx: TyCtxt<'tcx>, val: &T) -> T where T: TypeFol } else { old_param.fold_with(self) } - }, + } GenericArgKind::Const(old_const) => { if let ty::ConstKind::Infer(_) = old_const.val { - // This should never happen - we currently do not support - // 'const projections', e.g.: - // `impl<T: SomeTrait> MyTrait for T where <T as SomeTrait>::MyConst == 25` - // which should be the only way for us to end up with a const inference - // variable after projection. If Rust ever gains support for this kind - // of projection, this should *probably* be changed to - // `self.tcx.mk_param_from_def(param)` - bug!("Found infer const: `{:?}` in opaque type: {:?}", - old_const, ty); + // This should never happen - we currently do not support + // 'const projections', e.g.: + // `impl<T: SomeTrait> MyTrait for T where <T as SomeTrait>::MyConst == 25` + // which should be the only way for us to end up with a const inference + // variable after projection. If Rust ever gains support for this kind + // of projection, this should *probably* be changed to + // `self.tcx.mk_param_from_def(param)` + bug!( + "Found infer const: `{:?}` in opaque type: {:?}", + old_const, + ty + ); } else { old_param.fold_with(self) } @@ -968,8 +932,8 @@ fn fixup_opaque_types<'tcx, T>(tcx: TyCtxt<'tcx>, val: &T) -> T where T: TypeFol } else { ty } - }, - _ => ty.super_fold_with(self) + } + _ => ty.super_fold_with(self), } } } @@ -990,10 +954,9 @@ fn typeck_tables_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TypeckTables<'_> { let span = tcx.hir().span(id); // Figure out what primary body this item has. - let (body_id, body_ty, fn_header, fn_decl) = primary_body_of(tcx, id) - .unwrap_or_else(|| { - span_bug!(span, "can't type-check body of {:?}", def_id); - }); + let (body_id, body_ty, fn_header, fn_decl) = primary_body_of(tcx, id).unwrap_or_else(|| { + span_bug!(span, "can't type-check body of {:?}", def_id); + }); let body = tcx.hir().body(body_id); let tables = Inherited::build(tcx, def_id).enter(|inh| { @@ -1009,13 +972,13 @@ fn typeck_tables_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TypeckTables<'_> { check_abi(tcx, span, fn_sig.abi()); // Compute the fty from point of view of inside the fn. - let fn_sig = - tcx.liberate_late_bound_regions(def_id, &fn_sig); - let fn_sig = - inh.normalize_associated_types_in(body.value.span, - body_id.hir_id, - param_env, - &fn_sig); + let fn_sig = tcx.liberate_late_bound_regions(def_id, &fn_sig); + let fn_sig = inh.normalize_associated_types_in( + body.value.span, + body_id.hir_id, + param_env, + &fn_sig, + ); let fn_sig = fixup_opaque_types(tcx, &fn_sig); @@ -1023,25 +986,23 @@ fn typeck_tables_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TypeckTables<'_> { fcx } else { let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id); - let expected_type = body_ty.and_then(|ty| match ty.kind { - hir::TyKind::Infer => Some(AstConv::ast_ty_to_ty(&fcx, ty)), - _ => None - }).unwrap_or_else(|| tcx.type_of(def_id)); + let expected_type = body_ty + .and_then(|ty| match ty.kind { + hir::TyKind::Infer => Some(AstConv::ast_ty_to_ty(&fcx, ty)), + _ => None, + }) + .unwrap_or_else(|| tcx.type_of(def_id)); let expected_type = fcx.normalize_associated_types_in(body.value.span, &expected_type); fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized); let revealed_ty = if tcx.features().impl_trait_in_bindings { - fcx.instantiate_opaque_types_from_value( - id, - &expected_type, - body.value.span, - ) + fcx.instantiate_opaque_types_from_value(id, &expected_type, body.value.span) } else { expected_type }; // Gather locals in statics (because of block expressions). - GatherLocalsVisitor { fcx: &fcx, parent_id: id, }.visit_body(body); + GatherLocalsVisitor { fcx: &fcx, parent_id: id }.visit_body(body); fcx.check_expr_coercable_to_type(&body.value, revealed_ty); @@ -1129,8 +1090,14 @@ fn typeck_tables_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TypeckTables<'_> { fn check_abi(tcx: TyCtxt<'_>, span: Span, abi: Abi) { if !tcx.sess.target.target.is_abi_supported(abi) { - struct_span_err!(tcx.sess, span, E0570, - "The ABI `{}` is not supported for the current target", abi).emit() + struct_span_err!( + tcx.sess, + span, + E0570, + "The ABI `{}` is not supported for the current target", + abi + ) + .emit() } } @@ -1148,10 +1115,10 @@ impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> { kind: TypeVariableOriginKind::TypeInference, span, }); - self.fcx.locals.borrow_mut().insert(nid, LocalTy { - decl_ty: var_ty, - revealed_ty: var_ty - }); + self.fcx + .locals + .borrow_mut() + .insert(nid, LocalTy { decl_ty: var_ty, revealed_ty: var_ty }); var_ty } Some(typ) => { @@ -1175,32 +1142,34 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { let o_ty = self.fcx.to_ty(&ty); let revealed_ty = if self.fcx.tcx.features().impl_trait_in_bindings { - self.fcx.instantiate_opaque_types_from_value( - self.parent_id, - &o_ty, - ty.span, - ) + self.fcx.instantiate_opaque_types_from_value(self.parent_id, &o_ty, ty.span) } else { o_ty }; - let c_ty = self.fcx.inh.infcx.canonicalize_user_type_annotation( - &UserType::Ty(revealed_ty) + let c_ty = self + .fcx + .inh + .infcx + .canonicalize_user_type_annotation(&UserType::Ty(revealed_ty)); + debug!( + "visit_local: ty.hir_id={:?} o_ty={:?} revealed_ty={:?} c_ty={:?}", + ty.hir_id, o_ty, revealed_ty, c_ty ); - debug!("visit_local: ty.hir_id={:?} o_ty={:?} revealed_ty={:?} c_ty={:?}", - ty.hir_id, o_ty, revealed_ty, c_ty); self.fcx.tables.borrow_mut().user_provided_types_mut().insert(ty.hir_id, c_ty); Some(LocalTy { decl_ty: o_ty, revealed_ty }) - }, + } None => None, }; self.assign(local.span, local.hir_id, local_ty); - debug!("local variable {:?} is assigned type {}", - local.pat, - self.fcx.ty_to_string( - self.fcx.locals.borrow().get(&local.hir_id).unwrap().clone().decl_ty)); + debug!( + "local variable {:?} is assigned type {}", + local.pat, + self.fcx + .ty_to_string(self.fcx.locals.borrow().get(&local.hir_id).unwrap().clone().decl_ty) + ); intravisit::walk_local(self, local); } @@ -1210,15 +1179,16 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { let var_ty = self.assign(p.span, p.hir_id, None); if !self.fcx.tcx.features().unsized_locals { - self.fcx.require_type_is_sized(var_ty, p.span, - traits::VariableType(p.hir_id)); + self.fcx.require_type_is_sized(var_ty, p.span, traits::VariableType(p.hir_id)); } - debug!("pattern binding {} is assigned to {} with type {:?}", - ident, - self.fcx.ty_to_string( - self.fcx.locals.borrow().get(&p.hir_id).unwrap().clone().decl_ty), - var_ty); + debug!( + "pattern binding {} is assigned to {} with type {:?}", + ident, + self.fcx + .ty_to_string(self.fcx.locals.borrow().get(&p.hir_id).unwrap().clone().decl_ty), + var_ty + ); } intravisit::walk_pat(self, p); } @@ -1231,7 +1201,8 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { _: hir::BodyId, _: Span, _: hir::HirId, - ) { } + ) { + } } /// When `check_fn` is invoked on a generator (i.e., a body that @@ -1274,11 +1245,8 @@ fn check_fn<'a, 'tcx>( let declared_ret_ty = fn_sig.output(); fcx.require_type_is_sized(declared_ret_ty, decl.output.span(), traits::SizedReturnType); - let revealed_ret_ty = fcx.instantiate_opaque_types_from_value( - fn_id, - &declared_ret_ty, - decl.output.span(), - ); + let revealed_ret_ty = + fcx.instantiate_opaque_types_from_value(fn_id, &declared_ret_ty, decl.output.span()); debug!("check_fn: declared_ret_ty: {}, revealed_ret_ty: {}", declared_ret_ty, revealed_ret_ty); fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(revealed_ret_ty))); fn_sig = fcx.tcx.mk_fn_sig( @@ -1286,7 +1254,7 @@ fn check_fn<'a, 'tcx>( revealed_ret_ty, fn_sig.c_variadic, fn_sig.unsafety, - fn_sig.abi + fn_sig.abi, ); let span = body.value.span; @@ -1294,17 +1262,15 @@ fn check_fn<'a, 'tcx>( fn_maybe_err(fcx.tcx, span, fn_sig.abi); if body.generator_kind.is_some() && can_be_generator.is_some() { - let yield_ty = fcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::TypeInference, - span, - }); + let yield_ty = fcx + .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span }); fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType); fcx.yield_ty = Some(yield_ty); } let outer_def_id = fcx.tcx.closure_base_def_id(fcx.tcx.hir().local_def_id(fn_id)); let outer_hir_id = fcx.tcx.hir().as_local_hir_id(outer_def_id).unwrap(); - GatherLocalsVisitor { fcx: &fcx, parent_id: outer_hir_id, }.visit_body(body); + GatherLocalsVisitor { fcx: &fcx, parent_id: outer_hir_id }.visit_body(body); // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` // (as it's created inside the body itself, not passed in from outside). @@ -1315,7 +1281,7 @@ fn check_fn<'a, 'tcx>( ); let region = fcx.tcx.mk_region(ty::ReScope(region::Scope { id: body.value.hir_id.local_id, - data: region::ScopeData::CallSite + data: region::ScopeData::CallSite, })); Some(fcx.tcx.type_of(va_list_did).subst(fcx.tcx, &[region.into()])) @@ -1324,11 +1290,7 @@ fn check_fn<'a, 'tcx>( }; // Add formal parameters. - for (param_ty, param) in - fn_sig.inputs().iter().copied() - .chain(maybe_va_list) - .zip(body.params) - { + for (param_ty, param) in fn_sig.inputs().iter().copied().chain(maybe_va_list).zip(body.params) { // Check the pattern. fcx.check_pat_top(¶m.pat, param_ty, None); @@ -1351,10 +1313,8 @@ fn check_fn<'a, 'tcx>( // This ensures that all nested generators appear before the entry of this generator. // resolve_generator_interiors relies on this property. let gen_ty = if let (Some(_), Some(gen_kind)) = (can_be_generator, body.generator_kind) { - let interior = fcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span, - }); + let interior = fcx + .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span }); fcx.deferred_generator_interiors.borrow_mut().push((body.id(), interior, gen_kind)); Some(GeneratorTypes { yield_ty: fcx.yield_ty.unwrap(), @@ -1391,12 +1351,10 @@ fn check_fn<'a, 'tcx>( let coercion = fcx.ret_coercion.take().unwrap().into_inner(); let mut actual_return_ty = coercion.complete(&fcx); if actual_return_ty.is_never() { - actual_return_ty = fcx.next_diverging_ty_var( - TypeVariableOrigin { - kind: TypeVariableOriginKind::DivergingFn, - span, - }, - ); + actual_return_ty = fcx.next_diverging_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::DivergingFn, + span, + }); } fcx.demand_suptype(span, revealed_ret_ty, actual_return_ty); @@ -1409,11 +1367,16 @@ fn check_fn<'a, 'tcx>( let trait_ref = ty::TraitRef::new(term_id, substs); let return_ty_span = decl.output.span(); let cause = traits::ObligationCause::new( - return_ty_span, fn_id, ObligationCauseCode::MainFunctionType); + return_ty_span, + fn_id, + ObligationCauseCode::MainFunctionType, + ); - inherited.register_predicate( - traits::Obligation::new( - cause, param_env, trait_ref.to_predicate())); + inherited.register_predicate(traits::Obligation::new( + cause, + param_env, + trait_ref.to_predicate(), + )); } } } @@ -1423,10 +1386,7 @@ fn check_fn<'a, 'tcx>( if panic_impl_did == fcx.tcx.hir().local_def_id(fn_id) { if let Some(panic_info_did) = fcx.tcx.lang_items().panic_info() { if declared_ret_ty.kind != ty::Never { - fcx.tcx.sess.span_err( - decl.output.span(), - "return type should be `!`", - ); + fcx.tcx.sess.span_err(decl.output.span(), "return type should be `!`"); } let inputs = fn_sig.inputs(); @@ -1435,29 +1395,25 @@ fn check_fn<'a, 'tcx>( let arg_is_panic_info = match inputs[0].kind { ty::Ref(region, ty, mutbl) => match ty.kind { ty::Adt(ref adt, _) => { - adt.did == panic_info_did && - mutbl == hir::Mutability::Not && - *region != RegionKind::ReStatic - }, + adt.did == panic_info_did + && mutbl == hir::Mutability::Not + && *region != RegionKind::ReStatic + } _ => false, }, _ => false, }; if !arg_is_panic_info { - fcx.tcx.sess.span_err( - decl.inputs[0].span, - "argument should be `&PanicInfo`", - ); + fcx.tcx + .sess + .span_err(decl.inputs[0].span, "argument should be `&PanicInfo`"); } if let Node::Item(item) = fcx.tcx.hir().get(fn_id) { if let ItemKind::Fn(_, ref generics, _) = item.kind { if !generics.params.is_empty() { - fcx.tcx.sess.span_err( - span, - "should have no type parameters", - ); + fcx.tcx.sess.span_err(span, "should have no type parameters"); } } } @@ -1476,27 +1432,19 @@ fn check_fn<'a, 'tcx>( if alloc_error_handler_did == fcx.tcx.hir().local_def_id(fn_id) { if let Some(alloc_layout_did) = fcx.tcx.lang_items().alloc_layout() { if declared_ret_ty.kind != ty::Never { - fcx.tcx.sess.span_err( - decl.output.span(), - "return type should be `!`", - ); + fcx.tcx.sess.span_err(decl.output.span(), "return type should be `!`"); } let inputs = fn_sig.inputs(); let span = fcx.tcx.hir().span(fn_id); if inputs.len() == 1 { let arg_is_alloc_layout = match inputs[0].kind { - ty::Adt(ref adt, _) => { - adt.did == alloc_layout_did - }, + ty::Adt(ref adt, _) => adt.did == alloc_layout_did, _ => false, }; if !arg_is_alloc_layout { - fcx.tcx.sess.span_err( - decl.inputs[0].span, - "argument should be `Layout`", - ); + fcx.tcx.sess.span_err(decl.inputs[0].span, "argument should be `Layout`"); } if let Node::Item(item) = fcx.tcx.hir().get(fn_id) { @@ -1560,11 +1508,14 @@ fn check_union_fields(tcx: TyCtxt<'_>, span: Span, item_def_id: DefId) -> bool { let field_span = tcx.hir().span_if_local(field.did).unwrap(); let param_env = tcx.param_env(field.did); if field_ty.needs_drop(tcx, param_env) { - struct_span_err!(tcx.sess, field_span, E0740, - "unions may not contain fields that need dropping") - .span_note(field_span, - "`std::mem::ManuallyDrop` can be used to wrap the type") - .emit(); + struct_span_err!( + tcx.sess, + field_span, + E0740, + "unions may not contain fields that need dropping" + ) + .span_note(field_span, "`std::mem::ManuallyDrop` can be used to wrap the type") + .emit(); return false; } } @@ -1589,15 +1540,13 @@ fn check_opaque<'tcx>( /// Checks that an opaque type does not use `Self` or `T::Foo` projections that would result /// in "inheriting lifetimes". -fn check_opaque_for_inheriting_lifetimes( - tcx: TyCtxt<'tcx>, - def_id: DefId, - span: Span, -) { - let item = tcx.hir().expect_item( - tcx.hir().as_local_hir_id(def_id).expect("opaque type is not local")); - debug!("check_opaque_for_inheriting_lifetimes: def_id={:?} span={:?} item={:?}", - def_id, span, item); +fn check_opaque_for_inheriting_lifetimes(tcx: TyCtxt<'tcx>, def_id: DefId, span: Span) { + let item = + tcx.hir().expect_item(tcx.hir().as_local_hir_id(def_id).expect("opaque type is not local")); + debug!( + "check_opaque_for_inheriting_lifetimes: def_id={:?} span={:?} item={:?}", + def_id, span, item + ); #[derive(Debug)] struct ProhibitOpaqueVisitor<'tcx> { @@ -1622,18 +1571,20 @@ fn check_opaque_for_inheriting_lifetimes( } let prohibit_opaque = match item.kind { - ItemKind::OpaqueTy(hir::OpaqueTy { origin: hir::OpaqueTyOrigin::AsyncFn, .. }) | - ItemKind::OpaqueTy(hir::OpaqueTy { origin: hir::OpaqueTyOrigin::FnReturn, .. }) => { + ItemKind::OpaqueTy(hir::OpaqueTy { origin: hir::OpaqueTyOrigin::AsyncFn, .. }) + | ItemKind::OpaqueTy(hir::OpaqueTy { origin: hir::OpaqueTyOrigin::FnReturn, .. }) => { let mut visitor = ProhibitOpaqueVisitor { - opaque_identity_ty: tcx.mk_opaque( - def_id, InternalSubsts::identity_for_item(tcx, def_id)), + opaque_identity_ty: tcx + .mk_opaque(def_id, InternalSubsts::identity_for_item(tcx, def_id)), generics: tcx.generics_of(def_id), }; debug!("check_opaque_for_inheriting_lifetimes: visitor={:?}", visitor); - tcx.predicates_of(def_id).predicates.iter().any( - |(predicate, _)| predicate.visit_with(&mut visitor)) - }, + tcx.predicates_of(def_id) + .predicates + .iter() + .any(|(predicate, _)| predicate.visit_with(&mut visitor)) + } _ => false, }; @@ -1665,18 +1616,13 @@ fn check_opaque_for_cycles<'tcx>( ) { if let Err(partially_expanded_type) = tcx.try_expand_impl_trait_type(def_id, substs) { if let hir::OpaqueTyOrigin::AsyncFn = origin { - struct_span_err!( - tcx.sess, span, E0733, - "recursion in an `async fn` requires boxing", - ) - .span_label(span, "recursive `async fn`") - .note("a recursive `async fn` must be rewritten to return a boxed `dyn Future`.") - .emit(); + struct_span_err!(tcx.sess, span, E0733, "recursion in an `async fn` requires boxing",) + .span_label(span, "recursive `async fn`") + .note("a recursive `async fn` must be rewritten to return a boxed `dyn Future`.") + .emit(); } else { - let mut err = struct_span_err!( - tcx.sess, span, E0720, - "opaque type expands to a recursive type", - ); + let mut err = + struct_span_err!(tcx.sess, span, E0720, "opaque type expands to a recursive type",); err.span_label(span, "expands to a recursive type"); if let ty::Opaque(..) = partially_expanded_type.kind { err.note("type resolves to itself"); @@ -1750,7 +1696,7 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { hir::ItemKind::Union(..) => { check_union(tcx, it.hir_id, it.span); } - hir::ItemKind::OpaqueTy(hir::OpaqueTy{origin, ..}) => { + hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, .. }) => { let def_id = tcx.hir().local_def_id(it.hir_id); let substs = InternalSubsts::identity_for_item(tcx, def_id); @@ -1791,10 +1737,9 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { E0044, "foreign items may not have {} parameters", kinds, - ).span_label( - item.span, - &format!("can't have {} parameters", kinds), - ).help( + ) + .span_label(item.span, &format!("can't have {} parameters", kinds)) + .help( // FIXME: once we start storing spans for type arguments, turn this // into a suggestion. &format!( @@ -1803,7 +1748,8 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { kinds_pl, egs.map(|egs| format!(" like `{}`", egs)).unwrap_or_default(), ), - ).emit(); + ) + .emit(); } if let hir::ForeignItemKind::Fn(ref fn_decl, _, _) = item.kind { @@ -1819,13 +1765,13 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: DefId, span: Span) { // Only restricted on wasm32 target for now if !tcx.sess.opts.target_triple.triple().starts_with("wasm32") { - return + return; } // If `#[link_section]` is missing, then nothing to verify let attrs = tcx.codegen_fn_attrs(id); if attrs.link_section.is_none() { - return + return; } // For the wasm32 target statics with `#[link_section]` are placed into custom @@ -1863,18 +1809,22 @@ fn report_forbidden_specialization( parent_impl: DefId, ) { let mut err = struct_span_err!( - tcx.sess, impl_item.span, E0520, + tcx.sess, + impl_item.span, + E0520, "`{}` specializes an item from a parent `impl`, but \ that item is not marked `default`", - impl_item.ident); - err.span_label(impl_item.span, format!("cannot specialize default item `{}`", - impl_item.ident)); + impl_item.ident + ); + err.span_label(impl_item.span, format!("cannot specialize default item `{}`", impl_item.ident)); match tcx.span_of_impl(parent_impl) { Ok(span) => { err.span_label(span, "parent `impl` is here"); - err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`", - impl_item.ident)); + err.note(&format!( + "to specialize, `{}` in the parent `impl` must be marked `default`", + impl_item.ident + )); } Err(cname) => { err.note(&format!("parent implementation is in crate `{}`", cname)); @@ -1898,7 +1848,8 @@ fn check_specialization_validity<'tcx>( hir::ImplItemKind::TyAlias(_) => ty::AssocKind::Type, }; - let mut ancestor_impls = trait_def.ancestors(tcx, impl_id) + let mut ancestor_impls = trait_def + .ancestors(tcx, impl_id) .skip(1) .filter_map(|parent| { if parent.is_from_trait() { @@ -1923,17 +1874,17 @@ fn check_specialization_validity<'tcx>( } // Parent impl contains item and makes it specializable. - Some(_) => { - Some(Ok(())) - } + Some(_) => Some(Ok(())), // Parent impl doesn't mention the item. This means it's inherited from the // grandparent. In that case, if parent is a `default impl`, inherited items use the // "defaultness" from the grandparent, else they are final. - None => if tcx.impl_is_default(parent_impl.def_id()) { - None - } else { - Some(Err(parent_impl.def_id())) + None => { + if tcx.impl_is_default(parent_impl.def_id()) { + None + } else { + Some(Err(parent_impl.def_id())) + } } } }); @@ -1959,7 +1910,9 @@ fn check_impl_items_against_trait<'tcx>( // If the trait reference itself is erroneous (so the compilation is going // to fail), skip checking the items here -- the `impl_item` table in `tcx` // isn't populated for such impls. - if impl_trait_ref.references_error() { return; } + if impl_trait_ref.references_error() { + return; + } // Locate trait definition and items let trait_def = tcx.trait_def(impl_trait_ref.def_id); @@ -1970,15 +1923,17 @@ fn check_impl_items_against_trait<'tcx>( // Check existing impl methods to see if they are both present in trait // and compatible with trait signature for impl_item in impl_items() { - let ty_impl_item = tcx.associated_item( - tcx.hir().local_def_id(impl_item.hir_id)); - let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id) - .find(|ac| Namespace::from(&impl_item.kind) == Namespace::from(ac.kind) && - tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id)) + let ty_impl_item = tcx.associated_item(tcx.hir().local_def_id(impl_item.hir_id)); + let ty_trait_item = tcx + .associated_items(impl_trait_ref.def_id) + .find(|ac| { + Namespace::from(&impl_item.kind) == Namespace::from(ac.kind) + && tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id) + }) .or_else(|| { // Not compatible, but needed for the error message tcx.associated_items(impl_trait_ref.def_id) - .find(|ac| tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id)) + .find(|ac| tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id)) }); // Check that impl definition matches trait definition @@ -1987,50 +1942,61 @@ fn check_impl_items_against_trait<'tcx>( hir::ImplItemKind::Const(..) => { // Find associated const definition. if ty_trait_item.kind == ty::AssocKind::Const { - compare_const_impl(tcx, - &ty_impl_item, - impl_item.span, - &ty_trait_item, - impl_trait_ref); + compare_const_impl( + tcx, + &ty_impl_item, + impl_item.span, + &ty_trait_item, + impl_trait_ref, + ); } else { - let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323, - "item `{}` is an associated const, \ + let mut err = struct_span_err!( + tcx.sess, + impl_item.span, + E0323, + "item `{}` is an associated const, \ which doesn't match its trait `{}`", - ty_impl_item.ident, - impl_trait_ref.print_only_trait_path()); - err.span_label(impl_item.span, "does not match trait"); - // We can only get the spans from local trait definition - // Same for E0324 and E0325 - if let Some(trait_span) = tcx.hir().span_if_local(ty_trait_item.def_id) { + ty_impl_item.ident, + impl_trait_ref.print_only_trait_path() + ); + err.span_label(impl_item.span, "does not match trait"); + // We can only get the spans from local trait definition + // Same for E0324 and E0325 + if let Some(trait_span) = tcx.hir().span_if_local(ty_trait_item.def_id) { err.span_label(trait_span, "item in trait"); - } - err.emit() + } + err.emit() } } hir::ImplItemKind::Method(..) => { let opt_trait_span = tcx.hir().span_if_local(ty_trait_item.def_id); if ty_trait_item.kind == ty::AssocKind::Method { - compare_impl_method(tcx, - &ty_impl_item, - impl_item.span, - &ty_trait_item, - impl_trait_ref, - opt_trait_span); + compare_impl_method( + tcx, + &ty_impl_item, + impl_item.span, + &ty_trait_item, + impl_trait_ref, + opt_trait_span, + ); } else { - let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324, + let mut err = struct_span_err!( + tcx.sess, + impl_item.span, + E0324, "item `{}` is an associated method, \ which doesn't match its trait `{}`", ty_impl_item.ident, - impl_trait_ref.print_only_trait_path()); - err.span_label(impl_item.span, "does not match trait"); - if let Some(trait_span) = opt_trait_span { + impl_trait_ref.print_only_trait_path() + ); + err.span_label(impl_item.span, "does not match trait"); + if let Some(trait_span) = opt_trait_span { err.span_label(trait_span, "item in trait"); - } - err.emit() + } + err.emit() } } - hir::ImplItemKind::OpaqueTy(..) | - hir::ImplItemKind::TyAlias(_) => { + hir::ImplItemKind::OpaqueTy(..) | hir::ImplItemKind::TyAlias(_) => { let opt_trait_span = tcx.hir().span_if_local(ty_trait_item.def_id); if ty_trait_item.kind == ty::AssocKind::Type { if ty_trait_item.defaultness.has_value() { @@ -2045,16 +2011,20 @@ fn check_impl_items_against_trait<'tcx>( opt_trait_span, ) } else { - let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325, + let mut err = struct_span_err!( + tcx.sess, + impl_item.span, + E0325, "item `{}` is an associated type, \ which doesn't match its trait `{}`", ty_impl_item.ident, - impl_trait_ref.print_only_trait_path()); - err.span_label(impl_item.span, "does not match trait"); - if let Some(trait_span) = opt_trait_span { + impl_trait_ref.print_only_trait_path() + ); + err.span_label(impl_item.span, "does not match trait"); + if let Some(trait_span) = opt_trait_span { err.span_label(trait_span, "item in trait"); - } - err.emit() + } + err.emit() } } } @@ -2068,7 +2038,8 @@ fn check_impl_items_against_trait<'tcx>( let mut invalidated_items = Vec::new(); let associated_type_overridden = overridden_associated_type.is_some(); for trait_item in tcx.associated_items(impl_trait_ref.def_id) { - let is_implemented = trait_def.ancestors(tcx, impl_id) + let is_implemented = trait_def + .ancestors(tcx, impl_id) .leaf_def(tcx, trait_item.ident, trait_item.kind) .map(|node_item| !node_item.node.is_from_trait()) .unwrap_or(false); @@ -2094,9 +2065,7 @@ fn check_impl_items_against_trait<'tcx>( E0399, "the following trait items need to be reimplemented as `{}` was overridden: `{}`", invalidator.ident, - invalidated_items.iter() - .map(|name| name.to_string()) - .collect::<Vec<_>>().join("`, `") + invalidated_items.iter().map(|name| name.to_string()).collect::<Vec<_>>().join("`, `") ) } } @@ -2107,9 +2076,11 @@ fn missing_items_err( missing_items: &[ty::AssocItem], full_impl_span: Span, ) { - let missing_items_msg = missing_items.iter() + let missing_items_msg = missing_items + .iter() .map(|trait_item| trait_item.ident.to_string()) - .collect::<Vec<_>>().join("`, `"); + .collect::<Vec<_>>() + .join("`, `"); let mut err = struct_span_err!( tcx.sess, @@ -2147,38 +2118,33 @@ fn missing_items_err( /// Return placeholder code for the given function. fn fn_sig_suggestion(sig: &ty::FnSig<'_>, ident: Ident) -> String { - let args = sig.inputs() + let args = sig + .inputs() .iter() - .map(|ty| Some(match ty.kind { - ty::Param(param) if param.name == kw::SelfUpper => "self".to_string(), - ty::Ref(reg, ref_ty, mutability) => { - let reg = match &format!("{}", reg)[..] { - "'_" | "" => String::new(), - reg => format!("{} ", reg), - }; - match ref_ty.kind { - ty::Param(param) if param.name == kw::SelfUpper => { - format!("&{}{}self", reg, mutability.prefix_str()) + .map(|ty| { + Some(match ty.kind { + ty::Param(param) if param.name == kw::SelfUpper => "self".to_string(), + ty::Ref(reg, ref_ty, mutability) => { + let reg = match &format!("{}", reg)[..] { + "'_" | "" => String::new(), + reg => format!("{} ", reg), + }; + match ref_ty.kind { + ty::Param(param) if param.name == kw::SelfUpper => { + format!("&{}{}self", reg, mutability.prefix_str()) + } + _ => format!("_: {:?}", ty), } - _ => format!("_: {:?}", ty), } - } - _ => format!("_: {:?}", ty), - })) - .chain(std::iter::once(if sig.c_variadic { - Some("...".to_string()) - } else { - None - })) + _ => format!("_: {:?}", ty), + }) + }) + .chain(std::iter::once(if sig.c_variadic { Some("...".to_string()) } else { None })) .filter_map(|arg| arg) .collect::<Vec<String>>() .join(", "); let output = sig.output(); - let output = if !output.is_unit() { - format!(" -> {:?}", output) - } else { - String::new() - }; + let output = if !output.is_unit() { format!(" -> {:?}", output) } else { String::new() }; let unsafety = sig.unsafety.prefix_str(); // FIXME: this is not entirely correct, as the lifetimes from borrowed params will @@ -2230,7 +2196,7 @@ fn check_representable(tcx: TyCtxt<'_>, sp: Span, item_def_id: DefId) -> bool { err.span_label(span, "recursive without indirection"); } err.emit(); - return false + return false; } Representability::Representable | Representability::ContainsRecursive => (), } @@ -2249,16 +2215,20 @@ pub fn check_simd(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) { let e = fields[0].ty(tcx, substs); if !fields.iter().all(|f| f.ty(tcx, substs) == e) { struct_span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous") - .span_label(sp, "SIMD elements must have the same type") - .emit(); + .span_label(sp, "SIMD elements must have the same type") + .emit(); return; } match e.kind { ty::Param(_) => { /* struct<T>(T, T, T, T) is ok */ } _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ } _ => { - span_err!(tcx.sess, sp, E0077, - "SIMD vector element type should be machine type"); + span_err!( + tcx.sess, + sp, + E0077, + "SIMD vector element type should be machine type" + ); return; } } @@ -2275,21 +2245,33 @@ fn check_packed(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) { if let Some(repr_pack) = repr.pack { if pack as u64 != repr_pack.bytes() { struct_span_err!( - tcx.sess, sp, E0634, + tcx.sess, + sp, + E0634, "type has conflicting packed representation hints" - ).emit(); + ) + .emit(); } } } } } if repr.align.is_some() { - struct_span_err!(tcx.sess, sp, E0587, - "type has conflicting packed and align representation hints").emit(); - } - else if check_packed_inner(tcx, def_id, &mut Vec::new()) { - struct_span_err!(tcx.sess, sp, E0588, - "packed type cannot transitively contain a `[repr(align)]` type").emit(); + struct_span_err!( + tcx.sess, + sp, + E0587, + "type has conflicting packed and align representation hints" + ) + .emit(); + } else if check_packed_inner(tcx, def_id, &mut Vec::new()) { + struct_span_err!( + tcx.sess, + sp, + E0588, + "packed type cannot transitively contain a `[repr(align)]` type" + ) + .emit(); } } } @@ -2324,13 +2306,12 @@ fn check_packed_inner(tcx: TyCtxt<'_>, def_id: DefId, stack: &mut Vec<DefId>) -> /// Emit an error when encountering more or less than one variant in a transparent enum. fn bad_variant_count<'tcx>(tcx: TyCtxt<'tcx>, adt: &'tcx ty::AdtDef, sp: Span, did: DefId) { - let variant_spans: Vec<_> = adt.variants.iter().map(|variant| { - tcx.hir().span_if_local(variant.def_id).unwrap() - }).collect(); - let msg = format!( - "needs exactly one variant, but has {}", - adt.variants.len(), - ); + let variant_spans: Vec<_> = adt + .variants + .iter() + .map(|variant| tcx.hir().span_if_local(variant.def_id).unwrap()) + .collect(); + let msg = format!("needs exactly one variant, but has {}", adt.variants.len(),); let mut err = struct_span_err!(tcx.sess, sp, E0731, "transparent enum {}", msg); err.span_label(sp, &msg); if let [start @ .., end] = &*variant_spans { @@ -2415,11 +2396,8 @@ fn check_transparent(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) { (span, zst, align1) }); - let non_zst_fields = field_infos.clone().filter_map(|(span, zst, _align1)| if !zst { - Some(span) - } else { - None - }); + let non_zst_fields = + field_infos.clone().filter_map(|(span, zst, _align1)| if !zst { Some(span) } else { None }); let non_zst_count = non_zst_fields.clone().count(); if non_zst_count != 1 { bad_non_zero_sized_fields(tcx, adt, non_zst_count, non_zst_fields, sp); @@ -2432,7 +2410,9 @@ fn check_transparent(tcx: TyCtxt<'_>, sp: Span, def_id: DefId) { E0691, "zero-sized field in transparent {} has alignment larger than 1", adt.descr(), - ).span_label(span, "has alignment larger than 1").emit(); + ) + .span_label(span, "has alignment larger than 1") + .emit(); } } } @@ -2452,10 +2432,13 @@ pub fn check_enum<'tcx>( let attributes = tcx.get_attrs(def_id); if let Some(attr) = attr::find_by_name(&attributes, sym::repr) { struct_span_err!( - tcx.sess, attr.span, E0084, - "unsupported representation for zero-variant enum") - .span_label(sp, "zero-variant enum") - .emit(); + tcx.sess, + attr.span, + E0084, + "unsupported representation for zero-variant enum" + ) + .span_label(sp, "zero-variant enum") + .emit(); } } @@ -2479,11 +2462,10 @@ pub fn check_enum<'tcx>( } if tcx.adt_def(def_id).repr.int.is_none() && tcx.features().arbitrary_enum_discriminant { - let is_unit = - |var: &hir::Variant<'_>| match var.data { - hir::VariantData::Unit(..) => true, - _ => false - }; + let is_unit = |var: &hir::Variant<'_>| match var.data { + hir::VariantData::Unit(..) => true, + _ => false, + }; let has_disr = |var: &hir::Variant<'_>| var.disr_expr.is_some(); let has_non_units = vs.iter().any(|var| !is_unit(var)); @@ -2491,8 +2473,8 @@ pub fn check_enum<'tcx>( let disr_non_unit = vs.iter().any(|var| !is_unit(&var) && has_disr(&var)); if disr_non_unit || (disr_units && has_non_units) { - let mut err = struct_span_err!(tcx.sess, sp, E0732, - "`#[repr(inttype)]` must be specified"); + let mut err = + struct_span_err!(tcx.sess, sp, E0732, "`#[repr(inttype)]` must be specified"); err.emit(); } } @@ -2506,17 +2488,22 @@ pub fn check_enum<'tcx>( let variant_i = tcx.hir().expect_variant(variant_i_hir_id); let i_span = match variant_i.disr_expr { Some(ref expr) => tcx.hir().span(expr.hir_id), - None => tcx.hir().span(variant_i_hir_id) + None => tcx.hir().span(variant_i_hir_id), }; let span = match v.disr_expr { Some(ref expr) => tcx.hir().span(expr.hir_id), - None => v.span + None => v.span, }; - struct_span_err!(tcx.sess, span, E0081, - "discriminant value `{}` already exists", disr_vals[i]) - .span_label(i_span, format!("first use of `{}`", disr_vals[i])) - .span_label(span , format!("enum already has `{}`", disr_vals[i])) - .emit(); + struct_span_err!( + tcx.sess, + span, + E0081, + "discriminant value `{}` already exists", + disr_vals[i] + ) + .span_label(i_span, format!("first use of `{}`", disr_vals[i])) + .span_label(span, format!("enum already has `{}`", disr_vals[i])) + .emit(); } disr_vals.push(discr); } @@ -2526,10 +2513,14 @@ pub fn check_enum<'tcx>( } fn report_unexpected_variant_res(tcx: TyCtxt<'_>, res: Res, span: Span, qpath: &QPath) { - span_err!(tcx.sess, span, E0533, - "expected unit struct, unit variant or constant, found {} `{}`", - res.descr(), - hir::print::to_string(tcx.hir(), |s| s.print_qpath(qpath, false))); + span_err!( + tcx.sess, + span, + E0533, + "expected unit struct, unit variant or constant, found {} `{}`", + res.descr(), + hir::print::to_string(tcx.hir(), |s| s.print_qpath(qpath, false)) + ); } impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { @@ -2550,28 +2541,25 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { let index = generics.param_def_id_to_index[&def_id]; ty::GenericPredicates { parent: None, - predicates: tcx.arena.alloc_from_iter( - self.param_env.caller_bounds.iter().filter_map(|&predicate| match predicate { + predicates: tcx.arena.alloc_from_iter(self.param_env.caller_bounds.iter().filter_map( + |&predicate| match predicate { ty::Predicate::Trait(ref data) - if data.skip_binder().self_ty().is_param(index) => { + if data.skip_binder().self_ty().is_param(index) => + { // HACK(eddyb) should get the original `Span`. let span = tcx.def_span(def_id); Some((predicate, span)) } - _ => None - }), - ), + _ => None, + }, + )), } } - fn re_infer( - &self, - def: Option<&ty::GenericParamDef>, - span: Span, - ) -> Option<ty::Region<'tcx>> { + fn re_infer(&self, def: Option<&ty::GenericParamDef>, span: Span) -> Option<ty::Region<'tcx>> { let v = match def { Some(def) => infer::EarlyBoundRegion(span, def.name), - None => infer::MiscVariable(span) + None => infer::MiscVariable(span), }; Some(self.next_region_var(v)) } @@ -2602,24 +2590,24 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { } unreachable!() } else { - self.next_const_var(ty, ConstVariableOrigin { - kind: ConstVariableOriginKind::ConstInference, - span, - }) + self.next_const_var( + ty, + ConstVariableOrigin { kind: ConstVariableOriginKind::ConstInference, span }, + ) } } - fn projected_ty_from_poly_trait_ref(&self, - span: Span, - item_def_id: DefId, - item_segment: &hir::PathSegment, - poly_trait_ref: ty::PolyTraitRef<'tcx>) - -> Ty<'tcx> - { + fn projected_ty_from_poly_trait_ref( + &self, + span: Span, + item_def_id: DefId, + item_segment: &hir::PathSegment, + poly_trait_ref: ty::PolyTraitRef<'tcx>, + ) -> Ty<'tcx> { let (trait_ref, _) = self.replace_bound_vars_with_fresh_vars( span, infer::LateBoundRegionConversionTime::AssocTypeProjection(item_def_id), - &poly_trait_ref + &poly_trait_ref, ); let item_substs = <dyn AstConv<'tcx>>::create_substs_for_associated_item( @@ -2696,8 +2684,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ret_coercion: None, ret_coercion_span: RefCell::new(None), yield_ty: None, - ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, - hir::CRATE_HIR_ID)), + ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, hir::CRATE_HIR_ID)), diverges: Cell::new(Diverges::Maybe), has_errors: Cell::new(false), enclosing_breakables: RefCell::new(EnclosingBreakables { @@ -2725,16 +2712,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If span arose from a desugaring of `if` or `while`, then it is the condition itself, // which diverges, that we are about to lint on. This gives suboptimal diagnostics. // Instead, stop here so that the `if`- or `while`-expression's block is linted instead. - if !span.is_desugaring(DesugaringKind::CondTemporary) && - !span.is_desugaring(DesugaringKind::Async) && - !orig_span.is_desugaring(DesugaringKind::Await) + if !span.is_desugaring(DesugaringKind::CondTemporary) + && !span.is_desugaring(DesugaringKind::Async) + && !orig_span.is_desugaring(DesugaringKind::Await) { self.diverges.set(Diverges::WarnedAlways); debug!("warn_if_unreachable: id={:?} span={:?} kind={}", id, span, kind); let msg = format!("unreachable {}", kind); - self.tcx().struct_span_lint_hir(lint::builtin::UNREACHABLE_CODE, id, span, &msg) + self.tcx() + .struct_span_lint_hir(lint::builtin::UNREACHABLE_CODE, id, span, &msg) .span_label(span, &msg) .span_label( orig_span, @@ -2745,10 +2733,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - pub fn cause(&self, - span: Span, - code: ObligationCauseCode<'tcx>) - -> ObligationCause<'tcx> { + pub fn cause(&self, span: Span, code: ObligationCauseCode<'tcx>) -> ObligationCause<'tcx> { ObligationCause::new(span, self.body_id, code) } @@ -2771,7 +2756,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If `ty` is a type variable, see whether we already know what it is. ty = self.resolve_vars_if_possible(&ty); - if !ty.has_infer_types() && !ty.has_infer_consts() { + if !ty.has_infer_types() && !ty.has_infer_consts() { debug!("resolve_vars_with_obligations: ty={:?}", ty); return ty; } @@ -2809,16 +2794,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } pub fn local_ty(&self, span: Span, nid: hir::HirId) -> LocalTy<'tcx> { - self.locals.borrow().get(&nid).cloned().unwrap_or_else(|| - span_bug!(span, "no type for local variable {}", - self.tcx.hir().node_to_string(nid)) - ) + self.locals.borrow().get(&nid).cloned().unwrap_or_else(|| { + span_bug!(span, "no type for local variable {}", self.tcx.hir().node_to_string(nid)) + }) } #[inline] pub fn write_ty(&self, id: hir::HirId, ty: Ty<'tcx>) { - debug!("write_ty({:?}, {:?}) in fcx {}", - id, self.resolve_vars_if_possible(&ty), self.tag()); + debug!( + "write_ty({:?}, {:?}) in fcx {}", + id, + self.resolve_vars_if_possible(&ty), + self.tag() + ); self.tables.borrow_mut().node_types_mut().insert(id, ty); if ty.references_error() { @@ -2835,9 +2823,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tables.borrow_mut().type_dependent_defs_mut().insert(hir_id, r); } - pub fn write_method_call(&self, - hir_id: hir::HirId, - method: MethodCallee<'tcx>) { + pub fn write_method_call(&self, hir_id: hir::HirId, method: MethodCallee<'tcx>) { debug!("write_method_call(hir_id={:?}, method={:?})", hir_id, method); self.write_resolution(hir_id, Ok((DefKind::Method, method.def_id))); self.write_substs(hir_id, method.substs); @@ -2886,10 +2872,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub fn write_substs(&self, node_id: hir::HirId, substs: SubstsRef<'tcx>) { if !substs.is_noop() { - debug!("write_substs({:?}, {:?}) in fcx {}", - node_id, - substs, - self.tag()); + debug!("write_substs({:?}, {:?}) in fcx {}", node_id, substs, self.tag()); self.tables.borrow_mut().node_substs_mut().insert(node_id, substs); } @@ -2912,16 +2895,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!( "write_user_type_annotation_from_substs: hir_id={:?} def_id={:?} substs={:?} \ user_self_ty={:?} in fcx {}", - hir_id, def_id, substs, user_self_ty, self.tag(), + hir_id, + def_id, + substs, + user_self_ty, + self.tag(), ); if Self::can_contain_user_lifetime_bounds((substs, user_self_ty)) { - let canonicalized = self.infcx.canonicalize_user_type_annotation( - &UserType::TypeOf(def_id, UserSubsts { - substs, - user_self_ty, - }) - ); + let canonicalized = self.infcx.canonicalize_user_type_annotation(&UserType::TypeOf( + def_id, + UserSubsts { substs, user_self_ty }, + )); debug!("write_user_type_annotation_from_substs: canonicalized={:?}", canonicalized); self.write_user_type_annotation(hir_id, canonicalized); } @@ -2934,13 +2919,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) { debug!( "write_user_type_annotation: hir_id={:?} canonical_user_type_annotation={:?} tag={}", - hir_id, canonical_user_type_annotation, self.tag(), + hir_id, + canonical_user_type_annotation, + self.tag(), ); if !canonical_user_type_annotation.is_identity() { - self.tables.borrow_mut().user_provided_types_mut().insert( - hir_id, canonical_user_type_annotation - ); + self.tables + .borrow_mut() + .user_provided_types_mut() + .insert(hir_id, canonical_user_type_annotation); } else { debug!("write_user_type_annotation: skipping identity substs"); } @@ -2954,7 +2942,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } match self.tables.borrow_mut().adjustments_mut().entry(expr.hir_id) { - Entry::Vacant(entry) => { entry.insert(adj); }, + Entry::Vacant(entry) => { + entry.insert(adj); + } Entry::Occupied(mut entry) => { debug!(" - composing on top of {:?}", entry.get()); match (&entry.get()[..], &adj[..]) { @@ -2985,19 +2975,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Basically whenever we are converting from a type scheme into /// the fn body space, we always want to normalize associated /// types as well. This function combines the two. - fn instantiate_type_scheme<T>(&self, - span: Span, - substs: SubstsRef<'tcx>, - value: &T) - -> T - where T : TypeFoldable<'tcx> + fn instantiate_type_scheme<T>(&self, span: Span, substs: SubstsRef<'tcx>, value: &T) -> T + where + T: TypeFoldable<'tcx>, { let value = value.subst(self.tcx, substs); let result = self.normalize_associated_types_in(span, &value); - debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}", - value, - substs, - result); + debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}", value, substs, result); result } @@ -3015,10 +2999,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let result = self.normalize_associated_types_in(span, &result); debug!( "instantiate_bounds(bounds={:?}, substs={:?}) = {:?}, {:?}", - bounds, - substs, - result, - spans, + bounds, substs, result, spans, ); (result, spans) } @@ -3033,19 +3014,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { value_span: Span, ) -> T { let parent_def_id = self.tcx.hir().local_def_id(parent_id); - debug!("instantiate_opaque_types_from_value(parent_def_id={:?}, value={:?})", - parent_def_id, - value); + debug!( + "instantiate_opaque_types_from_value(parent_def_id={:?}, value={:?})", + parent_def_id, value + ); - let (value, opaque_type_map) = self.register_infer_ok_obligations( - self.instantiate_opaque_types( + let (value, opaque_type_map) = + self.register_infer_ok_obligations(self.instantiate_opaque_types( parent_def_id, self.body_id, self.param_env, value, value_span, - ) - ); + )); let mut opaque_types = self.opaque_types.borrow_mut(); let mut opaque_types_vars = self.opaque_types_vars.borrow_mut(); @@ -3058,31 +3039,31 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T - where T : TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { self.inh.normalize_associated_types_in(span, self.body_id, self.param_env, value) } - fn normalize_associated_types_in_as_infer_ok<T>(&self, span: Span, value: &T) - -> InferOk<'tcx, T> - where T : TypeFoldable<'tcx> + fn normalize_associated_types_in_as_infer_ok<T>( + &self, + span: Span, + value: &T, + ) -> InferOk<'tcx, T> + where + T: TypeFoldable<'tcx>, { - self.inh.partially_normalize_associated_types_in(span, - self.body_id, - self.param_env, - value) + self.inh.partially_normalize_associated_types_in(span, self.body_id, self.param_env, value) } - pub fn require_type_meets(&self, - ty: Ty<'tcx>, - span: Span, - code: traits::ObligationCauseCode<'tcx>, - def_id: DefId) - { - self.register_bound( - ty, - def_id, - traits::ObligationCause::new(span, self.body_id, code)); + pub fn require_type_meets( + &self, + ty: Ty<'tcx>, + span: Span, + code: traits::ObligationCauseCode<'tcx>, + def_id: DefId, + ) { + self.register_bound(ty, def_id, traits::ObligationCause::new(span, self.body_id, code)); } pub fn require_type_is_sized( @@ -3115,8 +3096,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { cause: traits::ObligationCause<'tcx>, ) { if !ty.references_error() { - self.fulfillment_cx.borrow_mut() - .register_bound(self, self.param_env, ty, def_id, cause); + self.fulfillment_cx.borrow_mut().register_bound( + self, + self.param_env, + ty, + def_id, + cause, + ); } } @@ -3157,7 +3143,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // sufficiently enforced with erased regions. =) fn can_contain_user_lifetime_bounds<T>(t: T) -> bool where - T: TypeFoldable<'tcx> + T: TypeFoldable<'tcx>, { t.has_free_regions() || t.has_projections() || t.has_infer_types() } @@ -3167,9 +3153,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&t) => t, None if self.is_tainted_by_errors() => self.tcx.types.err, None => { - bug!("no type for node {}: {} in fcx {}", - id, self.tcx.hir().node_to_string(id), - self.tag()); + bug!( + "no type for node {}: {} in fcx {}", + id, + self.tcx.hir().node_to_string(id), + self.tag() + ); } } } @@ -3184,9 +3173,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) { // WF obligations never themselves fail, so no real need to give a detailed cause: let cause = traits::ObligationCause::new(span, self.body_id, code); - self.register_predicate( - traits::Obligation::new(cause, self.param_env, ty::Predicate::WellFormed(ty)), - ); + self.register_predicate(traits::Obligation::new( + cause, + self.param_env, + ty::Predicate::WellFormed(ty), + )); } /// Registers obligations that all types appearing in `substs` are well-formed. @@ -3216,14 +3207,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a` /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally. - pub fn add_obligations_for_parameters(&self, - cause: traits::ObligationCause<'tcx>, - predicates: &ty::InstantiatedPredicates<'tcx>) - { + pub fn add_obligations_for_parameters( + &self, + cause: traits::ObligationCause<'tcx>, + predicates: &ty::InstantiatedPredicates<'tcx>, + ) { assert!(!predicates.has_escaping_bound_vars()); - debug!("add_obligations_for_parameters(predicates={:?})", - predicates); + debug!("add_obligations_for_parameters(predicates={:?})", predicates); for obligation in traits::predicates_for_generics(cause, self.param_env, predicates) { self.register_predicate(obligation); @@ -3271,7 +3262,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // The return value indicates whether fallback has occurred. fn fallback_if_possible(&self, ty: Ty<'tcx>, mode: FallbackMode) -> bool { use rustc::ty::error::UnconstrainedNumeric::Neither; - use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat}; + use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedFloat, UnconstrainedInt}; assert!(ty.is_ty_infer()); let fallback = match self.type_is_unconstrained_numeric(ty) { @@ -3316,8 +3307,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // variable fall back to the opaque type itself. if let FallbackMode::All = mode { if let Some(opaque_ty) = self.opaque_types_vars.borrow().get(ty) { - debug!("fallback_if_possible: falling back opaque type var {:?} to {:?}", - ty, opaque_ty); + debug!( + "fallback_if_possible: falling back opaque type var {:?} to {:?}", + ty, opaque_ty + ); *opaque_ty } else { return false; @@ -3325,7 +3318,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else { return false; } - }, + } }; debug!("fallback_if_possible: defaulting `{:?}` to `{:?}`", ty, fallback); self.demand_eqtype(syntax_pos::DUMMY_SP, ty, fallback); @@ -3356,10 +3349,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// returns a type of `&T`, but the actual type we assign to the /// *expression* is `T`. So this function just peels off the return /// type by one layer to yield `T`. - fn make_overloaded_place_return_type(&self, - method: MethodCallee<'tcx>) - -> ty::TypeAndMut<'tcx> - { + fn make_overloaded_place_return_type( + &self, + method: MethodCallee<'tcx>, + ) -> ty::TypeAndMut<'tcx> { // extract method return type, which will be &T; let ret_ty = method.sig.output(); @@ -3402,12 +3395,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { index_ty: Ty<'tcx>, ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { let adjusted_ty = autoderef.unambiguous_final_ty(self); - debug!("try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \ + debug!( + "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \ index_ty={:?})", - expr, - base_expr, - adjusted_ty, - index_ty); + expr, base_expr, adjusted_ty, index_ty + ); for &unsize in &[false, true] { let mut self_ty = adjusted_ty; @@ -3428,7 +3420,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span: base_expr.span, }); let method = self.try_overloaded_place_op( - expr.span, self_ty, &[input_ty], needs, PlaceOp::Index); + expr.span, + self_ty, + &[input_ty], + needs, + PlaceOp::Index, + ); let result = method.map(|ok| { debug!("try_index_step: success, using overloaded indexing"); @@ -3444,20 +3441,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // See the documentation of AllowTwoPhase for why that's // not the case today. allow_two_phase_borrow: AllowTwoPhase::No, - } + }, }; adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), - target: self.tcx.mk_ref(region, ty::TypeAndMut { - mutbl: r_mutbl, - ty: adjusted_ty - }) + target: self + .tcx + .mk_ref(region, ty::TypeAndMut { mutbl: r_mutbl, ty: adjusted_ty }), }); } if unsize { adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), - target: method.sig.inputs()[0] + target: method.sig.inputs()[0], }); } self.apply_adjustments(base_expr, adjustments); @@ -3483,19 +3479,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (tr, ast::Ident::with_dummy_span(name)) } - fn try_overloaded_place_op(&self, - span: Span, - base_ty: Ty<'tcx>, - arg_tys: &[Ty<'tcx>], - needs: Needs, - op: PlaceOp) - -> Option<InferOk<'tcx, MethodCallee<'tcx>>> - { - debug!("try_overloaded_place_op({:?},{:?},{:?},{:?})", - span, - base_ty, - needs, - op); + fn try_overloaded_place_op( + &self, + span: Span, + base_ty: Ty<'tcx>, + arg_tys: &[Ty<'tcx>], + needs: Needs, + op: PlaceOp, + ) -> Option<InferOk<'tcx, MethodCallee<'tcx>>> { + debug!("try_overloaded_place_op({:?},{:?},{:?},{:?})", span, base_ty, needs, op); // Try Mut first, if needed. let (mut_tr, mut_op) = self.resolve_place_op(op, true); @@ -3527,12 +3519,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { tuple_arguments: TupleArgumentsFlag, expected: Expectation<'tcx>, ) -> Ty<'tcx> { - let has_error = match method { - Ok(method) => { - method.substs.references_error() || method.sig.references_error() - } - Err(_) => true + Ok(method) => method.substs.references_error() || method.sig.references_error(), + Err(_) => true, }; if has_error { let err_inputs = self.err_args(args_no_rcvr.len()); @@ -3561,7 +3550,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { sp, expected, method.sig.output(), - &method.sig.inputs()[1..] + &method.sig.inputs()[1..], ); self.check_argument_types( sp, @@ -3594,7 +3583,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!("self_type_matches_expected_vid - found_vid={:?}", found_vid); expected_vid == found_vid } - _ => false + _ => false, } } @@ -3602,24 +3591,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &'b self, self_ty: ty::TyVid, ) -> impl Iterator<Item = (ty::PolyTraitRef<'tcx>, traits::PredicateObligation<'tcx>)> - + Captures<'tcx> - + 'b { + + Captures<'tcx> + + 'b { // FIXME: consider using `sub_root_var` here so we // can see through subtyping. let ty_var_root = self.root_var(self_ty); - debug!("obligations_for_self_ty: self_ty={:?} ty_var_root={:?} pending_obligations={:?}", - self_ty, ty_var_root, - self.fulfillment_cx.borrow().pending_obligations()); + debug!( + "obligations_for_self_ty: self_ty={:?} ty_var_root={:?} pending_obligations={:?}", + self_ty, + ty_var_root, + self.fulfillment_cx.borrow().pending_obligations() + ); self.fulfillment_cx .borrow() .pending_obligations() .into_iter() .filter_map(move |obligation| match obligation.predicate { - ty::Predicate::Projection(ref data) => - Some((data.to_poly_trait_ref(self.tcx), obligation)), - ty::Predicate::Trait(ref data) => - Some((data.to_poly_trait_ref(), obligation)), + ty::Predicate::Projection(ref data) => { + Some((data.to_poly_trait_ref(self.tcx), obligation)) + } + ty::Predicate::Trait(ref data) => Some((data.to_poly_trait_ref(), obligation)), ty::Predicate::Subtype(..) => None, ty::Predicate::RegionOutlives(..) => None, ty::Predicate::TypeOutlives(..) => None, @@ -3635,13 +3627,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // code is looking for a self type of a unresolved // inference variable. ty::Predicate::ClosureKind(..) => None, - }).filter(move |(tr, _)| self.self_type_matches_expected_vid(*tr, ty_var_root)) + }) + .filter(move |(tr, _)| self.self_type_matches_expected_vid(*tr, ty_var_root)) } fn type_var_is_sized(&self, self_ty: ty::TyVid) -> bool { - self.obligations_for_self_ty(self_ty).any(|(tr, _)| { - Some(tr.def_id()) == self.tcx.lang_items().sized_trait() - }) + self.obligations_for_self_ty(self_ty) + .any(|(tr, _)| Some(tr.def_id()) == self.tcx.lang_items().sized_trait()) } /// Generic function that factors out common logic from function calls, @@ -3660,11 +3652,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let tcx = self.tcx; // Grab the argument types, supplying fresh type variables // if the wrong number of arguments were supplied - let supplied_arg_count = if tuple_arguments == DontTupleArguments { - args.len() - } else { - 1 - }; + let supplied_arg_count = if tuple_arguments == DontTupleArguments { args.len() } else { 1 }; // All the input types from the fn signature must outlive the call // so as to validate implied bounds. @@ -3679,13 +3667,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { error_code: &str, c_variadic: bool, sugg_unit: bool| { - let mut err = tcx.sess.struct_span_err_with_code(sp, - &format!("this function takes {}{} but {} {} supplied", + let mut err = tcx.sess.struct_span_err_with_code( + sp, + &format!( + "this function takes {}{} but {} {} supplied", if c_variadic { "at least " } else { "" }, potentially_plural_count(expected_count, "parameter"), potentially_plural_count(arg_count, "parameter"), - if arg_count == 1 {"was"} else {"were"}), - DiagnosticId::Error(error_code.to_owned())); + if arg_count == 1 { "was" } else { "were" } + ), + DiagnosticId::Error(error_code.to_owned()), + ); if let Some(def_s) = def_span.map(|sp| tcx.sess.source_map().def_span(sp)) { err.span_label(def_s, "defined here"); @@ -3698,11 +3690,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { sugg_span, "expected the unit value `()`; create it with empty parentheses", String::from("()"), - Applicability::MachineApplicable); + Applicability::MachineApplicable, + ); } else { - err.span_label(sp, format!("expected {}{}", - if c_variadic { "at least " } else { "" }, - potentially_plural_count(expected_count, "parameter"))); + err.span_label( + sp, + format!( + "expected {}{}", + if c_variadic { "at least " } else { "" }, + potentially_plural_count(expected_count, "parameter") + ), + ); } err.emit(); }; @@ -3728,9 +3726,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { arg_types.iter().map(|k| k.expect_ty()).collect() } _ => { - span_err!(tcx.sess, sp, E0059, + span_err!( + tcx.sess, + sp, + E0059, "cannot use call notation; the first type parameter \ - for the function trait is neither a tuple nor unit"); + for the function trait is neither a tuple nor unit" + ); expected_arg_tys = vec![]; self.err_args(args.len()) } @@ -3760,15 +3762,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.err_args(supplied_arg_count) }; - debug!("check_argument_types: formal_tys={:?}", - formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>()); + debug!( + "check_argument_types: formal_tys={:?}", + formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>() + ); // If there is no expectation, expect formal_tys. - let expected_arg_tys = if !expected_arg_tys.is_empty() { - expected_arg_tys - } else { - formal_tys.clone() - }; + let expected_arg_tys = + if !expected_arg_tys.is_empty() { expected_arg_tys } else { formal_tys.clone() }; let mut final_arg_types: Vec<(usize, Ty<'_>, Ty<'_>)> = vec![]; @@ -3815,7 +3816,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let is_closure = match arg.kind { ExprKind::Closure(..) => true, - _ => false + _ => false, }; if is_closure != check_closures { @@ -3850,7 +3851,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // arguments which we skipped above. if c_variadic { fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) { - use crate::structured_errors::{VariadicError, StructuredDiagnostic}; + use crate::structured_errors::{StructuredDiagnostic, VariadicError}; VariadicError::new(s, span, t, cast_ty).diagnostic().emit(); } @@ -3901,7 +3902,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // We *do not* do this for desugared call spans to keep good diagnostics when involving // the `?` operator. if call_sp.desugaring_kind().is_some() { - return + return; } for error in errors { @@ -3909,13 +3910,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Otherwise, it means that the cause is somewhere else and we should not change // anything because we can break the correct span. if !call_sp.contains(error.obligation.cause.span) { - continue + continue; } if let ty::Predicate::Trait(predicate) = error.obligation.predicate { // Collect the argument position for all arguments that could have caused this // `FulfillmentError`. - let mut referenced_in = final_arg_types.iter() + let mut referenced_in = final_arg_types + .iter() .map(|(i, checked_ty, _)| (i, checked_ty)) .chain(final_arg_types.iter().map(|(i, _, coerced_ty)| (i, coerced_ty))) .flat_map(|(i, ty)| { @@ -3958,14 +3960,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let ty::Predicate::Trait(predicate) = error.obligation.predicate { // If any of the type arguments in this path segment caused the // `FullfillmentError`, point at its span (#61860). - for arg in path.segments.iter() + for arg in path + .segments + .iter() .filter_map(|seg| seg.args.as_ref()) .flat_map(|a| a.args.iter()) { if let hir::GenericArg::Type(hir_ty) = &arg { - if let hir::TyKind::Path( - hir::QPath::TypeRelative(..), - ) = &hir_ty.kind { + if let hir::TyKind::Path(hir::QPath::TypeRelative(..)) = + &hir_ty.kind + { // Avoid ICE with associated types. As this is best // effort only, it's ok to ignore the case. It // would trigger in `is_send::<T::AssocType>();` @@ -3987,42 +3991,33 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } // AST fragment checking - fn check_lit(&self, - lit: &hir::Lit, - expected: Expectation<'tcx>) - -> Ty<'tcx> - { + fn check_lit(&self, lit: &hir::Lit, expected: Expectation<'tcx>) -> Ty<'tcx> { let tcx = self.tcx; match lit.node { ast::LitKind::Str(..) => tcx.mk_static_str(), ast::LitKind::ByteStr(ref v) => { - tcx.mk_imm_ref(tcx.lifetimes.re_static, - tcx.mk_array(tcx.types.u8, v.len() as u64)) + tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_array(tcx.types.u8, v.len() as u64)) } ast::LitKind::Byte(_) => tcx.types.u8, ast::LitKind::Char(_) => tcx.types.char, ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t), ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t), ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => { - let opt_ty = expected.to_option(self).and_then(|ty| { - match ty.kind { - ty::Int(_) | ty::Uint(_) => Some(ty), - ty::Char => Some(tcx.types.u8), - ty::RawPtr(..) => Some(tcx.types.usize), - ty::FnDef(..) | ty::FnPtr(_) => Some(tcx.types.usize), - _ => None - } + let opt_ty = expected.to_option(self).and_then(|ty| match ty.kind { + ty::Int(_) | ty::Uint(_) => Some(ty), + ty::Char => Some(tcx.types.u8), + ty::RawPtr(..) => Some(tcx.types.usize), + ty::FnDef(..) | ty::FnPtr(_) => Some(tcx.types.usize), + _ => None, }); opt_ty.unwrap_or_else(|| self.next_int_var()) } ast::LitKind::Float(_, ast::LitFloatType::Suffixed(t)) => tcx.mk_mach_float(t), ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) => { - let opt_ty = expected.to_option(self).and_then(|ty| { - match ty.kind { - ty::Float(_) => Some(ty), - _ => None - } + let opt_ty = expected.to_option(self).and_then(|ty| match ty.kind { + ty::Float(_) => Some(ty), + _ => None, }); opt_ty.unwrap_or_else(|| self.next_float_var()) } @@ -4035,10 +4030,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>` // would return `($0, $1)` where `$0` and `$1` are freshly instantiated type // variables. - pub fn impl_self_ty(&self, - span: Span, // (potential) receiver for this impl - did: DefId) - -> TypeAndSubsts<'tcx> { + pub fn impl_self_ty( + &self, + span: Span, // (potential) receiver for this impl + did: DefId, + ) -> TypeAndSubsts<'tcx> { let ity = self.tcx.type_of(did); debug!("impl_self_ty: ity={:?}", ity); @@ -4050,63 +4046,67 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Unifies the output type with the expected type early, for more coercions /// and forward type information on the input expressions. - fn expected_inputs_for_expected_output(&self, - call_span: Span, - expected_ret: Expectation<'tcx>, - formal_ret: Ty<'tcx>, - formal_args: &[Ty<'tcx>]) - -> Vec<Ty<'tcx>> { + fn expected_inputs_for_expected_output( + &self, + call_span: Span, + expected_ret: Expectation<'tcx>, + formal_ret: Ty<'tcx>, + formal_args: &[Ty<'tcx>], + ) -> Vec<Ty<'tcx>> { let formal_ret = self.resolve_vars_with_obligations(formal_ret); let ret_ty = match expected_ret.only_has_type(self) { Some(ret) => ret, - None => return Vec::new() + None => return Vec::new(), }; - let expect_args = self.fudge_inference_if_ok(|| { - // Attempt to apply a subtyping relationship between the formal - // return type (likely containing type variables if the function - // is polymorphic) and the expected return type. - // No argument expectations are produced if unification fails. - let origin = self.misc(call_span); - let ures = self.at(&origin, self.param_env).sup(ret_ty, &formal_ret); - - // FIXME(#27336) can't use ? here, Try::from_error doesn't default - // to identity so the resulting type is not constrained. - match ures { - Ok(ok) => { - // Process any obligations locally as much as - // we can. We don't care if some things turn - // out unconstrained or ambiguous, as we're - // just trying to get hints here. - self.save_and_restore_in_snapshot_flag(|_| { - let mut fulfill = TraitEngine::new(self.tcx); - for obligation in ok.obligations { - fulfill.register_predicate_obligation(self, obligation); - } - fulfill.select_where_possible(self) - }).map_err(|_| ())?; + let expect_args = self + .fudge_inference_if_ok(|| { + // Attempt to apply a subtyping relationship between the formal + // return type (likely containing type variables if the function + // is polymorphic) and the expected return type. + // No argument expectations are produced if unification fails. + let origin = self.misc(call_span); + let ures = self.at(&origin, self.param_env).sup(ret_ty, &formal_ret); + + // FIXME(#27336) can't use ? here, Try::from_error doesn't default + // to identity so the resulting type is not constrained. + match ures { + Ok(ok) => { + // Process any obligations locally as much as + // we can. We don't care if some things turn + // out unconstrained or ambiguous, as we're + // just trying to get hints here. + self.save_and_restore_in_snapshot_flag(|_| { + let mut fulfill = TraitEngine::new(self.tcx); + for obligation in ok.obligations { + fulfill.register_predicate_obligation(self, obligation); + } + fulfill.select_where_possible(self) + }) + .map_err(|_| ())?; + } + Err(_) => return Err(()), } - Err(_) => return Err(()), - } - // Record all the argument types, with the substitutions - // produced from the above subtyping unification. - Ok(formal_args.iter().map(|ty| { - self.resolve_vars_if_possible(ty) - }).collect()) - }).unwrap_or_default(); - debug!("expected_inputs_for_expected_output(formal={:?} -> {:?}, expected={:?} -> {:?})", - formal_args, formal_ret, - expect_args, expected_ret); + // Record all the argument types, with the substitutions + // produced from the above subtyping unification. + Ok(formal_args.iter().map(|ty| self.resolve_vars_if_possible(ty)).collect()) + }) + .unwrap_or_default(); + debug!( + "expected_inputs_for_expected_output(formal={:?} -> {:?}, expected={:?} -> {:?})", + formal_args, formal_ret, expect_args, expected_ret + ); expect_args } - pub fn check_struct_path(&self, - qpath: &QPath, - hir_id: hir::HirId) - -> Option<(&'tcx ty::VariantDef, Ty<'tcx>)> { + pub fn check_struct_path( + &self, + qpath: &QPath, + hir_id: hir::HirId, + ) -> Option<(&'tcx ty::VariantDef, Ty<'tcx>)> { let path_span = match *qpath { QPath::Resolved(_, ref path) => path.span, - QPath::TypeRelative(ref qself, _) => qself.span + QPath::TypeRelative(ref qself, _) => qself.span, }; let (def, ty) = self.finish_resolving_struct_path(qpath, path_span, hir_id); let variant = match def { @@ -4114,27 +4114,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.set_tainted_by_errors(); return None; } - Res::Def(DefKind::Variant, _) => { - match ty.kind { - ty::Adt(adt, substs) => { - Some((adt.variant_of_res(def), adt.did, substs)) - } - _ => bug!("unexpected type: {:?}", ty) - } - } + Res::Def(DefKind::Variant, _) => match ty.kind { + ty::Adt(adt, substs) => Some((adt.variant_of_res(def), adt.did, substs)), + _ => bug!("unexpected type: {:?}", ty), + }, Res::Def(DefKind::Struct, _) | Res::Def(DefKind::Union, _) | Res::Def(DefKind::TyAlias, _) | Res::Def(DefKind::AssocTy, _) - | Res::SelfTy(..) => { - match ty.kind { - ty::Adt(adt, substs) if !adt.is_enum() => { - Some((adt.non_enum_variant(), adt.did, substs)) - } - _ => None, + | Res::SelfTy(..) => match ty.kind { + ty::Adt(adt, substs) if !adt.is_enum() => { + Some((adt.non_enum_variant(), adt.did, substs)) } - } - _ => bug!("unexpected definition: {:?}", def) + _ => None, + }, + _ => bug!("unexpected definition: {:?}", def), }; if let Some((variant, did, substs)) = variant { @@ -4143,32 +4137,33 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Check bounds on type arguments used in the path. let (bounds, _) = self.instantiate_bounds(path_span, did, substs); - let cause = traits::ObligationCause::new( - path_span, - self.body_id, - traits::ItemObligation(did), - ); + let cause = + traits::ObligationCause::new(path_span, self.body_id, traits::ItemObligation(did)); self.add_obligations_for_parameters(cause, &bounds); Some((variant, ty)) } else { - struct_span_err!(self.tcx.sess, path_span, E0071, - "expected struct, variant or union type, found {}", - ty.sort_string(self.tcx)) - .span_label(path_span, "not a struct") - .emit(); + struct_span_err!( + self.tcx.sess, + path_span, + E0071, + "expected struct, variant or union type, found {}", + ty.sort_string(self.tcx) + ) + .span_label(path_span, "not a struct") + .emit(); None } } // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary. // The newly resolved definition is written into `type_dependent_defs`. - fn finish_resolving_struct_path(&self, - qpath: &QPath, - path_span: Span, - hir_id: hir::HirId) - -> (Res, Ty<'tcx>) - { + fn finish_resolving_struct_path( + &self, + qpath: &QPath, + path_span: Span, + hir_id: hir::HirId, + ) -> (Res, Ty<'tcx>) { match *qpath { QPath::Resolved(ref maybe_qself, ref path) => { let self_ty = maybe_qself.as_ref().map(|qself| self.to_ty(qself)); @@ -4183,15 +4178,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else { Res::Err }; - let result = AstConv::associated_path_to_ty( - self, - hir_id, - path_span, - ty, - res, - segment, - true, - ); + let result = + AstConv::associated_path_to_ty(self, hir_id, path_span, ty, res, segment, true); let ty = result.map(|(ty, _, _)| ty).unwrap_or(self.tcx().types.err); let result = result.map(|(_, kind, def_id)| (kind, def_id)); @@ -4205,28 +4193,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Resolves an associated value path into a base type and associated constant, or method /// resolution. The newly resolved definition is written into `type_dependent_defs`. - pub fn resolve_ty_and_res_ufcs<'b>(&self, - qpath: &'b QPath, - hir_id: hir::HirId, - span: Span) - -> (Res, Option<Ty<'tcx>>, &'b [hir::PathSegment]) - { + pub fn resolve_ty_and_res_ufcs<'b>( + &self, + qpath: &'b QPath, + hir_id: hir::HirId, + span: Span, + ) -> (Res, Option<Ty<'tcx>>, &'b [hir::PathSegment]) { debug!("resolve_ty_and_res_ufcs: qpath={:?} hir_id={:?} span={:?}", qpath, hir_id, span); let (ty, qself, item_segment) = match *qpath { QPath::Resolved(ref opt_qself, ref path) => { - return (path.res, - opt_qself.as_ref().map(|qself| self.to_ty(qself)), - &path.segments[..]); - } - QPath::TypeRelative(ref qself, ref segment) => { - (self.to_ty(qself), qself, segment) + return ( + path.res, + opt_qself.as_ref().map(|qself| self.to_ty(qself)), + &path.segments[..], + ); } + QPath::TypeRelative(ref qself, ref segment) => (self.to_ty(qself), qself, segment), }; if let Some(&cached_result) = self.tables.borrow().type_dependent_defs().get(hir_id) { // Return directly on cache hit. This is useful to avoid doubly reporting // errors with default match binding modes. See #44614. - let def = cached_result.map(|(kind, def_id)| Res::Def(kind, def_id)) - .unwrap_or(Res::Err); + let def = + cached_result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err); return (def, Some(ty), slice::from_ref(&**item_segment)); } let item_name = item_segment.ident; @@ -4243,7 +4231,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { SelfSource::QPath(qself), error, None, - ).map(|mut e| e.emit()); + ) + .map(|mut e| e.emit()); } result }); @@ -4305,10 +4294,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Override the types everywhere with `types.err` to avoid knock down errors. self.write_ty(local.hir_id, ty); self.write_ty(local.pat.hir_id, ty); - let local_ty = LocalTy { - decl_ty, - revealed_ty: ty, - }; + let local_ty = LocalTy { decl_ty, revealed_ty: ty }; self.locals.borrow_mut().insert(local.hir_id, local_ty); self.locals.borrow_mut().insert(local.pat.hir_id, local_ty); } @@ -4381,24 +4367,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// ``` fn get_expr_coercion_span(&self, expr: &hir::Expr) -> syntax_pos::Span { if let hir::ExprKind::Match(_, arms, _) = &expr.kind { - let arm_spans: Vec<Span> = arms.iter().filter_map(|arm| { - self.in_progress_tables - .and_then(|tables| tables.borrow().node_type_opt(arm.body.hir_id)) - .and_then(|arm_ty| { - if arm_ty.is_never() { - None - } else { - Some(match &arm.body.kind { - // Point at the tail expression when possible. - hir::ExprKind::Block(block, _) => block.expr - .as_ref() - .map(|e| e.span) - .unwrap_or(block.span), - _ => arm.body.span, - }) - } - }) - }).collect(); + let arm_spans: Vec<Span> = arms + .iter() + .filter_map(|arm| { + self.in_progress_tables + .and_then(|tables| tables.borrow().node_type_opt(arm.body.hir_id)) + .and_then(|arm_ty| { + if arm_ty.is_never() { + None + } else { + Some(match &arm.body.kind { + // Point at the tail expression when possible. + hir::ExprKind::Block(block, _) => { + block.expr.as_ref().map(|e| e.span).unwrap_or(block.span) + } + _ => arm.body.span, + }) + } + }) + }) + .collect(); if arm_spans.len() == 1 { return arm_spans[0]; } @@ -4446,10 +4434,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let prev_diverges = self.diverges.get(); - let ctxt = BreakableCtxt { - coerce: Some(coerce), - may_break: false, - }; + let ctxt = BreakableCtxt { coerce: Some(coerce), may_break: false }; let (ctxt, ()) = self.with_breakable_ctxt(blk.hir_id, ctxt, || { for s in &blk.stmts { @@ -4499,18 +4484,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } } - coerce.coerce_forced_unit(self, &self.misc(sp), &mut |err| { - if let Some(expected_ty) = expected.only_has_type(self) { - self.consider_hint_about_removing_semicolon(blk, expected_ty, err); - } - if let Some(fn_span) = fn_span { - err.span_label( - fn_span, - "implicitly returns `()` as its body has no tail or `return` \ + coerce.coerce_forced_unit( + self, + &self.misc(sp), + &mut |err| { + if let Some(expected_ty) = expected.only_has_type(self) { + self.consider_hint_about_removing_semicolon(blk, expected_ty, err); + } + if let Some(fn_span) = fn_span { + err.span_label( + fn_span, + "implicitly returns `()` as its body has no tail or `return` \ expression", - ); - } - }, false); + ); + } + }, + false, + ); } } }); @@ -4536,10 +4526,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn parent_item_span(&self, id: hir::HirId) -> Option<Span> { let node = self.tcx.hir().get(self.tcx.hir().get_parent_item(id)); match node { - Node::Item(&hir::Item { - kind: hir::ItemKind::Fn(_, _, body_id), .. - }) | - Node::ImplItem(&hir::ImplItem { + Node::Item(&hir::Item { kind: hir::ItemKind::Fn(_, _, body_id), .. }) + | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Method(_, body_id), .. }) => { let body = self.tcx.hir().body(body_id); @@ -4561,19 +4549,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Given a function `Node`, return its `FnDecl` if it exists, or `None` otherwise. fn get_node_fn_decl(&self, node: Node<'tcx>) -> Option<(&'tcx hir::FnDecl, ast::Ident, bool)> { match node { - Node::Item(&hir::Item { - ident, kind: hir::ItemKind::Fn(ref sig, ..), .. - }) => { + Node::Item(&hir::Item { ident, kind: hir::ItemKind::Fn(ref sig, ..), .. }) => { // This is less than ideal, it will not suggest a return type span on any // method called `main`, regardless of whether it is actually the entry point, // but it will still present it as the reason for the expected type. Some((&sig.decl, ident, ident.name != sym::main)) } Node::TraitItem(&hir::TraitItem { - ident, kind: hir::TraitItemKind::Method(ref sig, ..), .. + ident, + kind: hir::TraitItemKind::Method(ref sig, ..), + .. }) => Some((&sig.decl, ident, true)), Node::ImplItem(&hir::ImplItem { - ident, kind: hir::ImplItemKind::Method(ref sig, ..), .. + ident, + kind: hir::ImplItemKind::Method(ref sig, ..), + .. }) => Some((&sig.decl, ident, false)), _ => None, } @@ -4608,8 +4598,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.suggest_missing_semicolon(err, expr, expected, cause_span); let mut pointing_at_return_type = false; if let Some((fn_decl, can_suggest)) = self.get_fn_decl(blk_id) { - pointing_at_return_type = self.suggest_missing_return_type( - err, &fn_decl, expected, found, can_suggest); + pointing_at_return_type = + self.suggest_missing_return_type(err, &fn_decl, expected, found, can_suggest); } pointing_at_return_type } @@ -4634,17 +4624,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // We don't use `closure_sig` to account for malformed closures like // `|_: [_; continue]| {}` and instead we don't suggest anything. let closure_sig_ty = substs.as_closure().sig_ty(def_id, self.tcx); - (def_id, match closure_sig_ty.kind { - ty::FnPtr(sig) => sig, - _ => return false, - }) + ( + def_id, + match closure_sig_ty.kind { + ty::FnPtr(sig) => sig, + _ => return false, + }, + ) } _ => return false, }; - let sig = self - .replace_bound_vars_with_fresh_vars(expr.span, infer::FnCall, &sig) - .0; + let sig = self.replace_bound_vars_with_fresh_vars(expr.span, infer::FnCall, &sig).0; let sig = self.normalize_associated_types_in(expr.span, &sig); if self.can_coerce(sig.output(), expected) { let (mut sugg_call, applicability) = if sig.inputs().is_empty() { @@ -4654,25 +4645,29 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let mut msg = "call this function"; match hir.get_if_local(def_id) { - Some(Node::Item(hir::Item { - kind: ItemKind::Fn(.., body_id), - .. - })) | - Some(Node::ImplItem(hir::ImplItem { + Some(Node::Item(hir::Item { kind: ItemKind::Fn(.., body_id), .. })) + | Some(Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Method(_, body_id), .. - })) | - Some(Node::TraitItem(hir::TraitItem { + })) + | Some(Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Method(.., hir::TraitMethod::Provided(body_id)), .. })) => { let body = hir.body(*body_id); - sugg_call = body.params.iter() + sugg_call = body + .params + .iter() .map(|param| match ¶m.pat.kind { hir::PatKind::Binding(_, _, ident, None) - if ident.name != kw::SelfLower => ident.to_string(), + if ident.name != kw::SelfLower => + { + ident.to_string() + } _ => "_".to_string(), - }).collect::<Vec<_>>().join(", "); + }) + .collect::<Vec<_>>() + .join(", "); } Some(Node::Expr(hir::Expr { kind: ExprKind::Closure(_, _, body_id, closure_span, _), @@ -4685,12 +4680,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_label(*closure_span, "closure defined here"); msg = "call this closure"; let body = hir.body(*body_id); - sugg_call = body.params.iter() + sugg_call = body + .params + .iter() .map(|param| match ¶m.pat.kind { hir::PatKind::Binding(_, _, ident, None) - if ident.name != kw::SelfLower => ident.to_string(), + if ident.name != kw::SelfLower => + { + ident.to_string() + } _ => "_".to_string(), - }).collect::<Vec<_>>().join(", "); + }) + .collect::<Vec<_>>() + .join(", "); } Some(Node::Ctor(hir::VariantData::Tuple(fields, _))) => { sugg_call = fields.iter().map(|_| "_").collect::<Vec<_>>().join(", "); @@ -4707,23 +4709,35 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(_, idents, _), .. - })) => sugg_call = idents.iter() - .map(|ident| if ident.name != kw::SelfLower { - ident.to_string() - } else { - "_".to_string() - }).collect::<Vec<_>>() - .join(", "), + })) => { + sugg_call = idents + .iter() + .map(|ident| { + if ident.name != kw::SelfLower { + ident.to_string() + } else { + "_".to_string() + } + }) + .collect::<Vec<_>>() + .join(", ") + } Some(Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Method(.., hir::TraitMethod::Required(idents)), .. - })) => sugg_call = idents.iter() - .map(|ident| if ident.name != kw::SelfLower { - ident.to_string() - } else { - "_".to_string() - }).collect::<Vec<_>>() - .join(", "), + })) => { + sugg_call = idents + .iter() + .map(|ident| { + if ident.name != kw::SelfLower { + ident.to_string() + } else { + "_".to_string() + } + }) + .collect::<Vec<_>>() + .join(", ") + } _ => {} } if let Ok(code) = self.sess().source_map().span_to_snippet(expr.span) { @@ -4747,36 +4761,30 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { found: Ty<'tcx>, ) { if let Some((sp, msg, suggestion)) = self.check_ref(expr, found, expected) { - err.span_suggestion( - sp, - msg, - suggestion, - Applicability::MachineApplicable, - ); - } else if let (ty::FnDef(def_id, ..), true) = ( - &found.kind, - self.suggest_fn_call(err, expr, expected, found), - ) { + err.span_suggestion(sp, msg, suggestion, Applicability::MachineApplicable); + } else if let (ty::FnDef(def_id, ..), true) = + (&found.kind, self.suggest_fn_call(err, expr, expected, found)) + { if let Some(sp) = self.tcx.hir().span_if_local(*def_id) { let sp = self.sess().source_map().def_span(sp); err.span_label(sp, &format!("{} defined here", found)); } } else if !self.check_for_cast(err, expr, found, expected) { - let is_struct_pat_shorthand_field = self.is_hir_id_from_struct_pattern_shorthand_field( - expr.hir_id, - expr.span, - ); + let is_struct_pat_shorthand_field = + self.is_hir_id_from_struct_pattern_shorthand_field(expr.hir_id, expr.span); let methods = self.get_conversion_methods(expr.span, expected, found); if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) { - let mut suggestions = iter::repeat(&expr_text).zip(methods.iter()) + let mut suggestions = iter::repeat(&expr_text) + .zip(methods.iter()) .filter_map(|(receiver, method)| { let method_call = format!(".{}()", method.ident); if receiver.ends_with(&method_call) { - None // do not suggest code that is already there (#53348) + None // do not suggest code that is already there (#53348) } else { let method_call_list = [".to_vec()", ".to_string()"]; let sugg = if receiver.ends_with(".clone()") - && method_call_list.contains(&method_call.as_str()) { + && method_call_list.contains(&method_call.as_str()) + { let max_len = receiver.rfind(".").unwrap(); format!("{}{}", &receiver[..max_len], method_call) } else { @@ -4792,7 +4800,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { sugg }) } - }).peekable(); + }) + .peekable(); if suggestions.peek().is_some() { err.span_suggestions( expr.span, @@ -4832,14 +4841,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { format!("Box::new({})", snippet), Applicability::MachineApplicable, ); - err.note("for more on the distinction between the stack and the \ + err.note( + "for more on the distinction between the stack and the \ heap, read https://doc.rust-lang.org/book/ch15-01-box.html, \ https://doc.rust-lang.org/rust-by-example/std/box.html, and \ - https://doc.rust-lang.org/std/boxed/index.html"); + https://doc.rust-lang.org/std/boxed/index.html", + ); } } - /// A common error is to forget to add a semicolon at the end of a block, e.g., /// /// ``` @@ -4862,17 +4872,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // `BlockTailExpression` only relevant if the tail expr would be // useful on its own. match expression.kind { - ExprKind::Call(..) | - ExprKind::MethodCall(..) | - ExprKind::Loop(..) | - ExprKind::Match(..) | - ExprKind::Block(..) => { + ExprKind::Call(..) + | ExprKind::MethodCall(..) + | ExprKind::Loop(..) + | ExprKind::Match(..) + | ExprKind::Block(..) => { let sp = self.tcx.sess.source_map().next_point(cause_span); err.span_suggestion( sp, "try adding a semicolon", ";".to_string(), - Applicability::MachineApplicable); + Applicability::MachineApplicable, + ); } _ => (), } @@ -4906,7 +4917,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span, "try adding a return type", format!("-> {} ", self.resolve_vars_with_obligations(found)), - Applicability::MachineApplicable); + Applicability::MachineApplicable, + ); true } (&hir::FunctionRetTy::DefaultReturn(span), false, true, true) => { @@ -4929,8 +4941,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!("suggest_missing_return_type: return type {:?}", ty); debug!("suggest_missing_return_type: expected type {:?}", ty); if ty.kind == expected.kind { - err.span_label(sp, format!("expected `{}` because of return type", - expected)); + err.span_label(sp, format!("expected `{}` because of return type", expected)); return true; } false @@ -4974,19 +4985,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // prove: `<T as Future>::Output == U` let future_trait = self.tcx.lang_items().future_trait().unwrap(); let item_def_id = self.tcx.associated_items(future_trait).next().unwrap().def_id; - let predicate = ty::Predicate::Projection(ty::Binder::bind(ty::ProjectionPredicate { - // `<T as Future>::Output` - projection_ty: ty::ProjectionTy { - // `T` - substs: self.tcx.mk_substs_trait( - found, - self.fresh_substs_for_item(sp, item_def_id) - ), - // `Future::Output` - item_def_id, - }, - ty: expected, - })); + let predicate = + ty::Predicate::Projection(ty::Binder::bind(ty::ProjectionPredicate { + // `<T as Future>::Output` + projection_ty: ty::ProjectionTy { + // `T` + substs: self.tcx.mk_substs_trait( + found, + self.fresh_substs_for_item(sp, item_def_id), + ), + // `Future::Output` + item_def_id, + }, + ty: expected, + })); let obligation = traits::Obligation::new(self.misc(sp), self.param_env, predicate); debug!("suggest_missing_await: trying obligation {:?}", obligation); if self.infcx.predicate_may_hold(&obligation) { @@ -5053,27 +5065,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Instantiates the given path, which must refer to an item with the given // number of type parameters and type. - pub fn instantiate_value_path(&self, - segments: &[hir::PathSegment], - self_ty: Option<Ty<'tcx>>, - res: Res, - span: Span, - hir_id: hir::HirId) - -> (Ty<'tcx>, Res) { + pub fn instantiate_value_path( + &self, + segments: &[hir::PathSegment], + self_ty: Option<Ty<'tcx>>, + res: Res, + span: Span, + hir_id: hir::HirId, + ) -> (Ty<'tcx>, Res) { debug!( "instantiate_value_path(segments={:?}, self_ty={:?}, res={:?}, hir_id={})", - segments, - self_ty, - res, - hir_id, + segments, self_ty, res, hir_id, ); let tcx = self.tcx; let path_segs = match res { Res::Local(_) | Res::SelfCtor(_) => vec![], - Res::Def(kind, def_id) => - AstConv::def_ids_for_value_path_segments(self, segments, self_ty, kind, def_id), + Res::Def(kind, def_id) => { + AstConv::def_ids_for_value_path_segments(self, segments, self_ty, kind, def_id) + } _ => bug!("instantiate_value_path on {:?}", res), }; @@ -5083,15 +5094,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Res::Def(DefKind::Ctor(CtorOf::Variant, _), _) => { if let Some(self_ty) = self_ty { let adt_def = self_ty.ty_adt_def().unwrap(); - user_self_ty = Some(UserSelfTy { - impl_def_id: adt_def.did, - self_ty, - }); + user_self_ty = Some(UserSelfTy { impl_def_id: adt_def.did, self_ty }); is_alias_variant_ctor = true; } } - Res::Def(DefKind::Method, def_id) - | Res::Def(DefKind::AssocConst, def_id) => { + Res::Def(DefKind::Method, def_id) | Res::Def(DefKind::AssocConst, def_id) => { let container = tcx.associated_item(def_id).container; debug!("instantiate_value_path: def_id={:?} container={:?}", def_id, container); match container { @@ -5106,10 +5113,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // `T` for posterity (see `UserSelfTy` for // details). let self_ty = self_ty.expect("UFCS sugared assoc missing Self"); - user_self_ty = Some(UserSelfTy { - impl_def_id, - self_ty, - }); + user_self_ty = Some(UserSelfTy { impl_def_id, self_ty }); } } } @@ -5124,13 +5128,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let generic_segs: FxHashSet<_> = path_segs.iter().map(|PathSeg(_, index)| index).collect(); let generics_has_err = AstConv::prohibit_generics( - self, segments.iter().enumerate().filter_map(|(index, seg)| { - if !generic_segs.contains(&index) || is_alias_variant_ctor { - Some(seg) - } else { - None - } - })); + self, + segments.iter().enumerate().filter_map(|(index, seg)| { + if !generic_segs.contains(&index) || is_alias_variant_ctor { + Some(seg) + } else { + None + } + }), + ); if let Res::Local(hid) = res { let ty = self.local_ty(span, hid).decl_ty; @@ -5160,11 +5166,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // parameter's value explicitly, so we have to do some error- // checking here. let suppress_errors = AstConv::check_generic_arg_count_for_call( - tcx, - span, - &generics, - &seg, - false, // `is_method_call` + tcx, span, &generics, &seg, false, // `is_method_call` ); if suppress_errors { infer_args_for_err.insert(index); @@ -5172,9 +5174,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - let has_self = path_segs.last().map(|PathSeg(def_id, _)| { - tcx.generics_of(*def_id).has_self - }).unwrap_or(false); + let has_self = path_segs + .last() + .map(|PathSeg(def_id, _)| tcx.generics_of(*def_id).has_self) + .unwrap_or(false); let (res, self_ctor_substs) = if let Res::SelfCtor(impl_def_id) = res { let ty = self.impl_self_ty(span, impl_def_id).ty; @@ -5190,15 +5193,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) } _ => { - let mut err = tcx.sess.struct_span_err(span, - "the `Self` constructor can only be used with tuple or unit structs"); + let mut err = tcx.sess.struct_span_err( + span, + "the `Self` constructor can only be used with tuple or unit structs", + ); if let Some(adt_def) = adt_def { match adt_def.adt_kind() { AdtKind::Enum => { err.help("did you mean to use one of the enum's variants?"); - }, - AdtKind::Struct | - AdtKind::Union => { + } + AdtKind::Struct | AdtKind::Union => { err.span_suggestion( span, "use curly brackets", @@ -5210,7 +5214,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } err.emit(); - return (tcx.types.err, res) + return (tcx.types.err, res); } } } else { @@ -5222,33 +5226,33 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // escaping late-bound regions, and nor should the base type scheme. let ty = tcx.type_of(def_id); - let substs = self_ctor_substs.unwrap_or_else(|| AstConv::create_substs_for_generic_args( - tcx, - def_id, - &[][..], - has_self, - self_ty, - // Provide the generic args, and whether types should be inferred. - |def_id| { - if let Some(&PathSeg(_, index)) = path_segs.iter().find(|&PathSeg(did, _)| { - *did == def_id - }) { - // If we've encountered an `impl Trait`-related error, we're just - // going to infer the arguments for better error messages. - if !infer_args_for_err.contains(&index) { - // Check whether the user has provided generic arguments. - if let Some(ref data) = segments[index].args { - return (Some(data), segments[index].infer_args); + let substs = self_ctor_substs.unwrap_or_else(|| { + AstConv::create_substs_for_generic_args( + tcx, + def_id, + &[][..], + has_self, + self_ty, + // Provide the generic args, and whether types should be inferred. + |def_id| { + if let Some(&PathSeg(_, index)) = + path_segs.iter().find(|&PathSeg(did, _)| *did == def_id) + { + // If we've encountered an `impl Trait`-related error, we're just + // going to infer the arguments for better error messages. + if !infer_args_for_err.contains(&index) { + // Check whether the user has provided generic arguments. + if let Some(ref data) = segments[index].args { + return (Some(data), segments[index].infer_args); + } } + return (None, segments[index].infer_args); } - return (None, segments[index].infer_args); - } - (None, true) - }, - // Provide substitutions for parameters for which (valid) arguments have been provided. - |param, arg| { - match (¶m.kind, arg) { + (None, true) + }, + // Provide substitutions for parameters for which (valid) arguments have been provided. + |param, arg| match (¶m.kind, arg) { (GenericParamDefKind::Lifetime, GenericArg::Lifetime(lt)) => { AstConv::ast_region_to_region(self, lt, Some(param)).into() } @@ -5259,40 +5263,41 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.to_const(&ct.value, self.tcx.type_of(param.def_id)).into() } _ => unreachable!(), - } - }, - // Provide substitutions for parameters for which arguments are inferred. - |substs, param, infer_args| { - match param.kind { - GenericParamDefKind::Lifetime => { - self.re_infer(Some(param), span).unwrap().into() - } - GenericParamDefKind::Type { has_default, .. } => { - if !infer_args && has_default { - // If we have a default, then we it doesn't matter that we're not - // inferring the type arguments: we provide the default where any - // is missing. - let default = tcx.type_of(param.def_id); - self.normalize_ty( - span, - default.subst_spanned(tcx, substs.unwrap(), Some(span)) - ).into() - } else { - // If no type arguments were provided, we have to infer them. - // This case also occurs as a result of some malformed input, e.g. - // a lifetime argument being given instead of a type parameter. - // Using inference instead of `Error` gives better error messages. + }, + // Provide substitutions for parameters for which arguments are inferred. + |substs, param, infer_args| { + match param.kind { + GenericParamDefKind::Lifetime => { + self.re_infer(Some(param), span).unwrap().into() + } + GenericParamDefKind::Type { has_default, .. } => { + if !infer_args && has_default { + // If we have a default, then we it doesn't matter that we're not + // inferring the type arguments: we provide the default where any + // is missing. + let default = tcx.type_of(param.def_id); + self.normalize_ty( + span, + default.subst_spanned(tcx, substs.unwrap(), Some(span)), + ) + .into() + } else { + // If no type arguments were provided, we have to infer them. + // This case also occurs as a result of some malformed input, e.g. + // a lifetime argument being given instead of a type parameter. + // Using inference instead of `Error` gives better error messages. + self.var_for_def(span, param) + } + } + GenericParamDefKind::Const => { + // FIXME(const_generics:defaults) + // No const parameters were provided, we have to infer them. self.var_for_def(span, param) } } - GenericParamDefKind::Const => { - // FIXME(const_generics:defaults) - // No const parameters were provided, we have to infer them. - self.var_for_def(span, param) - } - } - }, - )); + }, + ) + }); assert!(!substs.has_escaping_bound_vars()); assert!(!ty.has_escaping_bound_vars()); @@ -5328,9 +5333,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_rustc_args_require_const(def_id, hir_id, span); - debug!("instantiate_value_path: type of {:?} is {:?}", - hir_id, - ty_substituted); + debug!("instantiate_value_path: type of {:?} is {:?}", hir_id, ty_substituted); self.write_substs(hir_id, substs); (ty_substituted, res) @@ -5341,14 +5344,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let (bounds, spans) = self.instantiate_bounds(span, def_id, &substs); for (i, mut obligation) in traits::predicates_for_generics( - traits::ObligationCause::new( - span, - self.body_id, - traits::ItemObligation(def_id), - ), + traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def_id)), self.param_env, &bounds, - ).into_iter().enumerate() { + ) + .into_iter() + .enumerate() + { // This makes the error point at the bound, but we want to point at the argument if let Some(span) = spans.get(i) { obligation.cause.code = traits::BindingObligation(def_id, *span); @@ -5357,30 +5359,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - fn check_rustc_args_require_const(&self, - def_id: DefId, - hir_id: hir::HirId, - span: Span) { + fn check_rustc_args_require_const(&self, def_id: DefId, hir_id: hir::HirId, span: Span) { // We're only interested in functions tagged with // #[rustc_args_required_const], so ignore anything that's not. if !self.tcx.has_attr(def_id, sym::rustc_args_required_const) { - return + return; } // If our calling expression is indeed the function itself, we're good! // If not, generate an error that this can only be called directly. - if let Node::Expr(expr) = self.tcx.hir().get( - self.tcx.hir().get_parent_node(hir_id)) - { + if let Node::Expr(expr) = self.tcx.hir().get(self.tcx.hir().get_parent_node(hir_id)) { if let ExprKind::Call(ref callee, ..) = expr.kind { if callee.hir_id == hir_id { - return + return; } } } - self.tcx.sess.span_err(span, "this function can only be invoked \ - directly, not through a function pointer"); + self.tcx.sess.span_err( + span, + "this function can only be invoked \ + directly, not through a function pointer", + ); } /// Resolves `typ` by a single level if `typ` is a type variable. @@ -5431,13 +5431,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span: Span, original_values: &OriginalQueryValues<'tcx>, query_result: &Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>, - ) -> InferResult<'tcx, Ty<'tcx>> - { + ) -> InferResult<'tcx, Ty<'tcx>> { self.instantiate_query_response_and_region_obligations( &traits::ObligationCause::misc(span, self.body_id), self.param_env, original_values, - query_result) + query_result, + ) } /// Returns `true` if an expression is contained inside the LHS of an assignment expression. @@ -5467,9 +5467,7 @@ pub fn check_bounds_are_used<'tcx>(tcx: TyCtxt<'tcx>, generics: &ty::Generics, t let own_counts = generics.own_counts(); debug!( "check_bounds_are_used(n_tys={}, n_cts={}, ty={:?})", - own_counts.types, - own_counts.consts, - ty + own_counts.types, own_counts.consts, ty ); if own_counts.types == 0 { @@ -5515,9 +5513,10 @@ fn fatally_break_rust(sess: &Session) { handler.note_without_error("the compiler expectedly panicked. this is a feature."); handler.note_without_error( "we would appreciate a joke overview: \ - https://github.com/rust-lang/rust/issues/43162#issuecomment-320764675" + https://github.com/rust-lang/rust/issues/43162#issuecomment-320764675", ); - handler.note_without_error(&format!("rustc {} running on {}", + handler.note_without_error(&format!( + "rustc {} running on {}", option_env!("CFG_VERSION").unwrap_or("unknown_version"), crate::session::config::host_triple(), )); diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index e974b070dbd..4c349a27c21 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -1,22 +1,22 @@ -use crate::check::{Inherited, FnCtxt}; +use crate::check::{FnCtxt, Inherited}; use crate::constrained_generic_params::{identify_constrained_generic_params, Parameter}; use crate::hir::def_id::DefId; -use rustc::traits::{self, ObligationCause, ObligationCauseCode}; -use rustc::ty::{self, Ty, TyCtxt, GenericParamDefKind, TypeFoldable, ToPredicate}; -use rustc::ty::subst::{Subst, InternalSubsts}; -use rustc::util::nodemap::{FxHashSet, FxHashMap}; -use rustc::middle::lang_items; use rustc::infer::opaque_types::may_define_opaque_type; +use rustc::middle::lang_items; +use rustc::traits::{self, ObligationCause, ObligationCauseCode}; +use rustc::ty::subst::{InternalSubsts, Subst}; +use rustc::ty::{self, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeFoldable}; +use rustc::util::nodemap::{FxHashMap, FxHashSet}; +use errors::DiagnosticBuilder; use syntax::ast; use syntax::feature_gate; -use syntax_pos::Span; use syntax::symbol::sym; -use errors::DiagnosticBuilder; +use syntax_pos::Span; -use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc::hir; +use rustc::hir::itemlikevisit::ParItemLikeVisitor; use rustc_error_codes::*; @@ -73,9 +73,11 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: DefId) { let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); let item = tcx.hir().expect_item(hir_id); - debug!("check_item_well_formed(it.hir_id={:?}, it.name={})", - item.hir_id, - tcx.def_path_str(def_id)); + debug!( + "check_item_well_formed(it.hir_id={:?}, it.name={})", + item.hir_id, + tcx.def_path_str(def_id) + ); match item.kind { // Right now we check that every default trait implementation @@ -96,7 +98,8 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: DefId) { // won't be allowed unless there's an *explicit* implementation of `Send` // for `T` hir::ItemKind::Impl(_, _, defaultness, _, ref trait_ref, ref self_ty, _) => { - let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id(item.hir_id)) + let is_auto = tcx + .impl_trait_ref(tcx.hir().local_def_id(item.hir_id)) .map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.def_id)); let polarity = tcx.impl_polarity(def_id); if let (hir::Defaultness::Default { .. }, true) = (defaultness, is_auto) { @@ -109,9 +112,13 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: DefId) { ty::ImplPolarity::Negative => { // FIXME(#27579): what amount of WF checking do we need for neg impls? if trait_ref.is_some() && !is_auto { - span_err!(tcx.sess, item.span, E0192, - "negative impls are only allowed for \ - auto traits (e.g., `Send` and `Sync`)") + span_err!( + tcx.sess, + item.span, + E0192, + "negative impls are only allowed for \ + auto traits (e.g., `Send` and `Sync`)" + ) } } ty::ImplPolarity::Reservation => { @@ -128,29 +135,25 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: DefId) { hir::ItemKind::Const(ref ty, ..) => { check_item_type(tcx, item.hir_id, ty.span, false); } - hir::ItemKind::ForeignMod(ref module) => for it in module.items.iter() { - if let hir::ForeignItemKind::Static(ref ty, ..) = it.kind { - check_item_type(tcx, it.hir_id, ty.span, true); + hir::ItemKind::ForeignMod(ref module) => { + for it in module.items.iter() { + if let hir::ForeignItemKind::Static(ref ty, ..) = it.kind { + check_item_type(tcx, it.hir_id, ty.span, true); + } } - }, + } hir::ItemKind::Struct(ref struct_def, ref ast_generics) => { - check_type_defn(tcx, item, false, |fcx| { - vec![fcx.non_enum_variant(struct_def)] - }); + check_type_defn(tcx, item, false, |fcx| vec![fcx.non_enum_variant(struct_def)]); check_variances_for_type_defn(tcx, item, ast_generics); } hir::ItemKind::Union(ref struct_def, ref ast_generics) => { - check_type_defn(tcx, item, true, |fcx| { - vec![fcx.non_enum_variant(struct_def)] - }); + check_type_defn(tcx, item, true, |fcx| vec![fcx.non_enum_variant(struct_def)]); check_variances_for_type_defn(tcx, item, ast_generics); } hir::ItemKind::Enum(ref enum_def, ref ast_generics) => { - check_type_defn(tcx, item, true, |fcx| { - fcx.enum_variants(enum_def) - }); + check_type_defn(tcx, item, true, |fcx| fcx.enum_variants(enum_def)); check_variances_for_type_defn(tcx, item, ast_generics); } @@ -170,7 +173,7 @@ pub fn check_trait_item(tcx: TyCtxt<'_>, def_id: DefId) { let method_sig = match trait_item.kind { hir::TraitItemKind::Method(ref sig, _) => Some(sig), - _ => None + _ => None, }; check_associated_item(tcx, trait_item.hir_id, trait_item.span, method_sig); } @@ -181,7 +184,7 @@ pub fn check_impl_item(tcx: TyCtxt<'_>, def_id: DefId) { let method_sig = match impl_item.kind { hir::ImplItemKind::Method(ref sig, _) => Some(sig), - _ => None + _ => None, }; check_associated_item(tcx, impl_item.hir_id, impl_item.span, method_sig); @@ -201,8 +204,9 @@ fn check_associated_item( let (mut implied_bounds, self_ty) = match item.container { ty::TraitContainer(_) => (vec![], fcx.tcx.types.self_param), - ty::ImplContainer(def_id) => (fcx.impl_implied_bounds(def_id, span), - fcx.tcx.type_of(def_id)) + ty::ImplContainer(def_id) => { + (fcx.impl_implied_bounds(def_id, span), fcx.tcx.type_of(def_id)) + } }; match item.kind { @@ -214,8 +218,7 @@ fn check_associated_item( ty::AssocKind::Method => { let sig = fcx.tcx.fn_sig(item.def_id); let sig = fcx.normalize_associated_types_in(span, &sig); - check_fn_or_method(tcx, fcx, span, sig, - item.def_id, &mut implied_bounds); + check_fn_or_method(tcx, fcx, span, sig, item.def_id, &mut implied_bounds); let sig_if_method = sig_if_method.expect("bad signature for method"); check_method_receiver(fcx, sig_if_method, &item, self_ty); } @@ -271,27 +274,20 @@ fn check_type_defn<'tcx, F>( let ty = variant.fields.last().unwrap().ty; let ty = fcx.tcx.erase_regions(&ty); if ty.has_local_value() { - fcx_tcx.sess.delay_span_bug( - item.span, &format!("inference variables in {:?}", ty)); - // Just treat unresolved type expression as if it needs drop. - true + fcx_tcx + .sess + .delay_span_bug(item.span, &format!("inference variables in {:?}", ty)); + // Just treat unresolved type expression as if it needs drop. + true } else { ty.needs_drop(fcx_tcx, fcx_tcx.param_env(def_id)) } } }; - let all_sized = - all_sized || - variant.fields.is_empty() || - needs_drop_copy(); - let unsized_len = if all_sized { - 0 - } else { - 1 - }; - for (idx, field) in variant.fields[..variant.fields.len() - unsized_len] - .iter() - .enumerate() + let all_sized = all_sized || variant.fields.is_empty() || needs_drop_copy(); + let unsized_len = if all_sized { 0 } else { 1 }; + for (idx, field) in + variant.fields[..variant.fields.len() - unsized_len].iter().enumerate() { let last = idx == variant.fields.len() - 1; fcx.register_bound( @@ -305,16 +301,19 @@ fn check_type_defn<'tcx, F>( Some(i) => i, None => bug!(), }, - last - } - ) + last, + }, + ), ); } // All field types must be well-formed. for field in &variant.fields { - fcx.register_wf_obligation(field.ty, field.span, - ObligationCauseCode::MiscObligation) + fcx.register_wf_obligation( + field.ty, + field.span, + ObligationCauseCode::MiscObligation, + ) } } @@ -338,7 +337,8 @@ fn check_trait(tcx: TyCtxt<'_>, item: &hir::Item<'_>) { tcx.def_span(*associated_def_id), E0714, "marker traits cannot have associated items", - ).emit(); + ) + .emit(); } } @@ -354,18 +354,12 @@ fn check_item_fn(tcx: TyCtxt<'_>, item: &hir::Item<'_>) { let sig = fcx.tcx.fn_sig(def_id); let sig = fcx.normalize_associated_types_in(item.span, &sig); let mut implied_bounds = vec![]; - check_fn_or_method(tcx, fcx, item.span, sig, - def_id, &mut implied_bounds); + check_fn_or_method(tcx, fcx, item.span, sig, def_id, &mut implied_bounds); implied_bounds }) } -fn check_item_type( - tcx: TyCtxt<'_>, - item_id: hir::HirId, - ty_span: Span, - allow_foreign_ty: bool, -) { +fn check_item_type(tcx: TyCtxt<'_>, item_id: hir::HirId, ty_span: Span, allow_foreign_ty: bool) { debug!("check_item_type: {:?}", item_id); for_id(tcx, item_id, ty_span).with_fcx(|fcx, tcx| { @@ -411,10 +405,8 @@ fn check_impl<'tcx>( // therefore don't need to be WF (the trait's `Self: Trait` predicate // won't hold). let trait_ref = fcx.tcx.impl_trait_ref(item_def_id).unwrap(); - let trait_ref = fcx.normalize_associated_types_in( - ast_trait_ref.path.span, - &trait_ref, - ); + let trait_ref = + fcx.normalize_associated_types_in(ast_trait_ref.path.span, &trait_ref); let obligations = ty::wf::trait_obligations( fcx, fcx.param_env, @@ -430,8 +422,11 @@ fn check_impl<'tcx>( None => { let self_ty = fcx.tcx.type_of(item_def_id); let self_ty = fcx.normalize_associated_types_in(item.span, &self_ty); - fcx.register_wf_obligation(self_ty, ast_self_ty.span, - ObligationCauseCode::MiscObligation); + fcx.register_wf_obligation( + self_ty, + ast_self_ty.span, + ObligationCauseCode::MiscObligation, + ); } } @@ -454,13 +449,11 @@ fn check_where_clauses<'tcx, 'fcx>( let predicates = fcx.tcx.predicates_of(def_id); let generics = tcx.generics_of(def_id); - let is_our_default = |def: &ty::GenericParamDef| { - match def.kind { - GenericParamDefKind::Type { has_default, .. } => { - has_default && def.index >= generics.parent_count as u32 - } - _ => unreachable!() + let is_our_default = |def: &ty::GenericParamDef| match def.kind { + GenericParamDefKind::Type { has_default, .. } => { + has_default && def.index >= generics.parent_count as u32 } + _ => unreachable!(), }; // Check that concrete defaults are well-formed. See test `type-check-defaults.rs`. @@ -477,8 +470,11 @@ fn check_where_clauses<'tcx, 'fcx>( // parameter includes another (e.g., `<T, U = T>`). In those cases, we can't // be sure if it will error or not as user might always specify the other. if !ty.needs_subst() { - fcx.register_wf_obligation(ty, fcx.tcx.def_span(param.def_id), - ObligationCauseCode::MiscObligation); + fcx.register_wf_obligation( + ty, + fcx.tcx.def_span(param.def_id), + ObligationCauseCode::MiscObligation, + ); } } } @@ -521,55 +517,62 @@ fn check_where_clauses<'tcx, 'fcx>( }); // Now we build the substituted predicates. - let default_obligations = predicates.predicates.iter().flat_map(|&(pred, sp)| { - #[derive(Default)] - struct CountParams { params: FxHashSet<u32> } - impl<'tcx> ty::fold::TypeVisitor<'tcx> for CountParams { - fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { - if let ty::Param(param) = t.kind { - self.params.insert(param.index); - } - t.super_visit_with(self) + let default_obligations = predicates + .predicates + .iter() + .flat_map(|&(pred, sp)| { + #[derive(Default)] + struct CountParams { + params: FxHashSet<u32>, } + impl<'tcx> ty::fold::TypeVisitor<'tcx> for CountParams { + fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { + if let ty::Param(param) = t.kind { + self.params.insert(param.index); + } + t.super_visit_with(self) + } - fn visit_region(&mut self, _: ty::Region<'tcx>) -> bool { - true - } + fn visit_region(&mut self, _: ty::Region<'tcx>) -> bool { + true + } - fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool { - if let ty::ConstKind::Param(param) = c.val { - self.params.insert(param.index); + fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool { + if let ty::ConstKind::Param(param) = c.val { + self.params.insert(param.index); + } + c.super_visit_with(self) } - c.super_visit_with(self) } - } - let mut param_count = CountParams::default(); - let has_region = pred.visit_with(&mut param_count); - let substituted_pred = pred.subst(fcx.tcx, substs); - // Don't check non-defaulted params, dependent defaults (including lifetimes) - // or preds with multiple params. - if substituted_pred.references_error() || param_count.params.len() > 1 || has_region { - None - } else if predicates.predicates.iter().any(|&(p, _)| p == substituted_pred) { - // Avoid duplication of predicates that contain no parameters, for example. - None - } else { - Some((substituted_pred, sp)) - } - }).map(|(pred, sp)| { - // Convert each of those into an obligation. So if you have - // something like `struct Foo<T: Copy = String>`, we would - // take that predicate `T: Copy`, substitute to `String: Copy` - // (actually that happens in the previous `flat_map` call), - // and then try to prove it (in this case, we'll fail). - // - // Note the subtle difference from how we handle `predicates` - // below: there, we are not trying to prove those predicates - // to be *true* but merely *well-formed*. - let pred = fcx.normalize_associated_types_in(sp, &pred); - let cause = traits::ObligationCause::new(sp, fcx.body_id, traits::ItemObligation(def_id)); - traits::Obligation::new(cause, fcx.param_env, pred) - }); + let mut param_count = CountParams::default(); + let has_region = pred.visit_with(&mut param_count); + let substituted_pred = pred.subst(fcx.tcx, substs); + // Don't check non-defaulted params, dependent defaults (including lifetimes) + // or preds with multiple params. + if substituted_pred.references_error() || param_count.params.len() > 1 || has_region { + None + } else if predicates.predicates.iter().any(|&(p, _)| p == substituted_pred) { + // Avoid duplication of predicates that contain no parameters, for example. + None + } else { + Some((substituted_pred, sp)) + } + }) + .map(|(pred, sp)| { + // Convert each of those into an obligation. So if you have + // something like `struct Foo<T: Copy = String>`, we would + // take that predicate `T: Copy`, substitute to `String: Copy` + // (actually that happens in the previous `flat_map` call), + // and then try to prove it (in this case, we'll fail). + // + // Note the subtle difference from how we handle `predicates` + // below: there, we are not trying to prove those predicates + // to be *true* but merely *well-formed*. + let pred = fcx.normalize_associated_types_in(sp, &pred); + let cause = + traits::ObligationCause::new(sp, fcx.body_id, traits::ItemObligation(def_id)); + traits::Obligation::new(cause, fcx.param_env, pred) + }); let mut predicates = predicates.instantiate_identity(fcx.tcx); @@ -580,14 +583,10 @@ fn check_where_clauses<'tcx, 'fcx>( let predicates = fcx.normalize_associated_types_in(span, &predicates); debug!("check_where_clauses: predicates={:?}", predicates.predicates); - let wf_obligations = - predicates.predicates - .iter() - .flat_map(|p| ty::wf::predicate_obligations(fcx, - fcx.param_env, - fcx.body_id, - p, - span)); + let wf_obligations = predicates + .predicates + .iter() + .flat_map(|p| ty::wf::predicate_obligations(fcx, fcx.param_env, fcx.body_id, p, span)); for obligation in wf_obligations.chain(default_obligations) { debug!("next obligation cause: {:?}", obligation.cause); @@ -681,13 +680,12 @@ fn check_opaque_types<'fcx, 'tcx>( ) .emit(); } - } + }, ty::subst::GenericArgKind::Lifetime(region) => { let param_span = tcx.def_span(param.def_id); if let ty::ReStatic = region { - tcx - .sess + tcx.sess .struct_span_err( span, "non-defining opaque type use \ @@ -724,22 +722,18 @@ fn check_opaque_types<'fcx, 'tcx>( ) .emit(); } - } + }, } // match subst } // for (subst, param) for (_, spans) in seen { if spans.len() > 1 { - tcx - .sess + tcx.sess .struct_span_err( span, "non-defining opaque type use \ in defining scope", - ). - span_note( - spans, - "lifetime used multiple times", ) + .span_note(spans, "lifetime used multiple times") .emit(); } } @@ -756,10 +750,7 @@ fn check_opaque_types<'fcx, 'tcx>( // type Foo<T: Bar> = impl Baz + 'static; // fn foo<U: Bar>() -> Foo<U> { .. *} let predicates = tcx.predicates_of(def_id); - trace!( - "check_opaque_types: may define, predicates={:#?}", - predicates, - ); + trace!("check_opaque_types: may define, predicates={:#?}", predicates,); for &(pred, _) in predicates.predicates { let substituted_pred = pred.subst(fcx.tcx, substs); // Avoid duplication of predicates that contain no parameters, for example. @@ -777,8 +768,7 @@ fn check_opaque_types<'fcx, 'tcx>( substituted_predicates } -const HELP_FOR_SELF_TYPE: &str = - "consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, \ +const HELP_FOR_SELF_TYPE: &str = "consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, \ `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one \ of the previous types except `Self`)"; @@ -804,18 +794,13 @@ fn check_method_receiver<'fcx, 'tcx>( debug!("check_method_receiver: sig={:?}", sig); let self_ty = fcx.normalize_associated_types_in(span, &self_ty); - let self_ty = fcx.tcx.liberate_late_bound_regions( - method.def_id, - &ty::Binder::bind(self_ty) - ); + let self_ty = fcx.tcx.liberate_late_bound_regions(method.def_id, &ty::Binder::bind(self_ty)); let receiver_ty = sig.inputs()[0]; let receiver_ty = fcx.normalize_associated_types_in(span, &receiver_ty); - let receiver_ty = fcx.tcx.liberate_late_bound_regions( - method.def_id, - &ty::Binder::bind(receiver_ty) - ); + let receiver_ty = + fcx.tcx.liberate_late_bound_regions(method.def_id, &ty::Binder::bind(receiver_ty)); if fcx.tcx.features().arbitrary_self_types { if !receiver_is_valid(fcx, span, receiver_ty, self_ty, true) { @@ -851,8 +836,10 @@ fn e0307(fcx: &FnCtxt<'fcx, 'tcx>, span: Span, receiver_ty: Ty<'_>) { fcx.tcx.sess.diagnostic(), span, E0307, - "invalid `self` parameter type: {:?}", receiver_ty, - ).note("type of `self` must be `Self` or a type that dereferences to it") + "invalid `self` parameter type: {:?}", + receiver_ty, + ) + .note("type of `self` must be `Self` or a type that dereferences to it") .help(HELP_FOR_SELF_TYPE) .emit(); } @@ -882,7 +869,7 @@ fn receiver_is_valid<'fcx, 'tcx>( if let Some(mut err) = fcx.demand_eqtype_with_origin(&cause, self_ty, receiver_ty) { err.emit(); } - return true + return true; } let mut autoderef = fcx.autoderef(span, receiver_ty); @@ -895,27 +882,26 @@ fn receiver_is_valid<'fcx, 'tcx>( // The first type is `receiver_ty`, which we know its not equal to `self_ty`; skip it. autoderef.next(); - let receiver_trait_def_id = fcx.tcx.require_lang_item( - lang_items::ReceiverTraitLangItem, - None, - ); + let receiver_trait_def_id = fcx.tcx.require_lang_item(lang_items::ReceiverTraitLangItem, None); // Keep dereferencing `receiver_ty` until we get to `self_ty`. loop { if let Some((potential_self_ty, _)) = autoderef.next() { - debug!("receiver_is_valid: potential self type `{:?}` to match `{:?}`", - potential_self_ty, self_ty); + debug!( + "receiver_is_valid: potential self type `{:?}` to match `{:?}`", + potential_self_ty, self_ty + ); if can_eq_self(potential_self_ty) { autoderef.finalize(fcx); - if let Some(mut err) = fcx.demand_eqtype_with_origin( - &cause, self_ty, potential_self_ty - ) { + if let Some(mut err) = + fcx.demand_eqtype_with_origin(&cause, self_ty, potential_self_ty) + { err.emit(); } - break + break; } else { // Without `feature(arbitrary_self_types)`, we require that each step in the // deref chain implement `receiver` @@ -927,12 +913,11 @@ fn receiver_is_valid<'fcx, 'tcx>( potential_self_ty, ) { - return false + return false; } } } else { - debug!("receiver_is_valid: type `{:?}` does not deref to `{:?}`", - receiver_ty, self_ty); + debug!("receiver_is_valid: type `{:?}` does not deref to `{:?}`", receiver_ty, self_ty); // If he receiver already has errors reported due to it, consider it valid to avoid // unnecessary errors (#58712). return receiver_ty.references_error(); @@ -943,7 +928,7 @@ fn receiver_is_valid<'fcx, 'tcx>( if !arbitrary_self_types_enabled && !receiver_is_implemented(fcx, receiver_trait_def_id, cause.clone(), receiver_ty) { - return false + return false; } true @@ -955,22 +940,20 @@ fn receiver_is_implemented( cause: ObligationCause<'tcx>, receiver_ty: Ty<'tcx>, ) -> bool { - let trait_ref = ty::TraitRef{ + let trait_ref = ty::TraitRef { def_id: receiver_trait_def_id, substs: fcx.tcx.mk_substs_trait(receiver_ty, &[]), }; - let obligation = traits::Obligation::new( - cause, - fcx.param_env, - trait_ref.to_predicate() - ); + let obligation = traits::Obligation::new(cause, fcx.param_env, trait_ref.to_predicate()); if fcx.predicate_must_hold_modulo_regions(&obligation) { true } else { - debug!("receiver_is_implemented: type `{:?}` does not implement `Receiver` trait", - receiver_ty); + debug!( + "receiver_is_implemented: type `{:?}` does not implement `Receiver` trait", + receiver_ty + ); false } } @@ -990,18 +973,14 @@ fn check_variances_for_type_defn<'tcx>( assert_eq!(ty_predicates.parent, None); let variances = tcx.variances_of(item_def_id); - let mut constrained_parameters: FxHashSet<_> = - variances.iter().enumerate() - .filter(|&(_, &variance)| variance != ty::Bivariant) - .map(|(index, _)| Parameter(index as u32)) - .collect(); - - identify_constrained_generic_params( - tcx, - ty_predicates, - None, - &mut constrained_parameters, - ); + let mut constrained_parameters: FxHashSet<_> = variances + .iter() + .enumerate() + .filter(|&(_, &variance)| variance != ty::Bivariant) + .map(|(index, _)| Parameter(index as u32)) + .collect(); + + identify_constrained_generic_params(tcx, ty_predicates, None, &mut constrained_parameters); for (index, _) in variances.iter().enumerate() { if constrained_parameters.contains(&Parameter(index as u32)) { @@ -1011,7 +990,7 @@ fn check_variances_for_type_defn<'tcx>( let param = &hir_generics.params[index]; match param.name { - hir::ParamName::Error => { } + hir::ParamName::Error => {} _ => report_bivariance(tcx, param.span, param.name.ident().name), } } @@ -1029,7 +1008,7 @@ fn report_bivariance(tcx: TyCtxt<'_>, span: Span, param_name: ast::Name) { tcx.def_path_str(def_id), ) } else { - format!( "consider removing `{}` or referring to it in a field", param_name) + format!("consider removing `{}` or referring to it in a field", param_name) }; err.help(&msg); err.emit(); @@ -1041,10 +1020,7 @@ fn check_false_global_bounds(fcx: &FnCtxt<'_, '_>, span: Span, id: hir::HirId) { let empty_env = ty::ParamEnv::empty(); let def_id = fcx.tcx.hir().local_def_id(id); - let predicates = fcx.tcx.predicates_of(def_id).predicates - .iter() - .map(|(p, _)| *p) - .collect(); + let predicates = fcx.tcx.predicates_of(def_id).predicates.iter().map(|(p, _)| *p).collect(); // Check elaborated bounds. let implied_obligations = traits::elaborate_predicates(fcx.tcx, predicates); @@ -1053,11 +1029,7 @@ fn check_false_global_bounds(fcx: &FnCtxt<'_, '_>, span: Span, id: hir::HirId) { if pred.is_global() && !pred.has_late_bound_regions() { let pred = fcx.normalize_associated_types_in(span, &pred); let obligation = traits::Obligation::new( - traits::ObligationCause::new( - span, - id, - traits::TrivialBound, - ), + traits::ObligationCause::new(span, id, traits::TrivialBound), empty_env, pred, ); @@ -1074,9 +1046,7 @@ pub struct CheckTypeWellFormedVisitor<'tcx> { impl CheckTypeWellFormedVisitor<'tcx> { pub fn new(tcx: TyCtxt<'tcx>) -> CheckTypeWellFormedVisitor<'tcx> { - CheckTypeWellFormedVisitor { - tcx, - } + CheckTypeWellFormedVisitor { tcx } } } @@ -1114,22 +1084,22 @@ struct AdtField<'tcx> { impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn non_enum_variant(&self, struct_def: &hir::VariantData<'_>) -> AdtVariant<'tcx> { - let fields = struct_def.fields().iter().map(|field| { - let field_ty = self.tcx.type_of(self.tcx.hir().local_def_id(field.hir_id)); - let field_ty = self.normalize_associated_types_in(field.span, - &field_ty); - let field_ty = self.resolve_vars_if_possible(&field_ty); - debug!("non_enum_variant: type of field {:?} is {:?}", field, field_ty); - AdtField { ty: field_ty, span: field.span } - }) - .collect(); + let fields = struct_def + .fields() + .iter() + .map(|field| { + let field_ty = self.tcx.type_of(self.tcx.hir().local_def_id(field.hir_id)); + let field_ty = self.normalize_associated_types_in(field.span, &field_ty); + let field_ty = self.resolve_vars_if_possible(&field_ty); + debug!("non_enum_variant: type of field {:?} is {:?}", field, field_ty); + AdtField { ty: field_ty, span: field.span } + }) + .collect(); AdtVariant { fields } } fn enum_variants(&self, enum_def: &hir::EnumDef<'_>) -> Vec<AdtVariant<'tcx>> { - enum_def.variants.iter() - .map(|variant| self.non_enum_variant(&variant.data)) - .collect() + enum_def.variants.iter().map(|variant| self.non_enum_variant(&variant.data)).collect() } fn impl_implied_bounds(&self, impl_def_id: DefId, span: Span) -> Vec<Ty<'tcx>> { @@ -1151,13 +1121,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } -fn error_392( - tcx: TyCtxt<'_>, - span: Span, - param_name: ast::Name, -) -> DiagnosticBuilder<'_> { - let mut err = struct_span_err!(tcx.sess, span, E0392, - "parameter `{}` is never used", param_name); +fn error_392(tcx: TyCtxt<'_>, span: Span, param_name: ast::Name) -> DiagnosticBuilder<'_> { + let mut err = + struct_span_err!(tcx.sess, span, E0392, "parameter `{}` is never used", param_name); err.span_label(span, "unused parameter"); err } diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs index 5d6811a29a3..862c6c56c78 100644 --- a/src/librustdoc/html/markdown/tests.rs +++ b/src/librustdoc/html/markdown/tests.rs @@ -1,16 +1,42 @@ -use super::{ErrorCodes, LangString, Markdown, MarkdownHtml, IdMap, Ignore}; use super::plain_summary_line; +use super::{ErrorCodes, IdMap, Ignore, LangString, Markdown, MarkdownHtml}; use std::cell::RefCell; use syntax::edition::{Edition, DEFAULT_EDITION}; #[test] fn test_unique_id() { - let input = ["foo", "examples", "examples", "method.into_iter","examples", - "method.into_iter", "foo", "main", "search", "methods", - "examples", "method.into_iter", "assoc_type.Item", "assoc_type.Item"]; - let expected = ["foo", "examples", "examples-1", "method.into_iter", "examples-2", - "method.into_iter-1", "foo-1", "main", "search", "methods", - "examples-3", "method.into_iter-2", "assoc_type.Item", "assoc_type.Item-1"]; + let input = [ + "foo", + "examples", + "examples", + "method.into_iter", + "examples", + "method.into_iter", + "foo", + "main", + "search", + "methods", + "examples", + "method.into_iter", + "assoc_type.Item", + "assoc_type.Item", + ]; + let expected = [ + "foo", + "examples", + "examples-1", + "method.into_iter", + "examples-2", + "method.into_iter-1", + "foo-1", + "main", + "search", + "methods", + "examples-3", + "method.into_iter-2", + "assoc_type.Item", + "assoc_type.Item-1", + ]; let map = RefCell::new(IdMap::new()); let test = || { @@ -25,96 +51,161 @@ fn test_unique_id() { #[test] fn test_lang_string_parse() { - fn t(s: &str, - should_panic: bool, no_run: bool, ignore: Ignore, rust: bool, test_harness: bool, - compile_fail: bool, allow_fail: bool, error_codes: Vec<String>, - edition: Option<Edition>) { - assert_eq!(LangString::parse(s, ErrorCodes::Yes, true), LangString { - should_panic, - no_run, - ignore, - rust, - test_harness, - compile_fail, - error_codes, - original: s.to_owned(), - allow_fail, - edition, - }) + fn t( + s: &str, + should_panic: bool, + no_run: bool, + ignore: Ignore, + rust: bool, + test_harness: bool, + compile_fail: bool, + allow_fail: bool, + error_codes: Vec<String>, + edition: Option<Edition>, + ) { + assert_eq!( + LangString::parse(s, ErrorCodes::Yes, true), + LangString { + should_panic, + no_run, + ignore, + rust, + test_harness, + compile_fail, + error_codes, + original: s.to_owned(), + allow_fail, + edition, + } + ) } - let ignore_foo = Ignore::Some(vec!("foo".to_string())); + let ignore_foo = Ignore::Some(vec!["foo".to_string()]); fn v() -> Vec<String> { Vec::new() } - // ignore-tidy-linelength // marker | should_panic | no_run | ignore | rust | test_harness // | compile_fail | allow_fail | error_codes | edition - t("", false, false, Ignore::None, true, false, false, false, v(), None); - t("rust", false, false, Ignore::None, true, false, false, false, v(), None); - t("sh", false, false, Ignore::None, false, false, false, false, v(), None); - t("ignore", false, false, Ignore::All, true, false, false, false, v(), None); - t("ignore-foo", false, false, ignore_foo, true, false, false, false, v(), None); - t("should_panic", true, false, Ignore::None, true, false, false, false, v(), None); - t("no_run", false, true, Ignore::None, true, false, false, false, v(), None); - t("test_harness", false, false, Ignore::None, true, true, false, false, v(), None); - t("compile_fail", false, true, Ignore::None, true, false, true, false, v(), None); - t("allow_fail", false, false, Ignore::None, true, false, false, true, v(), None); - t("{.no_run .example}", false, true, Ignore::None, true, false, false, false, v(), None); - t("{.sh .should_panic}", true, false, Ignore::None, false, false, false, false, v(), None); - t("{.example .rust}", false, false, Ignore::None, true, false, false, false, v(), None); - t("{.test_harness .rust}", false, false, Ignore::None, true, true, false, false, v(), None); - t("text, no_run", false, true, Ignore::None, false, false, false, false, v(), None); - t("text,no_run", false, true, Ignore::None, false, false, false, false, v(), None); - t("edition2015", false, false, Ignore::None, true, false, false, false, v(), Some(Edition::Edition2015)); - t("edition2018", false, false, Ignore::None, true, false, false, false, v(), Some(Edition::Edition2018)); + t("", false, false, Ignore::None, true, false, false, false, v(), None); + t("rust", false, false, Ignore::None, true, false, false, false, v(), None); + t("sh", false, false, Ignore::None, false, false, false, false, v(), None); + t("ignore", false, false, Ignore::All, true, false, false, false, v(), None); + t("ignore-foo", false, false, ignore_foo, true, false, false, false, v(), None); + t("should_panic", true, false, Ignore::None, true, false, false, false, v(), None); + t("no_run", false, true, Ignore::None, true, false, false, false, v(), None); + t("test_harness", false, false, Ignore::None, true, true, false, false, v(), None); + t("compile_fail", false, true, Ignore::None, true, false, true, false, v(), None); + t("allow_fail", false, false, Ignore::None, true, false, false, true, v(), None); + t("{.no_run .example}", false, true, Ignore::None, true, false, false, false, v(), None); + t("{.sh .should_panic}", true, false, Ignore::None, false, false, false, false, v(), None); + t("{.example .rust}", false, false, Ignore::None, true, false, false, false, v(), None); + t("{.test_harness .rust}", false, false, Ignore::None, true, true, false, false, v(), None); + t("text, no_run", false, true, Ignore::None, false, false, false, false, v(), None); + t("text,no_run", false, true, Ignore::None, false, false, false, false, v(), None); + t( + "edition2015", + false, + false, + Ignore::None, + true, + false, + false, + false, + v(), + Some(Edition::Edition2015), + ); + t( + "edition2018", + false, + false, + Ignore::None, + true, + false, + false, + false, + v(), + Some(Edition::Edition2018), + ); } #[test] fn test_header() { fn t(input: &str, expect: &str) { let mut map = IdMap::new(); - let output = Markdown( - input, &[], &mut map, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); + let output = + Markdown(input, &[], &mut map, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); assert_eq!(output, expect, "original: {}", input); } - t("# Foo bar", "<h1 id=\"foo-bar\" class=\"section-header\">\ - <a href=\"#foo-bar\">Foo bar</a></h1>"); - t("## Foo-bar_baz qux", "<h2 id=\"foo-bar_baz-qux\" class=\"section-\ - header\"><a href=\"#foo-bar_baz-qux\">Foo-bar_baz qux</a></h2>"); - t("### **Foo** *bar* baz!?!& -_qux_-%", - "<h3 id=\"foo-bar-baz--qux-\" class=\"section-header\">\ + t( + "# Foo bar", + "<h1 id=\"foo-bar\" class=\"section-header\">\ + <a href=\"#foo-bar\">Foo bar</a></h1>", + ); + t( + "## Foo-bar_baz qux", + "<h2 id=\"foo-bar_baz-qux\" class=\"section-\ + header\"><a href=\"#foo-bar_baz-qux\">Foo-bar_baz qux</a></h2>", + ); + t( + "### **Foo** *bar* baz!?!& -_qux_-%", + "<h3 id=\"foo-bar-baz--qux-\" class=\"section-header\">\ <a href=\"#foo-bar-baz--qux-\"><strong>Foo</strong> \ - <em>bar</em> baz!?!& -<em>qux</em>-%</a></h3>"); - t("#### **Foo?** & \\*bar?!* _`baz`_ ❤ #qux", - "<h4 id=\"foo--bar--baz--qux\" class=\"section-header\">\ + <em>bar</em> baz!?!& -<em>qux</em>-%</a></h3>", + ); + t( + "#### **Foo?** & \\*bar?!* _`baz`_ ❤ #qux", + "<h4 id=\"foo--bar--baz--qux\" class=\"section-header\">\ <a href=\"#foo--bar--baz--qux\"><strong>Foo?</strong> & *bar?!* \ - <em><code>baz</code></em> ❤ #qux</a></h4>"); + <em><code>baz</code></em> ❤ #qux</a></h4>", + ); } #[test] fn test_header_ids_multiple_blocks() { let mut map = IdMap::new(); fn t(map: &mut IdMap, input: &str, expect: &str) { - let output = Markdown(input, &[], map, - ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); + let output = Markdown(input, &[], map, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); assert_eq!(output, expect, "original: {}", input); } - t(&mut map, "# Example", "<h1 id=\"example\" class=\"section-header\">\ - <a href=\"#example\">Example</a></h1>"); - t(&mut map, "# Panics", "<h1 id=\"panics\" class=\"section-header\">\ - <a href=\"#panics\">Panics</a></h1>"); - t(&mut map, "# Example", "<h1 id=\"example-1\" class=\"section-header\">\ - <a href=\"#example-1\">Example</a></h1>"); - t(&mut map, "# Main", "<h1 id=\"main\" class=\"section-header\">\ - <a href=\"#main\">Main</a></h1>"); - t(&mut map, "# Example", "<h1 id=\"example-2\" class=\"section-header\">\ - <a href=\"#example-2\">Example</a></h1>"); - t(&mut map, "# Panics", "<h1 id=\"panics-1\" class=\"section-header\">\ - <a href=\"#panics-1\">Panics</a></h1>"); + t( + &mut map, + "# Example", + "<h1 id=\"example\" class=\"section-header\">\ + <a href=\"#example\">Example</a></h1>", + ); + t( + &mut map, + "# Panics", + "<h1 id=\"panics\" class=\"section-header\">\ + <a href=\"#panics\">Panics</a></h1>", + ); + t( + &mut map, + "# Example", + "<h1 id=\"example-1\" class=\"section-header\">\ + <a href=\"#example-1\">Example</a></h1>", + ); + t( + &mut map, + "# Main", + "<h1 id=\"main\" class=\"section-header\">\ + <a href=\"#main\">Main</a></h1>", + ); + t( + &mut map, + "# Example", + "<h1 id=\"example-2\" class=\"section-header\">\ + <a href=\"#example-2\">Example</a></h1>", + ); + t( + &mut map, + "# Panics", + "<h1 id=\"panics-1\" class=\"section-header\">\ + <a href=\"#panics-1\">Panics</a></h1>", + ); } #[test] @@ -136,8 +227,8 @@ fn test_plain_summary_line() { fn test_markdown_html_escape() { fn t(input: &str, expect: &str) { let mut idmap = IdMap::new(); - let output = MarkdownHtml(input, &mut idmap, - ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); + let output = + MarkdownHtml(input, &mut idmap, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string(); assert_eq!(output, expect, "original: {}", input); } diff --git a/src/libstd/sys/sgx/abi/mem.rs b/src/libstd/sys/sgx/abi/mem.rs index d9051733da2..500e62b1cb5 100644 --- a/src/libstd/sys/sgx/abi/mem.rs +++ b/src/libstd/sys/sgx/abi/mem.rs @@ -10,7 +10,7 @@ pub(crate) unsafe fn rel_ptr_mut<T>(offset: u64) -> *mut T { (image_base() + offset) as *mut T } -extern { +extern "C" { static ENCLAVE_SIZE: usize; } @@ -33,8 +33,7 @@ pub fn image_base() -> u64 { pub fn is_enclave_range(p: *const u8, len: usize) -> bool { let start = p as u64; let end = start + (len as u64); - start >= image_base() && - end <= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant + start >= image_base() && end <= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant } /// Returns `true` if the specified memory range is in userspace. @@ -44,6 +43,5 @@ pub fn is_enclave_range(p: *const u8, len: usize) -> bool { pub fn is_user_range(p: *const u8, len: usize) -> bool { let start = p as u64; let end = start + (len as u64); - end <= image_base() || - start >= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant + end <= image_base() || start >= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant } diff --git a/src/libstd/sys/unix/os.rs b/src/libstd/sys/unix/os.rs index 10cdb25999c..95be564b330 100644 --- a/src/libstd/sys/unix/os.rs +++ b/src/libstd/sys/unix/os.rs @@ -5,7 +5,7 @@ use crate::os::unix::prelude::*; use crate::error::Error as StdError; -use crate::ffi::{CString, CStr, OsString, OsStr}; +use crate::ffi::{CStr, CString, OsStr, OsString}; use crate::fmt; use crate::io; use crate::iter; @@ -16,12 +16,12 @@ use crate::path::{self, PathBuf}; use crate::ptr; use crate::slice; use crate::str; -use crate::sys_common::mutex::{Mutex, MutexGuard}; use crate::sys::cvt; use crate::sys::fd; +use crate::sys_common::mutex::{Mutex, MutexGuard}; use crate::vec; -use libc::{c_int, c_char, c_void}; +use libc::{c_char, c_int, c_void}; const TMPBUF_SZ: usize = 128; @@ -33,24 +33,32 @@ cfg_if::cfg_if! { } } -extern { +extern "C" { #[cfg(not(target_os = "dragonfly"))] - #[cfg_attr(any(target_os = "linux", - target_os = "emscripten", - target_os = "fuchsia", - target_os = "l4re"), - link_name = "__errno_location")] - #[cfg_attr(any(target_os = "netbsd", - target_os = "openbsd", - target_os = "android", - target_os = "redox", - target_env = "newlib"), - link_name = "__errno")] + #[cfg_attr( + any( + target_os = "linux", + target_os = "emscripten", + target_os = "fuchsia", + target_os = "l4re" + ), + link_name = "__errno_location" + )] + #[cfg_attr( + any( + target_os = "netbsd", + target_os = "openbsd", + target_os = "android", + target_os = "redox", + target_env = "newlib" + ), + link_name = "__errno" + )] #[cfg_attr(target_os = "solaris", link_name = "___errno")] - #[cfg_attr(any(target_os = "macos", - target_os = "ios", - target_os = "freebsd"), - link_name = "__error")] + #[cfg_attr( + any(target_os = "macos", target_os = "ios", target_os = "freebsd"), + link_name = "__error" + )] #[cfg_attr(target_os = "haiku", link_name = "_errnop")] fn errno_location() -> *mut c_int; } @@ -58,23 +66,18 @@ extern { /// Returns the platform-specific value of errno #[cfg(not(target_os = "dragonfly"))] pub fn errno() -> i32 { - unsafe { - (*errno_location()) as i32 - } + unsafe { (*errno_location()) as i32 } } /// Sets the platform-specific value of errno -#[cfg(all(not(target_os = "linux"), - not(target_os = "dragonfly")))] // needed for readdir and syscall! +#[cfg(all(not(target_os = "linux"), not(target_os = "dragonfly")))] // needed for readdir and syscall! pub fn set_errno(e: i32) { - unsafe { - *errno_location() = e as c_int - } + unsafe { *errno_location() = e as c_int } } #[cfg(target_os = "dragonfly")] pub fn errno() -> i32 { - extern { + extern "C" { #[thread_local] static errno: c_int; } @@ -84,7 +87,7 @@ pub fn errno() -> i32 { #[cfg(target_os = "dragonfly")] pub fn set_errno(e: i32) { - extern { + extern "C" { #[thread_local] static mut errno: c_int; } @@ -96,11 +99,9 @@ pub fn set_errno(e: i32) { /// Gets a detailed string description for the given error number. pub fn error_string(errno: i32) -> String { - extern { - #[cfg_attr(any(target_os = "linux", target_env = "newlib"), - link_name = "__xpg_strerror_r")] - fn strerror_r(errnum: c_int, buf: *mut c_char, - buflen: libc::size_t) -> c_int; + extern "C" { + #[cfg_attr(any(target_os = "linux", target_env = "newlib"), link_name = "__xpg_strerror_r")] + fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: libc::size_t) -> c_int; } let mut buf = [0 as c_char; TMPBUF_SZ]; @@ -154,41 +155,51 @@ pub fn chdir(p: &path::Path) -> io::Result<()> { } pub struct SplitPaths<'a> { - iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>, - fn(&'a [u8]) -> PathBuf>, + iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>, fn(&'a [u8]) -> PathBuf>, } pub fn split_paths(unparsed: &OsStr) -> SplitPaths<'_> { fn bytes_to_path(b: &[u8]) -> PathBuf { PathBuf::from(<OsStr as OsStrExt>::from_bytes(b)) } - fn is_separator(b: &u8) -> bool { *b == PATH_SEPARATOR } + fn is_separator(b: &u8) -> bool { + *b == PATH_SEPARATOR + } let unparsed = unparsed.as_bytes(); SplitPaths { - iter: unparsed.split(is_separator as fn(&u8) -> bool) - .map(bytes_to_path as fn(&[u8]) -> PathBuf) + iter: unparsed + .split(is_separator as fn(&u8) -> bool) + .map(bytes_to_path as fn(&[u8]) -> PathBuf), } } impl<'a> Iterator for SplitPaths<'a> { type Item = PathBuf; - fn next(&mut self) -> Option<PathBuf> { self.iter.next() } - fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } + fn next(&mut self) -> Option<PathBuf> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option<usize>) { + self.iter.size_hint() + } } #[derive(Debug)] pub struct JoinPathsError; pub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError> - where I: Iterator<Item=T>, T: AsRef<OsStr> +where + I: Iterator<Item = T>, + T: AsRef<OsStr>, { let mut joined = Vec::new(); for (i, path) in paths.enumerate() { let path = path.as_ref().as_bytes(); - if i > 0 { joined.push(PATH_SEPARATOR) } + if i > 0 { + joined.push(PATH_SEPARATOR) + } if path.contains(&PATH_SEPARATOR) { - return Err(JoinPathsError) + return Err(JoinPathsError); } joined.extend_from_slice(path); } @@ -202,26 +213,41 @@ impl fmt::Display for JoinPathsError { } impl StdError for JoinPathsError { - fn description(&self) -> &str { "failed to join paths" } + fn description(&self) -> &str { + "failed to join paths" + } } #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] pub fn current_exe() -> io::Result<PathBuf> { unsafe { - let mut mib = [libc::CTL_KERN as c_int, - libc::KERN_PROC as c_int, - libc::KERN_PROC_PATHNAME as c_int, - -1 as c_int]; + let mut mib = [ + libc::CTL_KERN as c_int, + libc::KERN_PROC as c_int, + libc::KERN_PROC_PATHNAME as c_int, + -1 as c_int, + ]; let mut sz = 0; - cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as libc::c_uint, - ptr::null_mut(), &mut sz, ptr::null_mut(), 0))?; + cvt(libc::sysctl( + mib.as_mut_ptr(), + mib.len() as libc::c_uint, + ptr::null_mut(), + &mut sz, + ptr::null_mut(), + 0, + ))?; if sz == 0 { - return Err(io::Error::last_os_error()) + return Err(io::Error::last_os_error()); } let mut v: Vec<u8> = Vec::with_capacity(sz); - cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as libc::c_uint, - v.as_mut_ptr() as *mut libc::c_void, &mut sz, - ptr::null_mut(), 0))?; + cvt(libc::sysctl( + mib.as_mut_ptr(), + mib.len() as libc::c_uint, + v.as_mut_ptr() as *mut libc::c_void, + &mut sz, + ptr::null_mut(), + 0, + ))?; if sz == 0 { return Err(io::Error::last_os_error()); } @@ -236,17 +262,29 @@ pub fn current_exe() -> io::Result<PathBuf> { unsafe { let mib = [libc::CTL_KERN, libc::KERN_PROC_ARGS, -1, libc::KERN_PROC_PATHNAME]; let mut path_len: usize = 0; - cvt(libc::sysctl(mib.as_ptr(), mib.len() as libc::c_uint, - ptr::null_mut(), &mut path_len, - ptr::null(), 0))?; + cvt(libc::sysctl( + mib.as_ptr(), + mib.len() as libc::c_uint, + ptr::null_mut(), + &mut path_len, + ptr::null(), + 0, + ))?; if path_len <= 1 { - return Err(io::Error::new(io::ErrorKind::Other, - "KERN_PROC_PATHNAME sysctl returned zero-length string")) + return Err(io::Error::new( + io::ErrorKind::Other, + "KERN_PROC_PATHNAME sysctl returned zero-length string", + )); } let mut path: Vec<u8> = Vec::with_capacity(path_len); - cvt(libc::sysctl(mib.as_ptr(), mib.len() as libc::c_uint, - path.as_ptr() as *mut libc::c_void, &mut path_len, - ptr::null(), 0))?; + cvt(libc::sysctl( + mib.as_ptr(), + mib.len() as libc::c_uint, + path.as_ptr() as *mut libc::c_void, + &mut path_len, + ptr::null(), + 0, + ))?; path.set_len(path_len - 1); // chop off NUL Ok(PathBuf::from(OsString::from_vec(path))) } @@ -256,8 +294,10 @@ pub fn current_exe() -> io::Result<PathBuf> { if curproc_exe.is_file() { return crate::fs::read_link(curproc_exe); } - Err(io::Error::new(io::ErrorKind::Other, - "/proc/curproc/exe doesn't point to regular file.")) + Err(io::Error::new( + io::ErrorKind::Other, + "/proc/curproc/exe doesn't point to regular file.", + )) } sysctl().or_else(|_| procfs()) } @@ -265,21 +305,15 @@ pub fn current_exe() -> io::Result<PathBuf> { #[cfg(target_os = "openbsd")] pub fn current_exe() -> io::Result<PathBuf> { unsafe { - let mut mib = [libc::CTL_KERN, - libc::KERN_PROC_ARGS, - libc::getpid(), - libc::KERN_PROC_ARGV]; + let mut mib = [libc::CTL_KERN, libc::KERN_PROC_ARGS, libc::getpid(), libc::KERN_PROC_ARGV]; let mib = mib.as_mut_ptr(); let mut argv_len = 0; - cvt(libc::sysctl(mib, 4, ptr::null_mut(), &mut argv_len, - ptr::null_mut(), 0))?; + cvt(libc::sysctl(mib, 4, ptr::null_mut(), &mut argv_len, ptr::null_mut(), 0))?; let mut argv = Vec::<*const libc::c_char>::with_capacity(argv_len as usize); - cvt(libc::sysctl(mib, 4, argv.as_mut_ptr() as *mut _, - &mut argv_len, ptr::null_mut(), 0))?; + cvt(libc::sysctl(mib, 4, argv.as_mut_ptr() as *mut _, &mut argv_len, ptr::null_mut(), 0))?; argv.set_len(argv_len as usize); if argv[0].is_null() { - return Err(io::Error::new(io::ErrorKind::Other, - "no current exe available")) + return Err(io::Error::new(io::ErrorKind::Other, "no current exe available")); } let argv0 = CStr::from_ptr(argv[0]).to_bytes(); if argv0[0] == b'.' || argv0.iter().any(|b| *b == b'/') { @@ -293,29 +327,30 @@ pub fn current_exe() -> io::Result<PathBuf> { #[cfg(any(target_os = "linux", target_os = "android", target_os = "emscripten"))] pub fn current_exe() -> io::Result<PathBuf> { match crate::fs::read_link("/proc/self/exe") { - Err(ref e) if e.kind() == io::ErrorKind::NotFound => { - Err(io::Error::new( - io::ErrorKind::Other, - "no /proc/self/exe available. Is /proc mounted?" - )) - }, + Err(ref e) if e.kind() == io::ErrorKind::NotFound => Err(io::Error::new( + io::ErrorKind::Other, + "no /proc/self/exe available. Is /proc mounted?", + )), other => other, } } #[cfg(any(target_os = "macos", target_os = "ios"))] pub fn current_exe() -> io::Result<PathBuf> { - extern { - fn _NSGetExecutablePath(buf: *mut libc::c_char, - bufsize: *mut u32) -> libc::c_int; + extern "C" { + fn _NSGetExecutablePath(buf: *mut libc::c_char, bufsize: *mut u32) -> libc::c_int; } unsafe { let mut sz: u32 = 0; _NSGetExecutablePath(ptr::null_mut(), &mut sz); - if sz == 0 { return Err(io::Error::last_os_error()); } + if sz == 0 { + return Err(io::Error::last_os_error()); + } let mut v: Vec<u8> = Vec::with_capacity(sz as usize); let err = _NSGetExecutablePath(v.as_mut_ptr() as *mut i8, &mut sz); - if err != 0 { return Err(io::Error::last_os_error()); } + if err != 0 { + return Err(io::Error::last_os_error()); + } v.set_len(sz as usize - 1); // chop off trailing NUL Ok(PathBuf::from(OsString::from_vec(v))) } @@ -323,7 +358,7 @@ pub fn current_exe() -> io::Result<PathBuf> { #[cfg(any(target_os = "solaris"))] pub fn current_exe() -> io::Result<PathBuf> { - extern { + extern "C" { fn getexecname() -> *const c_char; } unsafe { @@ -336,11 +371,7 @@ pub fn current_exe() -> io::Result<PathBuf> { // Prepend a current working directory to the path if // it doesn't contain an absolute pathname. - if filename[0] == b'/' { - Ok(path) - } else { - getcwd().map(|cwd| cwd.join(path)) - } + if filename[0] == b'/' { Ok(path) } else { getcwd().map(|cwd| cwd.join(path)) } } } } @@ -354,11 +385,11 @@ pub fn current_exe() -> io::Result<PathBuf> { type_: i32, sequence: i32, init_order: i32, - init_routine: *mut libc::c_void, // function pointer - term_routine: *mut libc::c_void, // function pointer + init_routine: *mut libc::c_void, // function pointer + term_routine: *mut libc::c_void, // function pointer device: libc::dev_t, node: libc::ino_t, - name: [libc::c_char; 1024], // MAXPATHLEN + name: [libc::c_char; 1024], // MAXPATHLEN text: *mut libc::c_void, data: *mut libc::c_void, text_size: i32, @@ -368,16 +399,20 @@ pub fn current_exe() -> io::Result<PathBuf> { } unsafe { - extern { - fn _get_next_image_info(team_id: i32, cookie: *mut i32, - info: *mut image_info, size: i32) -> i32; + extern "C" { + fn _get_next_image_info( + team_id: i32, + cookie: *mut i32, + info: *mut image_info, + size: i32, + ) -> i32; } let mut info: image_info = mem::zeroed(); let mut cookie: i32 = 0; // the executable can be found at team id 0 - let result = _get_next_image_info(0, &mut cookie, &mut info, - mem::size_of::<image_info>() as i32); + let result = + _get_next_image_info(0, &mut cookie, &mut info, mem::size_of::<image_info>() as i32); if result != 0 { use crate::io::ErrorKind; Err(io::Error::new(ErrorKind::Other, "Error getting executable path")) @@ -406,19 +441,27 @@ pub struct Env { impl Iterator for Env { type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() } - fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } + fn next(&mut self) -> Option<(OsString, OsString)> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option<usize>) { + self.iter.size_hint() + } } #[cfg(target_os = "macos")] pub unsafe fn environ() -> *mut *const *const c_char { - extern { fn _NSGetEnviron() -> *mut *const *const c_char; } + extern "C" { + fn _NSGetEnviron() -> *mut *const *const c_char; + } _NSGetEnviron() } #[cfg(not(target_os = "macos"))] pub unsafe fn environ() -> *mut *const *const c_char { - extern { static mut environ: *const *const c_char; } + extern "C" { + static mut environ: *const *const c_char; + } &mut environ } @@ -442,10 +485,7 @@ pub fn env() -> Env { } environ = environ.offset(1); } - return Env { - iter: result.into_iter(), - _dont_send_or_sync_me: PhantomData, - } + return Env { iter: result.into_iter(), _dont_send_or_sync_me: PhantomData }; } fn parse(input: &[u8]) -> Option<(OsString, OsString)> { @@ -457,10 +497,12 @@ pub fn env() -> Env { return None; } let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p+1..].to_vec()), - )) + pos.map(|p| { + ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p + 1..].to_vec()), + ) + }) } } @@ -500,9 +542,7 @@ pub fn unsetenv(n: &OsStr) -> io::Result<()> { } pub fn page_size() -> usize { - unsafe { - libc::sysconf(libc::_SC_PAGESIZE) as usize - } + unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize } } pub fn temp_dir() -> PathBuf { @@ -516,19 +556,23 @@ pub fn temp_dir() -> PathBuf { } pub fn home_dir() -> Option<PathBuf> { - return crate::env::var_os("HOME").or_else(|| unsafe { - fallback() - }).map(PathBuf::from); - - #[cfg(any(target_os = "android", - target_os = "ios", - target_os = "emscripten", - target_os = "redox"))] - unsafe fn fallback() -> Option<OsString> { None } - #[cfg(not(any(target_os = "android", - target_os = "ios", - target_os = "emscripten", - target_os = "redox")))] + return crate::env::var_os("HOME").or_else(|| unsafe { fallback() }).map(PathBuf::from); + + #[cfg(any( + target_os = "android", + target_os = "ios", + target_os = "emscripten", + target_os = "redox" + ))] + unsafe fn fallback() -> Option<OsString> { + None + } + #[cfg(not(any( + target_os = "android", + target_os = "ios", + target_os = "emscripten", + target_os = "redox" + )))] unsafe fn fallback() -> Option<OsString> { let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) { n if n < 0 => 512 as usize, @@ -537,13 +581,18 @@ pub fn home_dir() -> Option<PathBuf> { let mut buf = Vec::with_capacity(amt); let mut passwd: libc::passwd = mem::zeroed(); let mut result = ptr::null_mut(); - match libc::getpwuid_r(libc::getuid(), &mut passwd, buf.as_mut_ptr(), - buf.capacity(), &mut result) { + match libc::getpwuid_r( + libc::getuid(), + &mut passwd, + buf.as_mut_ptr(), + buf.capacity(), + &mut result, + ) { 0 if !result.is_null() => { let ptr = passwd.pw_dir as *const _; let bytes = CStr::from_ptr(ptr).to_bytes().to_vec(); Some(OsStringExt::from_vec(bytes)) - }, + } _ => None, } } @@ -589,7 +638,7 @@ fn parse_glibc_version(version: &str) -> Option<(usize, usize)> { let mut parsed_ints = version.split('.').map(str::parse::<usize>).fuse(); match (parsed_ints.next(), parsed_ints.next()) { (Some(Ok(major)), Some(Ok(minor))) => Some((major, minor)), - _ => None + _ => None, } } diff --git a/src/libsyntax_expand/parse/lexer/tests.rs b/src/libsyntax_expand/parse/lexer/tests.rs index 75e4ee805b2..2ca0224812b 100644 --- a/src/libsyntax_expand/parse/lexer/tests.rs +++ b/src/libsyntax_expand/parse/lexer/tests.rs @@ -1,14 +1,14 @@ use rustc_data_structures::sync::Lrc; use rustc_parse::lexer::StringReader; -use syntax::token::{self, Token, TokenKind}; use syntax::sess::ParseSess; -use syntax::source_map::{SourceMap, FilePathMapping}; +use syntax::source_map::{FilePathMapping, SourceMap}; +use syntax::token::{self, Token, TokenKind}; use syntax::util::comments::is_doc_comment; use syntax::with_default_globals; use syntax_pos::symbol::Symbol; use syntax_pos::{BytePos, Span}; -use errors::{Handler, emitter::EmitterWriter}; +use errors::{emitter::EmitterWriter, Handler}; use std::io; use std::path::PathBuf; @@ -22,17 +22,11 @@ fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess { None, false, ); - ParseSess::with_span_handler( - Handler::with_emitter(true, None, Box::new(emitter)), - sm, - ) + ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm) } // Creates a string reader for the given string. -fn setup<'a>(sm: &SourceMap, - sess: &'a ParseSess, - teststr: String) - -> StringReader<'a> { +fn setup<'a>(sm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> { let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr); StringReader::new(sess, sf, None) } @@ -50,20 +44,14 @@ fn t1() { assert_eq!(string_reader.next_token(), token::Comment); assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); - let tok2 = Token::new( - mk_ident("fn"), - Span::with_root_ctxt(BytePos(21), BytePos(23)), - ); + let tok2 = Token::new(mk_ident("fn"), Span::with_root_ctxt(BytePos(21), BytePos(23))); assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.span, tok2.span); assert_eq!(string_reader.next_token(), token::Whitespace); // Read another token. let tok3 = string_reader.next_token(); assert_eq!(string_reader.pos.clone(), BytePos(28)); - let tok4 = Token::new( - mk_ident("main"), - Span::with_root_ctxt(BytePos(24), BytePos(28)), - ); + let tok4 = Token::new(mk_ident("main"), Span::with_root_ctxt(BytePos(24), BytePos(28))); assert_eq!(tok3.kind, tok4.kind); assert_eq!(tok3.span, tok4.span); @@ -142,10 +130,7 @@ fn character_a() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "'a'".to_string()).next_token(), - mk_lit(token::Char, "a", None), - ); + assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None),); }) } @@ -154,10 +139,7 @@ fn character_space() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "' '".to_string()).next_token(), - mk_lit(token::Char, " ", None), - ); + assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None),); }) } @@ -213,7 +195,7 @@ fn literal_suffixes() { setup(&sm, &sh, format!("{} suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, None), ); - }} + }}; } test!("'a'", Char, "a"); diff --git a/src/libsyntax_expand/parse/tests.rs b/src/libsyntax_expand/parse/tests.rs index 30e83c151e2..154ccb25621 100644 --- a/src/libsyntax_expand/parse/tests.rs +++ b/src/libsyntax_expand/parse/tests.rs @@ -1,19 +1,19 @@ use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse}; +use errors::PResult; use rustc_parse::new_parser_from_source_str; use syntax::ast::{self, Name, PatKind}; use syntax::attr::first_attr_value_str_by_name; -use syntax::sess::ParseSess; -use syntax::token::{self, Token}; use syntax::print::pprust::item_to_string; use syntax::ptr::P; +use syntax::sess::ParseSess; use syntax::source_map::FilePathMapping; use syntax::symbol::{kw, sym}; -use syntax::tokenstream::{DelimSpan, TokenTree, TokenStream}; +use syntax::token::{self, Token}; +use syntax::tokenstream::{DelimSpan, TokenStream, TokenTree}; use syntax::visit; use syntax::with_default_globals; -use syntax_pos::{Span, BytePos, Pos, FileName}; -use errors::PResult; +use syntax_pos::{BytePos, FileName, Pos, Span}; use std::path::PathBuf; @@ -25,8 +25,11 @@ fn sess() -> ParseSess { /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` /// when a syntax error occurred. -fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) - -> PResult<'_, Option<P<ast::Item>>> { +fn parse_item_from_source_str( + name: FileName, + source: String, + sess: &ParseSess, +) -> PResult<'_, Option<P<ast::Item>>> { new_parser_from_source_str(sess, name, source).parse_item() } @@ -36,17 +39,18 @@ fn sp(a: u32, b: u32) -> Span { } /// Parses a string, return an expression. -fn string_to_expr(source_str : String) -> P<ast::Expr> { +fn string_to_expr(source_str: String) -> P<ast::Expr> { with_error_checking_parse(source_str, &sess(), |p| p.parse_expr()) } /// Parses a string, returns an item. -fn string_to_item(source_str : String) -> Option<P<ast::Item>> { +fn string_to_item(source_str: String) -> Option<P<ast::Item>> { with_error_checking_parse(source_str, &sess(), |p| p.parse_item()) } #[should_panic] -#[test] fn bad_path_expr_1() { +#[test] +fn bad_path_expr_1() { with_default_globals(|| { string_to_expr("::abc::def::return".to_string()); }) @@ -54,51 +58,38 @@ fn string_to_item(source_str : String) -> Option<P<ast::Item>> { // Checks the token-tree-ization of macros. #[test] -fn string_to_tts_macro () { +fn string_to_tts_macro() { with_default_globals(|| { let tts: Vec<_> = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; match tts { - [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), - TokenTree::Token(Token { kind: token::Not, .. }), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), - TokenTree::Delimited(_, macro_delim, macro_tts) - ] - if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => { + [TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), TokenTree::Token(Token { kind: token::Not, .. }), TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), TokenTree::Delimited(_, macro_delim, macro_tts)] + if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => + { let tts = ¯o_tts.trees().collect::<Vec<_>>(); match &tts[..] { - [ - TokenTree::Delimited(_, first_delim, first_tts), - TokenTree::Token(Token { kind: token::FatArrow, .. }), - TokenTree::Delimited(_, second_delim, second_tts), - ] - if macro_delim == &token::Paren => { + [TokenTree::Delimited(_, first_delim, first_tts), TokenTree::Token(Token { kind: token::FatArrow, .. }), TokenTree::Delimited(_, second_delim, second_tts)] + if macro_delim == &token::Paren => + { let tts = &first_tts.trees().collect::<Vec<_>>(); match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if first_delim == &token::Paren && name.as_str() == "a" => {}, + [TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })] + if first_delim == &token::Paren && name.as_str() == "a" => {} _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } let tts = &second_tts.trees().collect::<Vec<_>>(); match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if second_delim == &token::Paren && name.as_str() == "a" => {}, + [TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })] + if second_delim == &token::Paren && name.as_str() == "a" => {} _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), } - }, + } _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts), } - }, - _ => panic!("value: {:?}",tts), + } + _ => panic!("value: {:?}", tts), } }) } @@ -118,23 +109,28 @@ fn string_to_tts_1() { TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(), TokenTree::token(token::Colon, sp(8, 9)).into(), TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(), - ]).into(), - ).into(), + ]) + .into(), + ) + .into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(), TokenTree::token(token::Semi, sp(18, 19)).into(), - ]).into(), - ).into() + ]) + .into(), + ) + .into(), ]); assert_eq!(tts, expected); }) } -#[test] fn parse_use() { +#[test] +fn parse_use() { with_default_globals(|| { let use_s = "use foo::bar::baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); @@ -148,7 +144,8 @@ fn string_to_tts_1() { }) } -#[test] fn parse_extern_crate() { +#[test] +fn parse_extern_crate() { with_default_globals(|| { let ex_s = "extern crate foo;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); @@ -166,12 +163,12 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { let item = string_to_item(src.to_string()).unwrap(); struct PatIdentVisitor { - spans: Vec<Span> + spans: Vec<Span>, } impl<'a> visit::Visitor<'a> for PatIdentVisitor { fn visit_pat(&mut self, p: &'a ast::Pat) { match p.kind { - PatKind::Ident(_ , ref ident, _) => { + PatKind::Ident(_, ref ident, _) => { self.spans.push(ident.span.clone()); } _ => { @@ -185,27 +182,32 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { return v.spans; } -#[test] fn span_of_self_arg_pat_idents_are_correct() { +#[test] +fn span_of_self_arg_pat_idents_are_correct() { with_default_globals(|| { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; + let srcs = [ + "impl z { fn a (&self, &myarg: i32) {} }", + "impl z { fn a (&mut self, &myarg: i32) {} }", + "impl z { fn a (&'a self, &myarg: i32) {} }", + "impl z { fn a (self, &myarg: i32) {} }", + "impl z { fn a (self: Foo, &myarg: i32) {} }", + ]; for &src in &srcs { let spans = get_spans_of_pat_idents(src); let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) + assert!( + "self" == &src[lo.to_usize()..hi.to_usize()], + "\"{}\" != \"self\". src=\"{}\"", + &src[lo.to_usize()..hi.to_usize()], + src + ) } }) } -#[test] fn parse_exprs () { +#[test] +fn parse_exprs() { with_default_globals(|| { // just make sure that they parse.... string_to_expr("3 + 4".to_string()); @@ -213,9 +215,11 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { }) } -#[test] fn attrs_fix_bug () { +#[test] +fn attrs_fix_bug() { with_default_globals(|| { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + string_to_item( + "pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<Box<Writer>, String> { #[cfg(windows)] fn wb() -> c_int { @@ -226,27 +230,32 @@ fn wb() -> c_int { fn wb() -> c_int { O_WRONLY as c_int } let mut fflags: c_int = wb(); -}".to_string()); +}" + .to_string(), + ); }) } -#[test] fn crlf_doc_comments() { +#[test] +fn crlf_doc_comments() { with_default_globals(|| { let sess = sess(); let name_1 = FileName::Custom("crlf_source_1".to_string()); let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_1, source, &sess) - .unwrap().unwrap(); + let item = parse_item_from_source_str(name_1, source, &sess).unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap(); assert_eq!(doc.as_str(), "/// doc comment"); let name_2 = FileName::Custom("crlf_source_2".to_string()); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_2, source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.has_name(sym::doc)) - .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); + let item = parse_item_from_source_str(name_2, source, &sess).unwrap().unwrap(); + let docs = item + .attrs + .iter() + .filter(|a| a.has_name(sym::doc)) + .map(|a| a.value_str().unwrap().to_string()) + .collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); @@ -261,15 +270,21 @@ let mut fflags: c_int = wb(); #[test] fn ttdelim_span() { fn parse_expr_from_source_str( - name: FileName, source: String, sess: &ParseSess + name: FileName, + source: String, + sess: &ParseSess, ) -> PResult<'_, P<ast::Expr>> { new_parser_from_source_str(sess, name, source).parse_expr() } with_default_globals(|| { let sess = sess(); - let expr = parse_expr_from_source_str(PathBuf::from("foo").into(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); + let expr = parse_expr_from_source_str( + PathBuf::from("foo").into(), + "foo!( fn main() { body } )".to_string(), + &sess, + ) + .unwrap(); let tts: Vec<_> = match expr.kind { ast::ExprKind::Mac(ref mac) => mac.args.inner_tokens().trees().collect(), @@ -295,7 +310,9 @@ fn out_of_line_mod() { PathBuf::from("foo").into(), "mod foo { struct S; mod this_does_not_exist; }".to_owned(), &sess(), - ).unwrap().unwrap(); + ) + .unwrap() + .unwrap(); if let ast::ItemKind::Mod(ref m) = item.kind { assert!(m.items.len() == 2); @@ -307,31 +324,31 @@ fn out_of_line_mod() { #[test] fn eqmodws() { - assert_eq!(matches_codepattern("",""),true); - assert_eq!(matches_codepattern("","a"),false); - assert_eq!(matches_codepattern("a",""),false); - assert_eq!(matches_codepattern("a","a"),true); - assert_eq!(matches_codepattern("a b","a \n\t\r b"),true); - assert_eq!(matches_codepattern("a b ","a \n\t\r b"),true); - assert_eq!(matches_codepattern("a b","a \n\t\r b "),false); - assert_eq!(matches_codepattern("a b","a b"),true); - assert_eq!(matches_codepattern("ab","a b"),false); - assert_eq!(matches_codepattern("a b","ab"),true); - assert_eq!(matches_codepattern(" a b","ab"),true); + assert_eq!(matches_codepattern("", ""), true); + assert_eq!(matches_codepattern("", "a"), false); + assert_eq!(matches_codepattern("a", ""), false); + assert_eq!(matches_codepattern("a", "a"), true); + assert_eq!(matches_codepattern("a b", "a \n\t\r b"), true); + assert_eq!(matches_codepattern("a b ", "a \n\t\r b"), true); + assert_eq!(matches_codepattern("a b", "a \n\t\r b "), false); + assert_eq!(matches_codepattern("a b", "a b"), true); + assert_eq!(matches_codepattern("ab", "a b"), false); + assert_eq!(matches_codepattern("a b", "ab"), true); + assert_eq!(matches_codepattern(" a b", "ab"), true); } #[test] fn pattern_whitespace() { - assert_eq!(matches_codepattern("","\x0C"), false); - assert_eq!(matches_codepattern("a b ","a \u{0085}\n\t\r b"),true); - assert_eq!(matches_codepattern("a b","a \u{0085}\n\t\r b "),false); + assert_eq!(matches_codepattern("", "\x0C"), false); + assert_eq!(matches_codepattern("a b ", "a \u{0085}\n\t\r b"), true); + assert_eq!(matches_codepattern("a b", "a \u{0085}\n\t\r b "), false); } #[test] fn non_pattern_whitespace() { // These have the property 'White_Space' but not 'Pattern_White_Space' - assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); - assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); - assert_eq!(matches_codepattern("\u{205F}a b","ab"), false); - assert_eq!(matches_codepattern("a \u{3000}b","ab"), false); + assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false); + assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false); + assert_eq!(matches_codepattern("\u{205F}a b", "ab"), false); + assert_eq!(matches_codepattern("a \u{3000}b", "ab"), false); } diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs index 8656100c921..edf427edaae 100644 --- a/src/libsyntax_ext/test.rs +++ b/src/libsyntax_ext/test.rs @@ -1,14 +1,13 @@ /// The expansion from a test function to the appropriate test struct for libtest /// Ideally, this code would be in libtest but for efficiency and error messages it lives here. - use crate::util::check_builtin_macro_attribute; use syntax::ast; use syntax::attr; -use syntax_expand::base::*; use syntax::print::pprust; use syntax::source_map::respan; -use syntax::symbol::{Symbol, sym}; +use syntax::symbol::{sym, Symbol}; +use syntax_expand::base::*; use syntax_pos::Span; use std::iter; @@ -24,24 +23,24 @@ pub fn expand_test_case( ecx: &mut ExtCtxt<'_>, attr_sp: Span, meta_item: &ast::MetaItem, - anno_item: Annotatable + anno_item: Annotatable, ) -> Vec<Annotatable> { check_builtin_macro_attribute(ecx, meta_item, sym::test_case); - if !ecx.ecfg.should_test { return vec![]; } + if !ecx.ecfg.should_test { + return vec![]; + } let sp = ecx.with_def_site_ctxt(attr_sp); let mut item = anno_item.expect_item(); item = item.map(|mut item| { item.vis = respan(item.vis.span, ast::VisibilityKind::Public); item.ident.span = item.ident.span.with_ctxt(sp.ctxt()); - item.attrs.push( - ecx.attribute(ecx.meta_word(sp, sym::rustc_test_marker)) - ); + item.attrs.push(ecx.attribute(ecx.meta_word(sp, sym::rustc_test_marker))); item }); - return vec![Annotatable::Item(item)] + return vec![Annotatable::Item(item)]; } pub fn expand_test( @@ -68,29 +67,39 @@ pub fn expand_test_or_bench( cx: &mut ExtCtxt<'_>, attr_sp: Span, item: Annotatable, - is_bench: bool + is_bench: bool, ) -> Vec<Annotatable> { // If we're not in test configuration, remove the annotated item - if !cx.ecfg.should_test { return vec![]; } + if !cx.ecfg.should_test { + return vec![]; + } - let item = - if let Annotatable::Item(i) = item { i } - else { - cx.parse_sess.span_diagnostic.span_fatal(item.span(), - "`#[test]` attribute is only allowed on non associated functions").raise(); - }; + let item = if let Annotatable::Item(i) = item { + i + } else { + cx.parse_sess + .span_diagnostic + .span_fatal( + item.span(), + "`#[test]` attribute is only allowed on non associated functions", + ) + .raise(); + }; if let ast::ItemKind::Mac(_) = item.kind { - cx.parse_sess.span_diagnostic.span_warn(item.span, - "`#[test]` attribute should not be used on macros. Use `#[cfg(test)]` instead."); + cx.parse_sess.span_diagnostic.span_warn( + item.span, + "`#[test]` attribute should not be used on macros. Use `#[cfg(test)]` instead.", + ); return vec![Annotatable::Item(item)]; } // has_*_signature will report any errors in the type so compilation // will fail. We shouldn't try to expand in this case because the errors // would be spurious. - if (!is_bench && !has_test_signature(cx, &item)) || - (is_bench && !has_bench_signature(cx, &item)) { + if (!is_bench && !has_test_signature(cx, &item)) + || (is_bench && !has_bench_signature(cx, &item)) + { return vec![Annotatable::Item(item)]; } @@ -99,19 +108,15 @@ pub fn expand_test_or_bench( let test_id = ast::Ident::new(sym::test, attr_sp); // creates test::$name - let test_path = |name| { - cx.path(sp, vec![test_id, cx.ident_of(name, sp)]) - }; + let test_path = |name| cx.path(sp, vec![test_id, cx.ident_of(name, sp)]); // creates test::ShouldPanic::$name - let should_panic_path = |name| { - cx.path(sp, vec![test_id, cx.ident_of("ShouldPanic", sp), cx.ident_of(name, sp)]) - }; + let should_panic_path = + |name| cx.path(sp, vec![test_id, cx.ident_of("ShouldPanic", sp), cx.ident_of(name, sp)]); // creates test::TestType::$name - let test_type_path = |name| { - cx.path(sp, vec![test_id, cx.ident_of("TestType", sp), cx.ident_of(name, sp)]) - }; + let test_type_path = + |name| cx.path(sp, vec![test_id, cx.ident_of("TestType", sp), cx.ident_of(name, sp)]); // creates $name: $expr let field = |name, expr| cx.field_imm(sp, cx.ident_of(name, sp), expr); @@ -120,101 +125,151 @@ pub fn expand_test_or_bench( // A simple ident for a lambda let b = cx.ident_of("b", attr_sp); - cx.expr_call(sp, cx.expr_path(test_path("StaticBenchFn")), vec![ - // |b| self::test::assert_test_result( - cx.lambda1(sp, - cx.expr_call(sp, cx.expr_path(test_path("assert_test_result")), vec![ - // super::$test_fn(b) - cx.expr_call(sp, - cx.expr_path(cx.path(sp, vec![item.ident])), - vec![cx.expr_ident(sp, b)]) - ]), - b - ) - // ) - ]) + cx.expr_call( + sp, + cx.expr_path(test_path("StaticBenchFn")), + vec![ + // |b| self::test::assert_test_result( + cx.lambda1( + sp, + cx.expr_call( + sp, + cx.expr_path(test_path("assert_test_result")), + vec![ + // super::$test_fn(b) + cx.expr_call( + sp, + cx.expr_path(cx.path(sp, vec![item.ident])), + vec![cx.expr_ident(sp, b)], + ), + ], + ), + b, + ), // ) + ], + ) } else { - cx.expr_call(sp, cx.expr_path(test_path("StaticTestFn")), vec![ - // || { - cx.lambda0(sp, - // test::assert_test_result( - cx.expr_call(sp, cx.expr_path(test_path("assert_test_result")), vec![ - // $test_fn() - cx.expr_call(sp, cx.expr_path(cx.path(sp, vec![item.ident])), vec![]) - // ) - ]) - // } - ) - // ) - ]) + cx.expr_call( + sp, + cx.expr_path(test_path("StaticTestFn")), + vec![ + // || { + cx.lambda0( + sp, + // test::assert_test_result( + cx.expr_call( + sp, + cx.expr_path(test_path("assert_test_result")), + vec![ + // $test_fn() + cx.expr_call(sp, cx.expr_path(cx.path(sp, vec![item.ident])), vec![]), // ) + ], + ), // } + ), // ) + ], + ) }; - let mut test_const = cx.item(sp, ast::Ident::new(item.ident.name, sp), + let mut test_const = cx.item( + sp, + ast::Ident::new(item.ident.name, sp), vec![ // #[cfg(test)] - cx.attribute(attr::mk_list_item(ast::Ident::new(sym::cfg, attr_sp), vec![ - attr::mk_nested_word_item(ast::Ident::new(sym::test, attr_sp)) - ])), + cx.attribute(attr::mk_list_item( + ast::Ident::new(sym::cfg, attr_sp), + vec![attr::mk_nested_word_item(ast::Ident::new(sym::test, attr_sp))], + )), // #[rustc_test_marker] cx.attribute(cx.meta_word(attr_sp, sym::rustc_test_marker)), ], // const $ident: test::TestDescAndFn = - ast::ItemKind::Const(cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))), + ast::ItemKind::Const( + cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))), // test::TestDescAndFn { - cx.expr_struct(sp, test_path("TestDescAndFn"), vec![ - // desc: test::TestDesc { - field("desc", cx.expr_struct(sp, test_path("TestDesc"), vec![ - // name: "path::to::test" - field("name", cx.expr_call(sp, cx.expr_path(test_path("StaticTestName")), - vec![ - cx.expr_str(sp, Symbol::intern(&item_path( - // skip the name of the root module - &cx.current_expansion.module.mod_path[1..], - &item.ident - ))) - ])), - // ignore: true | false - field("ignore", cx.expr_bool(sp, should_ignore(&item))), - // allow_fail: true | false - field("allow_fail", cx.expr_bool(sp, should_fail(&item))), - // should_panic: ... - field("should_panic", match should_panic(cx, &item) { - // test::ShouldPanic::No - ShouldPanic::No => cx.expr_path(should_panic_path("No")), - // test::ShouldPanic::Yes - ShouldPanic::Yes(None) => cx.expr_path(should_panic_path("Yes")), - // test::ShouldPanic::YesWithMessage("...") - ShouldPanic::Yes(Some(sym)) => cx.expr_call(sp, - cx.expr_path(should_panic_path("YesWithMessage")), - vec![cx.expr_str(sp, sym)]), - }), - // test_type: ... - field("test_type", match test_type(cx) { - // test::TestType::UnitTest - TestType::UnitTest => cx.expr_path(test_type_path("UnitTest")), - // test::TestType::IntegrationTest - TestType::IntegrationTest => cx.expr_path( - test_type_path("IntegrationTest") + cx.expr_struct( + sp, + test_path("TestDescAndFn"), + vec![ + // desc: test::TestDesc { + field( + "desc", + cx.expr_struct( + sp, + test_path("TestDesc"), + vec![ + // name: "path::to::test" + field( + "name", + cx.expr_call( + sp, + cx.expr_path(test_path("StaticTestName")), + vec![cx.expr_str( + sp, + Symbol::intern(&item_path( + // skip the name of the root module + &cx.current_expansion.module.mod_path[1..], + &item.ident, + )), + )], + ), + ), + // ignore: true | false + field("ignore", cx.expr_bool(sp, should_ignore(&item))), + // allow_fail: true | false + field("allow_fail", cx.expr_bool(sp, should_fail(&item))), + // should_panic: ... + field( + "should_panic", + match should_panic(cx, &item) { + // test::ShouldPanic::No + ShouldPanic::No => cx.expr_path(should_panic_path("No")), + // test::ShouldPanic::Yes + ShouldPanic::Yes(None) => { + cx.expr_path(should_panic_path("Yes")) + } + // test::ShouldPanic::YesWithMessage("...") + ShouldPanic::Yes(Some(sym)) => cx.expr_call( + sp, + cx.expr_path(should_panic_path("YesWithMessage")), + vec![cx.expr_str(sp, sym)], + ), + }, + ), + // test_type: ... + field( + "test_type", + match test_type(cx) { + // test::TestType::UnitTest + TestType::UnitTest => { + cx.expr_path(test_type_path("UnitTest")) + } + // test::TestType::IntegrationTest + TestType::IntegrationTest => { + cx.expr_path(test_type_path("IntegrationTest")) + } + // test::TestPath::Unknown + TestType::Unknown => { + cx.expr_path(test_type_path("Unknown")) + } + }, + ), + // }, + ], ), - // test::TestPath::Unknown - TestType::Unknown => cx.expr_path(test_type_path("Unknown")), - }), - // }, - ])), - // testfn: test::StaticTestFn(...) | test::StaticBenchFn(...) - field("testfn", test_fn) - // } - ]) - // } - )); - test_const = test_const.map(|mut tc| { tc.vis.node = ast::VisibilityKind::Public; tc}); + ), + // testfn: test::StaticTestFn(...) | test::StaticBenchFn(...) + field("testfn", test_fn), // } + ], + ), // } + ), + ); + test_const = test_const.map(|mut tc| { + tc.vis.node = ast::VisibilityKind::Public; + tc + }); // extern crate test - let test_extern = cx.item(sp, - test_id, - vec![], - ast::ItemKind::ExternCrate(None) - ); + let test_extern = cx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None)); log::debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); @@ -224,13 +279,17 @@ pub fn expand_test_or_bench( // The generated test case Annotatable::Item(test_const), // The original item - Annotatable::Item(item) + Annotatable::Item(item), ] } fn item_path(mod_path: &[ast::Ident], item_ident: &ast::Ident) -> String { - mod_path.iter().chain(iter::once(item_ident)) - .map(|x| x.to_string()).collect::<Vec<String>>().join("::") + mod_path + .iter() + .chain(iter::once(item_ident)) + .map(|x| x.to_string()) + .collect::<Vec<String>>() + .join("::") } enum ShouldPanic { @@ -254,7 +313,8 @@ fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { match attr.meta_item_list() { // Handle #[should_panic(expected = "foo")] Some(list) => { - let msg = list.iter() + let msg = list + .iter() .find(|mi| mi.check_name(sym::expected)) .and_then(|mi| mi.meta_item()) .and_then(|mi| mi.value_str()); @@ -262,17 +322,21 @@ fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { sd.struct_span_warn( attr.span, "argument must be of the form: \ - `expected = \"error message\"`" - ).note("Errors in this attribute were erroneously \ + `expected = \"error message\"`", + ) + .note( + "Errors in this attribute were erroneously \ allowed and will become a hard error in a \ - future release.").emit(); + future release.", + ) + .emit(); ShouldPanic::Yes(None) } else { ShouldPanic::Yes(msg) } - }, + } // Handle #[should_panic] and #[should_panic = "expected"] - None => ShouldPanic::Yes(attr.value_str()) + None => ShouldPanic::Yes(attr.value_str()), } } None => ShouldPanic::No, @@ -312,27 +376,20 @@ fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { let ref sd = cx.parse_sess.span_diagnostic; if let ast::ItemKind::Fn(ref sig, ref generics, _) = i.kind { if sig.header.unsafety == ast::Unsafety::Unsafe { - sd.span_err( - i.span, - "unsafe functions cannot be used for tests" - ); - return false + sd.span_err(i.span, "unsafe functions cannot be used for tests"); + return false; } if sig.header.asyncness.node.is_async() { - sd.span_err( - i.span, - "async functions cannot be used for tests" - ); - return false + sd.span_err(i.span, "async functions cannot be used for tests"); + return false; } - // If the termination trait is active, the compiler will check that the output // type implements the `Termination` trait as `libtest` enforces that. let has_output = match sig.decl.output { ast::FunctionRetTy::Default(..) => false, ast::FunctionRetTy::Ty(ref t) if t.kind.is_unit() => false, - _ => true + _ => true, }; if !sig.decl.inputs.is_empty() { @@ -344,15 +401,16 @@ fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { (true, true) => { sd.span_err(i.span, "functions using `#[should_panic]` must return `()`"); false - }, - (true, false) => if !generics.params.is_empty() { - sd.span_err(i.span, - "functions used as tests must have signature fn() -> ()"); - false - } else { - true - }, - (false, _) => true + } + (true, false) => { + if !generics.params.is_empty() { + sd.span_err(i.span, "functions used as tests must have signature fn() -> ()"); + false + } else { + true + } + } + (false, _) => true, } } else { sd.span_err(i.span, "only functions may be used as tests"); @@ -370,8 +428,11 @@ fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { }; if !has_sig { - cx.parse_sess.span_diagnostic.span_err(i.span, "functions used as benches must have \ - signature `fn(&mut Bencher) -> impl Termination`"); + cx.parse_sess.span_diagnostic.span_err( + i.span, + "functions used as benches must have \ + signature `fn(&mut Bencher) -> impl Termination`", + ); } has_sig diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs index 7e76604355d..481163a1a9a 100644 --- a/src/tools/build-manifest/src/main.rs +++ b/src/tools/build-manifest/src/main.rs @@ -6,16 +6,16 @@ #![deny(warnings)] -use toml; use serde::Serialize; +use toml; use std::collections::BTreeMap; +use std::collections::HashMap; use std::env; use std::fs::{self, File}; use std::io::{self, Read, Write}; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; -use std::collections::HashMap; static HOSTS: &[&str] = &[ "aarch64-unknown-linux-gnu", @@ -154,10 +154,7 @@ static DOCS_TARGETS: &[&str] = &[ "x86_64-unknown-linux-gnu", ]; -static MINGW: &[&str] = &[ - "i686-pc-windows-gnu", - "x86_64-pc-windows-gnu", -]; +static MINGW: &[&str] = &["i686-pc-windows-gnu", "x86_64-pc-windows-gnu"]; #[derive(Serialize)] #[serde(rename_all = "kebab-case")] @@ -193,7 +190,9 @@ struct Target { } impl Target { - fn unavailable() -> Self { Self::default() } + fn unavailable() -> Self { + Self::default() + } } #[derive(Serialize)] @@ -209,10 +208,12 @@ impl Component { } macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } struct Builder { @@ -323,10 +324,21 @@ fn main() { miri_git_commit_hash: None, should_sign, - }.build(); + } + .build(); } -enum PkgType { RustSrc, Cargo, Rls, Clippy, Rustfmt, LlvmTools, Lldb, Miri, Other } +enum PkgType { + RustSrc, + Cargo, + Rls, + Clippy, + Rustfmt, + LlvmTools, + Lldb, + Miri, + Other, +} impl PkgType { fn from_component(component: &str) -> Self { @@ -362,8 +374,8 @@ impl Builder { self.rls_git_commit_hash = self.git_commit_hash("rls", "x86_64-unknown-linux-gnu"); self.clippy_git_commit_hash = self.git_commit_hash("clippy", "x86_64-unknown-linux-gnu"); self.rustfmt_git_commit_hash = self.git_commit_hash("rustfmt", "x86_64-unknown-linux-gnu"); - self.llvm_tools_git_commit_hash = self.git_commit_hash("llvm-tools", - "x86_64-unknown-linux-gnu"); + self.llvm_tools_git_commit_hash = + self.git_commit_hash("llvm-tools", "x86_64-unknown-linux-gnu"); self.lldb_git_commit_hash = self.git_commit_hash("lldb", "x86_64-unknown-linux-gnu"); self.miri_git_commit_hash = self.git_commit_hash("miri", "x86_64-unknown-linux-gnu"); @@ -381,11 +393,14 @@ impl Builder { /// Right now, we do this only for Miri. fn check_toolstate(&mut self) { let toolstates: Option<HashMap<String, String>> = - File::open(self.input.join("toolstates-linux.json")).ok() + File::open(self.input.join("toolstates-linux.json")) + .ok() .and_then(|f| serde_json::from_reader(&f).ok()); let toolstates = toolstates.unwrap_or_else(|| { - println!("WARNING: `toolstates-linux.json` missing/malformed; \ - assuming all tools failed"); + println!( + "WARNING: `toolstates-linux.json` missing/malformed; \ + assuming all tools failed" + ); HashMap::default() // Use empty map if anything went wrong. }); // Mark some tools as missing based on toolstate. @@ -442,16 +457,36 @@ impl Builder { fn add_profiles_to(&mut self, manifest: &mut Manifest) { let mut profile = |name, pkgs| self.profile(name, &mut manifest.profiles, pkgs); profile("minimal", &["rustc", "cargo", "rust-std", "rust-mingw"]); - profile("default", &[ - "rustc", "cargo", "rust-std", "rust-mingw", - "rust-docs", "rustfmt-preview", "clippy-preview" - ]); - profile("complete", &[ - "rustc", "cargo", "rust-std", "rust-mingw", - "rust-docs", "rustfmt-preview", "clippy-preview", - "rls-preview", "rust-src", "llvm-tools-preview", - "lldb-preview", "rust-analysis", "miri-preview" - ]); + profile( + "default", + &[ + "rustc", + "cargo", + "rust-std", + "rust-mingw", + "rust-docs", + "rustfmt-preview", + "clippy-preview", + ], + ); + profile( + "complete", + &[ + "rustc", + "cargo", + "rust-std", + "rust-mingw", + "rust-docs", + "rustfmt-preview", + "clippy-preview", + "rls-preview", + "rust-src", + "llvm-tools-preview", + "lldb-preview", + "rust-analysis", + "miri-preview", + ], + ); // The compiler libraries are not stable for end users, and they're also huge, so we only // `rustc-dev` for nightly users, and only in the "complete" profile. It's still possible @@ -462,10 +497,9 @@ impl Builder { } fn add_renames_to(&self, manifest: &mut Manifest) { - let mut rename = |from: &str, to: &str| manifest.renames.insert( - from.to_owned(), - Rename { to: to.to_owned() } - ); + let mut rename = |from: &str, to: &str| { + manifest.renames.insert(from.to_owned(), Rename { to: to.to_owned() }) + }; rename("rls", "rls-preview"); rename("rustfmt", "rustfmt-preview"); rename("clippy", "clippy-preview"); @@ -474,10 +508,11 @@ impl Builder { fn rust_package(&mut self, manifest: &Manifest) -> Package { let mut pkg = Package { - version: self.cached_version("rust") - .as_ref() - .expect("Couldn't find Rust version") - .clone(), + version: self + .cached_version("rust") + .as_ref() + .expect("Couldn't find Rust version") + .clone(), git_commit_hash: self.cached_git_commit_hash("rust").clone(), target: BTreeMap::new(), }; @@ -486,7 +521,7 @@ impl Builder { pkg.target.insert(host.to_string(), target); } else { pkg.target.insert(host.to_string(), Target::unavailable()); - continue + continue; } } pkg @@ -527,14 +562,12 @@ impl Builder { ]); extensions.extend( - TARGETS.iter() + TARGETS + .iter() .filter(|&&target| target != host) - .map(|target| Component::from_str("rust-std", target)) - ); - extensions.extend( - HOSTS.iter() - .map(|target| Component::from_str("rustc-dev", target)) + .map(|target| Component::from_str("rust-std", target)), ); + extensions.extend(HOSTS.iter().map(|target| Component::from_str("rustc-dev", target))); extensions.push(Component::from_str("rust-src", "*")); // If the components/extensions don't actually exist for this @@ -542,7 +575,7 @@ impl Builder { // lists. let has_component = |c: &Component| { if c.target == "*" { - return true + return true; } let pkg = match manifest.pkg.get(&c.pkg) { Some(p) => p, @@ -564,26 +597,29 @@ impl Builder { }) } - fn profile(&mut self, - profile_name: &str, - dst: &mut BTreeMap<String, Vec<String>>, - pkgs: &[&str]) { + fn profile( + &mut self, + profile_name: &str, + dst: &mut BTreeMap<String, Vec<String>>, + pkgs: &[&str], + ) { dst.insert(profile_name.to_owned(), pkgs.iter().map(|s| (*s).to_owned()).collect()); } - fn extend_profile(&mut self, - profile_name: &str, - dst: &mut BTreeMap<String, Vec<String>>, - pkgs: &[&str]) { - dst.get_mut(profile_name).expect("existing profile") + fn extend_profile( + &mut self, + profile_name: &str, + dst: &mut BTreeMap<String, Vec<String>>, + pkgs: &[&str], + ) { + dst.get_mut(profile_name) + .expect("existing profile") .extend(pkgs.iter().map(|s| (*s).to_owned())); } - fn package(&mut self, - pkgname: &str, - dst: &mut BTreeMap<String, Package>, - targets: &[&str]) { - let (version, mut is_present) = self.cached_version(pkgname) + fn package(&mut self, pkgname: &str, dst: &mut BTreeMap<String, Package>, targets: &[&str]) { + let (version, mut is_present) = self + .cached_version(pkgname) .as_ref() .cloned() .map(|version| (version, true)) @@ -594,46 +630,52 @@ impl Builder { is_present = false; // Pretend the component is entirely missing. } - let targets = targets.iter().map(|name| { - if is_present { - // The component generally exists, but it might still be missing for this target. - let filename = self.filename(pkgname, name); - let digest = match self.digests.remove(&filename) { - Some(digest) => digest, - // This component does not exist for this target -- skip it. - None => return (name.to_string(), Target::unavailable()), - }; - let xz_filename = filename.replace(".tar.gz", ".tar.xz"); - let xz_digest = self.digests.remove(&xz_filename); - - (name.to_string(), Target { - available: true, - url: Some(self.url(&filename)), - hash: Some(digest), - xz_url: xz_digest.as_ref().map(|_| self.url(&xz_filename)), - xz_hash: xz_digest, - components: None, - extensions: None, - }) - } else { - // If the component is not present for this build add it anyway but mark it as - // unavailable -- this way rustup won't allow upgrades without --force - (name.to_string(), Target::unavailable()) - } - }).collect(); - - dst.insert(pkgname.to_string(), Package { - version, - git_commit_hash: self.cached_git_commit_hash(pkgname).clone(), - target: targets, - }); + let targets = targets + .iter() + .map(|name| { + if is_present { + // The component generally exists, but it might still be missing for this target. + let filename = self.filename(pkgname, name); + let digest = match self.digests.remove(&filename) { + Some(digest) => digest, + // This component does not exist for this target -- skip it. + None => return (name.to_string(), Target::unavailable()), + }; + let xz_filename = filename.replace(".tar.gz", ".tar.xz"); + let xz_digest = self.digests.remove(&xz_filename); + + ( + name.to_string(), + Target { + available: true, + url: Some(self.url(&filename)), + hash: Some(digest), + xz_url: xz_digest.as_ref().map(|_| self.url(&xz_filename)), + xz_hash: xz_digest, + components: None, + extensions: None, + }, + ) + } else { + // If the component is not present for this build add it anyway but mark it as + // unavailable -- this way rustup won't allow upgrades without --force + (name.to_string(), Target::unavailable()) + } + }) + .collect(); + + dst.insert( + pkgname.to_string(), + Package { + version, + git_commit_hash: self.cached_git_commit_hash(pkgname).clone(), + target: targets, + }, + ); } fn url(&self, filename: &str) -> String { - format!("{}/{}/{}", - self.s3_address, - self.date, - filename) + format!("{}/{}/{}", self.s3_address, self.date, filename) } fn filename(&self, component: &str, target: &str) -> String { @@ -689,14 +731,14 @@ impl Builder { fn untar<F>(&self, component: &str, target: &str, dir: F) -> Option<String> where - F: FnOnce(String) -> String + F: FnOnce(String) -> String, { let mut cmd = Command::new("tar"); let filename = self.filename(component, target); cmd.arg("xf") - .arg(self.input.join(&filename)) - .arg(dir(filename.replace(".tar.gz", ""))) - .arg("-O"); + .arg(self.input.join(&filename)) + .arg(dir(filename.replace(".tar.gz", ""))) + .arg("-O"); let output = t!(cmd.output()); if output.status.success() { Some(String::from_utf8_lossy(&output.stdout).trim().to_string()) @@ -707,10 +749,11 @@ impl Builder { fn hash(&self, path: &Path) -> String { let sha = t!(Command::new("shasum") - .arg("-a").arg("256") - .arg(path.file_name().unwrap()) - .current_dir(path.parent().unwrap()) - .output()); + .arg("-a") + .arg("256") + .arg(path.file_name().unwrap()) + .current_dir(path.parent().unwrap()) + .output()); assert!(sha.status.success()); let filename = path.file_name().unwrap().to_str().unwrap(); @@ -734,11 +777,15 @@ impl Builder { .arg("--no-tty") .arg("--yes") .arg("--batch") - .arg("--passphrase-fd").arg("0") - .arg("--personal-digest-preferences").arg("SHA512") + .arg("--passphrase-fd") + .arg("0") + .arg("--personal-digest-preferences") + .arg("SHA512") .arg("--armor") - .arg("--output").arg(&asc) - .arg("--detach-sign").arg(path) + .arg("--output") + .arg(&asc) + .arg("--detach-sign") + .arg(path) .stdin(Stdio::piped()); let mut child = t!(cmd.spawn()); t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes())); @@ -748,8 +795,11 @@ impl Builder { fn write_channel_files(&self, channel_name: &str, manifest: &Manifest) { self.write(&toml::to_string(&manifest).unwrap(), channel_name, ".toml"); self.write(&manifest.date, channel_name, "-date.txt"); - self.write(manifest.pkg["rust"].git_commit_hash.as_ref().unwrap(), - channel_name, "-git-commit-hash.txt"); + self.write( + manifest.pkg["rust"].git_commit_hash.as_ref().unwrap(), + channel_name, + "-git-commit-hash.txt", + ); } fn write(&self, contents: &str, channel_name: &str, suffix: &str) { |
