diff options
| author | Aaron Hill <aa1ronham@gmail.com> | 2017-11-22 16:16:55 -0500 |
|---|---|---|
| committer | Aaron Hill <aa1ronham@gmail.com> | 2018-02-18 16:29:24 -0500 |
| commit | 6728f21d85d347bcd5e7ca919b4e9f0c2677572b (patch) | |
| tree | 69ccb4351afdb8b46c28191a809182f6cfbe06ce /src | |
| parent | 27a046e9338fb0455c33b13e8fe28da78212dedc (diff) | |
| download | rust-6728f21d85d347bcd5e7ca919b4e9f0c2677572b.tar.gz rust-6728f21d85d347bcd5e7ca919b4e9f0c2677572b.zip | |
Generate documentation for auto-trait impls
A new section is added to both both struct and trait doc pages.
On struct/enum pages, a new 'Auto Trait Implementations' section displays any
synthetic implementations for auto traits. Currently, this is only done
for Send and Sync.
On trait pages, a new 'Auto Implementors' section displays all types
which automatically implement the trait. Effectively, this is a list of
all public types in the standard library.
Synthesized impls for a particular auto trait ('synthetic impls') take
into account generic bounds. For example, a type 'struct Foo<T>(T)' will
have 'impl<T> Send for Foo<T> where T: Send' generated for it.
Manual implementations of auto traits are also taken into account. If we have
the following types:
'struct Foo<T>(T)'
'struct Wrapper<T>(Foo<T>)'
'unsafe impl<T> Send for Wrapper<T>' // pretend that Wrapper<T> makes
this sound somehow
Then Wrapper will have the following impl generated:
'impl<T> Send for Wrapper<T>'
reflecting the fact that 'T: Send' need not hold for 'Wrapper<T>: Send'
to hold
Lifetimes, HRTBS, and projections (e.g. '<T as Iterator>::Item') are
taken into account by synthetic impls
However, if a type can *never* implement a particular auto trait
(e.g. 'struct MyStruct<T>(*const T)'), then a negative impl will be
generated (in this case, 'impl<T> !Send for MyStruct<T>')
All of this means that a user should be able to copy-paste a synthetic
impl into their code, without any observable changes in behavior
(assuming the rest of the program remains unchanged).
Diffstat (limited to 'src')
29 files changed, 2483 insertions, 151 deletions
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example -Subproject ebb28c95b2ea68b96eddb9e71aff4d32eacc74f +Subproject 919980be7df4ea7d45a9dca8efc34da89bcf7d6 diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 43cc437e1e7..61a58a60306 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -72,6 +72,10 @@ impl DefPathTable { index } + pub fn next_id(&self, address_space: DefIndexAddressSpace) -> DefIndex { + DefIndex::from_array_index(self.index_to_key[address_space.index()].len(), address_space) + } + #[inline(always)] pub fn def_key(&self, index: DefIndex) -> DefKey { self.index_to_key[index.address_space().index()] diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 07c5b319970..2a620ecdfff 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -180,7 +180,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // for each body-id in this map, which will process the // obligations within. This is expected to be done 'late enough' // that all type inference variables have been bound and so forth. - region_obligations: RefCell<Vec<(ast::NodeId, RegionObligation<'tcx>)>>, + pub region_obligations: RefCell<Vec<(ast::NodeId, RegionObligation<'tcx>)>>, } /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized @@ -1555,11 +1555,20 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { InferOk { value, obligations } } - fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { + pub fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { RefMut::map( self.region_constraints.borrow_mut(), |c| c.as_mut().expect("region constraints already solved")) } + + /// Clears the selection, evaluation, and projection cachesThis is useful when + /// repeatedly attemping to select an Obligation while chagning only + /// its ParamEnv, since FulfillmentContext doesn't use 'probe' + pub fn clear_caches(&self) { + self.selection_cache.clear(); + self.evaluation_cache.clear(); + self.projection_cache.borrow_mut().clear(); + } } impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 68d81a2dee3..be196192371 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -82,7 +82,7 @@ pub type VarOrigins = IndexVec<RegionVid, RegionVariableOrigin>; /// Describes constraints between the region variables and other /// regions, as well as other conditions that must be verified, or /// assumptions that can be made. -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct RegionConstraintData<'tcx> { /// Constraints of the form `A <= B`, where either `A` or `B` can /// be a region variable (or neither, as it happens). @@ -142,7 +142,7 @@ pub enum Constraint<'tcx> { /// outlive `RS`. Therefore verify that `R <= RS[i]` for some /// `i`. Inference variables may be involved (but this verification /// step doesn't influence inference). -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Verify<'tcx> { pub kind: GenericKind<'tcx>, pub origin: SubregionOrigin<'tcx>, @@ -159,7 +159,7 @@ pub enum GenericKind<'tcx> { /// When we introduce a verification step, we wish to test that a /// particular region (let's call it `'min`) meets some bound. /// The bound is described the by the following grammar: -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum VerifyBound<'tcx> { /// B = exists {R} --> some 'r in {R} must outlive 'min /// @@ -288,6 +288,10 @@ impl<'tcx> RegionConstraintCollector<'tcx> { &self.var_origins } + pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { + &self.data + } + /// Once all the constraints have been gathered, extract out the final data. /// /// Not legal during a snapshot. diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 41cc8ca601a..31836f7e3c5 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -32,8 +32,8 @@ use syntax_pos::{Span, DUMMY_SP}; pub use self::coherence::{orphan_check, overlapping_impls, OrphanCheckErr, OverlapResult}; pub use self::fulfill::FulfillmentContext; pub use self::project::MismatchedProjectionTypes; -pub use self::project::{normalize, normalize_projection_type, Normalized}; -pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal}; +pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; +pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index ae539f07336..0d0476e7c21 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -1596,6 +1596,10 @@ impl<'tcx> ProjectionCache<'tcx> { } } + pub fn clear(&mut self) { + self.map.clear(); + } + pub fn snapshot(&mut self) -> ProjectionCacheSnapshot { ProjectionCacheSnapshot { snapshot: self.map.snapshot() } } diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 4ed25646d43..e03f3e7f12c 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -93,6 +93,11 @@ pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { inferred_obligations: SnapshotVec<InferredObligationsSnapshotVecDelegate<'tcx>>, intercrate_ambiguity_causes: Option<Vec<IntercrateAmbiguityCause>>, + + /// Controls whether or not to filter out negative impls when selecting. + /// This is used in librustdoc to distinguish between the lack of an impl + /// and a negative impl + allow_negative_impls: bool } #[derive(Clone, Debug)] @@ -424,6 +429,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: None, inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false } } @@ -436,6 +442,20 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: Some(mode), inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false + } + } + + pub fn with_negative(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, + allow_negative_impls: bool) -> SelectionContext<'cx, 'gcx, 'tcx> { + debug!("with_negative({:?})", allow_negative_impls); + SelectionContext { + infcx, + freshener: infcx.freshener(), + intercrate: None, + inferred_obligations: SnapshotVec::new(), + intercrate_ambiguity_causes: Vec::new(), + allow_negative_impls } } @@ -1086,7 +1106,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { if let ImplCandidate(def_id) = candidate { - if self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { + if !self.allow_negative_impls && + self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { return Err(Unimplemented) } } @@ -3337,6 +3358,10 @@ impl<'tcx> SelectionCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'tcx> EvaluationCache<'tcx> { @@ -3345,6 +3370,10 @@ impl<'tcx> EvaluationCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'o,'tcx> TraitObligationStack<'o,'tcx> { diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs index cd7143ad3ce..cede6f14782 100644 --- a/src/librustc_data_structures/snapshot_map/mod.rs +++ b/src/librustc_data_structures/snapshot_map/mod.rs @@ -45,6 +45,11 @@ impl<K, V> SnapshotMap<K, V> } } + pub fn clear(&mut self) { + self.map.clear(); + self.undo_log.clear(); + } + pub fn insert(&mut self, key: K, value: V) -> bool { match self.map.insert(key.clone(), value) { None => { diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs new file mode 100644 index 00000000000..8f0d3929b52 --- /dev/null +++ b/src/librustdoc/clean/auto_trait.rs @@ -0,0 +1,1455 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::ty::TypeFoldable; + +use super::*; + +pub struct AutoTraitFinder<'a, 'tcx: 'a, 'rcx: 'a> { + pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, +} + +impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> { + pub fn get_with_def_id(&self, def_id: DefId) -> Vec<Item> { + let ty = self.cx.tcx.type_of(def_id); + + let def_ctor: fn(DefId) -> Def = match ty.sty { + ty::TyAdt(adt, _) => match adt.adt_kind() { + AdtKind::Struct => Def::Struct, + AdtKind::Enum => Def::Enum, + AdtKind::Union => Def::Union, + }, + _ => panic!("Unexpected type {:?}", def_id), + }; + + self.get_auto_trait_impls(def_id, def_ctor, None) + } + + pub fn get_with_node_id(&self, id: ast::NodeId, name: String) -> Vec<Item> { + let item = &self.cx.tcx.hir.expect_item(id).node; + let did = self.cx.tcx.hir.local_def_id(id); + + let def_ctor = match *item { + hir::ItemStruct(_, _) => Def::Struct, + hir::ItemUnion(_, _) => Def::Union, + hir::ItemEnum(_, _) => Def::Enum, + _ => panic!("Unexpected type {:?} {:?}", item, id), + }; + + self.get_auto_trait_impls(did, def_ctor, Some(name)) + } + + pub fn get_auto_trait_impls( + &self, + def_id: DefId, + def_ctor: fn(DefId) -> Def, + name: Option<String>, + ) -> Vec<Item> { + if self.cx + .tcx + .get_attrs(def_id) + .lists("doc") + .has_word("hidden") + { + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}): item has doc('hidden'), \ + aborting", + def_id, def_ctor + ); + return Vec::new(); + } + + let tcx = self.cx.tcx; + let generics = self.cx.tcx.generics_of(def_id); + + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}, generics={:?}", + def_id, def_ctor, generics + ); + let auto_traits: Vec<_> = self.cx + .send_trait + .and_then(|send_trait| { + self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + send_trait, + ) + }) + .into_iter() + .chain(self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + tcx.require_lang_item(lang_items::SyncTraitLangItem), + ).into_iter()) + .collect(); + + debug!( + "get_auto_traits: type {:?} auto_traits {:?}", + def_id, auto_traits + ); + auto_traits + } + + fn get_auto_trait_impl_for( + &self, + def_id: DefId, + name: Option<String>, + generics: ty::Generics, + def_ctor: fn(DefId) -> Def, + trait_def_id: DefId, + ) -> Option<Item> { + if !self.cx + .generated_synthetics + .borrow_mut() + .insert((def_id, trait_def_id)) + { + debug!( + "get_auto_trait_impl_for(def_id={:?}, generics={:?}, def_ctor={:?}, \ + trait_def_id={:?}): already generated, aborting", + def_id, generics, def_ctor, trait_def_id + ); + return None; + } + + let result = self.find_auto_trait_generics(def_id, trait_def_id, &generics); + + if result.is_auto() { + let trait_ = hir::TraitRef { + path: get_path_for_type(self.cx.tcx, trait_def_id, hir::def::Def::Trait), + ref_id: ast::DUMMY_NODE_ID, + }; + + let polarity; + + let new_generics = match result { + AutoTraitResult::PositiveImpl(new_generics) => { + polarity = None; + new_generics + } + AutoTraitResult::NegativeImpl => { + polarity = Some(ImplPolarity::Negative); + + // For negative impls, we use the generic params, but *not* the predicates, + // from the original type. Otherwise, the displayed impl appears to be a + // conditional negative impl, when it's really unconditional. + // + // For example, consider the struct Foo<T: Copy>(*mut T). Using + // the original predicates in our impl would cause us to generate + // `impl !Send for Foo<T: Copy>`, which makes it appear that Foo + // implements Send where T is not copy. + // + // Instead, we generate `impl !Send for Foo<T>`, which better + // expresses the fact that `Foo<T>` never implements `Send`, + // regardless of the choice of `T`. + let real_generics = (&generics, &Default::default()); + + // Clean the generics, but ignore the '?Sized' bounds generated + // by the `Clean` impl + let clean_generics = real_generics.clean(self.cx); + + Generics { + params: clean_generics.params, + where_predicates: Vec::new(), + } + } + _ => unreachable!(), + }; + + let path = get_path_for_type(self.cx.tcx, def_id, def_ctor); + let mut segments = path.segments.into_vec(); + let last = segments.pop().unwrap(); + + let real_name = name.as_ref().map(|n| Symbol::from(n.as_str())); + + segments.push(hir::PathSegment::new( + real_name.unwrap_or(last.name), + self.generics_to_path_params(generics.clone()), + false, + )); + + let new_path = hir::Path { + span: path.span, + def: path.def, + segments: HirVec::from_vec(segments), + }; + + let ty = hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved(None, P(new_path))), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + }; + + return Some(Item { + source: Span::empty(), + name: None, + attrs: Default::default(), + visibility: None, + def_id: self.next_def_id(def_id.krate), + stability: None, + deprecation: None, + inner: ImplItem(Impl { + unsafety: hir::Unsafety::Normal, + generics: new_generics, + provided_trait_methods: FxHashSet(), + trait_: Some(trait_.clean(self.cx)), + for_: ty.clean(self.cx), + items: Vec::new(), + polarity, + synthetic: true, + }), + }); + } + None + } + + fn generics_to_path_params(&self, generics: ty::Generics) -> hir::PathParameters { + let lifetimes = HirVec::from_vec( + generics + .regions + .iter() + .map(|p| { + let name = if p.name == "" { + hir::LifetimeName::Static + } else { + hir::LifetimeName::Name(p.name) + }; + + hir::Lifetime { + id: ast::DUMMY_NODE_ID, + span: DUMMY_SP, + name, + } + }) + .collect(), + ); + let types = HirVec::from_vec( + generics + .types + .iter() + .map(|p| P(self.ty_param_to_ty(p.clone()))) + .collect(), + ); + + hir::PathParameters { + lifetimes: lifetimes, + types: types, + bindings: HirVec::new(), + parenthesized: false, + } + } + + fn ty_param_to_ty(&self, param: ty::TypeParameterDef) -> hir::Ty { + debug!("ty_param_to_ty({:?}) {:?}", param, param.def_id); + hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved( + None, + P(hir::Path { + span: DUMMY_SP, + def: Def::TyParam(param.def_id), + segments: HirVec::from_vec(vec![hir::PathSegment::from_name(param.name)]), + }), + )), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + } + } + + fn find_auto_trait_generics( + &self, + did: DefId, + trait_did: DefId, + generics: &ty::Generics, + ) -> AutoTraitResult { + let tcx = self.cx.tcx; + let ty = self.cx.tcx.type_of(did); + + let orig_params = tcx.param_env(did); + + let trait_ref = ty::TraitRef { + def_id: trait_did, + substs: tcx.mk_substs_trait(ty, &[]), + }; + + let trait_pred = ty::Binder(trait_ref); + + let bail_out = tcx.infer_ctxt().enter(|infcx| { + let mut selcx = SelectionContext::with_negative(&infcx, true); + let result = selcx.select(&Obligation::new( + ObligationCause::dummy(), + orig_params, + trait_pred.to_poly_trait_predicate(), + )); + match result { + Ok(Some(Vtable::VtableImpl(_))) => { + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): \ + manual impl found, bailing out", + did, trait_did, generics + ); + return true; + } + _ => return false, + }; + }); + + // If an explicit impl exists, it always takes priority over an auto impl + if bail_out { + return AutoTraitResult::ExplicitImpl; + } + + return tcx.infer_ctxt().enter(|mut infcx| { + let mut fresh_preds = FxHashSet(); + + // Due to the way projections are handled by SelectionContext, we need to run + // evaluate_predicates twice: once on the original param env, and once on the result of + // the first evaluate_predicates call + // + // The problem is this: most of rustc, including SelectionContext and traits::project, + // are designed to work with a concrete usage of a type (e.g. Vec<u8> + // fn<T>() { Vec<T> }. This information will generally never change - given + // the 'T' in fn<T>() { ... }, we'll never know anything else about 'T'. + // If we're unable to prove that 'T' implements a particular trait, we're done - + // there's nothing left to do but error out. + // + // However, synthesizing an auto trait impl works differently. Here, we start out with + // a set of initial conditions - the ParamEnv of the struct/enum/union we're dealing + // with - and progressively discover the conditions we need to fulfill for it to + // implement a certain auto trait. This ends up breaking two assumptions made by trait + // selection and projection: + // + // * We can always cache the result of a particular trait selection for the lifetime of + // an InfCtxt + // * Given a projection bound such as '<T as SomeTrait>::SomeItem = K', if 'T: + // SomeTrait' doesn't hold, then we don't need to care about the 'SomeItem = K' + // + // We fix the first assumption by manually clearing out all of the InferCtxt's caches + // in between calls to SelectionContext.select. This allows us to keep all of the + // intermediate types we create bound to the 'tcx lifetime, rather than needing to lift + // them between calls + // + // We fix the second assumption by reprocessing the result of our first call to + // evaluate_predicates. Using the example of '<T as SomeTrait>::SomeItem = K', our first + // pass will pick up 'T: SomeTrait', but not 'SomeItem = K'. On our second pass, + // traits::project will see that 'T: SomeTrait' is in our ParamEnv, allowing + // SelectionContext to return it back to us. + + let (new_env, user_env) = match self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + orig_params.clone(), + orig_params, + &mut fresh_preds, + false, + ) { + Some(e) => e, + None => return AutoTraitResult::NegativeImpl, + }; + + let (full_env, full_user_env) = self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + new_env.clone(), + user_env, + &mut fresh_preds, + true, + ).unwrap_or_else(|| { + panic!( + "Failed to fully process: {:?} {:?} {:?}", + ty, trait_did, orig_params + ) + }); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): fulfilling \ + with {:?}", + did, trait_did, generics, full_env + ); + infcx.clear_caches(); + + // At this point, we already have all of the bounds we need. FulfillmentContext is used + // to store all of the necessary region/lifetime bounds in the InferContext, as well as + // an additional sanity check. + let mut fulfill = FulfillmentContext::new(); + fulfill.register_bound( + &infcx, + full_env, + ty, + trait_did, + ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID), + ); + fulfill.select_all_or_error(&infcx).unwrap_or_else(|e| { + panic!( + "Unable to fulfill trait {:?} for '{:?}': {:?}", + trait_did, ty, e + ) + }); + + let names_map: FxHashMap<String, Lifetime> = generics + .regions + .iter() + .map(|l| (l.name.as_str().to_string(), l.clean(self.cx))) + .collect(); + + let body_ids: FxHashSet<_> = infcx + .region_obligations + .borrow() + .iter() + .map(|&(id, _)| id) + .collect(); + + for id in body_ids { + infcx.process_registered_region_obligations(&[], None, full_env.clone(), id); + } + + let region_data = infcx + .borrow_region_constraints() + .region_constraint_data() + .clone(); + + let lifetime_predicates = self.handle_lifetimes(®ion_data, &names_map); + let vid_to_region = self.map_vid_to_region(®ion_data); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): computed \ + lifetime information '{:?}' '{:?}'", + did, trait_did, generics, lifetime_predicates, vid_to_region + ); + + let new_generics = self.param_env_to_generics( + infcx.tcx, + did, + full_user_env, + generics.clone(), + lifetime_predicates, + vid_to_region, + ); + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): finished with \ + {:?}", + did, trait_did, generics, new_generics + ); + return AutoTraitResult::PositiveImpl(new_generics); + }); + } + + fn clean_pred<'c, 'd, 'cx>( + &self, + infcx: &InferCtxt<'c, 'd, 'cx>, + p: ty::Predicate<'cx>, + ) -> ty::Predicate<'cx> { + infcx.freshen(p) + } + + fn evaluate_nested_obligations< + 'b, + 'c, + 'd, + 'cx, + T: Iterator<Item = Obligation<'cx, ty::Predicate<'cx>>>, + >( + &self, + ty: ty::Ty, + nested: T, + computed_preds: &'b mut FxHashSet<ty::Predicate<'cx>>, + fresh_preds: &'b mut FxHashSet<ty::Predicate<'cx>>, + predicates: &'b mut VecDeque<ty::PolyTraitPredicate<'cx>>, + select: &mut traits::SelectionContext<'c, 'd, 'cx>, + only_projections: bool, + ) -> bool { + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + for (obligation, predicate) in nested + .filter(|o| o.recursion_depth == 1) + .map(|o| (o.clone(), o.predicate.clone())) + { + let is_new_pred = + fresh_preds.insert(self.clean_pred(select.infcx(), predicate.clone())); + + match &predicate { + &ty::Predicate::Trait(ref p) => { + let substs = &p.skip_binder().trait_ref.substs; + + if self.is_of_param(substs) && !only_projections && is_new_pred { + computed_preds.insert(predicate); + } + predicates.push_back(p.clone()); + } + &ty::Predicate::Projection(p) => { + // If the projection isn't all type vars, then + // we don't want to add it as a bound + if self.is_of_param(p.skip_binder().projection_ty.substs) && is_new_pred { + computed_preds.insert(predicate); + } else { + match traits::poly_project_and_unify_type( + select, + &obligation.with(p.clone()), + ) { + Err(e) => { + debug!( + "evaluate_nested_obligations: Unable to unify predicate \ + '{:?}' '{:?}', bailing out", + ty, e + ); + return false; + } + Ok(Some(v)) => { + if !self.evaluate_nested_obligations( + ty, + v.clone().iter().cloned(), + computed_preds, + fresh_preds, + predicates, + select, + only_projections, + ) { + return false; + } + } + Ok(None) => { + panic!("Unexpected result when selecting {:?} {:?}", ty, obligation) + } + } + } + } + &ty::Predicate::RegionOutlives(ref binder) => { + if let Err(_) = select + .infcx() + .region_outlives_predicate(&dummy_cause, binder) + { + return false; + } + } + &ty::Predicate::TypeOutlives(ref binder) => { + match ( + binder.no_late_bound_regions(), + binder.map_bound_ref(|pred| pred.0).no_late_bound_regions(), + ) { + (None, Some(t_a)) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: select.infcx().tcx.types.re_static, + cause: dummy_cause.clone(), + }, + ); + } + (Some(ty::OutlivesPredicate(t_a, r_b)), _) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: r_b, + cause: dummy_cause.clone(), + }, + ); + } + _ => {} + }; + } + _ => panic!("Unexpected predicate {:?} {:?}", ty, predicate), + }; + } + return true; + } + + // The core logic responsible for computing the bounds for our synthesized impl. + // + // To calculate the bounds, we call SelectionContext.select in a loop. Like FulfillmentContext, + // we recursively select the nested obligations of predicates we encounter. However, whenver we + // encounter an UnimplementedError involving a type parameter, we add it to our ParamEnv. Since + // our goal is to determine when a particular type implements an auto trait, Unimplemented + // errors tell us what conditions need to be met. + // + // This method ends up working somewhat similary to FulfillmentContext, but with a few key + // differences. FulfillmentContext works under the assumption that it's dealing with concrete + // user code. According, it considers all possible ways that a Predicate could be met - which + // isn't always what we want for a synthesized impl. For example, given the predicate 'T: + // Iterator', FulfillmentContext can end up reporting an Unimplemented error for T: + // IntoIterator - since there's an implementation of Iteratpr where T: IntoIterator, + // FulfillmentContext will drive SelectionContext to consider that impl before giving up. If we + // were to rely on FulfillmentContext's decision, we might end up synthesizing an impl like + // this: + // 'impl<T> Send for Foo<T> where T: IntoIterator' + // + // While it might be technically true that Foo implements Send where T: IntoIterator, + // the bound is overly restrictive - it's really only necessary that T: Iterator. + // + // For this reason, evaluate_predicates handles predicates with type variables specially. When + // we encounter an Unimplemented error for a bound such as 'T: Iterator', we immediately add it + // to our ParamEnv, and add it to our stack for recursive evaluation. When we later select it, + // we'll pick up any nested bounds, without ever inferring that 'T: IntoIterator' needs to + // hold. + // + // One additonal consideration is supertrait bounds. Normally, a ParamEnv is only ever + // consutrcted once for a given type. As part of the construction process, the ParamEnv will + // have any supertrait bounds normalized - e.g. if we have a type 'struct Foo<T: Copy>', the + // ParamEnv will contain 'T: Copy' and 'T: Clone', since 'Copy: Clone'. When we construct our + // own ParamEnv, we need to do this outselves, through traits::elaborate_predicates, or else + // SelectionContext will choke on the missing predicates. However, this should never show up in + // the final synthesized generics: we don't want our generated docs page to contain something + // like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate + // 'user_env', which only holds the predicates that will actually be displayed to the user. + fn evaluate_predicates<'b, 'gcx, 'c>( + &self, + infcx: &mut InferCtxt<'b, 'tcx, 'c>, + ty_did: DefId, + trait_did: DefId, + ty: ty::Ty<'c>, + param_env: ty::ParamEnv<'c>, + user_env: ty::ParamEnv<'c>, + fresh_preds: &mut FxHashSet<ty::Predicate<'c>>, + only_projections: bool, + ) -> Option<(ty::ParamEnv<'c>, ty::ParamEnv<'c>)> { + let tcx = infcx.tcx; + + let mut select = traits::SelectionContext::new(&infcx); + + let mut already_visited = FxHashSet(); + let mut predicates = VecDeque::new(); + predicates.push_back(ty::Binder(ty::TraitPredicate { + trait_ref: ty::TraitRef { + def_id: trait_did, + substs: infcx.tcx.mk_substs_trait(ty, &[]), + }, + })); + + let mut computed_preds: FxHashSet<_> = param_env.caller_bounds.iter().cloned().collect(); + let mut user_computed_preds: FxHashSet<_> = + user_env.caller_bounds.iter().cloned().collect(); + + let mut new_env = param_env.clone(); + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + while let Some(pred) = predicates.pop_front() { + infcx.clear_caches(); + + if !already_visited.insert(pred.clone()) { + continue; + } + + let result = select.select(&Obligation::new(dummy_cause.clone(), new_env, pred)); + + match &result { + &Ok(Some(ref vtable)) => { + let obligations = vtable.clone().nested_obligations().into_iter(); + + if !self.evaluate_nested_obligations( + ty, + obligations, + &mut user_computed_preds, + fresh_preds, + &mut predicates, + &mut select, + only_projections, + ) { + return None; + } + } + &Ok(None) => {} + &Err(SelectionError::Unimplemented) => { + if self.is_of_param(pred.skip_binder().trait_ref.substs) { + already_visited.remove(&pred); + user_computed_preds.insert(ty::Predicate::Trait(pred.clone())); + predicates.push_back(pred); + } else { + debug!( + "evaluate_nested_obligations: Unimplemented found, bailing: {:?} {:?} \ + {:?}", + ty, + pred, + pred.skip_binder().trait_ref.substs + ); + return None; + } + } + _ => panic!("Unexpected error for '{:?}': {:?}", ty, result), + }; + + computed_preds.extend(user_computed_preds.iter().cloned()); + let normalized_preds = + traits::elaborate_predicates(tcx, computed_preds.clone().into_iter().collect()); + new_env = ty::ParamEnv::new(tcx.mk_predicates(normalized_preds), param_env.reveal); + } + + let final_user_env = ty::ParamEnv::new( + tcx.mk_predicates(user_computed_preds.into_iter()), + user_env.reveal, + ); + debug!( + "evaluate_nested_obligations(ty_did={:?}, trait_did={:?}): succeeded with '{:?}' \ + '{:?}'", + ty_did, trait_did, new_env, final_user_env + ); + + return Some((new_env, final_user_env)); + } + + fn is_of_param(&self, substs: &Substs) -> bool { + if substs.is_noop() { + return false; + } + + return match substs.type_at(0).sty { + ty::TyParam(_) => true, + ty::TyProjection(p) => self.is_of_param(p.substs), + _ => false, + }; + } + + fn get_lifetime(&self, region: Region, names_map: &FxHashMap<String, Lifetime>) -> Lifetime { + self.region_name(region) + .map(|name| { + names_map.get(&name).unwrap_or_else(|| { + panic!("Missing lifetime with name {:?} for {:?}", name, region) + }) + }) + .unwrap_or(&Lifetime::statik()) + .clone() + } + + fn region_name(&self, region: Region) -> Option<String> { + match region { + &ty::ReEarlyBound(r) => Some(r.name.as_str().to_string()), + _ => None, + } + } + + // This is very simiiar to handle_lifetimes. Instead of matching ty::Region's to ty::Region's, + // however, we match ty::RegionVid's to ty::Region's + fn map_vid_to_region<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + ) -> FxHashMap<ty::RegionVid, ty::Region<'cx>> { + let mut vid_map: FxHashMap<RegionTarget<'cx>, RegionDeps<'cx>> = FxHashMap(); + let mut finished_map = FxHashMap(); + + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(region)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(vid)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + finished_map.insert(vid, region); + } + &Constraint::RegSubReg(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::Region(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::Region(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(r1)); + } + } + } + + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(v1), &RegionTarget::Region(r1)) => { + finished_map.insert(v1, r1); + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + // Do nothing - we don't care about regions that are smaller than vids + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + finished_map + } + + // This method calculates two things: Lifetime constraints of the form 'a: 'b, + // and region constraints of the form ReVar: 'a + // + // This is essentially a simplified version of lexical_region_resolve. However, + // handle_lifetimes determines what *needs be* true in order for an impl to hold. + // lexical_region_resolve, along with much of the rest of the compiler, is concerned + // with determining if a given set up constraints/predicates *are* met, given some + // starting conditions (e.g. user-provided code). For this reason, it's easier + // to perform the calculations we need on our own, rather than trying to make + // existing inference/solver code do what we want + fn handle_lifetimes<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + names_map: &FxHashMap<String, Lifetime>, + ) -> Vec<WherePredicate> { + // Our goal is to 'flatten' the list of constraints by eliminating + // all intermediate RegionVids. At the end, all constraints should + // be between Regions (aka region variables). This gives us the information + // we need to create the Generics + // + let mut finished = FxHashMap(); + + let mut vid_map: FxHashMap<RegionTarget, RegionDeps> = FxHashMap(); + + // Flattening is done in two parts. First, we insert all of the constraints + // into a map. Each RegionTarget (either a RegionVid or a Region) maps + // to its smaller and larger regions. Note that 'larger' regions correspond + // to sub-regions in Rust code (e.g. in 'a: 'b, 'a is the larger region). + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.larger.insert(RegionTarget::Region(region)); + } + &Constraint::RegSubReg(r1, r2) => { + // The constraint is already in the form that we want, so we're done with it + // Desired order is 'larger, smaller', so flip then + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1); + } + } + } + } + + // Here, we 'flatten' the map one element at a time. + // All of the element's sub and super regions are connected + // to each other. For example, if we have a graph that looks like this: + // + // (A, B) - C - (D, E) + // Where (A, B) are subregions, and (D,E) are super-regions + // + // then after deleting 'C', the graph will look like this: + // ... - A - (D, E ...) + // ... - B - (D, E, ...) + // (A, B, ...) - D - ... + // (A, B, ...) - E - ... + // + // where '...' signifies the existing sub and super regions of an entry + // When two adjacent ty::Regions are encountered, we've computed a final + // constraint, and add it to our list. Since we make sure to never re-add + // deleted items, this process will always finish. + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(r1), &RegionTarget::Region(r2)) => { + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1) // Larger, smaller + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let deps = v.into_mut(); + deps.smaller.insert(*smaller); + deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + + let lifetime_predicates = names_map + .iter() + .flat_map(|(name, lifetime)| { + let empty = Vec::new(); + let bounds: FxHashSet<Lifetime> = finished + .get(name) + .unwrap_or(&empty) + .iter() + .map(|region| self.get_lifetime(region, names_map)) + .collect(); + + if bounds.is_empty() { + return None; + } + Some(WherePredicate::RegionPredicate { + lifetime: lifetime.clone(), + bounds: bounds.into_iter().collect(), + }) + }) + .collect(); + + lifetime_predicates + } + + fn extract_for_generics<'b, 'c, 'd>( + &self, + tcx: TyCtxt<'b, 'c, 'd>, + pred: ty::Predicate<'d>, + ) -> FxHashSet<GenericParam> { + pred.walk_tys() + .flat_map(|t| { + let mut regions = FxHashSet(); + tcx.collect_regions(&t, &mut regions); + + regions.into_iter().flat_map(|r| { + match r { + // We only care about late bound regions, as we need to add them + // to the 'for<>' section + &ty::ReLateBound(_, ty::BoundRegion::BrNamed(_, name)) => { + Some(GenericParam::Lifetime(Lifetime(name.as_str().to_string()))) + } + &ty::ReVar(_) | &ty::ReEarlyBound(_) => None, + _ => panic!("Unexpected region type {:?}", r), + } + }) + }) + .collect() + } + + fn make_final_bounds<'b, 'c, 'cx>( + &self, + ty_to_bounds: FxHashMap<Type, FxHashSet<TyParamBound>>, + ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)>, + lifetime_to_bounds: FxHashMap<Lifetime, FxHashSet<Lifetime>>, + ) -> Vec<WherePredicate> { + let final_predicates = ty_to_bounds + .into_iter() + .flat_map(|(ty, mut bounds)| { + if let Some(data) = ty_to_fn.get(&ty) { + let (poly_trait, output) = + (data.0.as_ref().unwrap().clone(), data.1.as_ref().cloned()); + let new_ty = match &poly_trait.trait_ { + &Type::ResolvedPath { + ref path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_path = path.clone(); + let last_segment = new_path.segments.pop().unwrap(); + + let (old_input, old_output) = match last_segment.params { + PathParameters::AngleBracketed { types, .. } => (types, None), + PathParameters::Parenthesized { inputs, output, .. } => { + (inputs, output) + } + }; + + if old_output.is_some() && old_output != output { + panic!( + "Output mismatch for {:?} {:?} {:?}", + ty, old_output, data.1 + ); + } + + let new_params = PathParameters::Parenthesized { + inputs: old_input, + output, + }; + + new_path.segments.push(PathSegment { + name: last_segment.name, + params: new_params, + }); + + Type::ResolvedPath { + path: new_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + } + } + _ => panic!("Unexpected data: {:?}, {:?}", ty, data), + }; + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: new_ty, + generic_params: poly_trait.generic_params, + }, + hir::TraitBoundModifier::None, + )); + } + if bounds.is_empty() { + return None; + } + + Some(WherePredicate::BoundPredicate { + ty, + bounds: bounds.into_iter().collect(), + }) + }) + .chain( + lifetime_to_bounds + .into_iter() + .filter(|&(_, ref bounds)| !bounds.is_empty()) + .map(|(lifetime, bounds)| WherePredicate::RegionPredicate { + lifetime, + bounds: bounds.into_iter().collect(), + }), + ) + .collect(); + + final_predicates + } + + // Converts the calculated ParamEnv and lifetime information to a clean::Generics, suitable for + // display on the docs page. Cleaning the Predicates produces sub-optimal WherePredicate's, + // so we fix them up: + // + // * Multiple bounds for the same type are coalesced into one: e.g. 'T: Copy', 'T: Debug' + // becomes 'T: Copy + Debug' + // * Fn bounds are handled specially - instead of leaving it as 'T: Fn(), <T as Fn::Output> = + // K', we use the dedicated syntax 'T: Fn() -> K' + // * We explcitly add a '?Sized' bound if we didn't find any 'Sized' predicates for a type + fn param_env_to_generics<'b, 'c, 'cx>( + &self, + tcx: TyCtxt<'b, 'c, 'cx>, + did: DefId, + param_env: ty::ParamEnv<'cx>, + type_generics: ty::Generics, + mut existing_predicates: Vec<WherePredicate>, + vid_to_region: FxHashMap<ty::RegionVid, ty::Region<'cx>>, + ) -> Generics { + debug!( + "param_env_to_generics(did={:?}, param_env={:?}, type_generics={:?}, \ + existing_predicates={:?})", + did, param_env, type_generics, existing_predicates + ); + + // The `Sized` trait must be handled specially, since we only only display it when + // it is *not* required (i.e. '?Sized') + let sized_trait = self.cx + .tcx + .require_lang_item(lang_items::SizedTraitLangItem); + + let mut replacer = RegionReplacer { + vid_to_region: &vid_to_region, + tcx, + }; + + let orig_bounds: FxHashSet<_> = self.cx.tcx.param_env(did).caller_bounds.iter().collect(); + let clean_where_predicates = param_env + .caller_bounds + .iter() + .filter(|p| { + !orig_bounds.contains(p) || match p { + &&ty::Predicate::Trait(pred) => pred.def_id() == sized_trait, + _ => false, + } + }) + .map(|p| { + let replaced = p.fold_with(&mut replacer); + (replaced.clone(), replaced.clean(self.cx)) + }); + + let full_generics = (&type_generics, &tcx.predicates_of(did)); + let Generics { + params: mut generic_params, + .. + } = full_generics.clean(self.cx); + + let mut has_sized = FxHashSet(); + let mut ty_to_bounds = FxHashMap(); + let mut lifetime_to_bounds = FxHashMap(); + let mut ty_to_traits: FxHashMap<Type, FxHashSet<Type>> = FxHashMap(); + + let mut ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)> = FxHashMap(); + + for (orig_p, p) in clean_where_predicates { + match p { + WherePredicate::BoundPredicate { ty, mut bounds } => { + // Writing a projection trait bound of the form + // <T as Trait>::Name : ?Sized + // is illegal, because ?Sized bounds can only + // be written in the (here, nonexistant) definition + // of the type. + // Therefore, we make sure that we never add a ?Sized + // bound for projections + match &ty { + &Type::QPath { .. } => { + has_sized.insert(ty.clone()); + } + _ => {} + } + + if bounds.is_empty() { + continue; + } + + let mut for_generics = self.extract_for_generics(tcx, orig_p.clone()); + + assert!(bounds.len() == 1); + let mut b = bounds.pop().unwrap(); + + if b.is_sized_bound(self.cx) { + has_sized.insert(ty.clone()); + } else if !b.get_trait_type() + .and_then(|t| { + ty_to_traits + .get(&ty) + .map(|bounds| bounds.contains(&strip_type(t.clone()))) + }) + .unwrap_or(false) + { + // If we've already added a projection bound for the same type, don't add + // this, as it would be a duplicate + + // Handle any 'Fn/FnOnce/FnMut' bounds specially, + // as we want to combine them with any 'Output' qpaths + // later + + let is_fn = match &mut b { + &mut TyParamBound::TraitBound(ref mut p, _) => { + // Insert regions into the for_generics hash map first, to ensure + // that we don't end up with duplicate bounds (e.g. for<'b, 'b>) + for_generics.extend(p.generic_params.clone()); + p.generic_params = for_generics.into_iter().collect(); + self.is_fn_ty(&tcx, &p.trait_) + } + _ => false, + }; + + let poly_trait = b.get_poly_trait().unwrap(); + + if is_fn { + ty_to_fn + .entry(ty.clone()) + .and_modify(|e| *e = (Some(poly_trait.clone()), e.1.clone())) + .or_insert(((Some(poly_trait.clone())), None)); + + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()); + } else { + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(b.clone()); + } + } + } + WherePredicate::RegionPredicate { lifetime, bounds } => { + lifetime_to_bounds + .entry(lifetime) + .or_insert_with(|| FxHashSet()) + .extend(bounds); + } + WherePredicate::EqPredicate { lhs, rhs } => { + match &lhs { + &Type::QPath { + name: ref left_name, + ref self_type, + ref trait_, + } => { + let ty = &*self_type; + match **trait_ { + Type::ResolvedPath { + path: ref trait_path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_trait_path = trait_path.clone(); + + if self.is_fn_ty(&tcx, trait_) && left_name == FN_OUTPUT_NAME { + ty_to_fn + .entry(*ty.clone()) + .and_modify(|e| *e = (e.0.clone(), Some(rhs.clone()))) + .or_insert((None, Some(rhs))); + continue; + } + + // FIXME: Remove this scope when NLL lands + { + let params = + &mut new_trait_path.segments.last_mut().unwrap().params; + + match params { + // Convert somethiung like '<T as Iterator::Item> = u8' + // to 'T: Iterator<Item=u8>' + &mut PathParameters::AngleBracketed { + ref mut bindings, + .. + } => { + bindings.push(TypeBinding { + name: left_name.clone(), + ty: rhs, + }); + } + &mut PathParameters::Parenthesized { .. } => { + existing_predicates.push( + WherePredicate::EqPredicate { + lhs: lhs.clone(), + rhs, + }, + ); + continue; // If something other than a Fn ends up + // with parenthesis, leave it alone + } + } + } + + let bounds = ty_to_bounds + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()); + + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: Type::ResolvedPath { + path: new_trait_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + }, + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + + // Remove any existing 'plain' bound (e.g. 'T: Iterator`) so + // that we don't see a + // duplicate bound like `T: Iterator + Iterator<Item=u8>` + // on the docs page. + bounds.remove(&TyParamBound::TraitBound( + PolyTrait { + trait_: *trait_.clone(), + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + // Avoid creating any new duplicate bounds later in the outer + // loop + ty_to_traits + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(*trait_.clone()); + } + _ => panic!("Unexpected trait {:?} for {:?}", trait_, did), + } + } + _ => panic!("Unexpected LHS {:?} for {:?}", lhs, did), + } + } + }; + } + + let final_bounds = self.make_final_bounds(ty_to_bounds, ty_to_fn, lifetime_to_bounds); + + existing_predicates.extend(final_bounds); + + for p in generic_params.iter_mut() { + match p { + &mut GenericParam::Type(ref mut ty) => { + // We never want something like 'impl<T=Foo>' + ty.default.take(); + + let generic_ty = Type::Generic(ty.name.clone()); + + if !has_sized.contains(&generic_ty) { + ty.bounds.insert(0, TyParamBound::maybe_sized(self.cx)); + } + } + _ => {} + } + } + + Generics { + params: generic_params, + where_predicates: existing_predicates, + } + } + + fn is_fn_ty(&self, tcx: &TyCtxt, ty: &Type) -> bool { + match &ty { + &&Type::ResolvedPath { ref did, .. } => { + *did == tcx.require_lang_item(lang_items::FnTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnMutTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnOnceTraitLangItem) + } + _ => false, + } + } + + // This is an ugly hack, but it's the simplest way to handle synthetic impls without greatly + // refactorying either librustdoc or librustc. In particular, allowing new DefIds to be + // registered after the AST is constructed would require storing the defid mapping in a + // RefCell, decreasing the performance for normal compilation for very little gain. + // + // Instead, we construct 'fake' def ids, which start immediately after the last DefId in + // DefIndexAddressSpace::Low. In the Debug impl for clean::Item, we explicitly check for fake + // def ids, as we'll end up with a panic if we use the DefId Debug impl for fake DefIds + fn next_def_id(&self, crate_num: CrateNum) -> DefId { + let start_def_id = { + let next_id = if crate_num == LOCAL_CRATE { + self.cx + .tcx + .hir + .definitions() + .def_path_table() + .next_id(DefIndexAddressSpace::Low) + } else { + self.cx + .cstore + .def_path_table(crate_num) + .next_id(DefIndexAddressSpace::Low) + }; + + DefId { + krate: crate_num, + index: next_id, + } + }; + + let mut fake_ids = self.cx.fake_def_ids.borrow_mut(); + + let def_id = fake_ids.entry(crate_num).or_insert(start_def_id).clone(); + fake_ids.insert( + crate_num, + DefId { + krate: crate_num, + index: DefIndex::from_array_index( + def_id.index.as_array_index() + 1, + def_id.index.address_space(), + ), + }, + ); + + MAX_DEF_ID.with(|m| { + m.borrow_mut() + .entry(def_id.krate.clone()) + .or_insert(start_def_id); + }); + + self.cx.all_fake_def_ids.borrow_mut().insert(def_id); + + def_id.clone() + } +} + +// Replaces all ReVars in a type with ty::Region's, using the provided map +struct RegionReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { + vid_to_region: &'a FxHashMap<ty::RegionVid, ty::Region<'tcx>>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, +} + +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } + + fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { + (match r { + &ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(), + _ => None, + }).unwrap_or_else(|| r.super_fold_with(self)) + } +} diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 5eb3e38d5b3..a769771f8aa 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -25,7 +25,7 @@ use syntax_pos::Span; use html::escape::Escape; -#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, Hash)] pub enum Cfg { /// Accepts all configurations. True, diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 9aba399b3b0..f32dbcb8a08 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -12,8 +12,8 @@ use std::collections::BTreeMap; use std::io; -use std::iter::once; use std::rc::Rc; +use std::iter::once; use syntax::ast; use rustc::hir; @@ -25,7 +25,7 @@ use rustc::util::nodemap::FxHashSet; use core::{DocContext, DocAccessLevels}; use doctree; -use clean::{self, GetDefId}; +use clean::{self, GetDefId, get_auto_traits_with_def_id}; use super::Clean; @@ -50,7 +50,7 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) let inner = match def { Def::Trait(did) => { record_extern_fqn(cx, did, clean::TypeKind::Trait); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::TraitItem(build_external_trait(cx, did)) } Def::Fn(did) => { @@ -59,27 +59,27 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) } Def::Struct(did) => { record_extern_fqn(cx, did, clean::TypeKind::Struct); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::StructItem(build_struct(cx, did)) } Def::Union(did) => { record_extern_fqn(cx, did, clean::TypeKind::Union); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::UnionItem(build_union(cx, did)) } Def::TyAlias(did) => { record_extern_fqn(cx, did, clean::TypeKind::Typedef); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::TypedefItem(build_type_alias(cx, did), false) } Def::Enum(did) => { record_extern_fqn(cx, did, clean::TypeKind::Enum); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::EnumItem(build_enum(cx, did)) } Def::TyForeign(did) => { record_extern_fqn(cx, did, clean::TypeKind::Foreign); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::ForeignTypeItem } // Never inline enum variants but leave them shown as re-exports. @@ -120,11 +120,17 @@ pub fn load_attrs(cx: &DocContext, did: DefId) -> clean::Attributes { cx.tcx.get_attrs(did).clean(cx) } + /// Record an external fully qualified name in the external_paths cache. /// /// These names are used later on by HTML rendering to generate things like /// source links back to the original item. pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { + if did.is_local() { + debug!("record_extern_fqn(did={:?}, kind+{:?}): def_id is local, aborting", did, kind); + return; + } + let crate_name = cx.tcx.crate_name(did.krate).to_string(); let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { // extern blocks have an empty name @@ -144,6 +150,7 @@ pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { } pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { + let auto_trait = cx.tcx.trait_def(did).has_auto_impl; let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect(); let predicates = cx.tcx.predicates_of(did); let generics = (cx.tcx.generics_of(did), &predicates).clean(cx); @@ -152,6 +159,7 @@ pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { let is_spotlight = load_attrs(cx, did).has_doc_flag("spotlight"); let is_auto = cx.tcx.trait_is_auto(did); clean::Trait { + auto: auto_trait, unsafety: cx.tcx.trait_def(did).unsafety, generics, items: trait_items, @@ -227,7 +235,7 @@ fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef { } } -pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> { +pub fn build_impls(cx: &DocContext, did: DefId, auto_traits: bool) -> Vec<clean::Item> { let tcx = cx.tcx; let mut impls = Vec::new(); @@ -235,6 +243,16 @@ pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> { build_impl(cx, did, &mut impls); } + if auto_traits { + let auto_impls = get_auto_traits_with_def_id(cx, did); + let mut renderinfo = cx.renderinfo.borrow_mut(); + + let new_impls: Vec<clean::Item> = auto_impls.into_iter() + .filter(|i| renderinfo.inlined.insert(i.def_id)).collect(); + + impls.extend(new_impls); + } + // If this is the first time we've inlined something from another crate, then // we inline *all* impls from all the crates into this crate. Note that there's // currently no way for us to filter this based on type, and we likely need @@ -249,6 +267,7 @@ pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> { cx.populated_all_crate_impls.set(true); + for &cnum in tcx.crates().iter() { for did in tcx.all_trait_implementations(cnum).iter() { build_impl(cx, *did, &mut impls); @@ -347,13 +366,14 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec<clean::Item>) { ret.push(clean::Item { inner: clean::ImplItem(clean::Impl { - unsafety: hir::Unsafety::Normal, // FIXME: this should be decoded + unsafety: hir::Unsafety::Normal, + generics: (tcx.generics_of(did), &predicates).clean(cx), provided_trait_methods: provided, trait_, for_, - generics: (tcx.generics_of(did), &predicates).clean(cx), items: trait_items, polarity: Some(polarity.clean(cx)), + synthetic: false }), source: tcx.def_span(did).clean(cx), name: None, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 7f51b8f68ae..fcefe6cf642 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -26,31 +26,41 @@ use syntax::codemap::Spanned; use syntax::feature_gate::UnstableFeatures; use syntax::ptr::P; use syntax::symbol::keywords; +use syntax::symbol::Symbol; use syntax_pos::{self, DUMMY_SP, Pos, FileName}; use rustc::middle::const_val::ConstVal; use rustc::middle::privacy::AccessLevels; use rustc::middle::resolve_lifetime as rl; +use rustc::ty::fold::TypeFolder; use rustc::middle::lang_items; -use rustc::hir::def::{Def, CtorKind}; -use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::{self, HirVec}; +use rustc::hir::def::{self, Def, CtorKind}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::def_id::DefIndexAddressSpace; +use rustc::traits; use rustc::ty::subst::Substs; -use rustc::ty::{self, Ty, AdtKind}; +use rustc::ty::{self, TyCtxt, Region, RegionVid, Ty, AdtKind}; use rustc::middle::stability; use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_typeck::hir_ty_to_ty; - -use rustc::hir; +use rustc::infer::{InferCtxt, RegionObligation}; +use rustc::infer::region_constraints::{RegionConstraintData, Constraint}; +use rustc::traits::*; +use std::collections::hash_map::Entry; +use std::collections::VecDeque; +use std::fmt; use rustc_const_math::ConstInt; use std::default::Default; use std::{mem, slice, vec}; -use std::iter::FromIterator; +use std::iter::{FromIterator, once}; use std::rc::Rc; +use std::cell::RefCell; use std::sync::Arc; use std::u32; -use core::DocContext; +use core::{self, DocContext}; use doctree; use visit_ast; use html::item_type::ItemType; @@ -59,8 +69,14 @@ use html::markdown::markdown_links; pub mod inline; pub mod cfg; mod simplify; +mod auto_trait; use self::cfg::Cfg; +use self::auto_trait::AutoTraitFinder; + +thread_local!(static MAX_DEF_ID: RefCell<FxHashMap<CrateNum, DefId>> = RefCell::new(FxHashMap())); + +const FN_OUTPUT_NAME: &'static str = "Output"; // extract the stability index for a node from tcx, if possible fn get_stability(cx: &DocContext, def_id: DefId) -> Option<Stability> { @@ -282,7 +298,7 @@ impl Clean<ExternalCrate> for CrateNum { /// Anything with a source location and set of attributes and, optionally, a /// name. That is, anything that can be documented. This doesn't correspond /// directly to the AST's concept of an item; it's a strict superset. -#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Item { /// Stringified span pub source: Span, @@ -296,6 +312,26 @@ pub struct Item { pub deprecation: Option<Deprecation>, } +impl fmt::Debug for Item { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + + let fake = MAX_DEF_ID.with(|m| m.borrow().get(&self.def_id.krate) + .map(|id| self.def_id >= *id).unwrap_or(false)); + let def_id: &fmt::Debug = if fake { &"**FAKE**" } else { &self.def_id }; + + fmt.debug_struct("Item") + .field("source", &self.source) + .field("name", &self.name) + .field("attrs", &self.attrs) + .field("inner", &self.inner) + .field("visibility", &self.visibility) + .field("def_id", def_id) + .field("stability", &self.stability) + .field("deprecation", &self.deprecation) + .finish() + } +} + impl Item { /// Finds the `doc` attribute as a NameValue and returns the corresponding /// value found. @@ -492,9 +528,9 @@ impl Clean<Item> for doctree::Module { let mut items: Vec<Item> = vec![]; items.extend(self.extern_crates.iter().map(|x| x.clean(cx))); items.extend(self.imports.iter().flat_map(|x| x.clean(cx))); - items.extend(self.structs.iter().map(|x| x.clean(cx))); - items.extend(self.unions.iter().map(|x| x.clean(cx))); - items.extend(self.enums.iter().map(|x| x.clean(cx))); + items.extend(self.structs.iter().flat_map(|x| x.clean(cx))); + items.extend(self.unions.iter().flat_map(|x| x.clean(cx))); + items.extend(self.enums.iter().flat_map(|x| x.clean(cx))); items.extend(self.fns.iter().map(|x| x.clean(cx))); items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx))); items.extend(self.mods.iter().map(|x| x.clean(cx))); @@ -601,7 +637,7 @@ impl<I: IntoIterator<Item=ast::NestedMetaItem>> NestedAttributesExt for I { /// Included files are kept separate from inline doc comments so that proper line-number /// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are /// kept separate because of issue #42760. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum DocFragment { // FIXME #44229 (misdreavus): sugared and raw doc comments can be brought back together once // hoedown is completely removed from rustdoc. @@ -653,7 +689,7 @@ impl<'a> FromIterator<&'a DocFragment> for String { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Attributes { pub doc_strings: Vec<DocFragment>, pub other_attrs: Vec<ast::Attribute>, @@ -1177,7 +1213,7 @@ impl Clean<Attributes> for [ast::Attribute] { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct TyParam { pub name: String, pub did: DefId, @@ -1212,7 +1248,7 @@ impl<'tcx> Clean<TyParam> for ty::TypeParameterDef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum TyParamBound { RegionBound(Lifetime), TraitBound(PolyTrait, hir::TraitBoundModifier) @@ -1245,6 +1281,21 @@ impl TyParamBound { } false } + + fn get_poly_trait(&self) -> Option<PolyTrait> { + if let TyParamBound::TraitBound(ref p, _) = *self { + return Some(p.clone()) + } + None + } + + fn get_trait_type(&self) -> Option<Type> { + + if let TyParamBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self { + return Some(trait_.clone()); + } + None + } } impl Clean<TyParamBound> for hir::TyParamBound { @@ -1363,7 +1414,7 @@ impl<'tcx> Clean<Option<Vec<TyParamBound>>> for Substs<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Lifetime(String); impl Lifetime { @@ -1380,17 +1431,19 @@ impl Lifetime { impl Clean<Lifetime> for hir::Lifetime { fn clean(&self, cx: &DocContext) -> Lifetime { - let hir_id = cx.tcx.hir.node_to_hir_id(self.id); - let def = cx.tcx.named_region(hir_id); - match def { - Some(rl::Region::EarlyBound(_, node_id, _)) | - Some(rl::Region::LateBound(_, node_id, _)) | - Some(rl::Region::Free(_, node_id)) => { - if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { - return lt; + if self.id != ast::DUMMY_NODE_ID { + let hir_id = cx.tcx.hir.node_to_hir_id(self.id); + let def = cx.tcx.named_region(hir_id); + match def { + Some(rl::Region::EarlyBound(_, node_id, _)) | + Some(rl::Region::LateBound(_, node_id, _)) | + Some(rl::Region::Free(_, node_id)) => { + if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { + return lt; + } } + _ => {} } - _ => {} } Lifetime(self.name.name().to_string()) } @@ -1437,7 +1490,7 @@ impl Clean<Option<Lifetime>> for ty::RegionKind { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec<TyParamBound> }, RegionPredicate { lifetime: Lifetime, bounds: Vec<Lifetime>}, @@ -1562,7 +1615,7 @@ impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum GenericParam { Lifetime(Lifetime), Type(TyParam), @@ -1577,7 +1630,8 @@ impl Clean<GenericParam> for hir::GenericParam { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +// maybe use a Generic enum and use Vec<Generic>? +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Generics { pub params: Vec<GenericParam>, pub where_predicates: Vec<WherePredicate>, @@ -1747,7 +1801,7 @@ impl Clean<Item> for doctree::Function { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, @@ -1765,7 +1819,7 @@ impl FnDecl { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Arguments { pub values: Vec<Argument>, } @@ -1840,7 +1894,7 @@ impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Argument { pub type_: Type, pub name: String, @@ -1870,7 +1924,7 @@ impl Argument { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum FunctionRetTy { Return(Type), DefaultReturn, @@ -1896,6 +1950,7 @@ impl GetDefId for FunctionRetTy { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Trait { + pub auto: bool, pub unsafety: hir::Unsafety, pub items: Vec<Item>, pub generics: Generics, @@ -1917,6 +1972,7 @@ impl Clean<Item> for doctree::Trait { stability: self.stab.clean(cx), deprecation: self.depr.clean(cx), inner: TraitItem(Trait { + auto: self.is_auto.clean(cx), unsafety: self.unsafety, items: self.items.clean(cx), generics: self.generics.clean(cx), @@ -2158,7 +2214,7 @@ impl<'tcx> Clean<Item> for ty::AssociatedItem { } /// A trait reference, which may have higher ranked lifetimes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PolyTrait { pub trait_: Type, pub generic_params: Vec<GenericParam>, @@ -2167,7 +2223,7 @@ pub struct PolyTrait { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum Type { /// structs/enums/traits (most that'd be an hir::TyPath) ResolvedPath { @@ -2782,10 +2838,13 @@ pub struct Union { pub fields_stripped: bool, } -impl Clean<Item> for doctree::Struct { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean<Vec<Item>> for doctree::Struct { + fn clean(&self, cx: &DocContext) -> Vec<Item> { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2798,14 +2857,19 @@ impl Clean<Item> for doctree::Struct { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } -impl Clean<Item> for doctree::Union { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean<Vec<Item>> for doctree::Union { + fn clean(&self, cx: &DocContext) -> Vec<Item> { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2818,7 +2882,9 @@ impl Clean<Item> for doctree::Union { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } @@ -2849,10 +2915,13 @@ pub struct Enum { pub variants_stripped: bool, } -impl Clean<Item> for doctree::Enum { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean<Vec<Item>> for doctree::Enum { + fn clean(&self, cx: &DocContext) -> Vec<Item> { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2864,7 +2933,9 @@ impl Clean<Item> for doctree::Enum { generics: self.generics.clean(cx), variants_stripped: false, }), - } + }); + + ret } } @@ -2989,7 +3060,7 @@ impl Clean<Span> for syntax_pos::Span { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Path { pub global: bool, pub def: Def, @@ -3027,7 +3098,7 @@ impl Clean<Path> for hir::Path { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum PathParameters { AngleBracketed { lifetimes: Vec<Lifetime>, @@ -3062,7 +3133,7 @@ impl Clean<PathParameters> for hir::PathParameters { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PathSegment { pub name: String, pub params: PathParameters, @@ -3077,6 +3148,50 @@ impl Clean<PathSegment> for hir::PathSegment { } } +fn strip_type(ty: Type) -> Type { + match ty { + Type::ResolvedPath { path, typarams, did, is_generic } => { + Type::ResolvedPath { path: strip_path(&path), typarams, did, is_generic } + }, + Type::Tuple(inner_tys) => { + Type::Tuple(inner_tys.iter().map(|t| strip_type(t.clone())).collect()) + }, + Type::Slice(inner_ty) => Type::Slice(Box::new(strip_type(*inner_ty))), + Type::Array(inner_ty, s) => Type::Array(Box::new(strip_type(*inner_ty)), s), + Type::Unique(inner_ty) => Type::Unique(Box::new(strip_type(*inner_ty))), + Type::RawPointer(m, inner_ty) => Type::RawPointer(m, Box::new(strip_type(*inner_ty))), + Type::BorrowedRef { lifetime, mutability, type_ } => { + Type::BorrowedRef { lifetime, mutability, type_: Box::new(strip_type(*type_)) } + }, + Type::QPath { name, self_type, trait_ } => { + Type::QPath { + name, + self_type: Box::new(strip_type(*self_type)), trait_: Box::new(strip_type(*trait_)) + } + }, + _ => ty + } +} + +fn strip_path(path: &Path) -> Path { + let segments = path.segments.iter().map(|s| { + PathSegment { + name: s.name.clone(), + params: PathParameters::AngleBracketed { + lifetimes: Vec::new(), + types: Vec::new(), + bindings: Vec::new() + } + } + }).collect(); + + Path { + global: path.global, + def: path.def.clone(), + segments + } +} + fn qpath_to_string(p: &hir::QPath) -> String { let segments = match *p { hir::QPath::Resolved(_, ref path) => &path.segments, @@ -3125,7 +3240,7 @@ impl Clean<Item> for doctree::Typedef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct BareFunctionDecl { pub unsafety: hir::Unsafety, pub generic_params: Vec<GenericParam>, @@ -3198,7 +3313,7 @@ impl Clean<Item> for doctree::Constant { } } -#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Copy)] +#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Hash)] pub enum Mutability { Mutable, Immutable, @@ -3213,7 +3328,7 @@ impl Clean<Mutability> for hir::Mutability { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Copy, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Debug, Hash)] pub enum ImplPolarity { Positive, Negative, @@ -3237,6 +3352,21 @@ pub struct Impl { pub for_: Type, pub items: Vec<Item>, pub polarity: Option<ImplPolarity>, + pub synthetic: bool, +} + +pub fn get_auto_traits_with_node_id(cx: &DocContext, id: ast::NodeId, name: String) -> Vec<Item> { + let finder = AutoTraitFinder { cx }; + finder.get_with_node_id(id, name) +} + +pub fn get_auto_traits_with_def_id(cx: &DocContext, id: DefId) -> Vec<Item> { + + let finder = AutoTraitFinder { + cx, + }; + + finder.get_with_def_id(id) } impl Clean<Vec<Item>> for doctree::Impl { @@ -3274,7 +3404,8 @@ impl Clean<Vec<Item>> for doctree::Impl { for_: self.for_.clean(cx), items, polarity: Some(self.polarity.clean(cx)), - }), + synthetic: false + }) }); ret } @@ -3294,7 +3425,7 @@ fn build_deref_target_impls(cx: &DocContext, let primitive = match *target { ResolvedPath { did, .. } if did.is_local() => continue, ResolvedPath { did, .. } => { - ret.extend(inline::build_impls(cx, did)); + ret.extend(inline::build_impls(cx, did, true)); continue } _ => match target.primitive_type() { @@ -3514,7 +3645,11 @@ fn print_const_expr(cx: &DocContext, body: hir::BodyId) -> String { fn resolve_type(cx: &DocContext, path: Path, id: ast::NodeId) -> Type { - debug!("resolve_type({:?},{:?})", path, id); + if id == ast::DUMMY_NODE_ID { + debug!("resolve_type({:?})", path); + } else { + debug!("resolve_type({:?},{:?})", path, id); + } let is_generic = match path.def { Def::PrimTy(p) => match p { @@ -3669,7 +3804,7 @@ impl Clean<Deprecation> for attr::Deprecation { } /// An equality constraint on an associated type, e.g. `A=Bar` in `Foo<A=Bar>` -#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Debug)] +#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)] pub struct TypeBinding { pub name: String, pub ty: Type @@ -3683,3 +3818,191 @@ impl Clean<TypeBinding> for hir::TypeBinding { } } } + + + +pub fn def_id_to_path(cx: &DocContext, did: DefId, name: Option<String>) -> Vec<String> { + let crate_name = name.unwrap_or_else(|| cx.tcx.crate_name(did.krate).to_string()); + let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { + // extern blocks have an empty name + let s = elem.data.to_string(); + if !s.is_empty() { + Some(s) + } else { + None + } + }); + once(crate_name).chain(relative).collect() +} + + +// Start of code copied from rust-clippy + +pub fn get_trait_def_id(tcx: &TyCtxt, path: &[&str], use_local: bool) -> Option<DefId> { + return if use_local { + path_to_def_local(tcx, path) + } else { + path_to_def(tcx, path) + }; +} + +pub fn path_to_def_local(tcx: &TyCtxt, path: &[&str]) -> Option<DefId> { + let krate = tcx.hir.krate(); + let mut items = krate.module.item_ids.clone(); + let mut path_it = path.iter().peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item_id in mem::replace(&mut items, HirVec::new()).iter() { + let item = tcx.hir.expect_item(item_id.id); + if item.name == *segment { + if path_it.peek().is_none() { + return Some(tcx.hir.local_def_id(item_id.id)) + } + + items = match &item.node { + &hir::ItemMod(ref m) => m.item_ids.clone(), + _ => panic!("Unexpected item {:?} in path {:?} path") + }; + break; + } + } + } + +} + +pub fn path_to_def(tcx: &TyCtxt, path: &[&str]) -> Option<DefId> { + let crates = tcx.crates(); + + + let krate = crates + .iter() + .find(|&&krate| tcx.crate_name(krate) == path[0]); + + if let Some(krate) = krate { + let krate = DefId { + krate: *krate, + index: CRATE_DEF_INDEX, + }; + let mut items = tcx.item_children(krate); + let mut path_it = path.iter().skip(1).peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item in mem::replace(&mut items, Rc::new(vec![])).iter() { + if item.ident.name == *segment { + if path_it.peek().is_none() { + return match item.def { + def::Def::Trait(did) => Some(did), + _ => None + } + } + + items = tcx.item_children(item.def.def_id()); + break; + } + } + } + } else { + None + } +} + +fn get_path_for_type(tcx: TyCtxt, def_id: DefId, def_ctor: fn(DefId) -> Def) -> hir::Path { + struct AbsolutePathBuffer { + names: Vec<String>, + } + + impl ty::item_path::ItemPathBuffer for AbsolutePathBuffer { + fn root_mode(&self) -> &ty::item_path::RootMode { + const ABSOLUTE: &'static ty::item_path::RootMode = &ty::item_path::RootMode::Absolute; + ABSOLUTE + } + + fn push(&mut self, text: &str) { + self.names.push(text.to_owned()); + } + } + + let mut apb = AbsolutePathBuffer { names: vec![] }; + + tcx.push_item_path(&mut apb, def_id); + + hir::Path { + span: DUMMY_SP, + def: def_ctor(def_id), + segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment { + name: ast::Name::intern(&s), + parameters: None, + infer_types: false + }).collect()) + } +} + +// End of code copied from rust-clippy + + +#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)] +enum RegionTarget<'tcx> { + Region(Region<'tcx>), + RegionVid(RegionVid) +} + +#[derive(Default, Debug, Clone)] +struct RegionDeps<'tcx> { + larger: FxHashSet<RegionTarget<'tcx>>, + smaller: FxHashSet<RegionTarget<'tcx>> +} + +#[derive(Eq, PartialEq, Hash, Debug)] +enum SimpleBound { + RegionBound(Lifetime), + TraitBound(Vec<PathSegment>, Vec<SimpleBound>, Vec<GenericParam>, hir::TraitBoundModifier) +} + +enum AutoTraitResult { + ExplicitImpl, + + PositiveImpl(Generics), + + NegativeImpl, +} + +impl AutoTraitResult { + + fn is_auto(&self) -> bool { + match *self { + AutoTraitResult::PositiveImpl(_) | AutoTraitResult::NegativeImpl => true, + _ => false + } + } +} + +impl From<TyParamBound> for SimpleBound { + + fn from(bound: TyParamBound) -> Self { + match bound.clone() { + TyParamBound::RegionBound(l) => SimpleBound::RegionBound(l), + TyParamBound::TraitBound(t, mod_) => match t.trait_ { + Type::ResolvedPath { path, typarams, .. } => { + SimpleBound::TraitBound(path.segments, + typarams + .map_or_else(|| Vec::new(), |v| v.iter() + .map(|p| SimpleBound::from(p.clone())) + .collect()), + t.generic_params, + mod_) + }, + _ => panic!("Unexpected bound {:?}", bound) + } + } + } +} diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 81babd803a5..57773e4e068 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -11,13 +11,14 @@ use rustc_lint; use rustc_driver::{self, driver, target_features, abort_on_err}; use rustc::session::{self, config}; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, CrateNum}; use rustc::hir::def::Def; +use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::AccessLevels; use rustc::ty::{self, TyCtxt, AllArenas}; use rustc::hir::map as hir_map; use rustc::lint; -use rustc::util::nodemap::FxHashMap; +use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_resolve as resolve; use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; @@ -48,6 +49,8 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub resolver: &'a RefCell<resolve::Resolver<'rcx>>, /// The stack of module NodeIds up till this point pub mod_ids: RefCell<Vec<NodeId>>, + pub crate_name: Option<String>, + pub cstore: Rc<CrateStore>, pub populated_all_crate_impls: Cell<bool>, // Note that external items for which `doc(hidden)` applies to are shown as // non-reachable while local items aren't. This is because we're reusing @@ -65,6 +68,11 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>, /// Table node id of lifetime parameter definition -> substituted lifetime pub lt_substs: RefCell<FxHashMap<DefId, clean::Lifetime>>, + pub send_trait: Option<DefId>, + pub fake_def_ids: RefCell<FxHashMap<CrateNum, DefId>>, + pub all_fake_def_ids: RefCell<FxHashSet<DefId>>, + /// Maps (type_id, trait_id) -> auto trait impl + pub generated_synthetics: RefCell<FxHashSet<(DefId, DefId)>> } impl<'a, 'tcx, 'rcx> DocContext<'a, 'tcx, 'rcx> { @@ -107,6 +115,7 @@ pub fn run_core(search_paths: SearchPaths, triple: Option<String>, maybe_sysroot: Option<PathBuf>, allow_warnings: bool, + crate_name: Option<String>, force_unstable_if_unmarked: bool) -> (clean::Crate, RenderInfo) { // Parse, resolve, and typecheck the given crate. @@ -230,9 +239,19 @@ pub fn run_core(search_paths: SearchPaths, .collect() }; + let send_trait; + + if crate_name == Some("core".to_string()) { + send_trait = clean::get_trait_def_id(&tcx, &["marker", "Send"], true); + } else { + send_trait = clean::get_trait_def_id(&tcx, &["core", "marker", "Send"], false) + } + let ctxt = DocContext { tcx, resolver: &resolver, + crate_name, + cstore: cstore.clone(), populated_all_crate_impls: Cell::new(false), access_levels: RefCell::new(access_levels), external_traits: Default::default(), @@ -240,6 +259,10 @@ pub fn run_core(search_paths: SearchPaths, ty_substs: Default::default(), lt_substs: Default::default(), mod_ids: Default::default(), + send_trait: send_trait, + fake_def_ids: RefCell::new(FxHashMap()), + all_fake_def_ids: RefCell::new(FxHashSet()), + generated_synthetics: RefCell::new(FxHashSet()) }; debug!("crate: {:?}", tcx.hir.krate()); diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 430236f30c4..413e5623118 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -210,6 +210,7 @@ pub struct Trait { pub depr: Option<attr::Deprecation>, } +#[derive(Debug)] pub struct Impl { pub unsafety: hir::Unsafety, pub polarity: hir::ImplPolarity, diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index d6025920e78..6e265ed0776 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -37,7 +37,7 @@ pub use self::ExternalLocation::*; use std::borrow::Cow; use std::cell::RefCell; use std::cmp::Ordering; -use std::collections::{BTreeMap, HashSet}; +use std::collections::{BTreeMap, HashSet, VecDeque}; use std::default::Default; use std::error; use std::fmt::{self, Display, Formatter, Write as FmtWrite}; @@ -169,6 +169,7 @@ pub enum ExternalLocation { Unknown, } + /// Metadata about implementations for a type or trait. #[derive(Clone)] pub struct Impl { @@ -270,6 +271,18 @@ pub struct Cache { /// generating explicit hyperlinks to other crates. pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>, + /// Maps local def ids of exported types to fully qualified paths. + /// Unlike 'paths', this mapping ignores any renames that occur + /// due to 'use' statements. + /// + /// This map is used when writing out the special 'implementors' + /// javascript file. By using the exact path that the type + /// is declared with, we ensure that each path will be identical + /// to the path used if the corresponding type is inlined. By + /// doing this, we can detect duplicate impls on a trait page, and only display + /// the impl for the inlined type. + pub exact_paths: FxHashMap<DefId, Vec<String>>, + /// This map contains information about all known traits of this crate. /// Implementations of a crate should inherit the documentation of the /// parent trait if no extra documentation is specified, and default methods @@ -322,6 +335,7 @@ pub struct RenderInfo { pub inlined: FxHashSet<DefId>, pub external_paths: ::core::ExternalPaths, pub external_typarams: FxHashMap<DefId, String>, + pub exact_paths: FxHashMap<DefId, Vec<String>>, pub deref_trait_did: Option<DefId>, pub deref_mut_trait_did: Option<DefId>, pub owned_box_did: Option<DefId>, @@ -436,7 +450,9 @@ fn init_ids() -> FxHashMap<String, usize> { "required-methods", "provided-methods", "implementors", + "synthetic-implementors", "implementors-list", + "synthetic-implementors-list", "methods", "deref-methods", "implementations", @@ -507,6 +523,7 @@ pub fn run(mut krate: clean::Crate, themes, }; + // If user passed in `--playground-url` arg, we fill in crate name here if let Some(url) = playground_url { markdown::PLAYGROUND.with(|slot| { @@ -556,6 +573,7 @@ pub fn run(mut krate: clean::Crate, inlined: _, external_paths, external_typarams, + exact_paths, deref_trait_did, deref_mut_trait_did, owned_box_did, @@ -568,6 +586,7 @@ pub fn run(mut krate: clean::Crate, let mut cache = Cache { impls: FxHashMap(), external_paths, + exact_paths, paths: FxHashMap(), implementors: FxHashMap(), stack: Vec::new(), @@ -873,7 +892,10 @@ themePicker.onclick = function() {{ // should add it. if !imp.impl_item.def_id.is_local() { continue } have_impls = true; - write!(implementors, "{},", as_json(&imp.inner_impl().to_string())).unwrap(); + write!(implementors, "{{text:{},synthetic:{},types:{}}},", + as_json(&imp.inner_impl().to_string()), + imp.inner_impl().synthetic, + as_json(&collect_paths_for_type(imp.inner_impl().for_.clone()))).unwrap(); } implementors.push_str("];"); @@ -1856,8 +1878,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, items: &[clean::Item]) -> fmt::Result { document(w, cx, item)?; - let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()) - .collect::<Vec<usize>>(); + let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>(); // the order of item types in the listing fn reorder(ty: ItemType) -> u8 { @@ -2201,6 +2222,50 @@ fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, document(w, cx, it) } +fn render_implementor(cx: &Context, implementor: &Impl, w: &mut fmt::Formatter, + implementor_dups: &FxHashMap<&str, (DefId, bool)>) -> Result<(), fmt::Error> { + write!(w, "<li>")?; + if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { + write!(w, "<div class='out-of-band'>")?; + write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>", + l, "goto source code")?; + write!(w, "</div>")?; + } + write!(w, "<code>")?; + // If there's already another implementor that has the same abbridged name, use the + // full path, for example in `std::iter::ExactSizeIterator` + let use_absolute = match implementor.inner_impl().for_ { + clean::ResolvedPath { ref path, is_generic: false, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, + .. + } => implementor_dups[path.last_name()].1, + _ => false, + }; + fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; + for it in &implementor.inner_impl().items { + if let clean::TypedefItem(ref tydef, _) = it.inner { + write!(w, "<span class=\"where fmt-newline\"> ")?; + assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; + write!(w, ";</span>")?; + } + } + writeln!(w, "</code></li>")?; + Ok(()) +} + +fn render_impls(cx: &Context, w: &mut fmt::Formatter, + traits: Vec<&&Impl>, + containing_item: &clean::Item) -> Result<(), fmt::Error> { + for i in &traits { + let did = i.trait_did().unwrap(); + let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); + render_impl(w, cx, i, assoc_link, + RenderMode::Normal, containing_item.stable_since(), true)?; + } + Ok(()) +} + fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, t: &clean::Trait) -> fmt::Result { let mut bounds = String::new(); @@ -2380,6 +2445,14 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, </h2> <ul class='item-list' id='implementors-list'> "; + + let synthetic_impl_header = " + <h2 id='synthetic-implementors' class='small-section-header'> + Auto implementors<a href='#synthetic-implementors' class='anchor'></a> + </h2> + <ul class='item-list' id='synthetic-implementors-list'> + "; + if let Some(implementors) = cache.implementors.get(&it.def_id) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. @@ -2405,6 +2478,11 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, .partition::<Vec<_>, _>(|i| i.inner_impl().for_.def_id() .map_or(true, |d| cache.paths.contains_key(&d))); + + let (synthetic, concrete) = local.iter() + .partition::<Vec<_>, _>(|i| i.inner_impl().synthetic); + + if !foreign.is_empty() { write!(w, " <h2 id='foreign-impls' class='small-section-header'> @@ -2422,42 +2500,48 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } write!(w, "{}", impl_header)?; + for implementor in concrete { + render_implementor(cx, implementor, w, &implementor_dups)?; + } + write!(w, "</ul>")?; - for implementor in local { - write!(w, "<li>")?; - if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { - write!(w, "<div class='out-of-band'>")?; - write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>", - l, "goto source code")?; - write!(w, "</div>")?; - } - write!(w, "<code>")?; - // If there's already another implementor that has the same abbridged name, use the - // full path, for example in `std::iter::ExactSizeIterator` - let use_absolute = match implementor.inner_impl().for_ { - clean::ResolvedPath { ref path, is_generic: false, .. } | - clean::BorrowedRef { - type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, - .. - } => implementor_dups[path.last_name()].1, - _ => false, - }; - fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; - for it in &implementor.inner_impl().items { - if let clean::TypedefItem(ref tydef, _) = it.inner { - write!(w, "<span class=\"where fmt-newline\"> ")?; - assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; - write!(w, ";</span>")?; - } + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + for implementor in synthetic { + render_implementor(cx, implementor, w, &implementor_dups)?; } - writeln!(w, "</code></li>")?; + write!(w, "</ul>")?; } + + write!(w, r#"<script type="text/javascript"> + window.inlined_types=new Set();"#)?; + + write!(w, r#"<script type="text/javascript" async + src="{root_path}/implementors/{path}/{ty}.{name}.js"> + </script>"#, + root_path = vec![".."; cx.current.len()].join("/"), + path = if it.def_id.is_local() { + cx.current.join("/") + } else { + let (ref path, _) = cache.external_paths[&it.def_id]; + path[..path.len() - 1].join("/") + }, + ty = it.type_().css_class(), + name = *it.name.as_ref().unwrap())?; } else { // even without any implementations to write in, we still want the heading and list, so the // implementors javascript file pulled in below has somewhere to write the impls into write!(w, "{}", impl_header)?; + write!(w, "</ul>")?; + + write!(w, r#"<script type="text/javascript">window.inlined_types=new Set();</script>"#)?; + + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + write!(w, "</ul>")?; + } } - write!(w, "</ul>")?; + write!(w, r#"<script type="text/javascript" async src="{root_path}/implementors/{path}/{ty}.{name}.js"> </script>"#, @@ -3075,17 +3159,28 @@ fn render_assoc_items(w: &mut fmt::Formatter, }).is_some(); render_deref_methods(w, cx, impl_, containing_item, has_deref_mut)?; } + + let (synthetic, concrete) = traits + .iter() + .partition::<Vec<_>, _>(|t| t.inner_impl().synthetic); + write!(w, " <h2 id='implementations' class='small-section-header'> Trait Implementations<a href='#implementations' class='anchor'></a> </h2> + <div id='implementations-list'> ")?; - for i in &traits { - let did = i.trait_did().unwrap(); - let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); - render_impl(w, cx, i, assoc_link, - RenderMode::Normal, containing_item.stable_since(), true)?; - } + render_impls(cx, w, concrete, containing_item)?; + write!(w, "</div>")?; + + write!(w, " + <h2 id='synthetic-implementations' class='small-section-header'> + Auto Trait Implementations<a href='#synthetic-implementations' class='anchor'></a> + </h2> + <div id='synthetic-implementations-list'> + ")?; + render_impls(cx, w, synthetic, containing_item)?; + write!(w, "</div>")?; } Ok(()) } @@ -3586,32 +3681,48 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { } } } - let mut links = HashSet::new(); - let ret = v.iter() - .filter_map(|i| { - let is_negative_impl = is_negative_impl(i.inner_impl()); - if let Some(ref i) = i.inner_impl().trait_ { - let i_display = format!("{:#}", i); - let out = Escape(&i_display); - let encoded = small_url_encode(&format!("{:#}", i)); - let generated = format!("<a href=\"#impl-{}\">{}{}</a>", - encoded, - if is_negative_impl { "!" } else { "" }, - out); - if !links.contains(&generated) && links.insert(generated.clone()) { - Some(generated) + let format_impls = |impls: Vec<&Impl>| { + let mut links = HashSet::new(); + impls.iter() + .filter_map(|i| { + let is_negative_impl = is_negative_impl(i.inner_impl()); + if let Some(ref i) = i.inner_impl().trait_ { + let i_display = format!("{:#}", i); + let out = Escape(&i_display); + let encoded = small_url_encode(&format!("{:#}", i)); + let generated = format!("<a href=\"#impl-{}\">{}{}</a>", + encoded, + if is_negative_impl { "!" } else { "" }, + out); + if links.insert(generated.clone()) { + Some(generated) + } else { + None + } } else { None } - } else { - None - } - }) - .collect::<String>(); - if !ret.is_empty() { + }) + .collect::<String>() + }; + + let (synthetic, concrete) = v + .iter() + .partition::<Vec<_>, _>(|i| i.inner_impl().synthetic); + + let concrete_format = format_impls(concrete); + let synthetic_format = format_impls(synthetic); + + if !concrete_format.is_empty() { out.push_str("<a class=\"sidebar-title\" href=\"#implementations\">\ Trait Implementations</a>"); - out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", ret)); + out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", concrete_format)); + } + + if !synthetic_format.is_empty() { + out.push_str("<a class=\"sidebar-title\" href=\"#synthetic-implementations\">\ + Auto Trait Implementations</a>"); + out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", synthetic_format)); } } } @@ -3734,7 +3845,7 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, if let Some(implementors) = c.implementors.get(&it.def_id) { let res = implementors.iter() .filter(|i| i.inner_impl().for_.def_id() - .map_or(false, |d| !c.paths.contains_key(&d))) + .map_or(false, |d| !c.paths.contains_key(&d))) .filter_map(|i| { match extract_for_impl_name(&i.impl_item) { Some((ref name, ref url)) => { @@ -3755,6 +3866,10 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, } sidebar.push_str("<a class=\"sidebar-title\" href=\"#implementors\">Implementors</a>"); + if t.auto { + sidebar.push_str("<a class=\"sidebar-title\" \ + href=\"#synthetic-implementors\">Auto Implementors</a>"); + } sidebar.push_str(&sidebar_assoc_items(it)); @@ -3969,6 +4084,66 @@ fn get_index_type(clean_type: &clean::Type) -> Type { t } +/// Returns a list of all paths used in the type. +/// This is used to help deduplicate imported impls +/// for reexported types. If any of the contained +/// types are re-exported, we don't use the corresponding +/// entry from the js file, as inlining will have already +/// picked up the impl +fn collect_paths_for_type(first_ty: clean::Type) -> Vec<String> { + let mut out = Vec::new(); + let mut visited = FxHashSet(); + let mut work = VecDeque::new(); + let cache = cache(); + + work.push_back(first_ty); + + while let Some(ty) = work.pop_front() { + if !visited.insert(ty.clone()) { + continue; + } + + match ty { + clean::Type::ResolvedPath { did, .. } => { + let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone()); + let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern); + + match fqp { + Some(path) => { + out.push(path.join("::")); + }, + _ => {} + }; + + }, + clean::Type::Tuple(tys) => { + work.extend(tys.into_iter()); + }, + clean::Type::Slice(ty) => { + work.push_back(*ty); + } + clean::Type::Array(ty, _) => { + work.push_back(*ty); + }, + clean::Type::Unique(ty) => { + work.push_back(*ty); + }, + clean::Type::RawPointer(_, ty) => { + work.push_back(*ty); + }, + clean::Type::BorrowedRef { type_, .. } => { + work.push_back(*type_); + }, + clean::Type::QPath { self_type, trait_, .. } => { + work.push_back(*self_type); + work.push_back(*trait_); + }, + _ => {} + } + }; + out +} + fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option<String> { match *clean_type { clean::ResolvedPath { ref path, .. } => { diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index f688be89bee..5d3f6e84789 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -1563,14 +1563,29 @@ window.initSidebarItems = initSidebarItems; window.register_implementors = function(imp) { - var list = document.getElementById('implementors-list'); + var implementors = document.getElementById('implementors-list'); + var synthetic_implementors = document.getElementById('synthetic-implementors-list'); var libs = Object.getOwnPropertyNames(imp); for (var i = 0; i < libs.length; ++i) { if (libs[i] === currentCrate) { continue; } var structs = imp[libs[i]]; for (var j = 0; j < structs.length; ++j) { + var struct = structs[j]; + var list = struct.synthetic ? synthetic_implementors : implementors; + + var bail = false; + for (var k = 0; k < struct.types.length; k++) { + if (window.inlined_types.has(struct.types[k])) { + bail = true; + break; + } + } + if (bail) { + continue; + } + var code = document.createElement('code'); - code.innerHTML = structs[j]; + code.innerHTML = struct.text; var x = code.getElementsByTagName('a'); for (var k = 0; k < x.length; k++) { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 825558648e1..033988fa9d9 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -24,6 +24,7 @@ #![feature(test)] #![feature(unicode)] #![feature(vec_remove_item)] +#![feature(entry_and_modify)] extern crate arena; extern crate getopts; @@ -549,7 +550,8 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R { let (mut krate, renderinfo) = core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot, - display_warnings, force_unstable_if_unmarked); + display_warnings, crate_name.clone(), + force_unstable_if_unmarked); info!("finished with rustc"); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 3b882827c61..9dc532953c8 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -24,12 +24,12 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::middle::cstore::{LoadedMacro, CrateStore}; use rustc::middle::privacy::AccessLevel; use rustc::ty::Visibility; -use rustc::util::nodemap::FxHashSet; +use rustc::util::nodemap::{FxHashSet, FxHashMap}; use rustc::hir; use core; -use clean::{self, AttributesExt, NestedAttributesExt}; +use clean::{self, AttributesExt, NestedAttributesExt, def_id_to_path}; use doctree::*; // looks to me like the first two of these are actually @@ -41,7 +41,7 @@ use doctree::*; // framework from syntax? pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { - cstore: &'a CrateStore, + pub cstore: &'a CrateStore, pub module: Module, pub attrs: hir::HirVec<ast::Attribute>, pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, @@ -50,6 +50,7 @@ pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { /// Is the current module and all of its parents public? inside_public_path: bool, reexported_macros: FxHashSet<DefId>, + exact_paths: Option<FxHashMap<DefId, Vec<String>>> } impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { @@ -66,10 +67,21 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { inlining: false, inside_public_path: true, reexported_macros: FxHashSet(), + exact_paths: Some(FxHashMap()), cstore, } } + fn store_path(&mut self, did: DefId) { + // We can't use the entry api, as that keeps the mutable borrow of self active + // when we try to use cx + let exact_paths = self.exact_paths.as_mut().unwrap(); + if exact_paths.get(&did).is_none() { + let path = def_id_to_path(self.cx, did, self.cx.crate_name.clone()); + exact_paths.insert(did, path); + } + } + fn stability(&self, id: ast::NodeId) -> Option<attr::Stability> { self.cx.tcx.hir.opt_local_def_id(id) .and_then(|def_id| self.cx.tcx.lookup_stability(def_id)).cloned() @@ -94,6 +106,8 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { krate.exported_macros.iter().map(|def| self.visit_local_macro(def)).collect(); self.module.macros.extend(macro_exports); self.module.is_crate = true; + + self.cx.renderinfo.borrow_mut().exact_paths = self.exact_paths.take().unwrap(); } pub fn visit_variant_data(&mut self, item: &hir::Item, @@ -371,6 +385,12 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { renamed: Option<ast::Name>, om: &mut Module) { debug!("Visiting item {:?}", item); let name = renamed.unwrap_or(item.name); + + if item.vis == hir::Public { + let def_id = self.cx.tcx.hir.local_def_id(item.id); + self.store_path(def_id); + } + match item.node { hir::ItemForeignMod(ref fm) => { // If inlining we only want to include public functions. diff --git a/src/test/rustdoc/duplicate_impls/issue-33054.rs b/src/test/rustdoc/duplicate_impls/issue-33054.rs index df6ebcae107..43a425d4c5e 100644 --- a/src/test/rustdoc/duplicate_impls/issue-33054.rs +++ b/src/test/rustdoc/duplicate_impls/issue-33054.rs @@ -11,7 +11,8 @@ // @has issue_33054/impls/struct.Foo.html // @has - '//code' 'impl Foo' // @has - '//code' 'impl Bar for Foo' -// @count - '//*[@class="impl"]' 2 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="main"]/*[@class="impl"]' 1 // @has issue_33054/impls/bar/trait.Bar.html // @has - '//code' 'impl Bar for Foo' // @count - '//*[@class="struct"]' 1 diff --git a/src/test/rustdoc/issue-21474.rs b/src/test/rustdoc/issue-21474.rs index 36f160acf1c..553bbeb0cff 100644 --- a/src/test/rustdoc/issue-21474.rs +++ b/src/test/rustdoc/issue-21474.rs @@ -17,5 +17,5 @@ mod inner { pub trait Blah { } // @count issue_21474/struct.What.html \ -// '//*[@class="impl"]' 1 +// '//*[@id="implementations-list"]/*[@class="impl"]' 1 pub struct What; diff --git a/src/test/rustdoc/issue-45584.rs b/src/test/rustdoc/issue-45584.rs index 6d6ae3dc94a..b0e64557be2 100644 --- a/src/test/rustdoc/issue-45584.rs +++ b/src/test/rustdoc/issue-45584.rs @@ -14,12 +14,12 @@ pub trait Bar<T, U> {} // @has 'foo/struct.Foo1.html' pub struct Foo1; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar<Foo1, &'static Foo1> for Foo1" impl Bar<Foo1, &'static Foo1> for Foo1 {} // @has 'foo/struct.Foo2.html' pub struct Foo2; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar<&'static Foo2, Foo2> for u8" impl Bar<&'static Foo2, Foo2> for u8 {} diff --git a/src/test/rustdoc/synthetic_auto/basic.rs b/src/test/rustdoc/synthetic_auto/basic.rs new file mode 100644 index 00000000000..8ff84d11a50 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/basic.rs @@ -0,0 +1,18 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has basic/struct.Foo.html +// @has - '//code' 'impl<T> Send for Foo<T> where T: Send' +// @has - '//code' 'impl<T> Sync for Foo<T> where T: Sync' +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 0 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 2 +pub struct Foo<T> { + field: T, +} diff --git a/src/test/rustdoc/synthetic_auto/complex.rs b/src/test/rustdoc/synthetic_auto/complex.rs new file mode 100644 index 00000000000..896d49ca79a --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/complex.rs @@ -0,0 +1,52 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod foo { + pub trait MyTrait<'a> { + type MyItem: ?Sized; + } + + pub struct Inner<'a, Q, R: ?Sized> { + field: Q, + field3: &'a u8, + my_foo: Foo<Q>, + field2: R, + } + + pub struct Outer<'a, T, K: ?Sized> { + my_inner: Inner<'a, T, K>, + } + + pub struct Foo<T> { + myfield: T, + } +} + +// @has complex/struct.NotOuter.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'a, T, K: \ +// ?Sized> Send for NotOuter<'a, T, K> where 'a: 'static, K: for<'b> Fn((&'b bool, &'a u8)) \ +// -> &'b i8, <T as MyTrait<'a>>::MyItem: Copy, T: MyTrait<'a>" + +pub use foo::{Foo, Inner as NotInner, MyTrait as NotMyTrait, Outer as NotOuter}; + +unsafe impl<T> Send for Foo<T> +where + T: NotMyTrait<'static>, +{ +} + +unsafe impl<'a, Q, R: ?Sized> Send for NotInner<'a, Q, R> +where + Q: NotMyTrait<'a>, + <Q as NotMyTrait<'a>>::MyItem: Copy, + /* for<'b> */ R: for<'b> Fn((&'b bool, &'a u8)) -> &'b i8, + Foo<Q>: Send, +{ +} diff --git a/src/test/rustdoc/synthetic_auto/lifetimes.rs b/src/test/rustdoc/synthetic_auto/lifetimes.rs new file mode 100644 index 00000000000..2f92627f954 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/lifetimes.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: for<'b> Fn(&'b bool) -> &'a u8, +{} + +// @has lifetimes/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where 'c: 'static, K: for<'b> Fn(&'b bool) -> &'c u8" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where K: Sync" +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} diff --git a/src/test/rustdoc/synthetic_auto/manual.rs b/src/test/rustdoc/synthetic_auto/manual.rs new file mode 100644 index 00000000000..d81e6309dff --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/manual.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has manual/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl<T> Sync for \ +// Foo<T> where T: Sync' +// +// @has - '//*[@id="implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl<T> Send for Foo<T>' +// +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 1 +pub struct Foo<T> { + field: T, +} + +unsafe impl<T> Send for Foo<T> {} diff --git a/src/test/rustdoc/synthetic_auto/negative.rs b/src/test/rustdoc/synthetic_auto/negative.rs new file mode 100644 index 00000000000..ec9cb710f1f --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/negative.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner<T: Copy> { + field: *mut T, +} + +// @has negative/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<T> !Send for \ +// Outer<T>" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<T> \ +// !Sync for Outer<T>" +pub struct Outer<T: Copy> { + inner_field: Inner<T>, +} diff --git a/src/test/rustdoc/synthetic_auto/nested.rs b/src/test/rustdoc/synthetic_auto/nested.rs new file mode 100644 index 00000000000..1f33a8b13cb --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/nested.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner<T> { + field: T, +} + +unsafe impl<T> Send for Inner<T> +where + T: Copy, +{ +} + +// @has nested/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl<T> Send for \ +// Foo<T> where T: Copy' +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl<T> Sync for Foo<T> where T: Sync' +pub struct Foo<T> { + inner_field: Inner<T>, +} diff --git a/src/test/rustdoc/synthetic_auto/no-redundancy.rs b/src/test/rustdoc/synthetic_auto/no-redundancy.rs new file mode 100644 index 00000000000..0b37f2ed317 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/no-redundancy.rs @@ -0,0 +1,26 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner<T> { + field: T, +} + +unsafe impl<T> Send for Inner<T> +where + T: Copy + Send, +{ +} + +// @has no_redundancy/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<T> Send for \ +// Outer<T> where T: Copy + Send" +pub struct Outer<T> { + inner_field: Inner<T>, +} diff --git a/src/test/rustdoc/synthetic_auto/project.rs b/src/test/rustdoc/synthetic_auto/project.rs new file mode 100644 index 00000000000..e1b8621ff6d --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/project.rs @@ -0,0 +1,43 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +trait MyTrait { + type MyItem; +} + +trait OtherTrait {} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: MyTrait<MyItem = bool>, +{ +} +unsafe impl<'a, T> Sync for Inner<'a, T> +where + 'a: 'static, + T: MyTrait, + <T as MyTrait>::MyItem: OtherTrait, +{ +} + +// @has project/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where 'c: 'static, K: MyTrait<MyItem = bool>" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where 'c: 'static, K: MyTrait, <K as MyTrait>::MyItem: OtherTrait" +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} |
