about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/doc/rustdoc/src/documentation-tests.md14
-rw-r--r--src/doc/rustdoc/src/the-doc-attribute.md20
-rwxr-xr-xsrc/etc/indenter2
-rwxr-xr-xsrc/etc/sugarise-doc-comments.py4
-rw-r--r--src/etc/test-float-parse/runtests.py6
-rw-r--r--src/libproc_macro/lib.rs9
-rw-r--r--src/libproc_macro/quote.rs2
-rw-r--r--src/librustc/infer/error_reporting/mod.rs9
-rw-r--r--src/librustc/infer/glb.rs28
-rw-r--r--src/librustc/infer/higher_ranked/mod.rs9
-rw-r--r--src/librustc/infer/lub.rs28
-rw-r--r--src/librustc/infer/mod.rs5
-rw-r--r--src/librustc/ty/error.rs11
-rw-r--r--src/librustc/ty/fold.rs9
-rw-r--r--src/librustc/ty/structural_impls.rs5
-rw-r--r--src/librustc_driver/test.rs212
-rw-r--r--src/librustc_mir/borrow_check.rs121
-rw-r--r--src/librustc_trans/back/link.rs53
-rw-r--r--src/librustdoc/html/static/main.js88
-rwxr-xr-xsrc/libstd_unicode/unicode.py37
-rw-r--r--src/test/compile-fail/issue-36082.rs23
-rw-r--r--src/test/run-pass/lub-glb-with-unbound-infer-var.rs24
-rw-r--r--src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs4
-rw-r--r--src/test/ui/lub-glb/old-lub-glb-hr.rs36
-rw-r--r--src/test/ui/lub-glb/old-lub-glb-hr.stderr22
-rw-r--r--src/test/ui/lub-glb/old-lub-glb-object.rs38
-rw-r--r--src/test/ui/lub-glb/old-lub-glb-object.stderr22
27 files changed, 490 insertions, 351 deletions
diff --git a/src/doc/rustdoc/src/documentation-tests.md b/src/doc/rustdoc/src/documentation-tests.md
index eb3e6a9dd50..9c6b86d6ddc 100644
--- a/src/doc/rustdoc/src/documentation-tests.md
+++ b/src/doc/rustdoc/src/documentation-tests.md
@@ -38,17 +38,19 @@ function! Forcing you to write `main` for every example, no matter how small,
 adds friction. So `rustdoc` processes your examples slightly before
 running them. Here's the full algorithm rustdoc uses to preprocess examples:
 
-1. Any leading `#![foo]` attributes are left intact as crate attributes.
-2. Some common `allow` attributes are inserted, including
+1. Some common `allow` attributes are inserted, including
    `unused_variables`, `unused_assignments`, `unused_mut`,
    `unused_attributes`, and `dead_code`. Small examples often trigger
    these lints.
-3. If the example does not contain `extern crate`, then `extern crate
+2. Any attributes specified with `#![doc(test(attr(...)))]` are added.
+3. Any leading `#![foo]` attributes are left intact as crate attributes.
+4. If the example does not contain `extern crate`, and
+   `#![doc(test(no_crate_inject))]` was not specified, then `extern crate
    <mycrate>;` is inserted (note the lack of `#[macro_use]`).
-4. Finally, if the example does not contain `fn main`, the remainder of the
+5. Finally, if the example does not contain `fn main`, the remainder of the
    text is wrapped in `fn main() { your_code }`.
 
-For more about that caveat in rule 3, see "Documeting Macros" below.
+For more about that caveat in rule 4, see "Documeting Macros" below.
 
 ## Hiding portions of the example
 
@@ -261,4 +263,4 @@ are added.
 The `no_run` attribute will compile your code, but not run it. This is
 important for examples such as "Here's how to retrieve a web page,"
 which you would want to ensure compiles, but might be run in a test
-environment that has no network access.
\ No newline at end of file
+environment that has no network access.
diff --git a/src/doc/rustdoc/src/the-doc-attribute.md b/src/doc/rustdoc/src/the-doc-attribute.md
index 978d7656bdd..aadd72d1c90 100644
--- a/src/doc/rustdoc/src/the-doc-attribute.md
+++ b/src/doc/rustdoc/src/the-doc-attribute.md
@@ -103,6 +103,26 @@ to it in the docs. But if you include this:
 
 it will not.
 
+### `test(no_crate_inject)`
+
+By default, `rustdoc` will automatically add a line with `extern crate my_crate;` into each doctest.
+But if you include this:
+
+```rust,ignore
+#![doc(test(no_crate_inject))]
+```
+
+it will not.
+
+### `test(attr(...))`
+
+This form of the `doc` attribute allows you to add arbitrary attributes to all your doctests. For
+example, if you want your doctests to fail if they produce any warnings, you could add this:
+
+```rust,ignore
+#![doc(test(attr(deny(warnings))))]
+```
+
 ## At the item level
 
 These forms of the `#[doc]` attribute are used on individual items, to control how
diff --git a/src/etc/indenter b/src/etc/indenter
index b3eed6a1443..21bfc448ae2 100755
--- a/src/etc/indenter
+++ b/src/etc/indenter
@@ -13,7 +13,7 @@ while True:
     if more_re.match(line):
         indent += 1
 
-    print "%03d %s%s" % (indent, " " * indent, line.strip())
+    print("%03d %s%s" % (indent, " " * indent, line.strip()))
 
     if less_re.match(line):
         indent -= 1
diff --git a/src/etc/sugarise-doc-comments.py b/src/etc/sugarise-doc-comments.py
index 62870f3ed47..ac2223f4ace 100755
--- a/src/etc/sugarise-doc-comments.py
+++ b/src/etc/sugarise-doc-comments.py
@@ -50,11 +50,11 @@ def block_trim(s):
         lns = lns[:-1]
 
     # remove leading horizontal whitespace
-    n = sys.maxint
+    n = sys.maxsize
     for ln in lns:
         if ln.strip():
             n = min(n, len(re.search('^\s*', ln).group()))
-    if n != sys.maxint:
+    if n != sys.maxsize:
         lns = [ln[n:] for ln in lns]
 
     # strip trailing whitespace
diff --git a/src/etc/test-float-parse/runtests.py b/src/etc/test-float-parse/runtests.py
index bc141877b37..75c92b9b15c 100644
--- a/src/etc/test-float-parse/runtests.py
+++ b/src/etc/test-float-parse/runtests.py
@@ -97,11 +97,15 @@ from collections import namedtuple
 from subprocess import Popen, check_call, PIPE
 from glob import glob
 import multiprocessing
-import Queue
 import threading
 import ctypes
 import binascii
 
+try:  # Python 3
+    import queue as Queue
+except ImportError:  # Python 2
+    import Queue
+
 NUM_WORKERS = 2
 UPDATE_EVERY_N = 50000
 INF = namedtuple('INF', '')()
diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs
index 8a400f3e636..22f788e34ec 100644
--- a/src/libproc_macro/lib.rs
+++ b/src/libproc_macro/lib.rs
@@ -177,9 +177,10 @@ impl TokenStream {
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub struct Span(syntax_pos::Span);
 
-#[unstable(feature = "proc_macro", issue = "38356")]
-impl Default for Span {
-    fn default() -> Span {
+impl Span {
+    /// A span that resolves at the macro definition site.
+    #[unstable(feature = "proc_macro", issue = "38356")]
+    pub fn def_site() -> Span {
         ::__internal::with_sess(|(_, mark)| {
             let call_site = mark.expn_info().unwrap().call_site;
             Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
@@ -351,7 +352,7 @@ pub struct TokenTree {
 #[unstable(feature = "proc_macro", issue = "38356")]
 impl From<TokenNode> for TokenTree {
     fn from(kind: TokenNode) -> TokenTree {
-        TokenTree { span: Span::default(), kind: kind }
+        TokenTree { span: Span::def_site(), kind: kind }
     }
 }
 
diff --git a/src/libproc_macro/quote.rs b/src/libproc_macro/quote.rs
index 26f88ad6bf6..8b5add1a0f0 100644
--- a/src/libproc_macro/quote.rs
+++ b/src/libproc_macro/quote.rs
@@ -168,7 +168,7 @@ impl Quote for Term {
 
 impl Quote for Span {
     fn quote(self) -> TokenStream {
-        quote!(::Span::default())
+        quote!(::Span::def_site())
     }
 }
 
diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs
index d22eb20e70a..4f36193e197 100644
--- a/src/librustc/infer/error_reporting/mod.rs
+++ b/src/librustc/infer/error_reporting/mod.rs
@@ -762,9 +762,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
             }
         }
 
-        self.note_error_origin(diag, &cause);
         self.check_and_note_conflicting_crates(diag, terr, span);
         self.tcx.note_and_explain_type_err(diag, terr, span);
+
+        // It reads better to have the error origin as the final
+        // thing.
+        self.note_error_origin(diag, &cause);
     }
 
     pub fn report_and_explain_type_error(&self,
@@ -772,6 +775,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
                                          terr: &TypeError<'tcx>)
                                          -> DiagnosticBuilder<'tcx>
     {
+        debug!("report_and_explain_type_error(trace={:?}, terr={:?})",
+               trace,
+               terr);
+
         let span = trace.cause.span;
         let failure_str = trace.cause.as_failure_str();
         let mut diag = match trace.cause.code {
diff --git a/src/librustc/infer/glb.rs b/src/librustc/infer/glb.rs
index 8b42314ed97..fd14e0e40e2 100644
--- a/src/librustc/infer/glb.rs
+++ b/src/librustc/infer/glb.rs
@@ -15,6 +15,7 @@ use super::Subtype;
 
 use traits::ObligationCause;
 use ty::{self, Ty, TyCtxt};
+use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
 
 /// "Greatest lower bound" (common subtype)
@@ -74,7 +75,32 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
                   -> RelateResult<'tcx, ty::Binder<T>>
         where T: Relate<'tcx>
     {
-        self.fields.higher_ranked_glb(a, b, self.a_is_expected)
+        debug!("binders(a={:?}, b={:?})", a, b);
+        let was_error = self.infcx().probe(|_snapshot| {
+            // Subtle: use a fresh combine-fields here because we recover
+            // from Err. Doing otherwise could propagate obligations out
+            // through our `self.obligations` field.
+            self.infcx()
+                .combine_fields(self.fields.trace.clone(), self.fields.param_env)
+                .higher_ranked_glb(a, b, self.a_is_expected)
+                .is_err()
+        });
+        debug!("binders: was_error={:?}", was_error);
+
+        // When higher-ranked types are involved, computing the LUB is
+        // very challenging, switch to invariance. This is obviously
+        // overly conservative but works ok in practice.
+        match self.relate_with_variance(ty::Variance::Invariant, a, b) {
+            Ok(_) => Ok(a.clone()),
+            Err(err) => {
+                debug!("binders: error occurred, was_error={:?}", was_error);
+                if !was_error {
+                    Err(TypeError::OldStyleLUB(Box::new(err)))
+                } else {
+                    Err(err)
+                }
+            }
+        }
     }
 }
 
diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs
index c49b3b4b9c8..57e237fb913 100644
--- a/src/librustc/infer/higher_ranked/mod.rs
+++ b/src/librustc/infer/higher_ranked/mod.rs
@@ -19,6 +19,7 @@ use super::{CombinedSnapshot,
 use super::combine::CombineFields;
 use super::region_constraints::{TaintDirections};
 
+use std::collections::BTreeMap;
 use ty::{self, TyCtxt, Binder, TypeFoldable};
 use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
@@ -246,7 +247,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
                                              snapshot: &CombinedSnapshot,
                                              debruijn: ty::DebruijnIndex,
                                              new_vars: &[ty::RegionVid],
-                                             a_map: &FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
+                                             a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
                                              r0: ty::Region<'tcx>)
                                              -> ty::Region<'tcx> {
             // Regions that pre-dated the LUB computation stay as they are.
@@ -342,7 +343,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
                                              snapshot: &CombinedSnapshot,
                                              debruijn: ty::DebruijnIndex,
                                              new_vars: &[ty::RegionVid],
-                                             a_map: &FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
+                                             a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
                                              a_vars: &[ty::RegionVid],
                                              b_vars: &[ty::RegionVid],
                                              r0: ty::Region<'tcx>)
@@ -411,7 +412,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
 
         fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                                       span: Span,
-                                      a_map: &FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
+                                      a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
                                       r: ty::Region<'tcx>) -> ty::Region<'tcx>
         {
             for (a_br, a_r) in a_map {
@@ -434,7 +435,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
 }
 
 fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
-                           map: &FxHashMap<ty::BoundRegion, ty::Region<'tcx>>)
+                           map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
                            -> Vec<ty::RegionVid> {
     map.iter()
        .map(|(_, &r)| match *r {
diff --git a/src/librustc/infer/lub.rs b/src/librustc/infer/lub.rs
index 4a2a7a6bdfe..55c7eef607b 100644
--- a/src/librustc/infer/lub.rs
+++ b/src/librustc/infer/lub.rs
@@ -15,6 +15,7 @@ use super::Subtype;
 
 use traits::ObligationCause;
 use ty::{self, Ty, TyCtxt};
+use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
 
 /// "Least upper bound" (common supertype)
@@ -74,7 +75,32 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
                   -> RelateResult<'tcx, ty::Binder<T>>
         where T: Relate<'tcx>
     {
-        self.fields.higher_ranked_lub(a, b, self.a_is_expected)
+        debug!("binders(a={:?}, b={:?})", a, b);
+        let was_error = self.infcx().probe(|_snapshot| {
+            // Subtle: use a fresh combine-fields here because we recover
+            // from Err. Doing otherwise could propagate obligations out
+            // through our `self.obligations` field.
+            self.infcx()
+                .combine_fields(self.fields.trace.clone(), self.fields.param_env)
+                .higher_ranked_lub(a, b, self.a_is_expected)
+                .is_err()
+        });
+        debug!("binders: was_error={:?}", was_error);
+
+        // When higher-ranked types are involved, computing the LUB is
+        // very challenging, switch to invariance. This is obviously
+        // overly conservative but works ok in practice.
+        match self.relate_with_variance(ty::Variance::Invariant, a, b) {
+            Ok(_) => Ok(a.clone()),
+            Err(err) => {
+                debug!("binders: error occurred, was_error={:?}", was_error);
+                if !was_error {
+                    Err(TypeError::OldStyleLUB(Box::new(err)))
+                } else {
+                    Err(err)
+                }
+            }
+        }
     }
 }
 
diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs
index f734ff84f63..4f923f0b249 100644
--- a/src/librustc/infer/mod.rs
+++ b/src/librustc/infer/mod.rs
@@ -31,6 +31,7 @@ use ty::relate::RelateResult;
 use traits::{self, ObligationCause, PredicateObligations, Reveal};
 use rustc_data_structures::unify::{self, UnificationTable};
 use std::cell::{Cell, RefCell, Ref, RefMut};
+use std::collections::BTreeMap;
 use std::fmt;
 use syntax::ast;
 use errors::DiagnosticBuilder;
@@ -184,7 +185,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 
 /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized
 /// region that each late-bound region was replaced with.
-pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, ty::Region<'tcx>>;
+pub type SkolemizationMap<'tcx> = BTreeMap<ty::BoundRegion, ty::Region<'tcx>>;
 
 /// See `error_reporting` module for more details
 #[derive(Clone, Debug)]
@@ -1384,7 +1385,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
         span: Span,
         lbrct: LateBoundRegionConversionTime,
         value: &ty::Binder<T>)
-        -> (T, FxHashMap<ty::BoundRegion, ty::Region<'tcx>>)
+        -> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
         where T : TypeFoldable<'tcx>
     {
         self.tcx.replace_late_bound_regions(
diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs
index 5cfa72c0712..228ca76ed9a 100644
--- a/src/librustc/ty/error.rs
+++ b/src/librustc/ty/error.rs
@@ -54,6 +54,8 @@ pub enum TypeError<'tcx> {
     ProjectionBoundsLength(ExpectedFound<usize>),
     TyParamDefaultMismatch(ExpectedFound<type_variable::Default<'tcx>>),
     ExistentialMismatch(ExpectedFound<&'tcx ty::Slice<ty::ExistentialPredicate<'tcx>>>),
+
+    OldStyleLUB(Box<TypeError<'tcx>>),
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -170,6 +172,9 @@ impl<'tcx> fmt::Display for TypeError<'tcx> {
                 report_maybe_different(f, format!("trait `{}`", values.expected),
                                        format!("trait `{}`", values.found))
             }
+            OldStyleLUB(ref err) => {
+                write!(f, "{}", err)
+            }
         }
     }
 }
@@ -293,6 +298,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
                 db.span_note(found.origin_span,
                              "...that also applies to the same type variable here");
             }
+            OldStyleLUB(err) => {
+                db.note("this was previously accepted by the compiler but has been phased out");
+                db.note("for more information, see https://github.com/rust-lang/rust/issues/45852");
+
+                self.note_and_explain_type_err(db, &err, sp);
+            }
             _ => {}
         }
     }
diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs
index 149999e0eee..bee11999223 100644
--- a/src/librustc/ty/fold.rs
+++ b/src/librustc/ty/fold.rs
@@ -43,7 +43,8 @@ use middle::const_val::ConstVal;
 use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
 
 use std::fmt;
-use util::nodemap::{FxHashMap, FxHashSet};
+use std::collections::BTreeMap;
+use util::nodemap::FxHashSet;
 
 /// The TypeFoldable trait is implemented for every type that can be folded.
 /// Basically, every type that has a corresponding method in TypeFolder.
@@ -324,14 +325,14 @@ struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     current_depth: u32,
     fld_r: &'a mut (FnMut(ty::BoundRegion) -> ty::Region<'tcx> + 'a),
-    map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>
+    map: BTreeMap<ty::BoundRegion, ty::Region<'tcx>>
 }
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     pub fn replace_late_bound_regions<T,F>(self,
         value: &Binder<T>,
         mut f: F)
-        -> (T, FxHashMap<ty::BoundRegion, ty::Region<'tcx>>)
+        -> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
         where F : FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
               T : TypeFoldable<'tcx>,
     {
@@ -438,7 +439,7 @@ impl<'a, 'gcx, 'tcx> RegionReplacer<'a, 'gcx, 'tcx> {
             tcx,
             current_depth: 1,
             fld_r,
-            map: FxHashMap()
+            map: BTreeMap::default()
         }
     }
 }
diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs
index 5f1448cd1f1..e5c24b4fcf9 100644
--- a/src/librustc/ty/structural_impls.rs
+++ b/src/librustc/ty/structural_impls.rs
@@ -428,7 +428,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> {
             TyParamDefaultMismatch(ref x) => {
                 return tcx.lift(x).map(TyParamDefaultMismatch)
             }
-            ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch)
+            ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch),
+            OldStyleLUB(ref x) => return tcx.lift(x).map(OldStyleLUB),
         })
     }
 }
@@ -1174,6 +1175,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> {
             Sorts(x) => Sorts(x.fold_with(folder)),
             TyParamDefaultMismatch(ref x) => TyParamDefaultMismatch(x.fold_with(folder)),
             ExistentialMismatch(x) => ExistentialMismatch(x.fold_with(folder)),
+            OldStyleLUB(ref x) => OldStyleLUB(x.fold_with(folder)),
         }
     }
 
@@ -1191,6 +1193,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> {
                 b.visit_with(visitor)
             },
             Sorts(x) => x.visit_with(visitor),
+            OldStyleLUB(ref x) => x.visit_with(visitor),
             TyParamDefaultMismatch(ref x) => x.visit_with(visitor),
             ExistentialMismatch(x) => x.visit_with(visitor),
             Mismatch |
diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs
index 9e02065145d..78ce959e5c9 100644
--- a/src/librustc_driver/test.rs
+++ b/src/librustc_driver/test.rs
@@ -353,28 +353,10 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {
         self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize)
     }
 
-    pub fn t_rptr_static(&self) -> Ty<'tcx> {
-        self.infcx.tcx.mk_imm_ref(self.infcx.tcx.types.re_static,
-                                  self.tcx().types.isize)
-    }
-
-    pub fn t_rptr_empty(&self) -> Ty<'tcx> {
-        self.infcx.tcx.mk_imm_ref(self.infcx.tcx.types.re_empty,
-                                  self.tcx().types.isize)
-    }
-
     pub fn sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, ()> {
         self.infcx.at(&ObligationCause::dummy(), self.param_env).sub(t1, t2)
     }
 
-    pub fn lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
-        self.infcx.at(&ObligationCause::dummy(), self.param_env).lub(t1, t2)
-    }
-
-    pub fn glb(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
-        self.infcx.at(&ObligationCause::dummy(), self.param_env).glb(t1, t2)
-    }
-
     /// Checks that `t1 <: t2` is true (this may register additional
     /// region checks).
     pub fn check_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) {
@@ -399,37 +381,6 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {
             }
         }
     }
-
-    /// Checks that `LUB(t1,t2) == t_lub`
-    pub fn check_lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>, t_lub: Ty<'tcx>) {
-        match self.lub(t1, t2) {
-            Ok(InferOk { obligations, value: t }) => {
-                // None of these tests should require nested obligations:
-                assert!(obligations.is_empty());
-
-                self.assert_eq(t, t_lub);
-            }
-            Err(ref e) => panic!("unexpected error in LUB: {}", e),
-        }
-    }
-
-    /// Checks that `GLB(t1,t2) == t_glb`
-    pub fn check_glb(&self, t1: Ty<'tcx>, t2: Ty<'tcx>, t_glb: Ty<'tcx>) {
-        debug!("check_glb(t1={}, t2={}, t_glb={})", t1, t2, t_glb);
-        match self.glb(t1, t2) {
-            Err(e) => panic!("unexpected error computing LUB: {:?}", e),
-            Ok(InferOk { obligations, value: t }) => {
-                // None of these tests should require nested obligations:
-                assert!(obligations.is_empty());
-
-                self.assert_eq(t, t_glb);
-
-                // sanity check for good measure:
-                self.assert_subtype(t, t1);
-                self.assert_subtype(t, t2);
-            }
-        }
-    }
 }
 
 #[test]
@@ -508,169 +459,6 @@ fn sub_free_bound_false_infer() {
     })
 }
 
-#[test]
-fn lub_free_bound_infer() {
-    //! Test result of:
-    //!
-    //!     LUB(fn(_#1), for<'b> fn(&'b isize))
-    //!
-    //! This should yield `fn(&'_ isize)`. We check
-    //! that it yields `fn(&'x isize)` for some free `'x`,
-    //! anyhow.
-
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_infer1 = env.infcx.next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP));
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_free1 = env.t_rptr_free(1);
-        env.check_lub(env.t_fn(&[t_infer1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
-    });
-}
-
-#[test]
-fn lub_bound_bound() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_bound2 = env.t_rptr_late_bound(2);
-        env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound2], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn lub_bound_free() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_free1 = env.t_rptr_free(1);
-        env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn lub_bound_static() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_static = env.t_rptr_static();
-        env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_static], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_static], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn lub_bound_bound_inverse_order() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_bound2 = env.t_rptr_late_bound(2);
-        env.check_lub(env.t_fn(&[t_rptr_bound1, t_rptr_bound2], t_rptr_bound1),
-                      env.t_fn(&[t_rptr_bound2, t_rptr_bound1], t_rptr_bound1),
-                      env.t_fn(&[t_rptr_bound1, t_rptr_bound1], t_rptr_bound1));
-    })
-}
-
-#[test]
-fn lub_free_free() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_rptr_free1 = env.t_rptr_free(1);
-        let t_rptr_free2 = env.t_rptr_free(2);
-        let t_rptr_static = env.t_rptr_static();
-        env.check_lub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_static], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn lub_returning_scope() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_rptr_scope10 = env.t_rptr_scope(10);
-        let t_rptr_scope11 = env.t_rptr_scope(11);
-        let t_rptr_empty = env.t_rptr_empty();
-        env.check_lub(env.t_fn(&[t_rptr_scope10], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_scope11], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_empty], env.tcx().types.isize));
-    });
-}
-
-#[test]
-fn glb_free_free_with_common_scope() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_rptr_free1 = env.t_rptr_free(1);
-        let t_rptr_free2 = env.t_rptr_free(2);
-        let t_rptr_scope = env.t_rptr_scope(1);
-        env.check_glb(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_scope], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn glb_bound_bound() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_bound2 = env.t_rptr_late_bound(2);
-        env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound2], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn glb_bound_free() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
-        env.create_simple_region_hierarchy();
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_free1 = env.t_rptr_free(1);
-        env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
-    })
-}
-
-#[test]
-fn glb_bound_free_infer() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_infer1 = env.infcx.next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP));
-
-        // compute GLB(fn(_) -> isize, for<'b> fn(&'b isize) -> isize),
-        // which should yield for<'b> fn(&'b isize) -> isize
-        env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_infer1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
-
-        // as a side-effect, computing GLB should unify `_` with
-        // `&'_ isize`
-        let t_resolve1 = env.infcx.shallow_resolve(t_infer1);
-        match t_resolve1.sty {
-            ty::TyRef(..) => {}
-            _ => {
-                panic!("t_resolve1={:?}", t_resolve1);
-            }
-        }
-    })
-}
-
-#[test]
-fn glb_bound_static() {
-    test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
-        let t_rptr_bound1 = env.t_rptr_late_bound(1);
-        let t_rptr_static = env.t_rptr_static();
-        env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_static], env.tcx().types.isize),
-                      env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
-    })
-}
-
 /// Test substituting a bound region into a function, which introduces another level of binding.
 /// This requires adjusting the Debruijn index.
 #[test]
diff --git a/src/librustc_mir/borrow_check.rs b/src/librustc_mir/borrow_check.rs
index f5f7b53a235..0ff6c8622a5 100644
--- a/src/librustc_mir/borrow_check.rs
+++ b/src/librustc_mir/borrow_check.rs
@@ -20,6 +20,7 @@ use rustc::mir::{Mir, Mutability, Operand, Projection, ProjectionElem, Rvalue};
 use rustc::mir::{Statement, StatementKind, Terminator, TerminatorKind};
 use transform::nll;
 
+use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::indexed_set::{self, IdxSetBuf};
 use rustc_data_structures::indexed_vec::{Idx};
 
@@ -136,6 +137,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
         node_id: id,
         move_data: &mdpe.move_data,
         param_env: param_env,
+        storage_drop_or_dead_error_reported: FxHashSet(),
     };
 
     let mut state = InProgress::new(flow_borrows,
@@ -153,6 +155,10 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     node_id: ast::NodeId,
     move_data: &'cx MoveData<'tcx>,
     param_env: ParamEnv<'gcx>,
+    /// This field keeps track of when storage drop or dead errors are reported
+    /// in order to stop duplicate error reporting and identify the conditions required
+    /// for a "temporary value dropped here while still borrowed" error. See #45360.
+    storage_drop_or_dead_error_reported: FxHashSet<Local>,
 }
 
 // (forced to be `pub` due to its use as an associated type below.)
@@ -281,10 +287,15 @@ impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx
             }
 
             StatementKind::StorageDead(local) => {
-                self.access_lvalue(ContextKind::StorageDead.new(location),
-                                   (&Lvalue::Local(local), span),
-                                   (Shallow(None), Write(WriteKind::StorageDead)),
-                                   flow_state);
+                if !self.storage_drop_or_dead_error_reported.contains(&local) {
+                    let error_reported = self.access_lvalue(ContextKind::StorageDead.new(location),
+                        (&Lvalue::Local(local), span),
+                        (Shallow(None), Write(WriteKind::StorageDeadOrDrop)), flow_state);
+
+                    if error_reported {
+                        self.storage_drop_or_dead_error_reported.insert(local);
+                    }
+                }
             }
         }
     }
@@ -427,24 +438,30 @@ enum ReadKind {
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
 enum WriteKind {
-    StorageDead,
+    StorageDeadOrDrop,
     MutableBorrow(BorrowKind),
     Mutate,
     Move,
 }
 
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+    /// Checks an access to the given lvalue to see if it is allowed. Examines the set of borrows
+    /// that are in scope, as well as which paths have been initialized, to ensure that (a) the
+    /// lvalue is initialized and (b) it is not borrowed in some way that would prevent this
+    /// access.
+    ///
+    /// Returns true if an error is reported, false otherwise.
     fn access_lvalue(&mut self,
                      context: Context,
                      lvalue_span: (&Lvalue<'tcx>, Span),
                      kind: (ShallowOrDeep, ReadOrWrite),
-                     flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
-
+                     flow_state: &InProgress<'cx, 'gcx, 'tcx>) -> bool {
         let (sd, rw) = kind;
 
         // Check permissions
         self.check_access_permissions(lvalue_span, rw);
 
+        let mut error_reported = false;
         self.each_borrow_involving_path(
             context, (sd, lvalue_span.0), flow_state, |this, _index, borrow, common_prefix| {
                 match (rw, borrow.kind) {
@@ -454,13 +471,16 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
                     (Read(kind), BorrowKind::Unique) |
                     (Read(kind), BorrowKind::Mut) => {
                         match kind {
-                            ReadKind::Copy =>
+                            ReadKind::Copy => {
+                                error_reported = true;
                                 this.report_use_while_mutably_borrowed(
-                                    context, lvalue_span, borrow),
+                                    context, lvalue_span, borrow)
+                            },
                             ReadKind::Borrow(bk) => {
                                 let end_issued_loan_span =
                                     flow_state.borrows.base_results.operator().opt_region_end_span(
                                         &borrow.region);
+                                error_reported = true;
                                 this.report_conflicting_borrow(
                                     context, common_prefix, lvalue_span, bk,
                                     &borrow, end_issued_loan_span)
@@ -474,22 +494,35 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
                                 let end_issued_loan_span =
                                     flow_state.borrows.base_results.operator().opt_region_end_span(
                                         &borrow.region);
+                                error_reported = true;
                                 this.report_conflicting_borrow(
                                     context, common_prefix, lvalue_span, bk,
                                     &borrow, end_issued_loan_span)
                             }
-                            WriteKind::StorageDead |
-                            WriteKind::Mutate =>
+                             WriteKind::StorageDeadOrDrop => {
+                                let end_span =
+                                    flow_state.borrows.base_results.operator().opt_region_end_span(
+                                        &borrow.region);
+                                error_reported = true;
+                                this.report_borrowed_value_does_not_live_long_enough(
+                                    context, lvalue_span, end_span)
+                            },
+                            WriteKind::Mutate => {
+                                error_reported = true;
                                 this.report_illegal_mutation_of_borrowed(
-                                    context, lvalue_span, borrow),
-                            WriteKind::Move =>
+                                    context, lvalue_span, borrow)
+                            },
+                            WriteKind::Move => {
+                                error_reported = true;
                                 this.report_move_out_while_borrowed(
-                                    context, lvalue_span, &borrow),
+                                    context, lvalue_span, &borrow)
+                            },
                         }
                         Control::Break
                     }
                 }
             });
+        error_reported
     }
 
     fn mutate_lvalue(&mut self,
@@ -604,12 +637,39 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
         let erased_ty = gcx.lift(&self.tcx.erase_regions(&ty)).unwrap();
         let moves_by_default = erased_ty.moves_by_default(gcx, self.param_env, DUMMY_SP);
 
-        if moves_by_default {
-            // move of lvalue: check if this is move of already borrowed path
-            self.access_lvalue(context, lvalue_span, (Deep, Write(WriteKind::Move)), flow_state);
-        } else {
-            // copy of lvalue: check if this is "copy of frozen path" (FIXME: see check_loans.rs)
-            self.access_lvalue(context, lvalue_span, (Deep, Read(ReadKind::Copy)), flow_state);
+        // Check if error has already been reported to stop duplicate reporting.
+        let has_storage_drop_or_dead_error_reported = match *lvalue {
+            Lvalue::Local(local) => self.storage_drop_or_dead_error_reported.contains(&local),
+            _ => false,
+        };
+
+        // If the error has been reported already, then we don't need the access_lvalue call.
+        if !has_storage_drop_or_dead_error_reported || consume_via_drop != ConsumeKind::Drop {
+            let error_reported;
+
+            if moves_by_default {
+                let kind = match consume_via_drop {
+                    ConsumeKind::Drop => WriteKind::StorageDeadOrDrop,
+                    _ => WriteKind::Move,
+                };
+
+                // move of lvalue: check if this is move of already borrowed path
+                error_reported = self.access_lvalue(context, lvalue_span,
+                                                    (Deep, Write(kind)), flow_state);
+            } else {
+                // copy of lvalue: check if this is "copy of frozen path"
+                // (FIXME: see check_loans.rs)
+                error_reported = self.access_lvalue(context, lvalue_span,
+                                                    (Deep, Read(ReadKind::Copy)), flow_state);
+            }
+
+            // If there was an error, then we keep track of it so as to deduplicate it.
+            // We only do this on ConsumeKind::Drop.
+            if error_reported && consume_via_drop == ConsumeKind::Drop {
+                if let Lvalue::Local(local) = *lvalue {
+                    self.storage_drop_or_dead_error_reported.insert(local);
+                }
+            }
         }
 
         // Finally, check if path was already moved.
@@ -1458,6 +1518,27 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
         err.emit();
     }
 
+    fn report_borrowed_value_does_not_live_long_enough(&mut self,
+                                                       _: Context,
+                                                       (lvalue, span): (&Lvalue, Span),
+                                                       end_span: Option<Span>) {
+        let proper_span = match *lvalue {
+            Lvalue::Local(local) => self.mir.local_decls[local].source_info.span,
+            _ => span
+        };
+
+        let mut err = self.tcx.path_does_not_live_long_enough(span, "borrowed value", Origin::Mir);
+        err.span_label(proper_span, "temporary value created here");
+        err.span_label(span, "temporary value dropped here while still borrowed");
+        err.note("consider using a `let` binding to increase its lifetime");
+
+        if let Some(end) = end_span {
+            err.span_label(end, "temporary value needs to live until here");
+        }
+
+        err.emit();
+    }
+
     fn report_illegal_mutation_of_borrowed(&mut self,
                                            _: Context,
                                            (lvalue, span): (&Lvalue<'tcx>, Span),
diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs
index 1d2bfd001f1..a67cc339916 100644
--- a/src/librustc_trans/back/link.rs
+++ b/src/librustc_trans/back/link.rs
@@ -262,19 +262,31 @@ fn link_binary_output(sess: &Session,
         check_file_is_writeable(obj, sess);
     }
 
-    let tmpdir = match TempDir::new("rustc") {
-        Ok(tmpdir) => tmpdir,
-        Err(err) => sess.fatal(&format!("couldn't create a temp dir: {}", err)),
-    };
-
     let mut out_filenames = vec![];
 
     if outputs.outputs.contains_key(&OutputType::Metadata) {
         let out_filename = filename_for_metadata(sess, crate_name, outputs);
-        emit_metadata(sess, trans, &out_filename);
+        // To avoid races with another rustc process scanning the output directory,
+        // we need to write the file somewhere else and atomically move it to its
+        // final destination, with a `fs::rename` call. In order for the rename to
+        // always succeed, the temporary file needs to be on the same filesystem,
+        // which is why we create it inside the output directory specifically.
+        let metadata_tmpdir = match TempDir::new_in(out_filename.parent().unwrap(), "rmeta") {
+            Ok(tmpdir) => tmpdir,
+            Err(err) => sess.fatal(&format!("couldn't create a temp dir: {}", err)),
+        };
+        let metadata = emit_metadata(sess, trans, &metadata_tmpdir);
+        if let Err(e) = fs::rename(metadata, &out_filename) {
+            sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e));
+        }
         out_filenames.push(out_filename);
     }
 
+    let tmpdir = match TempDir::new("rustc") {
+        Ok(tmpdir) => tmpdir,
+        Err(err) => sess.fatal(&format!("couldn't create a temp dir: {}", err)),
+    };
+
     if outputs.outputs.should_trans() {
         let out_filename = out_filename(sess, crate_type, outputs, crate_name);
         match crate_type {
@@ -283,10 +295,10 @@ fn link_binary_output(sess: &Session,
                           trans,
                           RlibFlavor::Normal,
                           &out_filename,
-                          tmpdir.path()).build();
+                          &tmpdir).build();
             }
             config::CrateTypeStaticlib => {
-                link_staticlib(sess, trans, &out_filename, tmpdir.path());
+                link_staticlib(sess, trans, &out_filename, &tmpdir);
             }
             _ => {
                 link_natively(sess, crate_type, &out_filename, trans, tmpdir.path());
@@ -321,14 +333,23 @@ fn archive_config<'a>(sess: &'a Session,
     }
 }
 
-fn emit_metadata<'a>(sess: &'a Session, trans: &CrateTranslation, out_filename: &Path) {
-    let result = fs::File::create(out_filename).and_then(|mut f| {
+/// We use a temp directory here to avoid races between concurrent rustc processes,
+/// such as builds in the same directory using the same filename for metadata while
+/// building an `.rlib` (stomping over one another), or writing an `.rmeta` into a
+/// directory being searched for `extern crate` (observing an incomplete file).
+/// The returned path is the temporary file containing the complete metadata.
+fn emit_metadata<'a>(sess: &'a Session, trans: &CrateTranslation, tmpdir: &TempDir)
+                     -> PathBuf {
+    let out_filename = tmpdir.path().join(METADATA_FILENAME);
+    let result = fs::File::create(&out_filename).and_then(|mut f| {
         f.write_all(&trans.metadata.raw_data)
     });
 
     if let Err(e) = result {
         sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e));
     }
+
+    out_filename
 }
 
 enum RlibFlavor {
@@ -346,7 +367,7 @@ fn link_rlib<'a>(sess: &'a Session,
                  trans: &CrateTranslation,
                  flavor: RlibFlavor,
                  out_filename: &Path,
-                 tmpdir: &Path) -> ArchiveBuilder<'a> {
+                 tmpdir: &TempDir) -> ArchiveBuilder<'a> {
     info!("preparing rlib to {:?}", out_filename);
     let mut ab = ArchiveBuilder::new(archive_config(sess, out_filename, None));
 
@@ -408,12 +429,8 @@ fn link_rlib<'a>(sess: &'a Session,
     match flavor {
         RlibFlavor::Normal => {
             // Instead of putting the metadata in an object file section, rlibs
-            // contain the metadata in a separate file. We use a temp directory
-            // here so concurrent builds in the same directory don't try to use
-            // the same filename for metadata (stomping over one another)
-            let metadata = tmpdir.join(METADATA_FILENAME);
-            emit_metadata(sess, trans, &metadata);
-            ab.add_file(&metadata);
+            // contain the metadata in a separate file.
+            ab.add_file(&emit_metadata(sess, trans, tmpdir));
 
             // For LTO purposes, the bytecode of this library is also inserted
             // into the archive.
@@ -457,7 +474,7 @@ fn link_rlib<'a>(sess: &'a Session,
 fn link_staticlib(sess: &Session,
                   trans: &CrateTranslation,
                   out_filename: &Path,
-                  tempdir: &Path) {
+                  tempdir: &TempDir) {
     let mut ab = link_rlib(sess,
                            trans,
                            RlibFlavor::StaticlibBase,
diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js
index 8d0faf261f6..632f6aa6625 100644
--- a/src/librustdoc/html/static/main.js
+++ b/src/librustdoc/html/static/main.js
@@ -381,13 +381,6 @@
                 }
             }
 
-            function min(a, b) {
-                if (a < b) {
-                    return a;
-                }
-                return b;
-            }
-
             function extractGenerics(val) {
                 val = val.toLowerCase();
                 if (val.indexOf('<') !== -1) {
@@ -425,7 +418,7 @@
                             }
                             if (lev.pos !== -1) {
                                 elems.splice(lev.pos, 1);
-                                lev_distance = min(lev.lev, lev_distance);
+                                lev_distance = Math.min(lev.lev, lev_distance);
                             } else {
                                 return MAX_LEV_DISTANCE + 1;
                             }
@@ -488,11 +481,12 @@
                 var new_lev = levenshtein(obj.name, val.name);
                 if (new_lev < lev_distance) {
                     if ((lev = checkGenerics(obj, val)) <= MAX_LEV_DISTANCE) {
-                        lev_distance = min(min(new_lev, lev), lev_distance);
+                        lev_distance = Math.min(Math.min(new_lev, lev), lev_distance);
                     }
                 } else if (obj.generics && obj.generics.length > 0) {
                     for (var x = 0; x < obj.generics.length; ++x) {
-                        lev_distance = min(levenshtein(obj.generics[x], val.name), lev_distance);
+                        lev_distance = Math.min(levenshtein(obj.generics[x], val.name),
+                                                lev_distance);
                     }
                 }
                 // Now whatever happens, the returned distance is "less good" so we should mark it
@@ -509,7 +503,7 @@
                         if (literalSearch === true && tmp === true) {
                             return true;
                         }
-                        lev_distance = min(tmp, lev_distance);
+                        lev_distance = Math.min(tmp, lev_distance);
                         if (lev_distance === 0) {
                             return 0;
                         }
@@ -526,7 +520,7 @@
                     if (literalSearch === true && tmp === true) {
                         return true;
                     }
-                    lev_distance = min(tmp, lev_distance);
+                    lev_distance = Math.min(tmp, lev_distance);
                     if (lev_distance === 0) {
                         return 0;
                     }
@@ -567,18 +561,20 @@
                     var in_args = findArg(searchIndex[i], val, true);
                     var returned = checkReturned(searchIndex[i], val, true);
                     var ty = searchIndex[i];
+                    var fullId = itemTypes[ty.ty] + ty.path + ty.name;
+
                     if (searchWords[i] === val.name) {
                         // filter type: ... queries
                         if (typePassesFilter(typeFilter, searchIndex[i].ty) &&
-                            results[ty.path + ty.name] === undefined)
+                            results[fullId] === undefined)
                         {
-                            results[ty.path + ty.name] = {id: i, index: -1};
+                            results[fullId] = {id: i, index: -1};
                             results_length += 1;
                         }
                     } else if ((in_args === true || returned === true) &&
                                typePassesFilter(typeFilter, searchIndex[i].ty)) {
-                        if (results[ty.path + ty.name] === undefined) {
-                            results[ty.path + ty.name] = {
+                        if (results[fullId] === undefined) {
+                            results[fullId] = {
                                 id: i,
                                 index: -1,
                                 dontValidate: true,
@@ -588,10 +584,10 @@
                             results_length += 1;
                         } else {
                             if (in_args === true) {
-                                results[ty.path + ty.name].in_args = true;
+                                results[fullId].in_args = true;
                             }
                             if (returned === true) {
-                                results[ty.path + ty.name].returned = true;
+                                results[fullId].returned = true;
                             }
                         }
                     }
@@ -620,6 +616,7 @@
                     if (!type) {
                         continue;
                     }
+                    var fullId = itemTypes[ty.ty] + ty.path + ty.name;
 
                     // allow searching for void (no output) functions as well
                     var typeOutput = type.output ? type.output.name : "";
@@ -638,15 +635,15 @@
                             in_args = allFound;
                         }
                         if (in_args === true || returned === true || module === true) {
-                            if (results[ty.path + ty.name] !== undefined) {
+                            if (results[fullId] !== undefined) {
                                 if (returned === true) {
-                                    results[ty.path + ty.name].returned = true;
+                                    results[fullId].returned = true;
                                 }
                                 if (in_args === true) {
-                                    results[ty.path + ty.name].in_args = true;
+                                    results[fullId].in_args = true;
                                 }
                             } else {
-                                results[ty.path + ty.name] = {
+                                results[fullId] = {
                                     id: i,
                                     index: -1,
                                     dontValidate: true,
@@ -681,48 +678,49 @@
                         var index = -1;
                         // we want lev results to go lower than others
                         var lev = MAX_LEV_DISTANCE;
+                        var fullId = itemTypes[ty.ty] + ty.path + ty.name;
 
                         if (searchWords[j].indexOf(split[i]) > -1 ||
                             searchWords[j].indexOf(val) > -1 ||
                             searchWords[j].replace(/_/g, "").indexOf(val) > -1)
                         {
                             // filter type: ... queries
-                            if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
-                                results[ty.path + ty.name] === undefined) {
+                            if (typePassesFilter(typeFilter, ty) &&
+                                results[fullId] === undefined) {
                                 index = searchWords[j].replace(/_/g, "").indexOf(val);
                             }
                         }
                         if ((lev_distance = levenshtein(searchWords[j], val)) <= MAX_LEV_DISTANCE) {
-                            if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
-                                (results[ty.path + ty.name] === undefined ||
-                                 results[ty.path + ty.name].lev > lev_distance)) {
-                                lev = min(lev, lev_distance);
-                                index = 0;
+                            if (typePassesFilter(typeFilter, ty) &&
+                                (results[fullId] === undefined ||
+                                 results[fullId].lev > lev_distance)) {
+                                lev = Math.min(lev, lev_distance);
+                                index = Math.max(0, index);
                             }
                         }
                         if ((lev_distance = findArg(searchIndex[j], valGenerics))
                             <= MAX_LEV_DISTANCE) {
-                            if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
-                                (results[ty.path + ty.name] === undefined ||
-                                 results[ty.path + ty.name].lev > lev_distance)) {
+                            if (typePassesFilter(typeFilter, ty) &&
+                                (results[fullId] === undefined ||
+                                 results[fullId].lev > lev_distance)) {
                                 in_args = true;
-                                lev = min(lev_distance, lev);
-                                index = 0;
+                                lev = Math.min(lev_distance, lev);
+                                index = Math.max(0, index);
                             }
                         }
                         if ((lev_distance = checkReturned(searchIndex[j], valGenerics)) <=
                             MAX_LEV_DISTANCE) {
-                            if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
-                                (results[ty.path + ty.name] === undefined ||
-                                 results[ty.path + ty.name].lev > lev_distance)) {
+                            if (typePassesFilter(typeFilter, ty) &&
+                                (results[fullId] === undefined ||
+                                 results[fullId].lev > lev_distance)) {
                                 returned = true;
-                                lev = min(lev_distance, lev);
-                                index = 0;
+                                lev = Math.min(lev_distance, lev);
+                                index = Math.max(0, index);
                             }
                         }
                         if (index !== -1) {
-                            if (results[ty.path + ty.name] === undefined) {
-                                results[ty.path + ty.name] = {
+                            if (results[fullId] === undefined) {
+                                results[fullId] = {
                                     id: j,
                                     index: index,
                                     lev: lev,
@@ -731,14 +729,14 @@
                                 };
                                 results_length += 1;
                             } else {
-                                if (results[ty.path + ty.name].lev > lev) {
-                                    results[ty.path + ty.name].lev = lev;
+                                if (results[fullId].lev > lev) {
+                                    results[fullId].lev = lev;
                                 }
                                 if (in_args === true) {
-                                    results[ty.path + ty.name].in_args = true;
+                                    results[fullId].in_args = true;
                                 }
                                 if (returned === true) {
-                                    results[ty.path + ty.name].returned = true;
+                                    results[fullId].returned = true;
                                 }
                             }
                         }
diff --git a/src/libstd_unicode/unicode.py b/src/libstd_unicode/unicode.py
index 1fac859242e..df79760894e 100755
--- a/src/libstd_unicode/unicode.py
+++ b/src/libstd_unicode/unicode.py
@@ -89,7 +89,7 @@ def load_unicode_data(f):
         if is_surrogate(cp):
             continue
         if range_start >= 0:
-            for i in xrange(range_start, cp):
+            for i in range(range_start, cp):
                 udict[i] = data
             range_start = -1
         if data[1].endswith(", First>"):
@@ -382,7 +382,7 @@ def compute_trie(rawdata, chunksize):
     root = []
     childmap = {}
     child_data = []
-    for i in range(len(rawdata) / chunksize):
+    for i in range(len(rawdata) // chunksize):
         data = rawdata[i * chunksize: (i + 1) * chunksize]
         child = '|'.join(map(str, data))
         if child not in childmap:
@@ -400,7 +400,7 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
 
     # convert to bitmap chunks of 64 bits each
     chunks = []
-    for i in range(0x110000 / CHUNK):
+    for i in range(0x110000 // CHUNK):
         chunk = 0
         for j in range(64):
             if rawdata[i * 64 + j]:
@@ -412,12 +412,12 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
         pub_string = "pub "
     f.write("    %sconst %s: &'static super::BoolTrie = &super::BoolTrie {\n" % (pub_string, name))
     f.write("        r1: [\n")
-    data = ','.join('0x%016x' % chunk for chunk in chunks[0:0x800 / CHUNK])
+    data = ','.join('0x%016x' % chunk for chunk in chunks[0:0x800 // CHUNK])
     format_table_content(f, data, 12)
     f.write("\n        ],\n")
 
     # 0x800..0x10000 trie
-    (r2, r3) = compute_trie(chunks[0x800 / CHUNK : 0x10000 / CHUNK], 64 / CHUNK)
+    (r2, r3) = compute_trie(chunks[0x800 // CHUNK : 0x10000 // CHUNK], 64 // CHUNK)
     f.write("        r2: [\n")
     data = ','.join(str(node) for node in r2)
     format_table_content(f, data, 12)
@@ -428,7 +428,7 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
     f.write("\n        ],\n")
 
     # 0x10000..0x110000 trie
-    (mid, r6) = compute_trie(chunks[0x10000 / CHUNK : 0x110000 / CHUNK], 64 / CHUNK)
+    (mid, r6) = compute_trie(chunks[0x10000 // CHUNK : 0x110000 // CHUNK], 64 // CHUNK)
     (r4, r5) = compute_trie(mid, 64)
     f.write("        r4: [\n")
     data = ','.join(str(node) for node in r4)
@@ -446,14 +446,14 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
     f.write("    };\n\n")
 
 def emit_small_bool_trie(f, name, t_data, is_pub=True):
-    last_chunk = max(int(hi / 64) for (lo, hi) in t_data)
+    last_chunk = max(hi // 64 for (lo, hi) in t_data)
     n_chunks = last_chunk + 1
     chunks = [0] * n_chunks
     for (lo, hi) in t_data:
         for cp in range(lo, hi + 1):
-            if int(cp / 64) >= len(chunks):
-                print(cp, int(cp / 64), len(chunks), lo, hi)
-            chunks[int(cp / 64)] |= 1 << (cp & 63)
+            if cp // 64 >= len(chunks):
+                print(cp, cp // 64, len(chunks), lo, hi)
+            chunks[cp // 64] |= 1 << (cp & 63)
 
     pub_string = ""
     if is_pub:
@@ -519,32 +519,29 @@ def emit_conversions_module(f, to_upper, to_lower, to_title):
     pfun = lambda x: "(%s,[%s,%s,%s])" % (
         escape_char(x[0]), escape_char(x[1][0]), escape_char(x[1][1]), escape_char(x[1][2]))
     emit_table(f, "to_lowercase_table",
-        sorted(to_lower.iteritems(), key=operator.itemgetter(0)),
+        sorted(to_lower.items(), key=operator.itemgetter(0)),
         is_pub=False, t_type = t_type, pfun=pfun)
     emit_table(f, "to_uppercase_table",
-        sorted(to_upper.iteritems(), key=operator.itemgetter(0)),
+        sorted(to_upper.items(), key=operator.itemgetter(0)),
         is_pub=False, t_type = t_type, pfun=pfun)
     f.write("}\n\n")
 
 def emit_norm_module(f, canon, compat, combine, norm_props):
-    canon_keys = canon.keys()
-    canon_keys.sort()
+    canon_keys = sorted(canon.keys())
 
-    compat_keys = compat.keys()
-    compat_keys.sort()
+    compat_keys = sorted(compat.keys())
 
     canon_comp = {}
     comp_exclusions = norm_props["Full_Composition_Exclusion"]
     for char in canon_keys:
-        if True in map(lambda (lo, hi): lo <= char <= hi, comp_exclusions):
+        if any(lo <= char <= hi for lo, hi in comp_exclusions):
             continue
         decomp = canon[char]
         if len(decomp) == 2:
-            if not canon_comp.has_key(decomp[0]):
+            if decomp[0] not in canon_comp:
                 canon_comp[decomp[0]] = []
             canon_comp[decomp[0]].append( (decomp[1], char) )
-    canon_comp_keys = canon_comp.keys()
-    canon_comp_keys.sort()
+    canon_comp_keys = sorted(canon_comp.keys())
 
 if __name__ == "__main__":
     r = "tables.rs"
diff --git a/src/test/compile-fail/issue-36082.rs b/src/test/compile-fail/issue-36082.rs
index b46756bb8f5..1596d9cc84e 100644
--- a/src/test/compile-fail/issue-36082.rs
+++ b/src/test/compile-fail/issue-36082.rs
@@ -8,6 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// revisions: ast mir
+//[mir]compile-flags: -Z emit-end-regions -Z borrowck-mir
+
 use std::cell::RefCell;
 
 fn main() {
@@ -16,10 +19,20 @@ fn main() {
     let x = RefCell::new((&mut r,s));
 
     let val: &_ = x.borrow().0;
-    //~^ ERROR borrowed value does not live long enough
-    //~| temporary value dropped here while still borrowed
-    //~| temporary value created here
-    //~| consider using a `let` binding to increase its lifetime
+    //[ast]~^ ERROR borrowed value does not live long enough [E0597]
+    //[ast]~| NOTE temporary value dropped here while still borrowed
+    //[ast]~| NOTE temporary value created here
+    //[ast]~| NOTE consider using a `let` binding to increase its lifetime
+    //[mir]~^^^^^ ERROR borrowed value does not live long enough (Ast) [E0597]
+    //[mir]~| NOTE temporary value dropped here while still borrowed
+    //[mir]~| NOTE temporary value created here
+    //[mir]~| NOTE consider using a `let` binding to increase its lifetime
+    //[mir]~| ERROR borrowed value does not live long enough (Mir) [E0597]
+    //[mir]~| NOTE temporary value dropped here while still borrowed
+    //[mir]~| NOTE temporary value created here
+    //[mir]~| NOTE consider using a `let` binding to increase its lifetime
     println!("{}", val);
 }
-//~^ temporary value needs to live until here
+//[ast]~^ NOTE temporary value needs to live until here
+//[mir]~^^ NOTE temporary value needs to live until here
+//[mir]~| NOTE temporary value needs to live until here
diff --git a/src/test/run-pass/lub-glb-with-unbound-infer-var.rs b/src/test/run-pass/lub-glb-with-unbound-infer-var.rs
new file mode 100644
index 00000000000..6b9bd67f9a5
--- /dev/null
+++ b/src/test/run-pass/lub-glb-with-unbound-infer-var.rs
@@ -0,0 +1,24 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test for a specific corner case: when we compute the LUB of two fn
+// types and their parameters have unbound variables. In that case, we
+// wind up relating those two variables. This was causing an ICE in an
+// in-progress PR.
+
+fn main() {
+    let a_f: fn(_) = |_| ();
+    let b_f: fn(_) = |_| ();
+    let c_f = match 22 {
+        0 => a_f,
+        _ => b_f,
+    };
+    c_f(4);
+}
diff --git a/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs b/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs
index 6fca32fece1..2381c61b87b 100644
--- a/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs
+++ b/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs
@@ -18,7 +18,7 @@ use proc_macro::{TokenStream, TokenNode, Span, Diagnostic};
 
 fn parse(input: TokenStream) -> Result<(), Diagnostic> {
     let mut count = 0;
-    let mut last_span = Span::default();
+    let mut last_span = Span::def_site();
     for tree in input {
         let span = tree.span;
         if count >= 3 {
@@ -37,7 +37,7 @@ fn parse(input: TokenStream) -> Result<(), Diagnostic> {
     }
 
     if count < 3 {
-        return Err(Span::default()
+        return Err(Span::def_site()
                        .error(format!("found {} equal signs, need exactly 3", count))
                        .help("input must be: `===`"))
     }
diff --git a/src/test/ui/lub-glb/old-lub-glb-hr.rs b/src/test/ui/lub-glb/old-lub-glb-hr.rs
new file mode 100644
index 00000000000..85c90bb375f
--- /dev/null
+++ b/src/test/ui/lub-glb/old-lub-glb-hr.rs
@@ -0,0 +1,36 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we give a note when the old LUB/GLB algorithm would have
+// succeeded but the new code (which is stricter) gives an error.
+
+fn foo(
+    x: fn(&u8, &u8),
+    y: for<'a> fn(&'a u8, &'a u8),
+) {
+    let z = match 22 {
+        0 => x,
+        _ => y,
+    };
+}
+
+fn bar(
+    x: fn(&u8, &u8),
+    y: for<'a> fn(&'a u8, &'a u8),
+) {
+    let z = match 22 {
+        // No error with an explicit cast:
+        0 => x as for<'a> fn(&'a u8, &'a u8),
+        _ => y,
+    };
+}
+
+fn main() {
+}
diff --git a/src/test/ui/lub-glb/old-lub-glb-hr.stderr b/src/test/ui/lub-glb/old-lub-glb-hr.stderr
new file mode 100644
index 00000000000..4a310a5e6b2
--- /dev/null
+++ b/src/test/ui/lub-glb/old-lub-glb-hr.stderr
@@ -0,0 +1,22 @@
+error[E0308]: match arms have incompatible types
+  --> $DIR/old-lub-glb-hr.rs:18:13
+   |
+18 |       let z = match 22 {
+   |  _____________^
+19 | |         0 => x,
+20 | |         _ => y,
+21 | |     };
+   | |_____^ expected bound lifetime parameter, found concrete lifetime
+   |
+   = note: expected type `for<'r, 's> fn(&'r u8, &'s u8)`
+              found type `for<'a> fn(&'a u8, &'a u8)`
+   = note: this was previously accepted by the compiler but has been phased out
+   = note: for more information, see https://github.com/rust-lang/rust/issues/45852
+note: match arm with an incompatible type
+  --> $DIR/old-lub-glb-hr.rs:20:14
+   |
+20 |         _ => y,
+   |              ^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/lub-glb/old-lub-glb-object.rs b/src/test/ui/lub-glb/old-lub-glb-object.rs
new file mode 100644
index 00000000000..7cf89b68be1
--- /dev/null
+++ b/src/test/ui/lub-glb/old-lub-glb-object.rs
@@ -0,0 +1,38 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we give a note when the old LUB/GLB algorithm would have
+// succeeded but the new code (which is stricter) gives an error.
+
+trait Foo<T, U> { }
+
+fn foo(
+    x: &for<'a, 'b> Foo<&'a u8, &'b u8>,
+    y: &for<'a> Foo<&'a u8, &'a u8>,
+) {
+    let z = match 22 {
+        0 => x,
+        _ => y,
+    };
+}
+
+fn bar(
+    x: &for<'a, 'b> Foo<&'a u8, &'b u8>,
+    y: &for<'a> Foo<&'a u8, &'a u8>,
+) {
+    // Accepted with explicit case:
+    let z = match 22 {
+        0 => x as &for<'a> Foo<&'a u8, &'a u8>,
+        _ => y,
+    };
+}
+
+fn main() {
+}
diff --git a/src/test/ui/lub-glb/old-lub-glb-object.stderr b/src/test/ui/lub-glb/old-lub-glb-object.stderr
new file mode 100644
index 00000000000..a1077f40bf5
--- /dev/null
+++ b/src/test/ui/lub-glb/old-lub-glb-object.stderr
@@ -0,0 +1,22 @@
+error[E0308]: match arms have incompatible types
+  --> $DIR/old-lub-glb-object.rs:20:13
+   |
+20 |       let z = match 22 {
+   |  _____________^
+21 | |         0 => x,
+22 | |         _ => y,
+23 | |     };
+   | |_____^ expected bound lifetime parameter 'a, found concrete lifetime
+   |
+   = note: expected type `&for<'a, 'b> Foo<&'a u8, &'b u8>`
+              found type `&for<'a> Foo<&'a u8, &'a u8>`
+   = note: this was previously accepted by the compiler but has been phased out
+   = note: for more information, see https://github.com/rust-lang/rust/issues/45852
+note: match arm with an incompatible type
+  --> $DIR/old-lub-glb-object.rs:22:14
+   |
+22 |         _ => y,
+   |              ^
+
+error: aborting due to previous error
+