about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libproc_macro/lib.rs8
-rw-r--r--src/librustc/hir/map/collector.rs4
-rw-r--r--src/librustc/ich/caching_codemap_view.rs2
-rw-r--r--src/librustc/ich/impls_syntax.rs16
-rw-r--r--src/librustc/ty/query/on_disk_cache.rs10
-rw-r--r--src/librustc_driver/pretty.rs2
-rw-r--r--src/librustc_errors/emitter.rs2
-rw-r--r--src/librustc_errors/lib.rs2
-rw-r--r--src/librustc_metadata/cstore.rs4
-rw-r--r--src/librustc_metadata/cstore_impl.rs8
-rw-r--r--src/librustc_metadata/decoder.rs64
-rw-r--r--src/librustc_metadata/encoder.rs34
-rw-r--r--src/librustc_save_analysis/span_utils.rs2
-rw-r--r--src/librustdoc/html/highlight.rs2
-rw-r--r--src/libsyntax/codemap.rs88
-rw-r--r--src/libsyntax/ext/expand.rs2
-rw-r--r--src/libsyntax/ext/source_util.rs6
-rw-r--r--src/libsyntax/json.rs4
-rw-r--r--src/libsyntax/parse/lexer/comments.rs8
-rw-r--r--src/libsyntax/parse/lexer/mod.rs38
-rw-r--r--src/libsyntax/parse/mod.rs28
-rw-r--r--src/libsyntax/test_snippet.rs2
-rw-r--r--src/libsyntax/util/parser_testing.rs6
-rw-r--r--src/libsyntax_pos/analyze_source_file.rs (renamed from src/libsyntax_pos/analyze_filemap.rs)70
-rw-r--r--src/libsyntax_pos/lib.rs6
25 files changed, 209 insertions, 209 deletions
diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs
index 50a613f86c2..08ae78f775b 100644
--- a/src/libproc_macro/lib.rs
+++ b/src/libproc_macro/lib.rs
@@ -440,9 +440,9 @@ impl SourceFile {
     /// [`is_real`]: #method.is_real
     #[unstable(feature = "proc_macro_span", issue = "38356")]
     pub fn path(&self) -> PathBuf {
-        match self.filemap.name {
+        match self.source_file.name {
             FileName::Real(ref path) => path.clone(),
-            _ => PathBuf::from(self.filemap.name.to_string())
+            _ => PathBuf::from(self.source_file.name.to_string())
         }
     }
 
@@ -453,7 +453,7 @@ impl SourceFile {
         // This is a hack until intercrate spans are implemented and we can have real source files
         // for spans generated in external macros.
         // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
-        self.filemap.is_real_file()
+        self.source_file.is_real_file()
     }
 }
 
@@ -471,7 +471,7 @@ impl fmt::Debug for SourceFile {
 #[unstable(feature = "proc_macro_span", issue = "38356")]
 impl PartialEq for SourceFile {
     fn eq(&self, other: &Self) -> bool {
-        Lrc::ptr_eq(&self.filemap, &other.filemap)
+        Lrc::ptr_eq(&self.source_file, &other.source_file)
     }
 }
 
diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs
index ceeb31934e1..7a304603ada 100644
--- a/src/librustc/hir/map/collector.rs
+++ b/src/librustc/hir/map/collector.rs
@@ -158,8 +158,8 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
         let mut source_file_names: Vec<_> = codemap
             .files()
             .iter()
-            .filter(|filemap| CrateNum::from_u32(filemap.crate_of_origin) == LOCAL_CRATE)
-            .map(|filemap| filemap.name_hash)
+            .filter(|source_file| CrateNum::from_u32(source_file.crate_of_origin) == LOCAL_CRATE)
+            .map(|source_file| source_file.name_hash)
             .collect();
 
         source_file_names.sort_unstable();
diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs
index d8999e1e00f..adfb9b6181a 100644
--- a/src/librustc/ich/caching_codemap_view.rs
+++ b/src/librustc/ich/caching_codemap_view.rs
@@ -79,7 +79,7 @@ impl<'cm> CachingCodemapView<'cm> {
         if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
             let file_valid;
             if self.codemap.files().len() > 0 {
-                let file_index = self.codemap.lookup_filemap_idx(pos);
+                let file_index = self.codemap.lookup_source_file_idx(pos);
                 let file = self.codemap.files()[file_index].clone();
 
                 if pos >= file.start_pos && pos < file.end_pos {
diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs
index fd35713cc6f..65b84ce4a82 100644
--- a/src/librustc/ich/impls_syntax.rs
+++ b/src/librustc/ich/impls_syntax.rs
@@ -458,13 +458,13 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
 
         src_hash.hash_stable(hcx, hasher);
 
-        // We only hash the relative position within this filemap
+        // We only hash the relative position within this source_file
         lines.len().hash_stable(hcx, hasher);
         for &line in lines.iter() {
             stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
         }
 
-        // We only hash the relative position within this filemap
+        // We only hash the relative position within this source_file
         multibyte_chars.len().hash_stable(hcx, hasher);
         for &char_pos in multibyte_chars.iter() {
             stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
@@ -478,29 +478,29 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
 }
 
 fn stable_byte_pos(pos: ::syntax_pos::BytePos,
-                   filemap_start: ::syntax_pos::BytePos)
+                   source_file_start: ::syntax_pos::BytePos)
                    -> u32 {
-    pos.0 - filemap_start.0
+    pos.0 - source_file_start.0
 }
 
 fn stable_multibyte_char(mbc: ::syntax_pos::MultiByteChar,
-                         filemap_start: ::syntax_pos::BytePos)
+                         source_file_start: ::syntax_pos::BytePos)
                          -> (u32, u32) {
     let ::syntax_pos::MultiByteChar {
         pos,
         bytes,
     } = mbc;
 
-    (pos.0 - filemap_start.0, bytes as u32)
+    (pos.0 - source_file_start.0, bytes as u32)
 }
 
 fn stable_non_narrow_char(swc: ::syntax_pos::NonNarrowChar,
-                          filemap_start: ::syntax_pos::BytePos)
+                          source_file_start: ::syntax_pos::BytePos)
                           -> (u32, u32) {
     let pos = swc.pos();
     let width = swc.width();
 
-    (pos.0 - filemap_start.0, width as u32)
+    (pos.0 - source_file_start.0, width as u32)
 }
 
 
diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs
index 2e5be214604..0dcdf44d6e6 100644
--- a/src/librustc/ty/query/on_disk_cache.rs
+++ b/src/librustc/ty/query/on_disk_cache.rs
@@ -494,7 +494,7 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
 
         file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
             let stable_id = file_index_to_stable_id[&index];
-            codemap.filemap_by_stable_id(stable_id)
+            codemap.source_file_by_stable_id(stable_id)
                    .expect("Failed to lookup SourceFile in new context.")
         }).clone()
     }
@@ -777,8 +777,8 @@ struct CacheEncoder<'enc, 'a, 'tcx, E>
 impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E>
     where E: 'enc + ty_codec::TyEncoder
 {
-    fn filemap_index(&mut self, filemap: Lrc<SourceFile>) -> SourceFileIndex {
-        self.file_to_file_index[&(&*filemap as *const SourceFile)]
+    fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
+        self.file_to_file_index[&(&*source_file as *const SourceFile)]
     }
 
     /// Encode something with additional information that allows to do some
@@ -850,10 +850,10 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'enc, 'a, 'tcx
 
         let len = span_data.hi - span_data.lo;
 
-        let filemap_index = self.filemap_index(file_lo);
+        let source_file_index = self.source_file_index(file_lo);
 
         TAG_VALID_SPAN.encode(self)?;
-        filemap_index.encode(self)?;
+        source_file_index.encode(self)?;
         line_lo.encode(self)?;
         col_lo.encode(self)?;
         len.encode(self)?;
diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs
index a66392833f6..4d4198d34bc 100644
--- a/src/librustc_driver/pretty.rs
+++ b/src/librustc_driver/pretty.rs
@@ -916,7 +916,7 @@ pub fn fold_crate(sess: &Session, krate: ast::Crate, ppm: PpMode) -> ast::Crate
 fn get_source(input: &Input, sess: &Session) -> (Vec<u8>, FileName) {
     let src_name = driver::source_name(input);
     let src = sess.codemap()
-        .get_filemap(&src_name)
+        .get_source_file(&src_name)
         .unwrap()
         .src
         .as_ref()
diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs
index ce3a19677f2..b4034a6a529 100644
--- a/src/librustc_errors/emitter.rs
+++ b/src/librustc_errors/emitter.rs
@@ -1021,7 +1021,7 @@ impl EmitterWriter {
         // Print out the annotate source lines that correspond with the error
         for annotated_file in annotated_files {
             // we can't annotate anything if the source is unavailable.
-            if !cm.ensure_filemap_source_present(annotated_file.file.clone()) {
+            if !cm.ensure_source_file_source_present(annotated_file.file.clone()) {
                 continue;
             }
 
diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs
index 597b3216490..ae88a365cbe 100644
--- a/src/librustc_errors/lib.rs
+++ b/src/librustc_errors/lib.rs
@@ -120,7 +120,7 @@ pub trait SourceMapper {
     fn span_to_filename(&self, sp: Span) -> FileName;
     fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
     fn call_span_if_macro(&self, sp: Span) -> Span;
-    fn ensure_filemap_source_present(&self, file_map: Lrc<SourceFile>) -> bool;
+    fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool;
     fn doctest_offset_line(&self, line: usize) -> usize;
 }
 
diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs
index 2c95bd82432..5c020b70e30 100644
--- a/src/librustc_metadata/cstore.rs
+++ b/src/librustc_metadata/cstore.rs
@@ -42,14 +42,14 @@ pub use rustc_data_structures::sync::MetadataRef;
 pub struct MetadataBlob(pub MetadataRef);
 
 /// Holds information about a syntax_pos::SourceFile imported from another crate.
-/// See `imported_filemaps()` for more information.
+/// See `imported_source_files()` for more information.
 pub struct ImportedSourceFile {
     /// This SourceFile's byte-offset within the codemap of its original crate
     pub original_start_pos: syntax_pos::BytePos,
     /// The end of this SourceFile within the codemap of its original crate
     pub original_end_pos: syntax_pos::BytePos,
     /// The imported SourceFile's representation within the local codemap
-    pub translated_filemap: Lrc<syntax_pos::SourceFile>,
+    pub translated_source_file: Lrc<syntax_pos::SourceFile>,
 }
 
 pub struct CrateMetadata {
diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs
index 4926da3b880..54431e669a8 100644
--- a/src/librustc_metadata/cstore_impl.rs
+++ b/src/librustc_metadata/cstore_impl.rs
@@ -40,7 +40,7 @@ use syntax::ast;
 use syntax::attr;
 use syntax::codemap;
 use syntax::edition::Edition;
-use syntax::parse::filemap_to_stream;
+use syntax::parse::source_file_to_stream;
 use syntax::symbol::Symbol;
 use syntax_pos::{Span, NO_EXPANSION, FileName};
 use rustc_data_structures::indexed_set::IdxSetBuf;
@@ -463,9 +463,9 @@ impl cstore::CStore {
         let (name, def) = data.get_macro(id.index);
         let source_name = FileName::Macros(name.to_string());
 
-        let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
-        let local_span = Span::new(filemap.start_pos, filemap.end_pos, NO_EXPANSION);
-        let body = filemap_to_stream(&sess.parse_sess, filemap, None);
+        let source_file = sess.parse_sess.codemap().new_source_file(source_name, def.body);
+        let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
+        let body = source_file_to_stream(&sess.parse_sess, source_file, None);
 
         // Mark the attrs as used
         let attrs = data.get_item_attrs(id.index, sess);
diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs
index 00ed71c8891..1efe6e50a24 100644
--- a/src/librustc_metadata/decoder.rs
+++ b/src/librustc_metadata/decoder.rs
@@ -50,8 +50,8 @@ pub struct DecodeContext<'a, 'tcx: 'a> {
     sess: Option<&'a Session>,
     tcx: Option<TyCtxt<'a, 'tcx, 'tcx>>,
 
-    // Cache the last used filemap for translating spans as an optimization.
-    last_filemap_index: usize,
+    // Cache the last used source_file for translating spans as an optimization.
+    last_source_file_index: usize,
 
     lazy_state: LazyState,
 
@@ -73,7 +73,7 @@ pub trait Metadata<'a, 'tcx>: Copy {
             cdata: self.cdata(),
             sess: self.sess().or(tcx.map(|tcx| tcx.sess)),
             tcx,
-            last_filemap_index: 0,
+            last_source_file_index: 0,
             lazy_state: LazyState::NoNode,
             alloc_decoding_session: self.cdata().map(|cdata| {
                 cdata.alloc_decoding_state.new_decoding_session()
@@ -314,43 +314,43 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
             bug!("Cannot decode Span without Session.")
         };
 
-        let imported_filemaps = self.cdata().imported_filemaps(&sess.codemap());
-        let filemap = {
+        let imported_source_files = self.cdata().imported_source_files(&sess.codemap());
+        let source_file = {
             // Optimize for the case that most spans within a translated item
-            // originate from the same filemap.
-            let last_filemap = &imported_filemaps[self.last_filemap_index];
+            // originate from the same source_file.
+            let last_source_file = &imported_source_files[self.last_source_file_index];
 
-            if lo >= last_filemap.original_start_pos &&
-               lo <= last_filemap.original_end_pos {
-                last_filemap
+            if lo >= last_source_file.original_start_pos &&
+               lo <= last_source_file.original_end_pos {
+                last_source_file
             } else {
                 let mut a = 0;
-                let mut b = imported_filemaps.len();
+                let mut b = imported_source_files.len();
 
                 while b - a > 1 {
                     let m = (a + b) / 2;
-                    if imported_filemaps[m].original_start_pos > lo {
+                    if imported_source_files[m].original_start_pos > lo {
                         b = m;
                     } else {
                         a = m;
                     }
                 }
 
-                self.last_filemap_index = a;
-                &imported_filemaps[a]
+                self.last_source_file_index = a;
+                &imported_source_files[a]
             }
         };
 
         // Make sure our binary search above is correct.
-        debug_assert!(lo >= filemap.original_start_pos &&
-                      lo <= filemap.original_end_pos);
+        debug_assert!(lo >= source_file.original_start_pos &&
+                      lo <= source_file.original_end_pos);
 
         // Make sure we correctly filtered out invalid spans during encoding
-        debug_assert!(hi >= filemap.original_start_pos &&
-                      hi <= filemap.original_end_pos);
+        debug_assert!(hi >= source_file.original_start_pos &&
+                      hi <= source_file.original_end_pos);
 
-        let lo = (lo + filemap.translated_filemap.start_pos) - filemap.original_start_pos;
-        let hi = (hi + filemap.translated_filemap.start_pos) - filemap.original_start_pos;
+        let lo = (lo + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
+        let hi = (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
 
         Ok(Span::new(lo, hi, NO_EXPANSION))
     }
@@ -1116,13 +1116,13 @@ impl<'a, 'tcx> CrateMetadata {
     /// file they represent, just information about length, line breaks, and
     /// multibyte characters. This information is enough to generate valid debuginfo
     /// for items inlined from other crates.
-    pub fn imported_filemaps(&'a self,
+    pub fn imported_source_files(&'a self,
                              local_codemap: &codemap::SourceMap)
                              -> ReadGuard<'a, Vec<cstore::ImportedSourceFile>> {
         {
-            let filemaps = self.codemap_import_info.borrow();
-            if !filemaps.is_empty() {
-                return filemaps;
+            let source_files = self.codemap_import_info.borrow();
+            if !source_files.is_empty() {
+                return source_files;
             }
         }
 
@@ -1136,7 +1136,7 @@ impl<'a, 'tcx> CrateMetadata {
 
         let external_codemap = self.root.codemap.decode(self);
 
-        let imported_filemaps = external_codemap.map(|filemap_to_import| {
+        let imported_source_files = external_codemap.map(|source_file_to_import| {
             // We can't reuse an existing SourceFile, so allocate a new one
             // containing the information we need.
             let syntax_pos::SourceFile { name,
@@ -1148,13 +1148,13 @@ impl<'a, 'tcx> CrateMetadata {
                                       mut multibyte_chars,
                                       mut non_narrow_chars,
                                       name_hash,
-                                      .. } = filemap_to_import;
+                                      .. } = source_file_to_import;
 
             let source_length = (end_pos - start_pos).to_usize();
 
             // Translate line-start positions and multibyte character
             // position into frame of reference local to file.
-            // `SourceMap::new_imported_filemap()` will then translate those
+            // `SourceMap::new_imported_source_file()` will then translate those
             // coordinates to their new global frame of reference when the
             // offset of the SourceFile is known.
             for pos in &mut lines {
@@ -1167,7 +1167,7 @@ impl<'a, 'tcx> CrateMetadata {
                 *swc = *swc - start_pos;
             }
 
-            let local_version = local_codemap.new_imported_filemap(name,
+            let local_version = local_codemap.new_imported_source_file(name,
                                                                    name_was_remapped,
                                                                    self.cnum.as_u32(),
                                                                    src_hash,
@@ -1176,8 +1176,8 @@ impl<'a, 'tcx> CrateMetadata {
                                                                    lines,
                                                                    multibyte_chars,
                                                                    non_narrow_chars);
-            debug!("CrateMetaData::imported_filemaps alloc \
-                    filemap {:?} original (start_pos {:?} end_pos {:?}) \
+            debug!("CrateMetaData::imported_source_files alloc \
+                    source_file {:?} original (start_pos {:?} end_pos {:?}) \
                     translated (start_pos {:?} end_pos {:?})",
                    local_version.name, start_pos, end_pos,
                    local_version.start_pos, local_version.end_pos);
@@ -1185,11 +1185,11 @@ impl<'a, 'tcx> CrateMetadata {
             cstore::ImportedSourceFile {
                 original_start_pos: start_pos,
                 original_end_pos: end_pos,
-                translated_filemap: local_version,
+                translated_source_file: local_version,
             }
         }).collect();
 
-        *codemap_import_info = imported_filemaps;
+        *codemap_import_info = imported_source_files;
         drop(codemap_import_info);
 
         // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs
index 925d765ca31..2111cb363b2 100644
--- a/src/librustc_metadata/encoder.rs
+++ b/src/librustc_metadata/encoder.rs
@@ -62,7 +62,7 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
     interpret_allocs_inverse: Vec<interpret::AllocId>,
 
     // This is used to speed up Span encoding.
-    filemap_cache: Lrc<SourceFile>,
+    source_file_cache: Lrc<SourceFile>,
 }
 
 macro_rules! encoder_methods {
@@ -157,13 +157,13 @@ impl<'a, 'tcx> SpecializedEncoder<Span> for EncodeContext<'a, 'tcx> {
         // The Span infrastructure should make sure that this invariant holds:
         debug_assert!(span.lo <= span.hi);
 
-        if !self.filemap_cache.contains(span.lo) {
+        if !self.source_file_cache.contains(span.lo) {
             let codemap = self.tcx.sess.codemap();
-            let filemap_index = codemap.lookup_filemap_idx(span.lo);
-            self.filemap_cache = codemap.files()[filemap_index].clone();
+            let source_file_index = codemap.lookup_source_file_idx(span.lo);
+            self.source_file_cache = codemap.files()[source_file_index].clone();
         }
 
-        if !self.filemap_cache.contains(span.hi) {
+        if !self.source_file_cache.contains(span.hi) {
             // Unfortunately, macro expansion still sometimes generates Spans
             // that malformed in this way.
             return TAG_INVALID_SPAN.encode(self)
@@ -339,17 +339,17 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
 
     fn encode_codemap(&mut self) -> LazySeq<syntax_pos::SourceFile> {
         let codemap = self.tcx.sess.codemap();
-        let all_filemaps = codemap.files();
+        let all_source_files = codemap.files();
 
         let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone();
 
-        let adapted = all_filemaps.iter()
-            .filter(|filemap| {
-                // No need to re-export imported filemaps, as any downstream
+        let adapted = all_source_files.iter()
+            .filter(|source_file| {
+                // No need to re-export imported source_files, as any downstream
                 // crate will import them from their original source.
-                !filemap.is_imported()
+                !source_file.is_imported()
             })
-            .map(|filemap| {
+            .map(|source_file| {
                 // When exporting SourceFiles, we expand all paths to absolute
                 // paths because any relative paths are potentially relative to
                 // a wrong directory.
@@ -357,16 +357,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
                 // `--remap-path-prefix` we assume the user has already set
                 // things up the way they want and don't touch the path values
                 // anymore.
-                match filemap.name {
+                match source_file.name {
                     FileName::Real(ref name) => {
-                        if filemap.name_was_remapped ||
+                        if source_file.name_was_remapped ||
                         (name.is_relative() && working_dir_was_remapped) {
                             // This path of this SourceFile has been modified by
                             // path-remapping, so we use it verbatim (and avoid cloning
                             // the whole map in the process).
-                            filemap.clone()
+                            source_file.clone()
                         } else {
-                            let mut adapted = (**filemap).clone();
+                            let mut adapted = (**source_file).clone();
                             adapted.name = Path::new(&working_dir).join(name).into();
                             adapted.name_hash = {
                                 let mut hasher: StableHasher<u128> = StableHasher::new();
@@ -377,7 +377,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
                         }
                     },
                     // expanded code, not from a file
-                    _ => filemap.clone(),
+                    _ => source_file.clone(),
                 }
             })
             .collect::<Vec<_>>();
@@ -1842,7 +1842,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             lazy_state: LazyState::NoNode,
             type_shorthands: Default::default(),
             predicate_shorthands: Default::default(),
-            filemap_cache: tcx.sess.codemap().files()[0].clone(),
+            source_file_cache: tcx.sess.codemap().files()[0].clone(),
             interpret_allocs: Default::default(),
             interpret_allocs_inverse: Default::default(),
         };
diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs
index 85dd2a3a206..f764042926a 100644
--- a/src/librustc_save_analysis/span_utils.rs
+++ b/src/librustc_save_analysis/span_utils.rs
@@ -276,7 +276,7 @@ impl<'a> SpanUtils<'a> {
             None => return true,
         };
 
-        //If the span comes from a fake filemap, filter it.
+        //If the span comes from a fake source_file, filter it.
         if !self.sess
             .codemap()
             .lookup_char_pos(parent.lo())
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index a9137009115..a3ad50b7079 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -33,7 +33,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>,
                                 tooltip: Option<(&str, &str)>) -> String {
     debug!("highlighting: ================\n{}\n==============", src);
     let sess = parse::ParseSess::new(FilePathMapping::empty());
-    let fm = sess.codemap().new_filemap(FileName::Custom("stdin".to_string()), src.to_string());
+    let fm = sess.codemap().new_source_file(FileName::Custom("stdin".to_string()), src.to_string());
 
     let mut out = Vec::new();
     if let Some((tooltip, class)) = tooltip {
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index c9a9fbf4d7b..34cd026f7a0 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -109,12 +109,12 @@ impl FileLoader for RealFileLoader {
 pub struct StableFilemapId(u128);
 
 impl StableFilemapId {
-    pub fn new(filemap: &SourceFile) -> StableFilemapId {
+    pub fn new(source_file: &SourceFile) -> StableFilemapId {
         let mut hasher = StableHasher::new();
 
-        filemap.name.hash(&mut hasher);
-        filemap.name_was_remapped.hash(&mut hasher);
-        filemap.unmapped_path.hash(&mut hasher);
+        source_file.name.hash(&mut hasher);
+        source_file.name_was_remapped.hash(&mut hasher);
+        source_file.unmapped_path.hash(&mut hasher);
 
         StableFilemapId(hasher.finish())
     }
@@ -126,7 +126,7 @@ impl StableFilemapId {
 
 pub(super) struct SourceMapFiles {
     pub(super) file_maps: Vec<Lrc<SourceFile>>,
-    stable_id_to_filemap: FxHashMap<StableFilemapId, Lrc<SourceFile>>
+    stable_id_to_source_file: FxHashMap<StableFilemapId, Lrc<SourceFile>>
 }
 
 pub struct SourceMap {
@@ -145,7 +145,7 @@ impl SourceMap {
         SourceMap {
             files: Lock::new(SourceMapFiles {
                 file_maps: Vec::new(),
-                stable_id_to_filemap: FxHashMap(),
+                stable_id_to_source_file: FxHashMap(),
             }),
             file_loader: Box::new(RealFileLoader),
             path_mapping,
@@ -168,7 +168,7 @@ impl SourceMap {
         SourceMap {
             files: Lock::new(SourceMapFiles {
                 file_maps: Vec::new(),
-                stable_id_to_filemap: FxHashMap(),
+                stable_id_to_source_file: FxHashMap(),
             }),
             file_loader: file_loader,
             path_mapping,
@@ -191,15 +191,15 @@ impl SourceMap {
         } else {
             path.to_owned().into()
         };
-        Ok(self.new_filemap(filename, src))
+        Ok(self.new_source_file(filename, src))
     }
 
     pub fn files(&self) -> LockGuard<Vec<Lrc<SourceFile>>> {
         LockGuard::map(self.files.borrow(), |files| &mut files.file_maps)
     }
 
-    pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Lrc<SourceFile>> {
-        self.files.borrow().stable_id_to_filemap.get(&stable_id).map(|fm| fm.clone())
+    pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Lrc<SourceFile>> {
+        self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone())
     }
 
     fn next_start_pos(&self) -> usize {
@@ -211,9 +211,9 @@ impl SourceMap {
         }
     }
 
-    /// Creates a new filemap.
+    /// Creates a new source_file.
     /// This does not ensure that only one SourceFile exists per file name.
-    pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
+    pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
         let start_pos = self.next_start_pos();
 
         // The path is used to determine the directory for loading submodules and
@@ -230,7 +230,7 @@ impl SourceMap {
             },
             other => (other, false),
         };
-        let filemap = Lrc::new(SourceFile::new(
+        let source_file = Lrc::new(SourceFile::new(
             filename,
             was_remapped,
             unmapped_path,
@@ -240,17 +240,17 @@ impl SourceMap {
 
         let mut files = self.files.borrow_mut();
 
-        files.file_maps.push(filemap.clone());
-        files.stable_id_to_filemap.insert(StableFilemapId::new(&filemap), filemap.clone());
+        files.file_maps.push(source_file.clone());
+        files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone());
 
-        filemap
+        source_file
     }
 
     /// Allocates a new SourceFile representing a source file from an external
-    /// crate. The source code of such an "imported filemap" is not available,
+    /// crate. The source code of such an "imported source_file" is not available,
     /// but we still know enough to generate accurate debuginfo location
     /// information for things inlined from other crates.
-    pub fn new_imported_filemap(&self,
+    pub fn new_imported_source_file(&self,
                                 filename: FileName,
                                 name_was_remapped: bool,
                                 crate_of_origin: u32,
@@ -278,7 +278,7 @@ impl SourceMap {
             *swc = *swc + start_pos;
         }
 
-        let filemap = Lrc::new(SourceFile {
+        let source_file = Lrc::new(SourceFile {
             name: filename,
             name_was_remapped,
             unmapped_path: None,
@@ -296,10 +296,10 @@ impl SourceMap {
 
         let mut files = self.files.borrow_mut();
 
-        files.file_maps.push(filemap.clone());
-        files.stable_id_to_filemap.insert(StableFilemapId::new(&filemap), filemap.clone());
+        files.file_maps.push(source_file.clone());
+        files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone());
 
-        filemap
+        source_file
     }
 
     pub fn mk_substr_filename(&self, sp: Span) -> String {
@@ -385,9 +385,9 @@ impl SourceMap {
         }
     }
 
-    // If the relevant filemap is empty, we don't return a line number.
+    // If the relevant source_file is empty, we don't return a line number.
     pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
-        let idx = self.lookup_filemap_idx(pos);
+        let idx = self.lookup_source_file_idx(pos);
 
         let f = (*self.files.borrow().file_maps)[idx].clone();
 
@@ -541,7 +541,7 @@ impl SourceMap {
                       local_end.fm.start_pos)
             }));
         } else {
-            self.ensure_filemap_source_present(local_begin.fm.clone());
+            self.ensure_source_file_source_present(local_begin.fm.clone());
 
             let start_index = local_begin.pos.to_usize();
             let end_index = local_end.pos.to_usize();
@@ -798,7 +798,7 @@ impl SourceMap {
         }
     }
 
-    pub fn get_filemap(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
+    pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
         for fm in self.files.borrow().file_maps.iter() {
             if *filename == fm.name {
                 return Some(fm.clone());
@@ -809,15 +809,15 @@ impl SourceMap {
 
     /// For a global BytePos compute the local offset within the containing SourceFile
     pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
-        let idx = self.lookup_filemap_idx(bpos);
+        let idx = self.lookup_source_file_idx(bpos);
         let fm = (*self.files.borrow().file_maps)[idx].clone();
         let offset = bpos - fm.start_pos;
         SourceFileAndBytePos {fm: fm, pos: offset}
     }
 
-    /// Converts an absolute BytePos to a CharPos relative to the filemap.
+    /// Converts an absolute BytePos to a CharPos relative to the source_file.
     pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
-        let idx = self.lookup_filemap_idx(bpos);
+        let idx = self.lookup_source_file_idx(bpos);
         let map = &(*self.files.borrow().file_maps)[idx];
 
         // The number of extra bytes due to multibyte chars in the SourceFile
@@ -841,13 +841,13 @@ impl SourceMap {
         CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
     }
 
-    // Return the index of the filemap (in self.files) which contains pos.
-    pub fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
+    // Return the index of the source_file (in self.files) which contains pos.
+    pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
         let files = self.files.borrow();
         let files = &files.file_maps;
         let count = files.len();
 
-        // Binary search for the filemap.
+        // Binary search for the source_file.
         let mut a = 0;
         let mut b = count;
         while b - a > 1 {
@@ -966,7 +966,7 @@ impl SourceMapper for SourceMap {
         }
         sp
     }
-    fn ensure_filemap_source_present(&self, file_map: Lrc<SourceFile>) -> bool {
+    fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool {
         file_map.add_external_src(
             || match file_map.name {
                 FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
@@ -1025,11 +1025,11 @@ mod tests {
 
     fn init_code_map() -> SourceMap {
         let cm = SourceMap::new(FilePathMapping::empty());
-        cm.new_filemap(PathBuf::from("blork.rs").into(),
+        cm.new_source_file(PathBuf::from("blork.rs").into(),
                        "first line.\nsecond line".to_string());
-        cm.new_filemap(PathBuf::from("empty.rs").into(),
+        cm.new_source_file(PathBuf::from("empty.rs").into(),
                        "".to_string());
-        cm.new_filemap(PathBuf::from("blork2.rs").into(),
+        cm.new_source_file(PathBuf::from("blork2.rs").into(),
                        "first line.\nsecond line".to_string());
         cm
     }
@@ -1066,7 +1066,7 @@ mod tests {
 
     #[test]
     fn t5() {
-        // Test zero-length filemaps.
+        // Test zero-length source_files.
         let cm = init_code_map();
 
         let loc1 = cm.lookup_char_pos(BytePos(22));
@@ -1083,9 +1083,9 @@ mod tests {
     fn init_code_map_mbc() -> SourceMap {
         let cm = SourceMap::new(FilePathMapping::empty());
         // € is a three byte utf8 char.
-        cm.new_filemap(PathBuf::from("blork.rs").into(),
+        cm.new_source_file(PathBuf::from("blork.rs").into(),
                        "fir€st €€€€ line.\nsecond line".to_string());
-        cm.new_filemap(PathBuf::from("blork2.rs").into(),
+        cm.new_source_file(PathBuf::from("blork2.rs").into(),
                        "first line€€.\n€ second line".to_string());
         cm
     }
@@ -1110,7 +1110,7 @@ mod tests {
 
     #[test]
     fn t7() {
-        // Test span_to_lines for a span ending at the end of filemap
+        // Test span_to_lines for a span ending at the end of source_file
         let cm = init_code_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let file_lines = cm.span_to_lines(span).unwrap();
@@ -1138,7 +1138,7 @@ mod tests {
         let cm = SourceMap::new(FilePathMapping::empty());
         let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
         let selection = "     \n    ~~\n~~~\n~~~~~     \n   \n";
-        cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
+        cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
         let span = span_from_selection(inputtext, selection);
 
         // check that we are extracting the text we thought we were extracting
@@ -1156,7 +1156,7 @@ mod tests {
 
     #[test]
     fn t8() {
-        // Test span_to_snippet for a span ending at the end of filemap
+        // Test span_to_snippet for a span ending at the end of source_file
         let cm = init_code_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let snippet = cm.span_to_snippet(span);
@@ -1166,7 +1166,7 @@ mod tests {
 
     #[test]
     fn t9() {
-        // Test span_to_str for a span ending at the end of filemap
+        // Test span_to_str for a span ending at the end of source_file
         let cm = init_code_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
         let sstr =  cm.span_to_string(span);
@@ -1181,7 +1181,7 @@ mod tests {
         let inputtext  = "bbbb BB\ncc CCC\n";
         let selection1 = "     ~~\n      \n";
         let selection2 = "       \n   ~~~\n";
-        cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
+        cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
         let span1 = span_from_selection(inputtext, selection1);
         let span2 = span_from_selection(inputtext, selection2);
 
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index ffa2730d686..6b41dfafd07 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -1563,7 +1563,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
 
                             // Add this input file to the code map to make it available as
                             // dependency information
-                            self.cx.codemap().new_filemap(filename.into(), src);
+                            self.cx.codemap().new_source_file(filename.into(), src);
 
                             let include_info = vec![
                                 dummy_spanned(ast::NestedMetaItemKind::MetaItem(
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 9b7e0fe1ae5..fdf9c33b6f4 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -63,7 +63,7 @@ pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 }
 
 /// file!(): expands to the current filename */
-/// The filemap (`loc.file`) contains a bunch more information we could spit
+/// The source_file (`loc.file`) contains a bunch more information we could spit
 /// out if we wanted.
 pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<dyn base::MacResult+'static> {
@@ -154,7 +154,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
 
             // Add this input file to the code map to make it available as
             // dependency information
-            cx.codemap().new_filemap(file.into(), src);
+            cx.codemap().new_source_file(file.into(), src);
 
             base::MacEager::expr(cx.expr_str(sp, interned_src))
         }
@@ -184,7 +184,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
         Ok(..) => {
             // Add this input file to the code map to make it available as
             // dependency information, but don't enter it's contents
-            cx.codemap().new_filemap(file.into(), "".to_string());
+            cx.codemap().new_source_file(file.into(), "".to_string());
 
             base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
         }
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index a090083f608..1ac51a68b62 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -340,7 +340,7 @@ impl DiagnosticSpan {
 }
 
 impl DiagnosticSpanLine {
-    fn line_from_filemap(fm: &syntax_pos::SourceFile,
+    fn line_from_source_file(fm: &syntax_pos::SourceFile,
                          index: usize,
                          h_start: usize,
                          h_end: usize)
@@ -362,7 +362,7 @@ impl DiagnosticSpanLine {
                  lines.lines
                       .iter()
                       .map(|line| {
-                          DiagnosticSpanLine::line_from_filemap(fm,
+                          DiagnosticSpanLine::line_from_source_file(fm,
                                                                 line.line_index,
                                                                 line.start_col.0 + 1,
                                                                 line.end_col.0 + 1)
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 2c227756f9a..f4d4635b61e 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -247,11 +247,11 @@ fn read_block_comment(rdr: &mut StringReader,
     let mut lines: Vec<String> = Vec::new();
 
     // Count the number of chars since the start of the line by rescanning.
-    let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos(rdr.pos));
+    let mut src_index = rdr.src_index(rdr.source_file.line_begin_pos(rdr.pos));
     let end_src_index = rdr.src_index(rdr.pos);
     assert!(src_index <= end_src_index,
         "src_index={}, end_src_index={}, line_begin_pos={}",
-        src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32());
+        src_index, end_src_index, rdr.source_file.line_begin_pos(rdr.pos).to_u32());
     let mut n = 0;
 
     while src_index < end_src_index {
@@ -372,8 +372,8 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
     let mut src = String::new();
     srdr.read_to_string(&mut src).unwrap();
     let cm = SourceMap::new(sess.codemap().path_mapping().clone());
-    let filemap = cm.new_filemap(path, src);
-    let mut rdr = lexer::StringReader::new_raw(sess, filemap, None);
+    let source_file = cm.new_source_file(path, src);
+    let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
 
     let mut comments: Vec<Comment> = Vec::new();
     let mut literals: Vec<Literal> = Vec::new();
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 654ac692338..acec975d32a 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -49,7 +49,7 @@ pub struct StringReader<'a> {
     pub pos: BytePos,
     /// The current character (which has been read from self.pos)
     pub ch: Option<char>,
-    pub filemap: Lrc<syntax_pos::SourceFile>,
+    pub source_file: Lrc<syntax_pos::SourceFile>,
     /// Stop reading src at this index.
     pub end_src_index: usize,
     // cached:
@@ -58,7 +58,7 @@ pub struct StringReader<'a> {
     peek_span_src_raw: Span,
     fatal_errs: Vec<DiagnosticBuilder<'a>>,
     // cache a direct reference to the source text, so that we don't have to
-    // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
+    // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
     src: Lrc<String>,
     /// Stack of open delimiters and their spans. Used for error message.
     token: token::Token,
@@ -180,31 +180,31 @@ impl<'a> StringReader<'a> {
     }
 
     /// For comments.rs, which hackily pokes into next_pos and ch
-    fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>)
+    fn new_raw(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>)
         -> Self
     {
-        let mut sr = StringReader::new_raw_internal(sess, filemap, override_span);
+        let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
         sr.bump();
 
         sr
     }
 
-    fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc<syntax_pos::SourceFile>,
+    fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
         override_span: Option<Span>) -> Self
     {
-        if filemap.src.is_none() {
-            sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}",
-                                              filemap.name));
+        if source_file.src.is_none() {
+            sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}",
+                                              source_file.name));
         }
 
-        let src = (*filemap.src.as_ref().unwrap()).clone();
+        let src = (*source_file.src.as_ref().unwrap()).clone();
 
         StringReader {
             sess,
-            next_pos: filemap.start_pos,
-            pos: filemap.start_pos,
+            next_pos: source_file.start_pos,
+            pos: source_file.start_pos,
             ch: Some('\n'),
-            filemap,
+            source_file,
             end_src_index: src.len(),
             // dummy values; not read
             peek_tok: token::Eof,
@@ -221,10 +221,10 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    pub fn new(sess: &'a ParseSess, filemap: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>)
+    pub fn new(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>)
         -> Self
     {
-        let mut sr = StringReader::new_raw(sess, filemap, override_span);
+        let mut sr = StringReader::new_raw(sess, source_file, override_span);
         if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             FatalError.raise();
@@ -364,8 +364,8 @@ impl<'a> StringReader<'a> {
                 if self.is_eof() {
                     self.peek_tok = token::Eof;
                     let (real, raw) = self.mk_sp_and_raw(
-                        self.filemap.end_pos,
-                        self.filemap.end_pos,
+                        self.source_file.end_pos,
+                        self.source_file.end_pos,
                     );
                     self.peek_span = real;
                     self.peek_span_src_raw = raw;
@@ -384,7 +384,7 @@ impl<'a> StringReader<'a> {
 
     #[inline]
     fn src_index(&self, pos: BytePos) -> usize {
-        (pos - self.filemap.start_pos).to_usize()
+        (pos - self.source_file.start_pos).to_usize()
     }
 
     /// Calls `f` with a string slice of the source text spanning from `start`
@@ -623,7 +623,7 @@ impl<'a> StringReader<'a> {
                 // I guess this is the only way to figure out if
                 // we're at the beginning of the file...
                 let cmap = SourceMap::new(FilePathMapping::empty());
-                cmap.files.borrow_mut().file_maps.push(self.filemap.clone());
+                cmap.files.borrow_mut().file_maps.push(self.source_file.clone());
                 let loc = cmap.lookup_char_pos_adj(self.pos);
                 debug!("Skipping a shebang");
                 if loc.line == 1 && loc.col == CharPos(0) {
@@ -1861,7 +1861,7 @@ mod tests {
                  sess: &'a ParseSess,
                  teststr: String)
                  -> StringReader<'a> {
-        let fm = cm.new_filemap(PathBuf::from("zebra.rs").into(), teststr);
+        let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
         StringReader::new(sess, fm, None)
     }
 
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 65bab94c6bc..07a9f44fe4a 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -171,13 +171,13 @@ crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &Parse
 pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess,
                                     override_span: Option<Span>)
                                     -> TokenStream {
-    filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span)
+    source_file_to_stream(sess, sess.codemap().new_source_file(name, source), override_span)
 }
 
 // Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
                                       -> Parser {
-    let mut parser = filemap_to_parser(sess, sess.codemap().new_filemap(name, source));
+    let mut parser = source_file_to_parser(sess, sess.codemap().new_source_file(name, source));
     parser.recurse_into_file_modules = false;
     parser
 }
@@ -185,7 +185,7 @@ pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: Stri
 /// Create a new parser, handling errors as appropriate
 /// if the file doesn't exist
 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
-    filemap_to_parser(sess, file_to_filemap(sess, path, None))
+    source_file_to_parser(sess, file_to_source_file(sess, path, None))
 }
 
 /// Given a session, a crate config, a path, and a span, add
@@ -196,16 +196,16 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
                                     directory_ownership: DirectoryOwnership,
                                     module_name: Option<String>,
                                     sp: Span) -> Parser<'a> {
-    let mut p = filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)));
+    let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp)));
     p.directory.ownership = directory_ownership;
     p.root_module_name = module_name;
     p
 }
 
-/// Given a filemap and config, return a parser
-fn filemap_to_parser(sess: & ParseSess, filemap: Lrc<SourceFile>) -> Parser {
-    let end_pos = filemap.end_pos;
-    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
+/// Given a source_file and config, return a parser
+fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+    let end_pos = source_file.end_pos;
+    let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None));
 
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
@@ -224,11 +224,11 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
 // base abstractions
 
 /// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's codemap and return the new filemap.
-fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
+/// add the path to the session's codemap and return the new source_file.
+fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
                    -> Lrc<SourceFile> {
     match sess.codemap().load_file(path) {
-        Ok(filemap) => filemap,
+        Ok(source_file) => source_file,
         Err(e) => {
             let msg = format!("couldn't read {:?}: {}", path.display(), e);
             match spanopt {
@@ -239,10 +239,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     }
 }
 
-/// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc<SourceFile>, override_span: Option<Span>)
+/// Given a source_file, produce a sequence of token-trees
+pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>)
                          -> TokenStream {
-    let mut srdr = lexer::StringReader::new(sess, filemap, override_span);
+    let mut srdr = lexer::StringReader::new(sess, source_file, override_span);
     srdr.real_token();
     panictry!(srdr.parse_all_token_trees())
 }
diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs
index 12f72a3979e..00dd79ffb00 100644
--- a/src/libsyntax/test_snippet.rs
+++ b/src/libsyntax/test_snippet.rs
@@ -51,7 +51,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
         let output = Arc::new(Mutex::new(Vec::new()));
 
         let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-        code_map.new_filemap(Path::new("test.rs").to_owned().into(), file_text.to_owned());
+        code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
 
         let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
         let mut msp = MultiSpan::from_span(primary_span);
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index 46b7f2d7bda..35dae1a4e67 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -10,7 +10,7 @@
 
 use ast::{self, Ident};
 use codemap::FilePathMapping;
-use parse::{ParseSess, PResult, filemap_to_stream};
+use parse::{ParseSess, PResult, source_file_to_stream};
 use parse::{lexer, new_parser_from_source_str};
 use parse::parser::Parser;
 use ptr::P;
@@ -21,8 +21,8 @@ use std::path::PathBuf;
 /// Map a string to tts, using a made-up filename:
 pub fn string_to_stream(source_str: String) -> TokenStream {
     let ps = ParseSess::new(FilePathMapping::empty());
-    filemap_to_stream(&ps, ps.codemap()
-                             .new_filemap(PathBuf::from("bogofile").into(), source_str), None)
+    source_file_to_stream(&ps, ps.codemap()
+                             .new_source_file(PathBuf::from("bogofile").into(), source_str), None)
 }
 
 /// Map string to parser (via tts)
diff --git a/src/libsyntax_pos/analyze_filemap.rs b/src/libsyntax_pos/analyze_source_file.rs
index 6ae1ada51fd..e468aaac7a3 100644
--- a/src/libsyntax_pos/analyze_filemap.rs
+++ b/src/libsyntax_pos/analyze_source_file.rs
@@ -16,27 +16,27 @@ use super::*;
 ///
 /// This function will use an SSE2 enhanced implementation if hardware support
 /// is detected at runtime.
-pub fn analyze_filemap(
+pub fn analyze_source_file(
     src: &str,
-    filemap_start_pos: BytePos)
+    source_file_start_pos: BytePos)
     -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>)
 {
-    let mut lines = vec![filemap_start_pos];
+    let mut lines = vec![source_file_start_pos];
     let mut multi_byte_chars = vec![];
     let mut non_narrow_chars = vec![];
 
     // Calls the right implementation, depending on hardware support available.
-    analyze_filemap_dispatch(src,
-                             filemap_start_pos,
+    analyze_source_file_dispatch(src,
+                             source_file_start_pos,
                              &mut lines,
                              &mut multi_byte_chars,
                              &mut non_narrow_chars);
 
     // The code above optimistically registers a new line *after* each \n
-    // it encounters. If that point is already outside the filemap, remove
+    // it encounters. If that point is already outside the source_file, remove
     // it again.
     if let Some(&last_line_start) = lines.last() {
-        let file_map_end = filemap_start_pos + BytePos::from_usize(src.len());
+        let file_map_end = source_file_start_pos + BytePos::from_usize(src.len());
         assert!(file_map_end >= last_line_start);
         if last_line_start == file_map_end {
             lines.pop();
@@ -49,23 +49,23 @@ pub fn analyze_filemap(
 cfg_if! {
     if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"),
                  not(stage0)))] {
-        fn analyze_filemap_dispatch(src: &str,
-                                    filemap_start_pos: BytePos,
+        fn analyze_source_file_dispatch(src: &str,
+                                    source_file_start_pos: BytePos,
                                     lines: &mut Vec<BytePos>,
                                     multi_byte_chars: &mut Vec<MultiByteChar>,
                                     non_narrow_chars: &mut Vec<NonNarrowChar>) {
             if is_x86_feature_detected!("sse2") {
                 unsafe {
-                    analyze_filemap_sse2(src,
-                                         filemap_start_pos,
+                    analyze_source_file_sse2(src,
+                                         source_file_start_pos,
                                          lines,
                                          multi_byte_chars,
                                          non_narrow_chars);
                 }
             } else {
-                analyze_filemap_generic(src,
+                analyze_source_file_generic(src,
                                         src.len(),
-                                        filemap_start_pos,
+                                        source_file_start_pos,
                                         lines,
                                         multi_byte_chars,
                                         non_narrow_chars);
@@ -78,7 +78,7 @@ cfg_if! {
         /// function falls back to the generic implementation. Otherwise it uses
         /// SSE2 intrinsics to quickly find all newlines.
         #[target_feature(enable = "sse2")]
-        unsafe fn analyze_filemap_sse2(src: &str,
+        unsafe fn analyze_source_file_sse2(src: &str,
                                        output_offset: BytePos,
                                        lines: &mut Vec<BytePos>,
                                        multi_byte_chars: &mut Vec<MultiByteChar>,
@@ -169,7 +169,7 @@ cfg_if! {
                 // The slow path.
                 // There are control chars in here, fallback to generic decoding.
                 let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
-                intra_chunk_offset = analyze_filemap_generic(
+                intra_chunk_offset = analyze_source_file_generic(
                     &src[scan_start .. ],
                     CHUNK_SIZE - intra_chunk_offset,
                     BytePos::from_usize(scan_start) + output_offset,
@@ -182,7 +182,7 @@ cfg_if! {
             // There might still be a tail left to analyze
             let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
             if tail_start < src.len() {
-                analyze_filemap_generic(&src[tail_start as usize ..],
+                analyze_source_file_generic(&src[tail_start as usize ..],
                                         src.len() - tail_start,
                                         output_offset + BytePos::from_usize(tail_start),
                                         lines,
@@ -193,14 +193,14 @@ cfg_if! {
     } else {
 
         // The target (or compiler version) does not support SSE2 ...
-        fn analyze_filemap_dispatch(src: &str,
-                                    filemap_start_pos: BytePos,
+        fn analyze_source_file_dispatch(src: &str,
+                                    source_file_start_pos: BytePos,
                                     lines: &mut Vec<BytePos>,
                                     multi_byte_chars: &mut Vec<MultiByteChar>,
                                     non_narrow_chars: &mut Vec<NonNarrowChar>) {
-            analyze_filemap_generic(src,
+            analyze_source_file_generic(src,
                                     src.len(),
-                                    filemap_start_pos,
+                                    source_file_start_pos,
                                     lines,
                                     multi_byte_chars,
                                     non_narrow_chars);
@@ -211,7 +211,7 @@ cfg_if! {
 // `scan_len` determines the number of bytes in `src` to scan. Note that the
 // function can read past `scan_len` if a multi-byte character start within the
 // range but extends past it. The overflow is returned by the function.
-fn analyze_filemap_generic(src: &str,
+fn analyze_source_file_generic(src: &str,
                            scan_len: usize,
                            output_offset: BytePos,
                            lines: &mut Vec<BytePos>,
@@ -288,7 +288,7 @@ fn analyze_filemap_generic(src: &str,
 macro_rules! test {
     (case: $test_name:ident,
      text: $text:expr,
-     filemap_start_pos: $filemap_start_pos:expr,
+     source_file_start_pos: $source_file_start_pos:expr,
      lines: $lines:expr,
      multi_byte_chars: $multi_byte_chars:expr,
      non_narrow_chars: $non_narrow_chars:expr,) => (
@@ -297,7 +297,7 @@ macro_rules! test {
     fn $test_name() {
 
         let (lines, multi_byte_chars, non_narrow_chars) =
-            analyze_filemap($text, BytePos($filemap_start_pos));
+            analyze_source_file($text, BytePos($source_file_start_pos));
 
         let expected_lines: Vec<BytePos> = $lines
             .into_iter()
@@ -330,7 +330,7 @@ macro_rules! test {
 test!(
     case: empty_text,
     text: "",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![],
@@ -339,7 +339,7 @@ test!(
 test!(
     case: newlines_short,
     text: "a\nc",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0, 2],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![],
@@ -348,7 +348,7 @@ test!(
 test!(
     case: newlines_long,
     text: "012345678\nabcdef012345678\na",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0, 10, 26],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![],
@@ -357,7 +357,7 @@ test!(
 test!(
     case: newline_and_multi_byte_char_in_same_chunk,
     text: "01234β789\nbcdef0123456789abcdef",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0, 11],
     multi_byte_chars: vec![(5, 2)],
     non_narrow_chars: vec![],
@@ -366,7 +366,7 @@ test!(
 test!(
     case: newline_and_control_char_in_same_chunk,
     text: "01234\u{07}6789\nbcdef0123456789abcdef",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0, 11],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![(5, 0)],
@@ -375,7 +375,7 @@ test!(
 test!(
     case: multi_byte_char_short,
     text: "aβc",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![(1, 2)],
     non_narrow_chars: vec![],
@@ -384,7 +384,7 @@ test!(
 test!(
     case: multi_byte_char_long,
     text: "0123456789abcΔf012345β",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![(13, 2), (22, 2)],
     non_narrow_chars: vec![],
@@ -393,7 +393,7 @@ test!(
 test!(
     case: multi_byte_char_across_chunk_boundary,
     text: "0123456789abcdeΔ123456789abcdef01234",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![(15, 2)],
     non_narrow_chars: vec![],
@@ -402,7 +402,7 @@ test!(
 test!(
     case: multi_byte_char_across_chunk_boundary_tail,
     text: "0123456789abcdeΔ....",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![(15, 2)],
     non_narrow_chars: vec![],
@@ -411,7 +411,7 @@ test!(
 test!(
     case: non_narrow_short,
     text: "0\t2",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![(1, 4)],
@@ -420,7 +420,7 @@ test!(
 test!(
     case: non_narrow_long,
     text: "01\t3456789abcdef01234567\u{07}9",
-    filemap_start_pos: 0,
+    source_file_start_pos: 0,
     lines: vec![0],
     multi_byte_chars: vec![],
     non_narrow_chars: vec![(2, 4), (24, 0)],
@@ -429,7 +429,7 @@ test!(
 test!(
     case: output_offset_all,
     text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
-    filemap_start_pos: 1000,
+    source_file_start_pos: 1000,
     lines: vec![0 + 1000, 7 + 1000, 27 + 1000],
     multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)],
     non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)],
diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs
index 5ab44b025e2..f9c91dc8a97 100644
--- a/src/libsyntax_pos/lib.rs
+++ b/src/libsyntax_pos/lib.rs
@@ -63,7 +63,7 @@ pub use span_encoding::{Span, DUMMY_SP};
 
 pub mod symbol;
 
-mod analyze_filemap;
+mod analyze_source_file;
 
 pub struct Globals {
     symbol_interner: Lock<symbol::Interner>,
@@ -974,7 +974,7 @@ impl SourceFile {
         let end_pos = start_pos.to_usize() + src.len();
 
         let (lines, multibyte_chars, non_narrow_chars) =
-            analyze_filemap::analyze_filemap(&src[..], start_pos);
+            analyze_source_file::analyze_source_file(&src[..], start_pos);
 
         SourceFile {
             name,
@@ -1082,7 +1082,7 @@ impl SourceFile {
 
     /// Find the line containing the given position. The return value is the
     /// index into the `lines` array of this SourceFile, not the 1-based line
-    /// number. If the filemap is empty or the position is located before the
+    /// number. If the source_file is empty or the position is located before the
     /// first line, None is returned.
     pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
         if self.lines.len() == 0 {