From c65547337831babea8d9052b960649309263df36 Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:13:35 +0200 Subject: mv CodeMap SourceMap --- src/libsyntax/parse/lexer/comments.rs | 4 ++-- src/libsyntax/parse/lexer/mod.rs | 36 +++++++++++++++++------------------ src/libsyntax/parse/mod.rs | 10 +++++----- src/libsyntax/parse/parser.rs | 4 ++-- 4 files changed, 27 insertions(+), 27 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 2c53dbdc402..2c227756f9a 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -11,7 +11,7 @@ pub use self::CommentStyle::*; use ast; -use codemap::CodeMap; +use codemap::SourceMap; use syntax_pos::{BytePos, CharPos, Pos, FileName}; use parse::lexer::{is_block_doc_comment, is_pattern_whitespace}; use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan}; @@ -371,7 +371,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut { let mut src = String::new(); srdr.read_to_string(&mut src).unwrap(); - let cm = CodeMap::new(sess.codemap().path_mapping().clone()); + let cm = SourceMap::new(sess.codemap().path_mapping().clone()); let filemap = cm.new_filemap(path, src); let mut rdr = lexer::StringReader::new_raw(sess, filemap, None); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index bdf25618f47..c1919434e37 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -10,7 +10,7 @@ use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; -use codemap::{CodeMap, FilePathMapping}; +use codemap::{SourceMap, FilePathMapping}; use errors::{Applicability, FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; use str::char_at; @@ -622,7 +622,7 @@ impl<'a> StringReader<'a> { // I guess this is the only way to figure out if // we're at the beginning of the file... - let cmap = CodeMap::new(FilePathMapping::empty()); + let cmap = SourceMap::new(FilePathMapping::empty()); cmap.files.borrow_mut().file_maps.push(self.filemap.clone()); let loc = cmap.lookup_char_pos_adj(self.pos); debug!("Skipping a shebang"); @@ -1827,7 +1827,7 @@ mod tests { use ast::{Ident, CrateConfig}; use symbol::Symbol; use syntax_pos::{BytePos, Span, NO_EXPANSION}; - use codemap::CodeMap; + use codemap::SourceMap; use errors; use feature_gate::UnstableFeatures; use parse::token; @@ -1837,7 +1837,7 @@ mod tests { use diagnostics::plugin::ErrorMap; use rustc_data_structures::sync::Lock; use with_globals; - fn mk_sess(cm: Lrc) -> ParseSess { + fn mk_sess(cm: Lrc) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), false, @@ -1857,7 +1857,7 @@ mod tests { } // open a string reader for the given string - fn setup<'a>(cm: &CodeMap, + fn setup<'a>(cm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> { @@ -1868,7 +1868,7 @@ mod tests { #[test] fn t1() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut string_reader = setup(&cm, &sh, @@ -1916,7 +1916,7 @@ mod tests { #[test] fn doublecolonparsing() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); @@ -1926,7 +1926,7 @@ mod tests { #[test] fn dcparsing_2() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), vec![mk_ident("a"), token::ModSep, mk_ident("b")]); @@ -1936,7 +1936,7 @@ mod tests { #[test] fn dcparsing_3() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); @@ -1946,7 +1946,7 @@ mod tests { #[test] fn dcparsing_4() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); @@ -1956,7 +1956,7 @@ mod tests { #[test] fn character_a() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("a")), None)); @@ -1966,7 +1966,7 @@ mod tests { #[test] fn character_space() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern(" ")), None)); @@ -1976,7 +1976,7 @@ mod tests { #[test] fn character_escaped() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("\\n")), None)); @@ -1986,7 +1986,7 @@ mod tests { #[test] fn lifetime_name() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, token::Lifetime(Ident::from_str("'abc"))); @@ -1996,7 +1996,7 @@ mod tests { #[test] fn raw_string() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) .next_token() @@ -2008,7 +2008,7 @@ mod tests { #[test] fn literal_suffixes() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ @@ -2054,7 +2054,7 @@ mod tests { #[test] fn nested_block_comments() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); match lexer.next_token().tok { @@ -2069,7 +2069,7 @@ mod tests { #[test] fn crlf_comments() { with_globals(|| { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d029509f0c1..d43cbf38064 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -13,7 +13,7 @@ use rustc_data_structures::sync::{Lrc, Lock}; use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; -use codemap::{CodeMap, FilePathMapping}; +use codemap::{SourceMap, FilePathMapping}; use syntax_pos::{Span, FileMap, FileName, MultiSpan}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; @@ -57,13 +57,13 @@ pub struct ParseSess { pub non_modrs_mods: Lock>, /// Used to determine and report recursive mod inclusions included_mod_stack: Lock>, - code_map: Lrc, + code_map: Lrc, pub buffered_lints: Lock>, } impl ParseSess { pub fn new(file_path_mapping: FilePathMapping) -> Self { - let cm = Lrc::new(CodeMap::new(file_path_mapping)); + let cm = Lrc::new(SourceMap::new(file_path_mapping)); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, @@ -71,7 +71,7 @@ impl ParseSess { ParseSess::with_span_handler(handler, cm) } - pub fn with_span_handler(handler: Handler, code_map: Lrc) -> ParseSess { + pub fn with_span_handler(handler: Handler, code_map: Lrc) -> ParseSess { ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), @@ -86,7 +86,7 @@ impl ParseSess { } } - pub fn codemap(&self) -> &CodeMap { + pub fn codemap(&self) -> &SourceMap { &self.code_map } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 345464c6664..1e6c1eee483 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -42,7 +42,7 @@ use ast::{UseTree, UseTreeKind}; use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; use {ast, attr}; -use codemap::{self, CodeMap, Spanned, respan}; +use codemap::{self, SourceMap, Spanned, respan}; use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, edition::Edition}; use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use parse::{self, SeqSep, classify, token}; @@ -6322,7 +6322,7 @@ impl<'a> Parser<'a> { id: ast::Ident, relative: Option, dir_path: &Path, - codemap: &CodeMap) -> ModulePath + codemap: &SourceMap) -> ModulePath { // If we're in a foo.rs file instead of a mod.rs file, // we need to look for submodules in -- cgit 1.4.1-3-g733a5 From d6dcbcd4e11a1b787a9db1fa43a49907e8bccecf Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:13:52 +0200 Subject: mv FileMap SourceFile --- src/libproc_macro/lib.rs | 6 +-- src/librustc/ich/caching_codemap_view.rs | 6 +-- src/librustc/ich/hcx.rs | 2 +- src/librustc/ich/impls_syntax.rs | 6 +-- src/librustc/ty/query/on_disk_cache.rs | 32 +++++++------- src/librustc_codegen_llvm/debuginfo/metadata.rs | 2 +- src/librustc_errors/emitter.rs | 8 ++-- src/librustc_errors/lib.rs | 4 +- src/librustc_metadata/cstore.rs | 14 +++---- src/librustc_metadata/decoder.rs | 22 +++++----- src/librustc_metadata/encoder.rs | 10 ++--- src/librustc_metadata/schema.rs | 2 +- src/librustdoc/clean/mod.rs | 2 +- src/libsyntax/codemap.rs | 56 ++++++++++++------------- src/libsyntax/ext/quote.rs | 2 +- src/libsyntax/json.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 10 ++--- src/libsyntax/parse/mod.rs | 8 ++-- src/libsyntax/test.rs | 2 +- src/libsyntax_pos/analyze_filemap.rs | 2 +- src/libsyntax_pos/lib.rs | 52 +++++++++++------------ src/test/incremental/remove_source_file/main.rs | 2 +- 22 files changed, 126 insertions(+), 126 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index b54054752ea..50a613f86c2 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -63,7 +63,7 @@ use syntax::errors::DiagnosticBuilder; use syntax::parse::{self, token}; use syntax::symbol::Symbol; use syntax::tokenstream; -use syntax_pos::{FileMap, Pos, FileName}; +use syntax_pos::{Pos, FileName}; /// The main type provided by this crate, representing an abstract stream of /// tokens, or, more specifically, a sequence of token trees. @@ -308,7 +308,7 @@ impl Span { #[unstable(feature = "proc_macro_span", issue = "38356")] pub fn source_file(&self) -> SourceFile { SourceFile { - filemap: __internal::lookup_char_pos(self.0.lo()).file, + source_file: __internal::lookup_char_pos(self.0.lo()).file, } } @@ -419,7 +419,7 @@ impl !Sync for LineColumn {} #[unstable(feature = "proc_macro_span", issue = "38356")] #[derive(Clone)] pub struct SourceFile { - filemap: Lrc, + source_file: Lrc, } #[unstable(feature = "proc_macro_span", issue = "38356")] diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index 769c4cfe9fa..d8999e1e00f 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -10,7 +10,7 @@ use rustc_data_structures::sync::Lrc; use syntax::codemap::SourceMap; -use syntax_pos::{BytePos, FileMap}; +use syntax_pos::{BytePos, SourceFile}; #[derive(Clone)] struct CacheEntry { @@ -18,7 +18,7 @@ struct CacheEntry { line_number: usize, line_start: BytePos, line_end: BytePos, - file: Lrc, + file: Lrc, file_index: usize, } @@ -51,7 +51,7 @@ impl<'cm> CachingCodemapView<'cm> { pub fn byte_pos_to_line_and_col(&mut self, pos: BytePos) - -> Option<(Lrc, usize, BytePos)> { + -> Option<(Lrc, usize, BytePos)> { self.time_stamp += 1; // Check if the position is in one of the cached lines diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 91fbb1cc0fd..5260d94168f 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -309,7 +309,7 @@ impl<'a> HashStable> for Span { // Hash a span in a stable way. We can't directly hash the span's BytePos // fields (that would be similar to hashing pointers, since those are just // offsets into the SourceMap). Instead, we hash the (file name, line, column) - // triple, which stays the same even if the containing FileMap has moved + // triple, which stays the same even if the containing SourceFile has moved // within the SourceMap. // Also note that we are hashing byte offsets for the column, not unicode // codepoint offsets. For the purpose of the hash that's sufficient. diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index d086d3bd28d..fd35713cc6f 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -21,7 +21,7 @@ use syntax::feature_gate; use syntax::parse::token; use syntax::symbol::{InternedString, LocalInternedString}; use syntax::tokenstream; -use syntax_pos::FileMap; +use syntax_pos::SourceFile; use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX}; @@ -427,11 +427,11 @@ impl_stable_hash_for!(enum ::syntax_pos::FileName { Custom(s) }); -impl<'a> HashStable> for FileMap { +impl<'a> HashStable> for SourceFile { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { - let FileMap { + let SourceFile { name: _, // We hash the smaller name_hash instead of this name_hash, name_was_remapped, diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 1f0f6bee777..2e5be214604 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -27,7 +27,7 @@ use session::{CrateDisambiguator, Session}; use std::mem; use syntax::ast::NodeId; use syntax::codemap::{SourceMap, StableFilemapId}; -use syntax_pos::{BytePos, Span, DUMMY_SP, FileMap}; +use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile}; use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo}; use ty; use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; @@ -63,10 +63,10 @@ pub struct OnDiskCache<'sess> { cnum_map: Once>>, codemap: &'sess SourceMap, - file_index_to_stable_id: FxHashMap, + file_index_to_stable_id: FxHashMap, // These two fields caches that are populated lazily during decoding. - file_index_to_file: Lock>>, + file_index_to_file: Lock>>, synthetic_expansion_infos: Lock>, // A map from dep-node to the position of the cached query result in @@ -83,7 +83,7 @@ pub struct OnDiskCache<'sess> { // This type is used only for (de-)serialization. #[derive(RustcEncodable, RustcDecodable)] struct Footer { - file_index_to_stable_id: FxHashMap, + file_index_to_stable_id: FxHashMap, prev_cnums: Vec<(u32, String, CrateDisambiguator)>, query_result_index: EncodedQueryResultIndex, diagnostics_index: EncodedQueryResultIndex, @@ -96,7 +96,7 @@ type EncodedDiagnosticsIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>; type EncodedDiagnostics = Vec; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] -struct FileMapIndex(u32); +struct SourceFileIndex(u32); #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, RustcEncodable, RustcDecodable)] struct AbsoluteBytePos(u32); @@ -173,14 +173,14 @@ impl<'sess> OnDiskCache<'sess> { { // Serializing the DepGraph should not modify it: tcx.dep_graph.with_ignore(|| { - // Allocate FileMapIndices + // Allocate SourceFileIndices let (file_to_file_index, file_index_to_stable_id) = { let mut file_to_file_index = FxHashMap(); let mut file_index_to_stable_id = FxHashMap(); for (index, file) in tcx.sess.codemap().files().iter().enumerate() { - let index = FileMapIndex(index as u32); - let file_ptr: *const FileMap = &**file as *const _; + let index = SourceFileIndex(index as u32); + let file_ptr: *const SourceFile = &**file as *const _; file_to_file_index.insert(file_ptr, index); file_index_to_stable_id.insert(index, StableFilemapId::new(&file)); } @@ -478,13 +478,13 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> { codemap: &'x SourceMap, cnum_map: &'x IndexVec>, synthetic_expansion_infos: &'x Lock>, - file_index_to_file: &'x Lock>>, - file_index_to_stable_id: &'x FxHashMap, + file_index_to_file: &'x Lock>>, + file_index_to_stable_id: &'x FxHashMap, alloc_decoding_session: AllocDecodingSession<'x>, } impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { - fn file_index_to_file(&self, index: FileMapIndex) -> Lrc { + fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc { let CacheDecoder { ref file_index_to_file, ref file_index_to_stable_id, @@ -495,7 +495,7 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { file_index_to_file.borrow_mut().entry(index).or_insert_with(|| { let stable_id = file_index_to_stable_id[&index]; codemap.filemap_by_stable_id(stable_id) - .expect("Failed to lookup FileMap in new context.") + .expect("Failed to lookup SourceFile in new context.") }).clone() } } @@ -617,7 +617,7 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { debug_assert_eq!(tag, TAG_VALID_SPAN); } - let file_lo_index = FileMapIndex::decode(self)?; + let file_lo_index = SourceFileIndex::decode(self)?; let line_lo = usize::decode(self)?; let col_lo = BytePos::decode(self)?; let len = BytePos::decode(self)?; @@ -771,14 +771,14 @@ struct CacheEncoder<'enc, 'a, 'tcx, E> interpret_allocs: FxHashMap, interpret_allocs_inverse: Vec, codemap: CachingCodemapView<'tcx>, - file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>, + file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>, } impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { - fn filemap_index(&mut self, filemap: Lrc) -> FileMapIndex { - self.file_to_file_index[&(&*filemap as *const FileMap)] + fn filemap_index(&mut self, filemap: Lrc) -> SourceFileIndex { + self.file_to_file_index[&(&*filemap as *const SourceFile)] } /// Encode something with additional information that allows to do some diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs index 223fa75723c..cac2ae0302e 100644 --- a/src/librustc_codegen_llvm/debuginfo/metadata.rs +++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs @@ -1745,7 +1745,7 @@ pub fn create_global_var_metadata( pub fn extend_scope_to_file( cx: &CodegenCx<'ll, '_>, scope_metadata: &'ll DIScope, - file: &syntax_pos::FileMap, + file: &syntax_pos::SourceFile, defining_crate: CrateNum, ) -> &'ll DILexicalBlock { let file_metadata = file_metadata(cx, &file.name, defining_crate); diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 0a3e4d3ad25..ce3a19677f2 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -10,7 +10,7 @@ use self::Destination::*; -use syntax_pos::{FileMap, Span, MultiSpan}; +use syntax_pos::{SourceFile, Span, MultiSpan}; use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId}; use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; @@ -127,7 +127,7 @@ pub struct EmitterWriter { } struct FileWithAnnotatedLines { - file: Lrc, + file: Lrc, lines: Vec, multiline_depth: usize, } @@ -177,7 +177,7 @@ impl EmitterWriter { fn preprocess_annotations(&mut self, msp: &MultiSpan) -> Vec { fn add_annotation_to_file(file_vec: &mut Vec, - file: Lrc, + file: Lrc, line_index: usize, ann: Annotation) { @@ -307,7 +307,7 @@ impl EmitterWriter { fn render_source_line(&self, buffer: &mut StyledBuffer, - file: Lrc, + file: Lrc, line: &Line, width_offset: usize, code_offset: usize) -> Vec<(usize, Style)> { diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 40d7a122d30..597b3216490 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -55,7 +55,7 @@ pub mod registry; mod styled_buffer; mod lock; -use syntax_pos::{BytePos, Loc, FileLinesResult, FileMap, FileName, MultiSpan, Span, NO_EXPANSION}; +use syntax_pos::{BytePos, Loc, FileLinesResult, SourceFile, FileName, MultiSpan, Span, NO_EXPANSION}; #[derive(Copy, Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub enum Applicability { @@ -120,7 +120,7 @@ pub trait SourceMapper { fn span_to_filename(&self, sp: Span) -> FileName; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; fn call_span_if_macro(&self, sp: Span) -> Span; - fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool; + fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool; fn doctest_offset_line(&self, line: usize) -> usize; } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 2d3e3080c89..2c95bd82432 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -41,15 +41,15 @@ pub use rustc_data_structures::sync::MetadataRef; pub struct MetadataBlob(pub MetadataRef); -/// Holds information about a syntax_pos::FileMap imported from another crate. +/// Holds information about a syntax_pos::SourceFile imported from another crate. /// See `imported_filemaps()` for more information. -pub struct ImportedFileMap { - /// This FileMap's byte-offset within the codemap of its original crate +pub struct ImportedSourceFile { + /// This SourceFile's byte-offset within the codemap of its original crate pub original_start_pos: syntax_pos::BytePos, - /// The end of this FileMap within the codemap of its original crate + /// The end of this SourceFile within the codemap of its original crate pub original_end_pos: syntax_pos::BytePos, - /// The imported FileMap's representation within the local codemap - pub translated_filemap: Lrc, + /// The imported SourceFile's representation within the local codemap + pub translated_filemap: Lrc, } pub struct CrateMetadata { @@ -64,7 +64,7 @@ pub struct CrateMetadata { pub cnum_map: CrateNumMap, pub cnum: CrateNum, pub dependencies: Lock>, - pub codemap_import_info: RwLock>, + pub codemap_import_info: RwLock>, /// Used for decoding interpret::AllocIds in a cached & thread-safe manner. pub alloc_decoding_state: AllocDecodingState, diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 4a17c5845fd..00ed71c8891 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -1099,26 +1099,26 @@ impl<'a, 'tcx> CrateMetadata { /// /// The import algorithm works analogous to how AST items are inlined from an /// external crate's metadata: - /// For every FileMap in the external codemap an 'inline' copy is created in the + /// For every SourceFile in the external codemap an 'inline' copy is created in the /// local codemap. The correspondence relation between external and local - /// FileMaps is recorded in the `ImportedFileMap` objects returned from this + /// SourceFiles is recorded in the `ImportedSourceFile` objects returned from this /// function. When an item from an external crate is later inlined into this /// crate, this correspondence information is used to translate the span /// information of the inlined item so that it refers the correct positions in /// the local codemap (see `>`). /// - /// The import algorithm in the function below will reuse FileMaps already - /// existing in the local codemap. For example, even if the FileMap of some + /// The import algorithm in the function below will reuse SourceFiles already + /// existing in the local codemap. For example, even if the SourceFile of some /// source file of libstd gets imported many times, there will only ever be - /// one FileMap object for the corresponding file in the local codemap. + /// one SourceFile object for the corresponding file in the local codemap. /// - /// Note that imported FileMaps do not actually contain the source code of the + /// Note that imported SourceFiles do not actually contain the source code of the /// file they represent, just information about length, line breaks, and /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. pub fn imported_filemaps(&'a self, local_codemap: &codemap::SourceMap) - -> ReadGuard<'a, Vec> { + -> ReadGuard<'a, Vec> { { let filemaps = self.codemap_import_info.borrow(); if !filemaps.is_empty() { @@ -1137,9 +1137,9 @@ impl<'a, 'tcx> CrateMetadata { let external_codemap = self.root.codemap.decode(self); let imported_filemaps = external_codemap.map(|filemap_to_import| { - // We can't reuse an existing FileMap, so allocate a new one + // We can't reuse an existing SourceFile, so allocate a new one // containing the information we need. - let syntax_pos::FileMap { name, + let syntax_pos::SourceFile { name, name_was_remapped, src_hash, start_pos, @@ -1156,7 +1156,7 @@ impl<'a, 'tcx> CrateMetadata { // position into frame of reference local to file. // `SourceMap::new_imported_filemap()` will then translate those // coordinates to their new global frame of reference when the - // offset of the FileMap is known. + // offset of the SourceFile is known. for pos in &mut lines { *pos = *pos - start_pos; } @@ -1182,7 +1182,7 @@ impl<'a, 'tcx> CrateMetadata { local_version.name, start_pos, end_pos, local_version.start_pos, local_version.end_pos); - cstore::ImportedFileMap { + cstore::ImportedSourceFile { original_start_pos: start_pos, original_end_pos: end_pos, translated_filemap: local_version, diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 02a41e68f68..925d765ca31 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -42,7 +42,7 @@ use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; use syntax::codemap::Spanned; use syntax::symbol::keywords; -use syntax_pos::{self, hygiene, FileName, FileMap, Span}; +use syntax_pos::{self, hygiene, FileName, SourceFile, Span}; use rustc::hir::{self, PatKind}; use rustc::hir::itemlikevisit::ItemLikeVisitor; @@ -62,7 +62,7 @@ pub struct EncodeContext<'a, 'tcx: 'a> { interpret_allocs_inverse: Vec, // This is used to speed up Span encoding. - filemap_cache: Lrc, + filemap_cache: Lrc, } macro_rules! encoder_methods { @@ -337,7 +337,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy(definitions.def_path_table()) } - fn encode_codemap(&mut self) -> LazySeq { + fn encode_codemap(&mut self) -> LazySeq { let codemap = self.tcx.sess.codemap(); let all_filemaps = codemap.files(); @@ -350,7 +350,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { !filemap.is_imported() }) .map(|filemap| { - // When exporting FileMaps, we expand all paths to absolute + // When exporting SourceFiles, we expand all paths to absolute // paths because any relative paths are potentially relative to // a wrong directory. // However, if a path has been modified via @@ -361,7 +361,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { FileName::Real(ref name) => { if filemap.name_was_remapped || (name.is_relative() && working_dir_was_remapped) { - // This path of this FileMap has been modified by + // This path of this SourceFile has been modified by // path-remapping, so we use it verbatim (and avoid cloning // the whole map in the process). filemap.clone() diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 781652e1985..520273487a9 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -204,7 +204,7 @@ pub struct CrateRoot { pub lang_items_missing: LazySeq, pub native_libraries: LazySeq, pub foreign_modules: LazySeq, - pub codemap: LazySeq, + pub codemap: LazySeq, pub def_path_table: Lazy, pub impls: LazySeq, pub exported_symbols: EncodedExportedSymbols, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index ad774f98602..9e852a077bc 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -603,7 +603,7 @@ impl Clean for doctree::Module { // mod foo { ... } self.where_outer } else { - // mod foo; (and a separate FileMap for the contents) + // mod foo; (and a separate SourceFile for the contents) self.where_inner } }; diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 8175e2495c5..c9a9fbf4d7b 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -12,7 +12,7 @@ //! from integer byte positions to the original source code location. Each bit //! of source parsed during crate parsing (typically files, in-memory strings, //! or various bits of macro expansion) cover a continuous range of bytes in the -//! SourceMap and are represented by FileMaps. Byte positions are stored in +//! SourceMap and are represented by SourceFiles. Byte positions are stored in //! `spans` and used pervasively in the compiler. They are absolute positions //! within the SourceMap, which upon request can be converted to line and column //! information, source code snippets, etc. @@ -62,7 +62,7 @@ pub fn dummy_spanned(t: T) -> Spanned { } // _____________________________________________________________________________ -// FileMap, MultiByteChar, FileName, FileLines +// SourceFile, MultiByteChar, FileName, FileLines // /// An abstraction over the fs operations used by the Parser. @@ -102,14 +102,14 @@ impl FileLoader for RealFileLoader { } } -// This is a FileMap identifier that is used to correlate FileMaps between +// This is a SourceFile identifier that is used to correlate SourceFiles between // subsequent compilation sessions (which is something we need to do during // incremental compilation). #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] pub struct StableFilemapId(u128); impl StableFilemapId { - pub fn new(filemap: &FileMap) -> StableFilemapId { + pub fn new(filemap: &SourceFile) -> StableFilemapId { let mut hasher = StableHasher::new(); filemap.name.hash(&mut hasher); @@ -125,15 +125,15 @@ impl StableFilemapId { // pub(super) struct SourceMapFiles { - pub(super) file_maps: Vec>, - stable_id_to_filemap: FxHashMap> + pub(super) file_maps: Vec>, + stable_id_to_filemap: FxHashMap> } pub struct SourceMap { pub(super) files: Lock, file_loader: Box, // This is used to apply the file path remapping as specified via - // --remap-path-prefix to all FileMaps allocated within this SourceMap. + // --remap-path-prefix to all SourceFiles allocated within this SourceMap. path_mapping: FilePathMapping, /// In case we are in a doctest, replace all file names with the PathBuf, /// and add the given offsets to the line info @@ -184,7 +184,7 @@ impl SourceMap { self.file_loader.file_exists(path) } - pub fn load_file(&self, path: &Path) -> io::Result> { + pub fn load_file(&self, path: &Path) -> io::Result> { let src = self.file_loader.read_file(path)?; let filename = if let Some((ref name, _)) = self.doctest_offset { name.clone() @@ -194,11 +194,11 @@ impl SourceMap { Ok(self.new_filemap(filename, src)) } - pub fn files(&self) -> LockGuard>> { + pub fn files(&self) -> LockGuard>> { LockGuard::map(self.files.borrow(), |files| &mut files.file_maps) } - pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { + pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { self.files.borrow().stable_id_to_filemap.get(&stable_id).map(|fm| fm.clone()) } @@ -212,8 +212,8 @@ impl SourceMap { } /// Creates a new filemap. - /// This does not ensure that only one FileMap exists per file name. - pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc { + /// This does not ensure that only one SourceFile exists per file name. + pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc { let start_pos = self.next_start_pos(); // The path is used to determine the directory for loading submodules and @@ -230,7 +230,7 @@ impl SourceMap { }, other => (other, false), }; - let filemap = Lrc::new(FileMap::new( + let filemap = Lrc::new(SourceFile::new( filename, was_remapped, unmapped_path, @@ -246,7 +246,7 @@ impl SourceMap { filemap } - /// Allocates a new FileMap representing a source file from an external + /// Allocates a new SourceFile representing a source file from an external /// crate. The source code of such an "imported filemap" is not available, /// but we still know enough to generate accurate debuginfo location /// information for things inlined from other crates. @@ -260,7 +260,7 @@ impl SourceMap { mut file_local_lines: Vec, mut file_local_multibyte_chars: Vec, mut file_local_non_narrow_chars: Vec) - -> Lrc { + -> Lrc { let start_pos = self.next_start_pos(); let end_pos = Pos::from_usize(start_pos + source_len); @@ -278,7 +278,7 @@ impl SourceMap { *swc = *swc + start_pos; } - let filemap = Lrc::new(FileMap { + let filemap = Lrc::new(SourceFile { name: filename, name_was_remapped, unmapped_path: None, @@ -326,7 +326,7 @@ impl SourceMap { pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { let chpos = self.bytepos_to_file_charpos(pos); match self.lookup_line(pos) { - Ok(FileMapAndLine { fm: f, line: a }) => { + Ok(SourceFileAndLine { fm: f, line: a }) => { let line = a + 1; // Line numbers start at 1 let linebpos = f.lines[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); @@ -386,13 +386,13 @@ impl SourceMap { } // If the relevant filemap is empty, we don't return a line number. - pub fn lookup_line(&self, pos: BytePos) -> Result> { + pub fn lookup_line(&self, pos: BytePos) -> Result> { let idx = self.lookup_filemap_idx(pos); let f = (*self.files.borrow().file_maps)[idx].clone(); match f.lookup_line(pos) { - Some(line) => Ok(FileMapAndLine { fm: f, line: line }), + Some(line) => Ok(SourceFileAndLine { fm: f, line: line }), None => Err(f) } } @@ -463,7 +463,7 @@ impl SourceMap { pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() - .expect("SourceMap::span_to_unmapped_path called for imported FileMap?") + .expect("SourceMap::span_to_unmapped_path called for imported SourceFile?") } pub fn is_multiline(&self, sp: Span) -> bool { @@ -798,7 +798,7 @@ impl SourceMap { } } - pub fn get_filemap(&self, filename: &FileName) -> Option> { + pub fn get_filemap(&self, filename: &FileName) -> Option> { for fm in self.files.borrow().file_maps.iter() { if *filename == fm.name { return Some(fm.clone()); @@ -807,12 +807,12 @@ impl SourceMap { None } - /// For a global BytePos compute the local offset within the containing FileMap - pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos { + /// For a global BytePos compute the local offset within the containing SourceFile + pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { let idx = self.lookup_filemap_idx(bpos); let fm = (*self.files.borrow().file_maps)[idx].clone(); let offset = bpos - fm.start_pos; - FileMapAndBytePos {fm: fm, pos: offset} + SourceFileAndBytePos {fm: fm, pos: offset} } /// Converts an absolute BytePos to a CharPos relative to the filemap. @@ -820,7 +820,7 @@ impl SourceMap { let idx = self.lookup_filemap_idx(bpos); let map = &(*self.files.borrow().file_maps)[idx]; - // The number of extra bytes due to multibyte chars in the FileMap + // The number of extra bytes due to multibyte chars in the SourceFile let mut total_extra_bytes = 0; for mbc in map.multibyte_chars.iter() { @@ -966,7 +966,7 @@ impl SourceMapper for SourceMap { } sp } - fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool { + fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool { file_map.add_external_src( || match file_map.name { FileName::Real(ref name) => self.file_loader.read_file(name).ok(), @@ -1192,7 +1192,7 @@ mod tests { /// `substring` in `source_text`. trait SourceMapExtension { fn span_substr(&self, - file: &Lrc, + file: &Lrc, source_text: &str, substring: &str, n: usize) @@ -1201,7 +1201,7 @@ mod tests { impl SourceMapExtension for SourceMap { fn span_substr(&self, - file: &Lrc, + file: &Lrc, source_text: &str, substring: &str, n: usize) diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index bc891700fc1..a4b19681164 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -802,7 +802,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec { // they happen to have a compiler on hand). Over all, the phase distinction // just makes quotes "hard to attribute". Possibly this could be fixed // by recreating some of the original qq machinery in the tt regime - // (pushing fake FileMaps onto the parser to account for original sites + // (pushing fake SourceFiles onto the parser to account for original sites // of quotes, for example) but at this point it seems not likely to be // worth the hassle. diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 22de184938f..a090083f608 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -340,7 +340,7 @@ impl DiagnosticSpan { } impl DiagnosticSpanLine { - fn line_from_filemap(fm: &syntax_pos::FileMap, + fn line_from_filemap(fm: &syntax_pos::SourceFile, index: usize, h_start: usize, h_end: usize) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index c1919434e37..654ac692338 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -49,7 +49,7 @@ pub struct StringReader<'a> { pub pos: BytePos, /// The current character (which has been read from self.pos) pub ch: Option, - pub filemap: Lrc, + pub filemap: Lrc, /// Stop reading src at this index. pub end_src_index: usize, // cached: @@ -180,7 +180,7 @@ impl<'a> StringReader<'a> { } /// For comments.rs, which hackily pokes into next_pos and ch - fn new_raw(sess: &'a ParseSess, filemap: Lrc, override_span: Option) + fn new_raw(sess: &'a ParseSess, filemap: Lrc, override_span: Option) -> Self { let mut sr = StringReader::new_raw_internal(sess, filemap, override_span); @@ -189,7 +189,7 @@ impl<'a> StringReader<'a> { sr } - fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc, + fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc, override_span: Option) -> Self { if filemap.src.is_none() { @@ -221,7 +221,7 @@ impl<'a> StringReader<'a> { } } - pub fn new(sess: &'a ParseSess, filemap: Lrc, override_span: Option) + pub fn new(sess: &'a ParseSess, filemap: Lrc, override_span: Option) -> Self { let mut sr = StringReader::new_raw(sess, filemap, override_span); @@ -468,7 +468,7 @@ impl<'a> StringReader<'a> { } /// Advance the StringReader by one character. If a newline is - /// discovered, add it to the FileMap's list of line start offsets. + /// discovered, add it to the SourceFile's list of line start offsets. crate fn bump(&mut self) { let next_src_index = self.src_index(self.next_pos); if next_src_index < self.end_src_index { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d43cbf38064..65bab94c6bc 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -14,7 +14,7 @@ use rustc_data_structures::sync::{Lrc, Lock}; use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use codemap::{SourceMap, FilePathMapping}; -use syntax_pos::{Span, FileMap, FileName, MultiSpan}; +use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -203,7 +203,7 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, } /// Given a filemap and config, return a parser -fn filemap_to_parser(sess: & ParseSess, filemap: Lrc) -> Parser { +fn filemap_to_parser(sess: & ParseSess, filemap: Lrc) -> Parser { let end_pos = filemap.end_pos; let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); @@ -226,7 +226,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's codemap and return the new filemap. fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) - -> Lrc { + -> Lrc { match sess.codemap().load_file(path) { Ok(filemap) => filemap, Err(e) => { @@ -240,7 +240,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc, override_span: Option) +pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc, override_span: Option) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap, override_span); srdr.real_token(); diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 633de812a87..b85fbae587a 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -20,7 +20,7 @@ use std::slice; use std::mem; use std::vec; use attr::{self, HasAttrs}; -use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos}; +use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; use codemap::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned}; use errors; diff --git a/src/libsyntax_pos/analyze_filemap.rs b/src/libsyntax_pos/analyze_filemap.rs index c7c0263e459..6ae1ada51fd 100644 --- a/src/libsyntax_pos/analyze_filemap.rs +++ b/src/libsyntax_pos/analyze_filemap.rs @@ -12,7 +12,7 @@ use unicode_width::UnicodeWidthChar; use super::*; /// Find all newlines, multi-byte characters, and non-narrow characters in a -/// FileMap. +/// SourceFile. /// /// This function will use an SSE2 enhanced implementation if hardware support /// is detected at runtime. diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index fec7551a208..5ab44b025e2 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -163,7 +163,7 @@ impl FileName { /// Spans represent a region of code, used for error reporting. Positions in spans /// are *absolute* positions from the beginning of the codemap, not positions -/// relative to FileMaps. Methods on the SourceMap can be used to relate spans back +/// relative to SourceFiles. Methods on the SourceMap can be used to relate spans back /// to the original source. /// You must be careful if the span crosses more than one file - you will not be /// able to use many of the functions on spans in codemap and you cannot assume @@ -675,7 +675,7 @@ impl From> for MultiSpan { pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); -/// Identifies an offset of a multi-byte character in a FileMap +/// Identifies an offset of a multi-byte character in a SourceFile #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] pub struct MultiByteChar { /// The absolute offset of the character in the SourceMap @@ -684,7 +684,7 @@ pub struct MultiByteChar { pub bytes: u8, } -/// Identifies an offset of a non-narrow character in a FileMap +/// Identifies an offset of a non-narrow character in a SourceFile #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] pub enum NonNarrowChar { /// Represents a zero-width character @@ -748,7 +748,7 @@ impl Sub for NonNarrowChar { } } -/// The state of the lazy external source loading mechanism of a FileMap. +/// The state of the lazy external source loading mechanism of a SourceFile. #[derive(PartialEq, Eq, Clone)] pub enum ExternalSource { /// The external source has been loaded already. @@ -757,7 +757,7 @@ pub enum ExternalSource { AbsentOk, /// A failed attempt has been made to load the external source. AbsentErr, - /// No external source has to be loaded, since the FileMap represents a local crate. + /// No external source has to be loaded, since the SourceFile represents a local crate. Unneeded, } @@ -779,7 +779,7 @@ impl ExternalSource { /// A single source in the SourceMap. #[derive(Clone)] -pub struct FileMap { +pub struct SourceFile { /// The name of the file that the source came from, source that doesn't /// originate from files has names between angle brackets by convention, /// e.g. `` @@ -787,9 +787,9 @@ pub struct FileMap { /// True if the `name` field above has been modified by --remap-path-prefix pub name_was_remapped: bool, /// The unmapped path of the file that the source came from. - /// Set to `None` if the FileMap was imported from an external crate. + /// Set to `None` if the SourceFile was imported from an external crate. pub unmapped_path: Option, - /// Indicates which crate this FileMap was imported from. + /// Indicates which crate this SourceFile was imported from. pub crate_of_origin: u32, /// The complete source code pub src: Option>, @@ -812,9 +812,9 @@ pub struct FileMap { pub name_hash: u128, } -impl Encodable for FileMap { +impl Encodable for SourceFile { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_struct("FileMap", 8, |s| { + s.emit_struct("SourceFile", 8, |s| { s.emit_struct_field("name", 0, |s| self.name.encode(s))?; s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?; s.emit_struct_field("src_hash", 2, |s| self.src_hash.encode(s))?; @@ -879,10 +879,10 @@ impl Encodable for FileMap { } } -impl Decodable for FileMap { - fn decode(d: &mut D) -> Result { +impl Decodable for SourceFile { + fn decode(d: &mut D) -> Result { - d.read_struct("FileMap", 8, |d| { + d.read_struct("SourceFile", 8, |d| { let name: FileName = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; let name_was_remapped: bool = d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?; @@ -925,7 +925,7 @@ impl Decodable for FileMap { d.read_struct_field("non_narrow_chars", 8, |d| Decodable::decode(d))?; let name_hash: u128 = d.read_struct_field("name_hash", 9, |d| Decodable::decode(d))?; - Ok(FileMap { + Ok(SourceFile { name, name_was_remapped, unmapped_path: None, @@ -947,18 +947,18 @@ impl Decodable for FileMap { } } -impl fmt::Debug for FileMap { +impl fmt::Debug for SourceFile { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "FileMap({})", self.name) + write!(fmt, "SourceFile({})", self.name) } } -impl FileMap { +impl SourceFile { pub fn new(name: FileName, name_was_remapped: bool, unmapped_path: FileName, mut src: String, - start_pos: BytePos) -> FileMap { + start_pos: BytePos) -> SourceFile { remove_bom(&mut src); let src_hash = { @@ -976,7 +976,7 @@ impl FileMap { let (lines, multibyte_chars, non_narrow_chars) = analyze_filemap::analyze_filemap(&src[..], start_pos); - FileMap { + SourceFile { name, name_was_remapped, unmapped_path: Some(unmapped_path), @@ -1081,7 +1081,7 @@ impl FileMap { } /// Find the line containing the given position. The return value is the - /// index into the `lines` array of this FileMap, not the 1-based line + /// index into the `lines` array of this SourceFile, not the 1-based line /// number. If the filemap is empty or the position is located before the /// first line, None is returned. pub fn lookup_line(&self, pos: BytePos) -> Option { @@ -1226,14 +1226,14 @@ impl Sub for CharPos { } // _____________________________________________________________________________ -// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos +// Loc, LocWithOpt, SourceFileAndLine, SourceFileAndBytePos // /// A source code location used for error reporting #[derive(Debug, Clone)] pub struct Loc { /// Information about the original source - pub file: Lrc, + pub file: Lrc, /// The (1-based) line number pub line: usize, /// The (0-based) column offset @@ -1250,14 +1250,14 @@ pub struct LocWithOpt { pub filename: FileName, pub line: usize, pub col: CharPos, - pub file: Option>, + pub file: Option>, } // used to be structural records. Better names, anyone? #[derive(Debug)] -pub struct FileMapAndLine { pub fm: Lrc, pub line: usize } +pub struct SourceFileAndLine { pub fm: Lrc, pub line: usize } #[derive(Debug)] -pub struct FileMapAndBytePos { pub fm: Lrc, pub pos: BytePos } +pub struct SourceFileAndBytePos { pub fm: Lrc, pub pos: BytePos } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct LineInfo { @@ -1272,7 +1272,7 @@ pub struct LineInfo { } pub struct FileLines { - pub file: Lrc, + pub file: Lrc, pub lines: Vec } diff --git a/src/test/incremental/remove_source_file/main.rs b/src/test/incremental/remove_source_file/main.rs index a8c3f70f47b..75fe6d9511c 100644 --- a/src/test/incremental/remove_source_file/main.rs +++ b/src/test/incremental/remove_source_file/main.rs @@ -13,7 +13,7 @@ // revisions:cfail1 cfail2 -// Note that we specify -g so that the FileMaps actually get referenced by the +// Note that we specify -g so that the SourceFiles actually get referenced by the // incr. comp. cache: // compile-flags: -Z query-dep-graph -g // compile-pass -- cgit 1.4.1-3-g733a5 From cbd05957103926fa10d41474fde773167fe64dfb Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:13:56 +0200 Subject: mv filemap source_file --- src/libproc_macro/lib.rs | 8 +- src/librustc/hir/map/collector.rs | 4 +- src/librustc/ich/caching_codemap_view.rs | 2 +- src/librustc/ich/impls_syntax.rs | 16 +- src/librustc/ty/query/on_disk_cache.rs | 10 +- src/librustc_driver/pretty.rs | 2 +- src/librustc_errors/emitter.rs | 2 +- src/librustc_errors/lib.rs | 2 +- src/librustc_metadata/cstore.rs | 4 +- src/librustc_metadata/cstore_impl.rs | 8 +- src/librustc_metadata/decoder.rs | 64 ++--- src/librustc_metadata/encoder.rs | 34 +-- src/librustc_save_analysis/span_utils.rs | 2 +- src/librustdoc/html/highlight.rs | 2 +- src/libsyntax/codemap.rs | 88 +++---- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/source_util.rs | 6 +- src/libsyntax/json.rs | 4 +- src/libsyntax/parse/lexer/comments.rs | 8 +- src/libsyntax/parse/lexer/mod.rs | 38 +-- src/libsyntax/parse/mod.rs | 28 +- src/libsyntax/test_snippet.rs | 2 +- src/libsyntax/util/parser_testing.rs | 6 +- src/libsyntax_pos/analyze_filemap.rs | 436 ------------------------------- src/libsyntax_pos/analyze_source_file.rs | 436 +++++++++++++++++++++++++++++++ src/libsyntax_pos/lib.rs | 6 +- 26 files changed, 610 insertions(+), 610 deletions(-) delete mode 100644 src/libsyntax_pos/analyze_filemap.rs create mode 100644 src/libsyntax_pos/analyze_source_file.rs (limited to 'src/libsyntax/parse') diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 50a613f86c2..08ae78f775b 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -440,9 +440,9 @@ impl SourceFile { /// [`is_real`]: #method.is_real #[unstable(feature = "proc_macro_span", issue = "38356")] pub fn path(&self) -> PathBuf { - match self.filemap.name { + match self.source_file.name { FileName::Real(ref path) => path.clone(), - _ => PathBuf::from(self.filemap.name.to_string()) + _ => PathBuf::from(self.source_file.name.to_string()) } } @@ -453,7 +453,7 @@ impl SourceFile { // This is a hack until intercrate spans are implemented and we can have real source files // for spans generated in external macros. // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368 - self.filemap.is_real_file() + self.source_file.is_real_file() } } @@ -471,7 +471,7 @@ impl fmt::Debug for SourceFile { #[unstable(feature = "proc_macro_span", issue = "38356")] impl PartialEq for SourceFile { fn eq(&self, other: &Self) -> bool { - Lrc::ptr_eq(&self.filemap, &other.filemap) + Lrc::ptr_eq(&self.source_file, &other.source_file) } } diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index ceeb31934e1..7a304603ada 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -158,8 +158,8 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { let mut source_file_names: Vec<_> = codemap .files() .iter() - .filter(|filemap| CrateNum::from_u32(filemap.crate_of_origin) == LOCAL_CRATE) - .map(|filemap| filemap.name_hash) + .filter(|source_file| CrateNum::from_u32(source_file.crate_of_origin) == LOCAL_CRATE) + .map(|source_file| source_file.name_hash) .collect(); source_file_names.sort_unstable(); diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index d8999e1e00f..adfb9b6181a 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -79,7 +79,7 @@ impl<'cm> CachingCodemapView<'cm> { if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos { let file_valid; if self.codemap.files().len() > 0 { - let file_index = self.codemap.lookup_filemap_idx(pos); + let file_index = self.codemap.lookup_source_file_idx(pos); let file = self.codemap.files()[file_index].clone(); if pos >= file.start_pos && pos < file.end_pos { diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index fd35713cc6f..65b84ce4a82 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -458,13 +458,13 @@ impl<'a> HashStable> for SourceFile { src_hash.hash_stable(hcx, hasher); - // We only hash the relative position within this filemap + // We only hash the relative position within this source_file lines.len().hash_stable(hcx, hasher); for &line in lines.iter() { stable_byte_pos(line, start_pos).hash_stable(hcx, hasher); } - // We only hash the relative position within this filemap + // We only hash the relative position within this source_file multibyte_chars.len().hash_stable(hcx, hasher); for &char_pos in multibyte_chars.iter() { stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher); @@ -478,29 +478,29 @@ impl<'a> HashStable> for SourceFile { } fn stable_byte_pos(pos: ::syntax_pos::BytePos, - filemap_start: ::syntax_pos::BytePos) + source_file_start: ::syntax_pos::BytePos) -> u32 { - pos.0 - filemap_start.0 + pos.0 - source_file_start.0 } fn stable_multibyte_char(mbc: ::syntax_pos::MultiByteChar, - filemap_start: ::syntax_pos::BytePos) + source_file_start: ::syntax_pos::BytePos) -> (u32, u32) { let ::syntax_pos::MultiByteChar { pos, bytes, } = mbc; - (pos.0 - filemap_start.0, bytes as u32) + (pos.0 - source_file_start.0, bytes as u32) } fn stable_non_narrow_char(swc: ::syntax_pos::NonNarrowChar, - filemap_start: ::syntax_pos::BytePos) + source_file_start: ::syntax_pos::BytePos) -> (u32, u32) { let pos = swc.pos(); let width = swc.width(); - (pos.0 - filemap_start.0, width as u32) + (pos.0 - source_file_start.0, width as u32) } diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 2e5be214604..0dcdf44d6e6 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -494,7 +494,7 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { file_index_to_file.borrow_mut().entry(index).or_insert_with(|| { let stable_id = file_index_to_stable_id[&index]; - codemap.filemap_by_stable_id(stable_id) + codemap.source_file_by_stable_id(stable_id) .expect("Failed to lookup SourceFile in new context.") }).clone() } @@ -777,8 +777,8 @@ struct CacheEncoder<'enc, 'a, 'tcx, E> impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { - fn filemap_index(&mut self, filemap: Lrc) -> SourceFileIndex { - self.file_to_file_index[&(&*filemap as *const SourceFile)] + fn source_file_index(&mut self, source_file: Lrc) -> SourceFileIndex { + self.file_to_file_index[&(&*source_file as *const SourceFile)] } /// Encode something with additional information that allows to do some @@ -850,10 +850,10 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx let len = span_data.hi - span_data.lo; - let filemap_index = self.filemap_index(file_lo); + let source_file_index = self.source_file_index(file_lo); TAG_VALID_SPAN.encode(self)?; - filemap_index.encode(self)?; + source_file_index.encode(self)?; line_lo.encode(self)?; col_lo.encode(self)?; len.encode(self)?; diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index a66392833f6..4d4198d34bc 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -916,7 +916,7 @@ pub fn fold_crate(sess: &Session, krate: ast::Crate, ppm: PpMode) -> ast::Crate fn get_source(input: &Input, sess: &Session) -> (Vec, FileName) { let src_name = driver::source_name(input); let src = sess.codemap() - .get_filemap(&src_name) + .get_source_file(&src_name) .unwrap() .src .as_ref() diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index ce3a19677f2..b4034a6a529 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -1021,7 +1021,7 @@ impl EmitterWriter { // Print out the annotate source lines that correspond with the error for annotated_file in annotated_files { // we can't annotate anything if the source is unavailable. - if !cm.ensure_filemap_source_present(annotated_file.file.clone()) { + if !cm.ensure_source_file_source_present(annotated_file.file.clone()) { continue; } diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 597b3216490..ae88a365cbe 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -120,7 +120,7 @@ pub trait SourceMapper { fn span_to_filename(&self, sp: Span) -> FileName; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; fn call_span_if_macro(&self, sp: Span) -> Span; - fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool; + fn ensure_source_file_source_present(&self, file_map: Lrc) -> bool; fn doctest_offset_line(&self, line: usize) -> usize; } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 2c95bd82432..5c020b70e30 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -42,14 +42,14 @@ pub use rustc_data_structures::sync::MetadataRef; pub struct MetadataBlob(pub MetadataRef); /// Holds information about a syntax_pos::SourceFile imported from another crate. -/// See `imported_filemaps()` for more information. +/// See `imported_source_files()` for more information. pub struct ImportedSourceFile { /// This SourceFile's byte-offset within the codemap of its original crate pub original_start_pos: syntax_pos::BytePos, /// The end of this SourceFile within the codemap of its original crate pub original_end_pos: syntax_pos::BytePos, /// The imported SourceFile's representation within the local codemap - pub translated_filemap: Lrc, + pub translated_source_file: Lrc, } pub struct CrateMetadata { diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 4926da3b880..54431e669a8 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -40,7 +40,7 @@ use syntax::ast; use syntax::attr; use syntax::codemap; use syntax::edition::Edition; -use syntax::parse::filemap_to_stream; +use syntax::parse::source_file_to_stream; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION, FileName}; use rustc_data_structures::indexed_set::IdxSetBuf; @@ -463,9 +463,9 @@ impl cstore::CStore { let (name, def) = data.get_macro(id.index); let source_name = FileName::Macros(name.to_string()); - let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body); - let local_span = Span::new(filemap.start_pos, filemap.end_pos, NO_EXPANSION); - let body = filemap_to_stream(&sess.parse_sess, filemap, None); + let source_file = sess.parse_sess.codemap().new_source_file(source_name, def.body); + let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); + let body = source_file_to_stream(&sess.parse_sess, source_file, None); // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 00ed71c8891..1efe6e50a24 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -50,8 +50,8 @@ pub struct DecodeContext<'a, 'tcx: 'a> { sess: Option<&'a Session>, tcx: Option>, - // Cache the last used filemap for translating spans as an optimization. - last_filemap_index: usize, + // Cache the last used source_file for translating spans as an optimization. + last_source_file_index: usize, lazy_state: LazyState, @@ -73,7 +73,7 @@ pub trait Metadata<'a, 'tcx>: Copy { cdata: self.cdata(), sess: self.sess().or(tcx.map(|tcx| tcx.sess)), tcx, - last_filemap_index: 0, + last_source_file_index: 0, lazy_state: LazyState::NoNode, alloc_decoding_session: self.cdata().map(|cdata| { cdata.alloc_decoding_state.new_decoding_session() @@ -314,43 +314,43 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { bug!("Cannot decode Span without Session.") }; - let imported_filemaps = self.cdata().imported_filemaps(&sess.codemap()); - let filemap = { + let imported_source_files = self.cdata().imported_source_files(&sess.codemap()); + let source_file = { // Optimize for the case that most spans within a translated item - // originate from the same filemap. - let last_filemap = &imported_filemaps[self.last_filemap_index]; + // originate from the same source_file. + let last_source_file = &imported_source_files[self.last_source_file_index]; - if lo >= last_filemap.original_start_pos && - lo <= last_filemap.original_end_pos { - last_filemap + if lo >= last_source_file.original_start_pos && + lo <= last_source_file.original_end_pos { + last_source_file } else { let mut a = 0; - let mut b = imported_filemaps.len(); + let mut b = imported_source_files.len(); while b - a > 1 { let m = (a + b) / 2; - if imported_filemaps[m].original_start_pos > lo { + if imported_source_files[m].original_start_pos > lo { b = m; } else { a = m; } } - self.last_filemap_index = a; - &imported_filemaps[a] + self.last_source_file_index = a; + &imported_source_files[a] } }; // Make sure our binary search above is correct. - debug_assert!(lo >= filemap.original_start_pos && - lo <= filemap.original_end_pos); + debug_assert!(lo >= source_file.original_start_pos && + lo <= source_file.original_end_pos); // Make sure we correctly filtered out invalid spans during encoding - debug_assert!(hi >= filemap.original_start_pos && - hi <= filemap.original_end_pos); + debug_assert!(hi >= source_file.original_start_pos && + hi <= source_file.original_end_pos); - let lo = (lo + filemap.translated_filemap.start_pos) - filemap.original_start_pos; - let hi = (hi + filemap.translated_filemap.start_pos) - filemap.original_start_pos; + let lo = (lo + source_file.translated_source_file.start_pos) - source_file.original_start_pos; + let hi = (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos; Ok(Span::new(lo, hi, NO_EXPANSION)) } @@ -1116,13 +1116,13 @@ impl<'a, 'tcx> CrateMetadata { /// file they represent, just information about length, line breaks, and /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. - pub fn imported_filemaps(&'a self, + pub fn imported_source_files(&'a self, local_codemap: &codemap::SourceMap) -> ReadGuard<'a, Vec> { { - let filemaps = self.codemap_import_info.borrow(); - if !filemaps.is_empty() { - return filemaps; + let source_files = self.codemap_import_info.borrow(); + if !source_files.is_empty() { + return source_files; } } @@ -1136,7 +1136,7 @@ impl<'a, 'tcx> CrateMetadata { let external_codemap = self.root.codemap.decode(self); - let imported_filemaps = external_codemap.map(|filemap_to_import| { + let imported_source_files = external_codemap.map(|source_file_to_import| { // We can't reuse an existing SourceFile, so allocate a new one // containing the information we need. let syntax_pos::SourceFile { name, @@ -1148,13 +1148,13 @@ impl<'a, 'tcx> CrateMetadata { mut multibyte_chars, mut non_narrow_chars, name_hash, - .. } = filemap_to_import; + .. } = source_file_to_import; let source_length = (end_pos - start_pos).to_usize(); // Translate line-start positions and multibyte character // position into frame of reference local to file. - // `SourceMap::new_imported_filemap()` will then translate those + // `SourceMap::new_imported_source_file()` will then translate those // coordinates to their new global frame of reference when the // offset of the SourceFile is known. for pos in &mut lines { @@ -1167,7 +1167,7 @@ impl<'a, 'tcx> CrateMetadata { *swc = *swc - start_pos; } - let local_version = local_codemap.new_imported_filemap(name, + let local_version = local_codemap.new_imported_source_file(name, name_was_remapped, self.cnum.as_u32(), src_hash, @@ -1176,8 +1176,8 @@ impl<'a, 'tcx> CrateMetadata { lines, multibyte_chars, non_narrow_chars); - debug!("CrateMetaData::imported_filemaps alloc \ - filemap {:?} original (start_pos {:?} end_pos {:?}) \ + debug!("CrateMetaData::imported_source_files alloc \ + source_file {:?} original (start_pos {:?} end_pos {:?}) \ translated (start_pos {:?} end_pos {:?})", local_version.name, start_pos, end_pos, local_version.start_pos, local_version.end_pos); @@ -1185,11 +1185,11 @@ impl<'a, 'tcx> CrateMetadata { cstore::ImportedSourceFile { original_start_pos: start_pos, original_end_pos: end_pos, - translated_filemap: local_version, + translated_source_file: local_version, } }).collect(); - *codemap_import_info = imported_filemaps; + *codemap_import_info = imported_source_files; drop(codemap_import_info); // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 925d765ca31..2111cb363b2 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -62,7 +62,7 @@ pub struct EncodeContext<'a, 'tcx: 'a> { interpret_allocs_inverse: Vec, // This is used to speed up Span encoding. - filemap_cache: Lrc, + source_file_cache: Lrc, } macro_rules! encoder_methods { @@ -157,13 +157,13 @@ impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { // The Span infrastructure should make sure that this invariant holds: debug_assert!(span.lo <= span.hi); - if !self.filemap_cache.contains(span.lo) { + if !self.source_file_cache.contains(span.lo) { let codemap = self.tcx.sess.codemap(); - let filemap_index = codemap.lookup_filemap_idx(span.lo); - self.filemap_cache = codemap.files()[filemap_index].clone(); + let source_file_index = codemap.lookup_source_file_idx(span.lo); + self.source_file_cache = codemap.files()[source_file_index].clone(); } - if !self.filemap_cache.contains(span.hi) { + if !self.source_file_cache.contains(span.hi) { // Unfortunately, macro expansion still sometimes generates Spans // that malformed in this way. return TAG_INVALID_SPAN.encode(self) @@ -339,17 +339,17 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_codemap(&mut self) -> LazySeq { let codemap = self.tcx.sess.codemap(); - let all_filemaps = codemap.files(); + let all_source_files = codemap.files(); let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone(); - let adapted = all_filemaps.iter() - .filter(|filemap| { - // No need to re-export imported filemaps, as any downstream + let adapted = all_source_files.iter() + .filter(|source_file| { + // No need to re-export imported source_files, as any downstream // crate will import them from their original source. - !filemap.is_imported() + !source_file.is_imported() }) - .map(|filemap| { + .map(|source_file| { // When exporting SourceFiles, we expand all paths to absolute // paths because any relative paths are potentially relative to // a wrong directory. @@ -357,16 +357,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // `--remap-path-prefix` we assume the user has already set // things up the way they want and don't touch the path values // anymore. - match filemap.name { + match source_file.name { FileName::Real(ref name) => { - if filemap.name_was_remapped || + if source_file.name_was_remapped || (name.is_relative() && working_dir_was_remapped) { // This path of this SourceFile has been modified by // path-remapping, so we use it verbatim (and avoid cloning // the whole map in the process). - filemap.clone() + source_file.clone() } else { - let mut adapted = (**filemap).clone(); + let mut adapted = (**source_file).clone(); adapted.name = Path::new(&working_dir).join(name).into(); adapted.name_hash = { let mut hasher: StableHasher = StableHasher::new(); @@ -377,7 +377,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } }, // expanded code, not from a file - _ => filemap.clone(), + _ => source_file.clone(), } }) .collect::>(); @@ -1842,7 +1842,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, lazy_state: LazyState::NoNode, type_shorthands: Default::default(), predicate_shorthands: Default::default(), - filemap_cache: tcx.sess.codemap().files()[0].clone(), + source_file_cache: tcx.sess.codemap().files()[0].clone(), interpret_allocs: Default::default(), interpret_allocs_inverse: Default::default(), }; diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 85dd2a3a206..f764042926a 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -276,7 +276,7 @@ impl<'a> SpanUtils<'a> { None => return true, }; - //If the span comes from a fake filemap, filter it. + //If the span comes from a fake source_file, filter it. if !self.sess .codemap() .lookup_char_pos(parent.lo()) diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index a9137009115..a3ad50b7079 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -33,7 +33,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, tooltip: Option<(&str, &str)>) -> String { debug!("highlighting: ================\n{}\n==============", src); let sess = parse::ParseSess::new(FilePathMapping::empty()); - let fm = sess.codemap().new_filemap(FileName::Custom("stdin".to_string()), src.to_string()); + let fm = sess.codemap().new_source_file(FileName::Custom("stdin".to_string()), src.to_string()); let mut out = Vec::new(); if let Some((tooltip, class)) = tooltip { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index c9a9fbf4d7b..34cd026f7a0 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -109,12 +109,12 @@ impl FileLoader for RealFileLoader { pub struct StableFilemapId(u128); impl StableFilemapId { - pub fn new(filemap: &SourceFile) -> StableFilemapId { + pub fn new(source_file: &SourceFile) -> StableFilemapId { let mut hasher = StableHasher::new(); - filemap.name.hash(&mut hasher); - filemap.name_was_remapped.hash(&mut hasher); - filemap.unmapped_path.hash(&mut hasher); + source_file.name.hash(&mut hasher); + source_file.name_was_remapped.hash(&mut hasher); + source_file.unmapped_path.hash(&mut hasher); StableFilemapId(hasher.finish()) } @@ -126,7 +126,7 @@ impl StableFilemapId { pub(super) struct SourceMapFiles { pub(super) file_maps: Vec>, - stable_id_to_filemap: FxHashMap> + stable_id_to_source_file: FxHashMap> } pub struct SourceMap { @@ -145,7 +145,7 @@ impl SourceMap { SourceMap { files: Lock::new(SourceMapFiles { file_maps: Vec::new(), - stable_id_to_filemap: FxHashMap(), + stable_id_to_source_file: FxHashMap(), }), file_loader: Box::new(RealFileLoader), path_mapping, @@ -168,7 +168,7 @@ impl SourceMap { SourceMap { files: Lock::new(SourceMapFiles { file_maps: Vec::new(), - stable_id_to_filemap: FxHashMap(), + stable_id_to_source_file: FxHashMap(), }), file_loader: file_loader, path_mapping, @@ -191,15 +191,15 @@ impl SourceMap { } else { path.to_owned().into() }; - Ok(self.new_filemap(filename, src)) + Ok(self.new_source_file(filename, src)) } pub fn files(&self) -> LockGuard>> { LockGuard::map(self.files.borrow(), |files| &mut files.file_maps) } - pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { - self.files.borrow().stable_id_to_filemap.get(&stable_id).map(|fm| fm.clone()) + pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { + self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone()) } fn next_start_pos(&self) -> usize { @@ -211,9 +211,9 @@ impl SourceMap { } } - /// Creates a new filemap. + /// Creates a new source_file. /// This does not ensure that only one SourceFile exists per file name. - pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc { + pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc { let start_pos = self.next_start_pos(); // The path is used to determine the directory for loading submodules and @@ -230,7 +230,7 @@ impl SourceMap { }, other => (other, false), }; - let filemap = Lrc::new(SourceFile::new( + let source_file = Lrc::new(SourceFile::new( filename, was_remapped, unmapped_path, @@ -240,17 +240,17 @@ impl SourceMap { let mut files = self.files.borrow_mut(); - files.file_maps.push(filemap.clone()); - files.stable_id_to_filemap.insert(StableFilemapId::new(&filemap), filemap.clone()); + files.file_maps.push(source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); - filemap + source_file } /// Allocates a new SourceFile representing a source file from an external - /// crate. The source code of such an "imported filemap" is not available, + /// crate. The source code of such an "imported source_file" is not available, /// but we still know enough to generate accurate debuginfo location /// information for things inlined from other crates. - pub fn new_imported_filemap(&self, + pub fn new_imported_source_file(&self, filename: FileName, name_was_remapped: bool, crate_of_origin: u32, @@ -278,7 +278,7 @@ impl SourceMap { *swc = *swc + start_pos; } - let filemap = Lrc::new(SourceFile { + let source_file = Lrc::new(SourceFile { name: filename, name_was_remapped, unmapped_path: None, @@ -296,10 +296,10 @@ impl SourceMap { let mut files = self.files.borrow_mut(); - files.file_maps.push(filemap.clone()); - files.stable_id_to_filemap.insert(StableFilemapId::new(&filemap), filemap.clone()); + files.file_maps.push(source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); - filemap + source_file } pub fn mk_substr_filename(&self, sp: Span) -> String { @@ -385,9 +385,9 @@ impl SourceMap { } } - // If the relevant filemap is empty, we don't return a line number. + // If the relevant source_file is empty, we don't return a line number. pub fn lookup_line(&self, pos: BytePos) -> Result> { - let idx = self.lookup_filemap_idx(pos); + let idx = self.lookup_source_file_idx(pos); let f = (*self.files.borrow().file_maps)[idx].clone(); @@ -541,7 +541,7 @@ impl SourceMap { local_end.fm.start_pos) })); } else { - self.ensure_filemap_source_present(local_begin.fm.clone()); + self.ensure_source_file_source_present(local_begin.fm.clone()); let start_index = local_begin.pos.to_usize(); let end_index = local_end.pos.to_usize(); @@ -798,7 +798,7 @@ impl SourceMap { } } - pub fn get_filemap(&self, filename: &FileName) -> Option> { + pub fn get_source_file(&self, filename: &FileName) -> Option> { for fm in self.files.borrow().file_maps.iter() { if *filename == fm.name { return Some(fm.clone()); @@ -809,15 +809,15 @@ impl SourceMap { /// For a global BytePos compute the local offset within the containing SourceFile pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { - let idx = self.lookup_filemap_idx(bpos); + let idx = self.lookup_source_file_idx(bpos); let fm = (*self.files.borrow().file_maps)[idx].clone(); let offset = bpos - fm.start_pos; SourceFileAndBytePos {fm: fm, pos: offset} } - /// Converts an absolute BytePos to a CharPos relative to the filemap. + /// Converts an absolute BytePos to a CharPos relative to the source_file. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { - let idx = self.lookup_filemap_idx(bpos); + let idx = self.lookup_source_file_idx(bpos); let map = &(*self.files.borrow().file_maps)[idx]; // The number of extra bytes due to multibyte chars in the SourceFile @@ -841,13 +841,13 @@ impl SourceMap { CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) } - // Return the index of the filemap (in self.files) which contains pos. - pub fn lookup_filemap_idx(&self, pos: BytePos) -> usize { + // Return the index of the source_file (in self.files) which contains pos. + pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { let files = self.files.borrow(); let files = &files.file_maps; let count = files.len(); - // Binary search for the filemap. + // Binary search for the source_file. let mut a = 0; let mut b = count; while b - a > 1 { @@ -966,7 +966,7 @@ impl SourceMapper for SourceMap { } sp } - fn ensure_filemap_source_present(&self, file_map: Lrc) -> bool { + fn ensure_source_file_source_present(&self, file_map: Lrc) -> bool { file_map.add_external_src( || match file_map.name { FileName::Real(ref name) => self.file_loader.read_file(name).ok(), @@ -1025,11 +1025,11 @@ mod tests { fn init_code_map() -> SourceMap { let cm = SourceMap::new(FilePathMapping::empty()); - cm.new_filemap(PathBuf::from("blork.rs").into(), + cm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); - cm.new_filemap(PathBuf::from("empty.rs").into(), + cm.new_source_file(PathBuf::from("empty.rs").into(), "".to_string()); - cm.new_filemap(PathBuf::from("blork2.rs").into(), + cm.new_source_file(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string()); cm } @@ -1066,7 +1066,7 @@ mod tests { #[test] fn t5() { - // Test zero-length filemaps. + // Test zero-length source_files. let cm = init_code_map(); let loc1 = cm.lookup_char_pos(BytePos(22)); @@ -1083,9 +1083,9 @@ mod tests { fn init_code_map_mbc() -> SourceMap { let cm = SourceMap::new(FilePathMapping::empty()); // € is a three byte utf8 char. - cm.new_filemap(PathBuf::from("blork.rs").into(), + cm.new_source_file(PathBuf::from("blork.rs").into(), "fir€st €€€€ line.\nsecond line".to_string()); - cm.new_filemap(PathBuf::from("blork2.rs").into(), + cm.new_source_file(PathBuf::from("blork2.rs").into(), "first line€€.\n€ second line".to_string()); cm } @@ -1110,7 +1110,7 @@ mod tests { #[test] fn t7() { - // Test span_to_lines for a span ending at the end of filemap + // Test span_to_lines for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let file_lines = cm.span_to_lines(span).unwrap(); @@ -1138,7 +1138,7 @@ mod tests { let cm = SourceMap::new(FilePathMapping::empty()); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n"; - cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); + cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); let span = span_from_selection(inputtext, selection); // check that we are extracting the text we thought we were extracting @@ -1156,7 +1156,7 @@ mod tests { #[test] fn t8() { - // Test span_to_snippet for a span ending at the end of filemap + // Test span_to_snippet for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let snippet = cm.span_to_snippet(span); @@ -1166,7 +1166,7 @@ mod tests { #[test] fn t9() { - // Test span_to_str for a span ending at the end of filemap + // Test span_to_str for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let sstr = cm.span_to_string(span); @@ -1181,7 +1181,7 @@ mod tests { let inputtext = "bbbb BB\ncc CCC\n"; let selection1 = " ~~\n \n"; let selection2 = " \n ~~~\n"; - cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); + cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); let span1 = span_from_selection(inputtext, selection1); let span2 = span_from_selection(inputtext, selection2); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index ffa2730d686..6b41dfafd07 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1563,7 +1563,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // Add this input file to the code map to make it available as // dependency information - self.cx.codemap().new_filemap(filename.into(), src); + self.cx.codemap().new_source_file(filename.into(), src); let include_info = vec![ dummy_spanned(ast::NestedMetaItemKind::MetaItem( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 9b7e0fe1ae5..fdf9c33b6f4 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -63,7 +63,7 @@ pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Token } /// file!(): expands to the current filename */ -/// The filemap (`loc.file`) contains a bunch more information we could spit +/// The source_file (`loc.file`) contains a bunch more information we could spit /// out if we wanted. pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { @@ -154,7 +154,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT // Add this input file to the code map to make it available as // dependency information - cx.codemap().new_filemap(file.into(), src); + cx.codemap().new_source_file(file.into(), src); base::MacEager::expr(cx.expr_str(sp, interned_src)) } @@ -184,7 +184,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Ok(..) => { // Add this input file to the code map to make it available as // dependency information, but don't enter it's contents - cx.codemap().new_filemap(file.into(), "".to_string()); + cx.codemap().new_source_file(file.into(), "".to_string()); base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) } diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index a090083f608..1ac51a68b62 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -340,7 +340,7 @@ impl DiagnosticSpan { } impl DiagnosticSpanLine { - fn line_from_filemap(fm: &syntax_pos::SourceFile, + fn line_from_source_file(fm: &syntax_pos::SourceFile, index: usize, h_start: usize, h_end: usize) @@ -362,7 +362,7 @@ impl DiagnosticSpanLine { lines.lines .iter() .map(|line| { - DiagnosticSpanLine::line_from_filemap(fm, + DiagnosticSpanLine::line_from_source_file(fm, line.line_index, line.start_col.0 + 1, line.end_col.0 + 1) diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 2c227756f9a..f4d4635b61e 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -247,11 +247,11 @@ fn read_block_comment(rdr: &mut StringReader, let mut lines: Vec = Vec::new(); // Count the number of chars since the start of the line by rescanning. - let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos(rdr.pos)); + let mut src_index = rdr.src_index(rdr.source_file.line_begin_pos(rdr.pos)); let end_src_index = rdr.src_index(rdr.pos); assert!(src_index <= end_src_index, "src_index={}, end_src_index={}, line_begin_pos={}", - src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32()); + src_index, end_src_index, rdr.source_file.line_begin_pos(rdr.pos).to_u32()); let mut n = 0; while src_index < end_src_index { @@ -372,8 +372,8 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut let mut src = String::new(); srdr.read_to_string(&mut src).unwrap(); let cm = SourceMap::new(sess.codemap().path_mapping().clone()); - let filemap = cm.new_filemap(path, src); - let mut rdr = lexer::StringReader::new_raw(sess, filemap, None); + let source_file = cm.new_source_file(path, src); + let mut rdr = lexer::StringReader::new_raw(sess, source_file, None); let mut comments: Vec = Vec::new(); let mut literals: Vec = Vec::new(); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 654ac692338..acec975d32a 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -49,7 +49,7 @@ pub struct StringReader<'a> { pub pos: BytePos, /// The current character (which has been read from self.pos) pub ch: Option, - pub filemap: Lrc, + pub source_file: Lrc, /// Stop reading src at this index. pub end_src_index: usize, // cached: @@ -58,7 +58,7 @@ pub struct StringReader<'a> { peek_span_src_raw: Span, fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to - // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. + // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time. src: Lrc, /// Stack of open delimiters and their spans. Used for error message. token: token::Token, @@ -180,31 +180,31 @@ impl<'a> StringReader<'a> { } /// For comments.rs, which hackily pokes into next_pos and ch - fn new_raw(sess: &'a ParseSess, filemap: Lrc, override_span: Option) + fn new_raw(sess: &'a ParseSess, source_file: Lrc, override_span: Option) -> Self { - let mut sr = StringReader::new_raw_internal(sess, filemap, override_span); + let mut sr = StringReader::new_raw_internal(sess, source_file, override_span); sr.bump(); sr } - fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc, + fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc, override_span: Option) -> Self { - if filemap.src.is_none() { - sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}", - filemap.name)); + if source_file.src.is_none() { + sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}", + source_file.name)); } - let src = (*filemap.src.as_ref().unwrap()).clone(); + let src = (*source_file.src.as_ref().unwrap()).clone(); StringReader { sess, - next_pos: filemap.start_pos, - pos: filemap.start_pos, + next_pos: source_file.start_pos, + pos: source_file.start_pos, ch: Some('\n'), - filemap, + source_file, end_src_index: src.len(), // dummy values; not read peek_tok: token::Eof, @@ -221,10 +221,10 @@ impl<'a> StringReader<'a> { } } - pub fn new(sess: &'a ParseSess, filemap: Lrc, override_span: Option) + pub fn new(sess: &'a ParseSess, source_file: Lrc, override_span: Option) -> Self { - let mut sr = StringReader::new_raw(sess, filemap, override_span); + let mut sr = StringReader::new_raw(sess, source_file, override_span); if sr.advance_token().is_err() { sr.emit_fatal_errors(); FatalError.raise(); @@ -364,8 +364,8 @@ impl<'a> StringReader<'a> { if self.is_eof() { self.peek_tok = token::Eof; let (real, raw) = self.mk_sp_and_raw( - self.filemap.end_pos, - self.filemap.end_pos, + self.source_file.end_pos, + self.source_file.end_pos, ); self.peek_span = real; self.peek_span_src_raw = raw; @@ -384,7 +384,7 @@ impl<'a> StringReader<'a> { #[inline] fn src_index(&self, pos: BytePos) -> usize { - (pos - self.filemap.start_pos).to_usize() + (pos - self.source_file.start_pos).to_usize() } /// Calls `f` with a string slice of the source text spanning from `start` @@ -623,7 +623,7 @@ impl<'a> StringReader<'a> { // I guess this is the only way to figure out if // we're at the beginning of the file... let cmap = SourceMap::new(FilePathMapping::empty()); - cmap.files.borrow_mut().file_maps.push(self.filemap.clone()); + cmap.files.borrow_mut().file_maps.push(self.source_file.clone()); let loc = cmap.lookup_char_pos_adj(self.pos); debug!("Skipping a shebang"); if loc.line == 1 && loc.col == CharPos(0) { @@ -1861,7 +1861,7 @@ mod tests { sess: &'a ParseSess, teststr: String) -> StringReader<'a> { - let fm = cm.new_filemap(PathBuf::from("zebra.rs").into(), teststr); + let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr); StringReader::new(sess, fm, None) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 65bab94c6bc..07a9f44fe4a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -171,13 +171,13 @@ crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &Parse pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, override_span: Option) -> TokenStream { - filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) + source_file_to_stream(sess, sess.codemap().new_source_file(name, source), override_span) } // Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { - let mut parser = filemap_to_parser(sess, sess.codemap().new_filemap(name, source)); + let mut parser = source_file_to_parser(sess, sess.codemap().new_source_file(name, source)); parser.recurse_into_file_modules = false; parser } @@ -185,7 +185,7 @@ pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: Stri /// Create a new parser, handling errors as appropriate /// if the file doesn't exist pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { - filemap_to_parser(sess, file_to_filemap(sess, path, None)) + source_file_to_parser(sess, file_to_source_file(sess, path, None)) } /// Given a session, a crate config, a path, and a span, add @@ -196,16 +196,16 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, directory_ownership: DirectoryOwnership, module_name: Option, sp: Span) -> Parser<'a> { - let mut p = filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp))); + let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp))); p.directory.ownership = directory_ownership; p.root_module_name = module_name; p } -/// Given a filemap and config, return a parser -fn filemap_to_parser(sess: & ParseSess, filemap: Lrc) -> Parser { - let end_pos = filemap.end_pos; - let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); +/// Given a source_file and config, return a parser +fn source_file_to_parser(sess: & ParseSess, source_file: Lrc) -> Parser { + let end_pos = source_file.end_pos; + let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None)); if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); @@ -224,11 +224,11 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { // base abstractions /// Given a session and a path and an optional span (for error reporting), -/// add the path to the session's codemap and return the new filemap. -fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) +/// add the path to the session's codemap and return the new source_file. +fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) -> Lrc { match sess.codemap().load_file(path) { - Ok(filemap) => filemap, + Ok(source_file) => source_file, Err(e) => { let msg = format!("couldn't read {:?}: {}", path.display(), e); match spanopt { @@ -239,10 +239,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } } -/// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc, override_span: Option) +/// Given a source_file, produce a sequence of token-trees +pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc, override_span: Option) -> TokenStream { - let mut srdr = lexer::StringReader::new(sess, filemap, override_span); + let mut srdr = lexer::StringReader::new(sess, source_file, override_span); srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index 12f72a3979e..00dd79ffb00 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -51,7 +51,7 @@ fn test_harness(file_text: &str, span_labels: Vec, expected_output: & let output = Arc::new(Mutex::new(Vec::new())); let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty())); - code_map.new_filemap(Path::new("test.rs").to_owned().into(), file_text.to_owned()); + code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned()); let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); let mut msp = MultiSpan::from_span(primary_span); diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 46b7f2d7bda..35dae1a4e67 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -10,7 +10,7 @@ use ast::{self, Ident}; use codemap::FilePathMapping; -use parse::{ParseSess, PResult, filemap_to_stream}; +use parse::{ParseSess, PResult, source_file_to_stream}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; use ptr::P; @@ -21,8 +21,8 @@ use std::path::PathBuf; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); - filemap_to_stream(&ps, ps.codemap() - .new_filemap(PathBuf::from("bogofile").into(), source_str), None) + source_file_to_stream(&ps, ps.codemap() + .new_source_file(PathBuf::from("bogofile").into(), source_str), None) } /// Map string to parser (via tts) diff --git a/src/libsyntax_pos/analyze_filemap.rs b/src/libsyntax_pos/analyze_filemap.rs deleted file mode 100644 index 6ae1ada51fd..00000000000 --- a/src/libsyntax_pos/analyze_filemap.rs +++ /dev/null @@ -1,436 +0,0 @@ -// Copyright 2018 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use unicode_width::UnicodeWidthChar; -use super::*; - -/// Find all newlines, multi-byte characters, and non-narrow characters in a -/// SourceFile. -/// -/// This function will use an SSE2 enhanced implementation if hardware support -/// is detected at runtime. -pub fn analyze_filemap( - src: &str, - filemap_start_pos: BytePos) - -> (Vec, Vec, Vec) -{ - let mut lines = vec![filemap_start_pos]; - let mut multi_byte_chars = vec![]; - let mut non_narrow_chars = vec![]; - - // Calls the right implementation, depending on hardware support available. - analyze_filemap_dispatch(src, - filemap_start_pos, - &mut lines, - &mut multi_byte_chars, - &mut non_narrow_chars); - - // The code above optimistically registers a new line *after* each \n - // it encounters. If that point is already outside the filemap, remove - // it again. - if let Some(&last_line_start) = lines.last() { - let file_map_end = filemap_start_pos + BytePos::from_usize(src.len()); - assert!(file_map_end >= last_line_start); - if last_line_start == file_map_end { - lines.pop(); - } - } - - (lines, multi_byte_chars, non_narrow_chars) -} - -cfg_if! { - if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), - not(stage0)))] { - fn analyze_filemap_dispatch(src: &str, - filemap_start_pos: BytePos, - lines: &mut Vec, - multi_byte_chars: &mut Vec, - non_narrow_chars: &mut Vec) { - if is_x86_feature_detected!("sse2") { - unsafe { - analyze_filemap_sse2(src, - filemap_start_pos, - lines, - multi_byte_chars, - non_narrow_chars); - } - } else { - analyze_filemap_generic(src, - src.len(), - filemap_start_pos, - lines, - multi_byte_chars, - non_narrow_chars); - - } - } - - /// Check 16 byte chunks of text at a time. If the chunk contains - /// something other than printable ASCII characters and newlines, the - /// function falls back to the generic implementation. Otherwise it uses - /// SSE2 intrinsics to quickly find all newlines. - #[target_feature(enable = "sse2")] - unsafe fn analyze_filemap_sse2(src: &str, - output_offset: BytePos, - lines: &mut Vec, - multi_byte_chars: &mut Vec, - non_narrow_chars: &mut Vec) { - #[cfg(target_arch = "x86")] - use std::arch::x86::*; - #[cfg(target_arch = "x86_64")] - use std::arch::x86_64::*; - - const CHUNK_SIZE: usize = 16; - - let src_bytes = src.as_bytes(); - - let chunk_count = src.len() / CHUNK_SIZE; - - // This variable keeps track of where we should start decoding a - // chunk. If a multi-byte character spans across chunk boundaries, - // we need to skip that part in the next chunk because we already - // handled it. - let mut intra_chunk_offset = 0; - - for chunk_index in 0 .. chunk_count { - let ptr = src_bytes.as_ptr() as *const __m128i; - // We don't know if the pointer is aligned to 16 bytes, so we - // use `loadu`, which supports unaligned loading. - let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize)); - - // For character in the chunk, see if its byte value is < 0, which - // indicates that it's part of a UTF-8 char. - let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)); - // Create a bit mask from the comparison results. - let multibyte_mask = _mm_movemask_epi8(multibyte_test); - - // If the bit mask is all zero, we only have ASCII chars here: - if multibyte_mask == 0 { - assert!(intra_chunk_offset == 0); - - // Check if there are any control characters in the chunk. All - // control characters that we can encounter at this point have a - // byte value less than 32 or ... - let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)); - let control_char_mask0 = _mm_movemask_epi8(control_char_test0); - - // ... it's the ASCII 'DEL' character with a value of 127. - let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)); - let control_char_mask1 = _mm_movemask_epi8(control_char_test1); - - let control_char_mask = control_char_mask0 | control_char_mask1; - - if control_char_mask != 0 { - // Check for newlines in the chunk - let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)); - let newlines_mask = _mm_movemask_epi8(newlines_test); - - if control_char_mask == newlines_mask { - // All control characters are newlines, record them - let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; - let output_offset = output_offset + - BytePos::from_usize(chunk_index * CHUNK_SIZE + 1); - - loop { - let index = newlines_mask.trailing_zeros(); - - if index >= CHUNK_SIZE as u32 { - // We have arrived at the end of the chunk. - break - } - - lines.push(BytePos(index) + output_offset); - - // Clear the bit, so we can find the next one. - newlines_mask &= (!1) << index; - } - - // We are done for this chunk. All control characters were - // newlines and we took care of those. - continue - } else { - // Some of the control characters are not newlines, - // fall through to the slow path below. - } - } else { - // No control characters, nothing to record for this chunk - continue - } - } - - // The slow path. - // There are control chars in here, fallback to generic decoding. - let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; - intra_chunk_offset = analyze_filemap_generic( - &src[scan_start .. ], - CHUNK_SIZE - intra_chunk_offset, - BytePos::from_usize(scan_start) + output_offset, - lines, - multi_byte_chars, - non_narrow_chars - ); - } - - // There might still be a tail left to analyze - let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; - if tail_start < src.len() { - analyze_filemap_generic(&src[tail_start as usize ..], - src.len() - tail_start, - output_offset + BytePos::from_usize(tail_start), - lines, - multi_byte_chars, - non_narrow_chars); - } - } - } else { - - // The target (or compiler version) does not support SSE2 ... - fn analyze_filemap_dispatch(src: &str, - filemap_start_pos: BytePos, - lines: &mut Vec, - multi_byte_chars: &mut Vec, - non_narrow_chars: &mut Vec) { - analyze_filemap_generic(src, - src.len(), - filemap_start_pos, - lines, - multi_byte_chars, - non_narrow_chars); - } - } -} - -// `scan_len` determines the number of bytes in `src` to scan. Note that the -// function can read past `scan_len` if a multi-byte character start within the -// range but extends past it. The overflow is returned by the function. -fn analyze_filemap_generic(src: &str, - scan_len: usize, - output_offset: BytePos, - lines: &mut Vec, - multi_byte_chars: &mut Vec, - non_narrow_chars: &mut Vec) - -> usize -{ - assert!(src.len() >= scan_len); - let mut i = 0; - let src_bytes = src.as_bytes(); - - while i < scan_len { - let byte = unsafe { - // We verified that i < scan_len <= src.len() - *src_bytes.get_unchecked(i as usize) - }; - - // How much to advance in order to get to the next UTF-8 char in the - // string. - let mut char_len = 1; - - if byte < 32 { - // This is an ASCII control character, it could be one of the cases - // that are interesting to us. - - let pos = BytePos::from_usize(i) + output_offset; - - match byte { - b'\n' => { - lines.push(pos + BytePos(1)); - } - b'\t' => { - non_narrow_chars.push(NonNarrowChar::Tab(pos)); - } - _ => { - non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos)); - } - } - } else if byte >= 127 { - // The slow path: - // This is either ASCII control character "DEL" or the beginning of - // a multibyte char. Just decode to `char`. - let c = (&src[i..]).chars().next().unwrap(); - char_len = c.len_utf8(); - - let pos = BytePos::from_usize(i) + output_offset; - - if char_len > 1 { - assert!(char_len >=2 && char_len <= 4); - let mbc = MultiByteChar { - pos, - bytes: char_len as u8, - }; - multi_byte_chars.push(mbc); - } - - // Assume control characters are zero width. - // FIXME: How can we decide between `width` and `width_cjk`? - let char_width = UnicodeWidthChar::width(c).unwrap_or(0); - - if char_width != 1 { - non_narrow_chars.push(NonNarrowChar::new(pos, char_width)); - } - } - - i += char_len; - } - - i - scan_len -} - - - -macro_rules! test { - (case: $test_name:ident, - text: $text:expr, - filemap_start_pos: $filemap_start_pos:expr, - lines: $lines:expr, - multi_byte_chars: $multi_byte_chars:expr, - non_narrow_chars: $non_narrow_chars:expr,) => ( - - #[test] - fn $test_name() { - - let (lines, multi_byte_chars, non_narrow_chars) = - analyze_filemap($text, BytePos($filemap_start_pos)); - - let expected_lines: Vec = $lines - .into_iter() - .map(|pos| BytePos(pos)) - .collect(); - - assert_eq!(lines, expected_lines); - - let expected_mbcs: Vec = $multi_byte_chars - .into_iter() - .map(|(pos, bytes)| MultiByteChar { - pos: BytePos(pos), - bytes, - }) - .collect(); - - assert_eq!(multi_byte_chars, expected_mbcs); - - let expected_nncs: Vec = $non_narrow_chars - .into_iter() - .map(|(pos, width)| { - NonNarrowChar::new(BytePos(pos), width) - }) - .collect(); - - assert_eq!(non_narrow_chars, expected_nncs); - }) -} - -test!( - case: empty_text, - text: "", - filemap_start_pos: 0, - lines: vec![], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newlines_short, - text: "a\nc", - filemap_start_pos: 0, - lines: vec![0, 2], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newlines_long, - text: "012345678\nabcdef012345678\na", - filemap_start_pos: 0, - lines: vec![0, 10, 26], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newline_and_multi_byte_char_in_same_chunk, - text: "01234β789\nbcdef0123456789abcdef", - filemap_start_pos: 0, - lines: vec![0, 11], - multi_byte_chars: vec![(5, 2)], - non_narrow_chars: vec![], -); - -test!( - case: newline_and_control_char_in_same_chunk, - text: "01234\u{07}6789\nbcdef0123456789abcdef", - filemap_start_pos: 0, - lines: vec![0, 11], - multi_byte_chars: vec![], - non_narrow_chars: vec![(5, 0)], -); - -test!( - case: multi_byte_char_short, - text: "aβc", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(1, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_long, - text: "0123456789abcΔf012345β", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(13, 2), (22, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_across_chunk_boundary, - text: "0123456789abcdeΔ123456789abcdef01234", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(15, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_across_chunk_boundary_tail, - text: "0123456789abcdeΔ....", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(15, 2)], - non_narrow_chars: vec![], -); - -test!( - case: non_narrow_short, - text: "0\t2", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![], - non_narrow_chars: vec![(1, 4)], -); - -test!( - case: non_narrow_long, - text: "01\t3456789abcdef01234567\u{07}9", - filemap_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![], - non_narrow_chars: vec![(2, 4), (24, 0)], -); - -test!( - case: output_offset_all, - text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", - filemap_start_pos: 1000, - lines: vec![0 + 1000, 7 + 1000, 27 + 1000], - multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)], - non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)], -); diff --git a/src/libsyntax_pos/analyze_source_file.rs b/src/libsyntax_pos/analyze_source_file.rs new file mode 100644 index 00000000000..e468aaac7a3 --- /dev/null +++ b/src/libsyntax_pos/analyze_source_file.rs @@ -0,0 +1,436 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use unicode_width::UnicodeWidthChar; +use super::*; + +/// Find all newlines, multi-byte characters, and non-narrow characters in a +/// SourceFile. +/// +/// This function will use an SSE2 enhanced implementation if hardware support +/// is detected at runtime. +pub fn analyze_source_file( + src: &str, + source_file_start_pos: BytePos) + -> (Vec, Vec, Vec) +{ + let mut lines = vec![source_file_start_pos]; + let mut multi_byte_chars = vec![]; + let mut non_narrow_chars = vec![]; + + // Calls the right implementation, depending on hardware support available. + analyze_source_file_dispatch(src, + source_file_start_pos, + &mut lines, + &mut multi_byte_chars, + &mut non_narrow_chars); + + // The code above optimistically registers a new line *after* each \n + // it encounters. If that point is already outside the source_file, remove + // it again. + if let Some(&last_line_start) = lines.last() { + let file_map_end = source_file_start_pos + BytePos::from_usize(src.len()); + assert!(file_map_end >= last_line_start); + if last_line_start == file_map_end { + lines.pop(); + } + } + + (lines, multi_byte_chars, non_narrow_chars) +} + +cfg_if! { + if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), + not(stage0)))] { + fn analyze_source_file_dispatch(src: &str, + source_file_start_pos: BytePos, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + non_narrow_chars: &mut Vec) { + if is_x86_feature_detected!("sse2") { + unsafe { + analyze_source_file_sse2(src, + source_file_start_pos, + lines, + multi_byte_chars, + non_narrow_chars); + } + } else { + analyze_source_file_generic(src, + src.len(), + source_file_start_pos, + lines, + multi_byte_chars, + non_narrow_chars); + + } + } + + /// Check 16 byte chunks of text at a time. If the chunk contains + /// something other than printable ASCII characters and newlines, the + /// function falls back to the generic implementation. Otherwise it uses + /// SSE2 intrinsics to quickly find all newlines. + #[target_feature(enable = "sse2")] + unsafe fn analyze_source_file_sse2(src: &str, + output_offset: BytePos, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + non_narrow_chars: &mut Vec) { + #[cfg(target_arch = "x86")] + use std::arch::x86::*; + #[cfg(target_arch = "x86_64")] + use std::arch::x86_64::*; + + const CHUNK_SIZE: usize = 16; + + let src_bytes = src.as_bytes(); + + let chunk_count = src.len() / CHUNK_SIZE; + + // This variable keeps track of where we should start decoding a + // chunk. If a multi-byte character spans across chunk boundaries, + // we need to skip that part in the next chunk because we already + // handled it. + let mut intra_chunk_offset = 0; + + for chunk_index in 0 .. chunk_count { + let ptr = src_bytes.as_ptr() as *const __m128i; + // We don't know if the pointer is aligned to 16 bytes, so we + // use `loadu`, which supports unaligned loading. + let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize)); + + // For character in the chunk, see if its byte value is < 0, which + // indicates that it's part of a UTF-8 char. + let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)); + // Create a bit mask from the comparison results. + let multibyte_mask = _mm_movemask_epi8(multibyte_test); + + // If the bit mask is all zero, we only have ASCII chars here: + if multibyte_mask == 0 { + assert!(intra_chunk_offset == 0); + + // Check if there are any control characters in the chunk. All + // control characters that we can encounter at this point have a + // byte value less than 32 or ... + let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)); + let control_char_mask0 = _mm_movemask_epi8(control_char_test0); + + // ... it's the ASCII 'DEL' character with a value of 127. + let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)); + let control_char_mask1 = _mm_movemask_epi8(control_char_test1); + + let control_char_mask = control_char_mask0 | control_char_mask1; + + if control_char_mask != 0 { + // Check for newlines in the chunk + let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)); + let newlines_mask = _mm_movemask_epi8(newlines_test); + + if control_char_mask == newlines_mask { + // All control characters are newlines, record them + let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; + let output_offset = output_offset + + BytePos::from_usize(chunk_index * CHUNK_SIZE + 1); + + loop { + let index = newlines_mask.trailing_zeros(); + + if index >= CHUNK_SIZE as u32 { + // We have arrived at the end of the chunk. + break + } + + lines.push(BytePos(index) + output_offset); + + // Clear the bit, so we can find the next one. + newlines_mask &= (!1) << index; + } + + // We are done for this chunk. All control characters were + // newlines and we took care of those. + continue + } else { + // Some of the control characters are not newlines, + // fall through to the slow path below. + } + } else { + // No control characters, nothing to record for this chunk + continue + } + } + + // The slow path. + // There are control chars in here, fallback to generic decoding. + let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; + intra_chunk_offset = analyze_source_file_generic( + &src[scan_start .. ], + CHUNK_SIZE - intra_chunk_offset, + BytePos::from_usize(scan_start) + output_offset, + lines, + multi_byte_chars, + non_narrow_chars + ); + } + + // There might still be a tail left to analyze + let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; + if tail_start < src.len() { + analyze_source_file_generic(&src[tail_start as usize ..], + src.len() - tail_start, + output_offset + BytePos::from_usize(tail_start), + lines, + multi_byte_chars, + non_narrow_chars); + } + } + } else { + + // The target (or compiler version) does not support SSE2 ... + fn analyze_source_file_dispatch(src: &str, + source_file_start_pos: BytePos, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + non_narrow_chars: &mut Vec) { + analyze_source_file_generic(src, + src.len(), + source_file_start_pos, + lines, + multi_byte_chars, + non_narrow_chars); + } + } +} + +// `scan_len` determines the number of bytes in `src` to scan. Note that the +// function can read past `scan_len` if a multi-byte character start within the +// range but extends past it. The overflow is returned by the function. +fn analyze_source_file_generic(src: &str, + scan_len: usize, + output_offset: BytePos, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + non_narrow_chars: &mut Vec) + -> usize +{ + assert!(src.len() >= scan_len); + let mut i = 0; + let src_bytes = src.as_bytes(); + + while i < scan_len { + let byte = unsafe { + // We verified that i < scan_len <= src.len() + *src_bytes.get_unchecked(i as usize) + }; + + // How much to advance in order to get to the next UTF-8 char in the + // string. + let mut char_len = 1; + + if byte < 32 { + // This is an ASCII control character, it could be one of the cases + // that are interesting to us. + + let pos = BytePos::from_usize(i) + output_offset; + + match byte { + b'\n' => { + lines.push(pos + BytePos(1)); + } + b'\t' => { + non_narrow_chars.push(NonNarrowChar::Tab(pos)); + } + _ => { + non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos)); + } + } + } else if byte >= 127 { + // The slow path: + // This is either ASCII control character "DEL" or the beginning of + // a multibyte char. Just decode to `char`. + let c = (&src[i..]).chars().next().unwrap(); + char_len = c.len_utf8(); + + let pos = BytePos::from_usize(i) + output_offset; + + if char_len > 1 { + assert!(char_len >=2 && char_len <= 4); + let mbc = MultiByteChar { + pos, + bytes: char_len as u8, + }; + multi_byte_chars.push(mbc); + } + + // Assume control characters are zero width. + // FIXME: How can we decide between `width` and `width_cjk`? + let char_width = UnicodeWidthChar::width(c).unwrap_or(0); + + if char_width != 1 { + non_narrow_chars.push(NonNarrowChar::new(pos, char_width)); + } + } + + i += char_len; + } + + i - scan_len +} + + + +macro_rules! test { + (case: $test_name:ident, + text: $text:expr, + source_file_start_pos: $source_file_start_pos:expr, + lines: $lines:expr, + multi_byte_chars: $multi_byte_chars:expr, + non_narrow_chars: $non_narrow_chars:expr,) => ( + + #[test] + fn $test_name() { + + let (lines, multi_byte_chars, non_narrow_chars) = + analyze_source_file($text, BytePos($source_file_start_pos)); + + let expected_lines: Vec = $lines + .into_iter() + .map(|pos| BytePos(pos)) + .collect(); + + assert_eq!(lines, expected_lines); + + let expected_mbcs: Vec = $multi_byte_chars + .into_iter() + .map(|(pos, bytes)| MultiByteChar { + pos: BytePos(pos), + bytes, + }) + .collect(); + + assert_eq!(multi_byte_chars, expected_mbcs); + + let expected_nncs: Vec = $non_narrow_chars + .into_iter() + .map(|(pos, width)| { + NonNarrowChar::new(BytePos(pos), width) + }) + .collect(); + + assert_eq!(non_narrow_chars, expected_nncs); + }) +} + +test!( + case: empty_text, + text: "", + source_file_start_pos: 0, + lines: vec![], + multi_byte_chars: vec![], + non_narrow_chars: vec![], +); + +test!( + case: newlines_short, + text: "a\nc", + source_file_start_pos: 0, + lines: vec![0, 2], + multi_byte_chars: vec![], + non_narrow_chars: vec![], +); + +test!( + case: newlines_long, + text: "012345678\nabcdef012345678\na", + source_file_start_pos: 0, + lines: vec![0, 10, 26], + multi_byte_chars: vec![], + non_narrow_chars: vec![], +); + +test!( + case: newline_and_multi_byte_char_in_same_chunk, + text: "01234β789\nbcdef0123456789abcdef", + source_file_start_pos: 0, + lines: vec![0, 11], + multi_byte_chars: vec![(5, 2)], + non_narrow_chars: vec![], +); + +test!( + case: newline_and_control_char_in_same_chunk, + text: "01234\u{07}6789\nbcdef0123456789abcdef", + source_file_start_pos: 0, + lines: vec![0, 11], + multi_byte_chars: vec![], + non_narrow_chars: vec![(5, 0)], +); + +test!( + case: multi_byte_char_short, + text: "aβc", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(1, 2)], + non_narrow_chars: vec![], +); + +test!( + case: multi_byte_char_long, + text: "0123456789abcΔf012345β", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(13, 2), (22, 2)], + non_narrow_chars: vec![], +); + +test!( + case: multi_byte_char_across_chunk_boundary, + text: "0123456789abcdeΔ123456789abcdef01234", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(15, 2)], + non_narrow_chars: vec![], +); + +test!( + case: multi_byte_char_across_chunk_boundary_tail, + text: "0123456789abcdeΔ....", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(15, 2)], + non_narrow_chars: vec![], +); + +test!( + case: non_narrow_short, + text: "0\t2", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![], + non_narrow_chars: vec![(1, 4)], +); + +test!( + case: non_narrow_long, + text: "01\t3456789abcdef01234567\u{07}9", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![], + non_narrow_chars: vec![(2, 4), (24, 0)], +); + +test!( + case: output_offset_all, + text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", + source_file_start_pos: 1000, + lines: vec![0 + 1000, 7 + 1000, 27 + 1000], + multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)], + non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)], +); diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 5ab44b025e2..f9c91dc8a97 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -63,7 +63,7 @@ pub use span_encoding::{Span, DUMMY_SP}; pub mod symbol; -mod analyze_filemap; +mod analyze_source_file; pub struct Globals { symbol_interner: Lock, @@ -974,7 +974,7 @@ impl SourceFile { let end_pos = start_pos.to_usize() + src.len(); let (lines, multibyte_chars, non_narrow_chars) = - analyze_filemap::analyze_filemap(&src[..], start_pos); + analyze_source_file::analyze_source_file(&src[..], start_pos); SourceFile { name, @@ -1082,7 +1082,7 @@ impl SourceFile { /// Find the line containing the given position. The return value is the /// index into the `lines` array of this SourceFile, not the 1-based line - /// number. If the filemap is empty or the position is located before the + /// number. If the source_file is empty or the position is located before the /// first line, None is returned. pub fn lookup_line(&self, pos: BytePos) -> Option { if self.lines.len() == 0 { -- cgit 1.4.1-3-g733a5 From 82607d2cf3866c7cc31050548f2fbfa39207e319 Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:14:03 +0200 Subject: mv (mod) codemap source_map --- src/librustc/hir/lowering.rs | 8 +- src/librustc/hir/map/collector.rs | 2 +- src/librustc/hir/map/mod.rs | 2 +- src/librustc/hir/mod.rs | 4 +- src/librustc/hir/print.rs | 2 +- src/librustc/ich/caching_codemap_view.rs | 2 +- src/librustc/ich/hcx.rs | 2 +- src/librustc/infer/canonical/mod.rs | 2 +- .../infer/error_reporting/need_type_info.rs | 2 +- .../infer/error_reporting/nice_region_error/mod.rs | 2 +- src/librustc/lint/builtin.rs | 2 +- src/librustc/lint/levels.rs | 2 +- src/librustc/lint/mod.rs | 2 +- src/librustc/macros.rs | 2 +- src/librustc/middle/dead.rs | 4 +- src/librustc/middle/region.rs | 16 +- src/librustc/session/config.rs | 2 +- src/librustc/session/mod.rs | 10 +- src/librustc/traits/query/dropck_outlives.rs | 2 +- src/librustc/traits/query/outlives_bounds.rs | 2 +- src/librustc/traits/query/type_op/custom.rs | 2 +- src/librustc/ty/context.rs | 2 +- src/librustc/ty/query/on_disk_cache.rs | 2 +- src/librustc/ty/query/plumbing.rs | 2 +- src/librustc_allocator/expand.rs | 2 +- src/librustc_codegen_llvm/mir/constant.rs | 2 +- src/librustc_driver/lib.rs | 2 +- src/librustc_driver/test.rs | 2 +- src/librustc_lint/builtin.rs | 2 +- src/librustc_lint/types.rs | 4 +- src/librustc_metadata/cstore_impl.rs | 4 +- src/librustc_metadata/decoder.rs | 4 +- src/librustc_metadata/encoder.rs | 2 +- src/librustc_metadata/native_libs.rs | 2 +- .../nll/region_infer/error_reporting/var_name.rs | 2 +- src/librustc_mir/interpret/const_eval.rs | 4 +- src/librustc_mir/interpret/eval_context.rs | 6 +- src/librustc_mir/interpret/machine.rs | 2 +- src/librustc_mir/interpret/terminator/drop.rs | 2 +- src/librustc_mir/interpret/terminator/mod.rs | 2 +- src/librustc_mir/monomorphize/item.rs | 2 +- src/librustc_mir/transform/const_prop.rs | 2 +- src/librustc_passes/ast_validation.rs | 2 +- src/librustc_resolve/lib.rs | 2 +- src/librustc_save_analysis/dump_visitor.rs | 2 +- src/librustc_save_analysis/lib.rs | 4 +- src/librustc_traits/dropck_outlives.rs | 2 +- src/librustc_traits/evaluate_obligation.rs | 2 +- src/librustc_traits/implied_outlives_bounds.rs | 2 +- src/librustc_typeck/check/_match.rs | 2 +- src/librustc_typeck/check/closure.rs | 2 +- src/librustc_typeck/check/mod.rs | 2 +- src/librustc_typeck/collect.rs | 2 +- src/librustdoc/clean/cfg.rs | 2 +- src/librustdoc/clean/mod.rs | 2 +- src/librustdoc/core.rs | 8 +- src/librustdoc/doctree.rs | 2 +- src/librustdoc/html/highlight.rs | 2 +- src/librustdoc/html/render.rs | 2 +- src/librustdoc/markdown.rs | 2 +- src/librustdoc/test.rs | 2 +- src/librustdoc/visit_ast.rs | 2 +- src/libsyntax/ast.rs | 2 +- src/libsyntax/attr/mod.rs | 2 +- src/libsyntax/codemap.rs | 1235 -------------------- src/libsyntax/config.rs | 2 +- src/libsyntax/diagnostics/plugin.rs | 4 +- src/libsyntax/ext/base.rs | 4 +- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/ext/derive.rs | 2 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/placeholders.rs | 2 +- src/libsyntax/ext/quote.rs | 6 +- src/libsyntax/feature_gate.rs | 2 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/json.rs | 2 +- src/libsyntax/lib.rs | 2 +- src/libsyntax/parse/attr.rs | 2 +- src/libsyntax/parse/lexer/comments.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 4 +- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 20 +- src/libsyntax/print/pprust.rs | 16 +- src/libsyntax/source_map.rs | 1235 ++++++++++++++++++++ src/libsyntax/std_inject.rs | 2 +- src/libsyntax/test.rs | 6 +- src/libsyntax/test_snippet.rs | 2 +- src/libsyntax/util/parser_testing.rs | 2 +- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/deriving/custom.rs | 2 +- src/libsyntax_ext/deriving/generic/mod.rs | 4 +- src/libsyntax_ext/deriving/generic/ty.rs | 2 +- src/libsyntax_ext/global_asm.rs | 2 +- src/libsyntax_ext/proc_macro_impl.rs | 2 +- src/libsyntax_ext/proc_macro_registrar.rs | 2 +- src/test/compile-fail-fulldeps/qquote.rs | 2 +- src/test/run-fail-fulldeps/qquote.rs | 4 +- src/test/run-make-fulldeps/issue-19371/foo.rs | 2 +- src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs | 2 +- .../auxiliary/custom_derive_partial_eq.rs | 2 +- .../auxiliary/macro_crate_test.rs | 2 +- .../mod_dir_path_canonicalized.rs | 2 +- .../run-pass-fulldeps/pprust-expr-roundtrip.rs | 4 +- .../proc-macro/auxiliary/issue-40001-plugin.rs | 2 +- src/test/run-pass-fulldeps/qquote.rs | 2 +- 105 files changed, 1392 insertions(+), 1388 deletions(-) delete mode 100644 src/libsyntax/codemap.rs create mode 100644 src/libsyntax/source_map.rs (limited to 'src/libsyntax/parse') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 09f76552279..e677906b058 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -68,7 +68,7 @@ use syntax::errors; use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::print::pprust; use syntax::ptr::P; -use syntax::codemap::{self, respan, CompilerDesugaringKind, Spanned}; +use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned}; use syntax::std_inject; use syntax::symbol::{keywords, Symbol}; use syntax::tokenstream::{Delimited, TokenStream, TokenTree}; @@ -614,14 +614,14 @@ impl<'a> LoweringContext<'a> { fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span { let mark = Mark::fresh(Mark::root()); - mark.set_expn_info(codemap::ExpnInfo { + mark.set_expn_info(source_map::ExpnInfo { call_site: span, def_site: Some(span), - format: codemap::CompilerDesugaring(reason), + format: source_map::CompilerDesugaring(reason), allow_internal_unstable: true, allow_internal_unsafe: false, local_inner_macros: false, - edition: codemap::hygiene::default_edition(), + edition: source_map::hygiene::default_edition(), }); span.with_ctxt(SyntaxContext::empty().apply_mark(mark)) } diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index 7a304603ada..bea9d80639c 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -18,7 +18,7 @@ use middle::cstore::CrateStore; use session::CrateDisambiguator; use std::iter::repeat; use syntax::ast::{NodeId, CRATE_NODE_ID}; -use syntax::codemap::SourceMap; +use syntax::source_map::SourceMap; use syntax_pos::Span; use ich::StableHashingContext; diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 81897322b6f..5bc80e537a3 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -24,7 +24,7 @@ use middle::cstore::CrateStore; use rustc_target::spec::abi::Abi; use rustc_data_structures::svh::Svh; use syntax::ast::{self, Name, NodeId, CRATE_NODE_ID}; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::ext::base::MacroKind; use syntax_pos::{Span, DUMMY_SP}; diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 589f3c9d87c..795dba93abd 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -24,7 +24,7 @@ use util::nodemap::{NodeMap, FxHashSet}; use mir::mono::Linkage; use syntax_pos::{Span, DUMMY_SP, symbol::InternedString}; -use syntax::codemap::{self, Spanned}; +use syntax::source_map::{self, Spanned}; use rustc_target::spec::abi::Abi; use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; @@ -1100,7 +1100,7 @@ pub type Stmt = Spanned; impl fmt::Debug for StmtKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Sadness. - let spanned = codemap::dummy_spanned(self.clone()); + let spanned = source_map::dummy_spanned(self.clone()); write!(f, "stmt({}: {})", spanned.node.id(), diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 512f78763b2..9fe462e65a2 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -12,7 +12,7 @@ pub use self::AnnNode::*; use rustc_target::spec::abi::Abi; use syntax::ast; -use syntax::codemap::{SourceMap, Spanned}; +use syntax::source_map::{SourceMap, Spanned}; use syntax::parse::ParseSess; use syntax::parse::lexer::comments; use syntax::print::pp::{self, Breaks}; diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index adfb9b6181a..c219bbcb621 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc_data_structures::sync::Lrc; -use syntax::codemap::SourceMap; +use syntax::source_map::SourceMap; use syntax_pos::{BytePos, SourceFile}; #[derive(Clone)] diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 5260d94168f..30ae1fbfb30 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -25,7 +25,7 @@ use std::cell::RefCell; use syntax::ast; -use syntax::codemap::SourceMap; +use syntax::source_map::SourceMap; use syntax::ext::hygiene::SyntaxContext; use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs index 958b3391060..9b25727112c 100644 --- a/src/librustc/infer/canonical/mod.rs +++ b/src/librustc/infer/canonical/mod.rs @@ -37,7 +37,7 @@ use rustc_data_structures::small_vec::SmallVec; use rustc_data_structures::sync::Lrc; use serialize::UseSpecializedDecodable; use std::ops::Index; -use syntax::codemap::Span; +use syntax::source_map::Span; use ty::fold::TypeFoldable; use ty::subst::Kind; use ty::{self, CanonicalVar, Lift, Region, Slice, TyCtxt}; diff --git a/src/librustc/infer/error_reporting/need_type_info.rs b/src/librustc/infer/error_reporting/need_type_info.rs index b71c886a896..bd2c81c7fb6 100644 --- a/src/librustc/infer/error_reporting/need_type_info.rs +++ b/src/librustc/infer/error_reporting/need_type_info.rs @@ -13,7 +13,7 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap}; use infer::InferCtxt; use infer::type_variable::TypeVariableOrigin; use ty::{self, Ty, TyInfer, TyVar}; -use syntax::codemap::CompilerDesugaringKind; +use syntax::source_map::CompilerDesugaringKind; use syntax_pos::Span; use errors::DiagnosticBuilder; diff --git a/src/librustc/infer/error_reporting/nice_region_error/mod.rs b/src/librustc/infer/error_reporting/nice_region_error/mod.rs index ddeb291a13a..0ff5630f63e 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/mod.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/mod.rs @@ -11,7 +11,7 @@ use infer::InferCtxt; use infer::lexical_region_resolve::RegionResolutionError; use infer::lexical_region_resolve::RegionResolutionError::*; -use syntax::codemap::Span; +use syntax::source_map::Span; use ty::{self, TyCtxt}; use util::common::ErrorReported; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 590b59568c4..32687cb4884 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -18,7 +18,7 @@ use errors::{Applicability, DiagnosticBuilder}; use lint::{LintPass, LateLintPass, LintArray}; use session::Session; use syntax::ast; -use syntax::codemap::Span; +use syntax::source_map::Span; declare_lint! { pub EXCEEDING_BITSHIFTS, diff --git a/src/librustc/lint/levels.rs b/src/librustc/lint/levels.rs index 483e2ea8a96..5b9ddabf21c 100644 --- a/src/librustc/lint/levels.rs +++ b/src/librustc/lint/levels.rs @@ -21,7 +21,7 @@ use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, use session::Session; use syntax::ast; use syntax::attr; -use syntax::codemap::MultiSpan; +use syntax::source_map::MultiSpan; use syntax::feature_gate; use syntax::symbol::Symbol; use util::nodemap::FxHashMap; diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index c36d674566a..62a582a3b93 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -42,7 +42,7 @@ use lint::builtin::parser::QUESTION_MARK_MACRO_SEP; use session::{Session, DiagnosticMessageId}; use std::{hash, ptr}; use syntax::ast; -use syntax::codemap::{MultiSpan, ExpnFormat}; +use syntax::source_map::{MultiSpan, ExpnFormat}; use syntax::early_buffered_lints::BufferedEarlyLintId; use syntax::edition::Edition; use syntax::symbol::Symbol; diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs index e599b0704f9..759ac1a7952 100644 --- a/src/librustc/macros.rs +++ b/src/librustc/macros.rs @@ -147,7 +147,7 @@ macro_rules! impl_stable_hash_for { macro_rules! impl_stable_hash_for_spanned { ($T:path) => ( - impl<'a, 'tcx> HashStable> for ::syntax::codemap::Spanned<$T> + impl<'a, 'tcx> HashStable> for ::syntax::source_map::Spanned<$T> { #[inline] fn hash_stable(&self, diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index d0e3ae2b9fc..2090950932b 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -24,7 +24,7 @@ use middle::privacy; use ty::{self, TyCtxt}; use util::nodemap::FxHashSet; -use syntax::{ast, codemap}; +use syntax::{ast, source_map}; use syntax::attr; use syntax_pos; @@ -115,7 +115,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, def: Def, - pats: &[codemap::Spanned]) { + pats: &[source_map::Spanned]) { let variant = match self.tables.node_id_to_type(lhs.hir_id).sty { ty::TyAdt(adt, _) => adt.variant_of_def(def), _ => span_bug!(lhs.span, "non-ADT in struct pattern") diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index ebdc9c922b1..e8429c78dc6 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -23,7 +23,7 @@ use ty; use std::fmt; use std::mem; use rustc_data_structures::sync::Lrc; -use syntax::codemap; +use syntax::source_map; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; use ty::TyCtxt; @@ -943,11 +943,15 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: // scopes, meaning that temporaries cannot outlive them. // This ensures fixed size stacks. - hir::ExprKind::Binary(codemap::Spanned { node: hir::BinOpKind::And, .. }, _, ref r) | - hir::ExprKind::Binary(codemap::Spanned { node: hir::BinOpKind::Or, .. }, _, ref r) => { - // For shortcircuiting operators, mark the RHS as a terminating - // scope since it only executes conditionally. - terminating(r.hir_id.local_id); + hir::ExprKind::Binary( + source_map::Spanned { node: hir::BinOpKind::And, .. }, + _, ref r) | + hir::ExprKind::Binary( + source_map::Spanned { node: hir::BinOpKind::Or, .. }, + _, ref r) => { + // For shortcircuiting operators, mark the RHS as a terminating + // scope since it only executes conditionally. + terminating(r.hir_id.local_id); } hir::ExprKind::If(ref expr, ref then, Some(ref otherwise)) => { diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 3926ebedd37..d5946786252 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -22,7 +22,7 @@ use lint; use middle::cstore; use syntax::ast::{self, IntTy, UintTy}; -use syntax::codemap::{FileName, FilePathMapping}; +use syntax::source_map::{FileName, FilePathMapping}; use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION}; use syntax::parse::token; use syntax::parse; diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 05f8e3cfb00..27d6a1f1791 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -35,7 +35,7 @@ use syntax::json::JsonEmitter; use syntax::feature_gate; use syntax::parse; use syntax::parse::ParseSess; -use syntax::{ast, codemap}; +use syntax::{ast, source_map}; use syntax::feature_gate::AttributeType; use syntax_pos::{MultiSpan, Span}; use util::profiling::SelfProfiler; @@ -484,7 +484,7 @@ impl Session { ); } - pub fn codemap<'a>(&'a self) -> &'a codemap::SourceMap { + pub fn codemap<'a>(&'a self) -> &'a source_map::SourceMap { self.parse_sess.codemap() } pub fn verbose(&self) -> bool { @@ -984,7 +984,7 @@ pub fn build_session( sopts, local_crate_source_file, registry, - Lrc::new(codemap::SourceMap::new(file_path_mapping)), + Lrc::new(source_map::SourceMap::new(file_path_mapping)), None, ) } @@ -993,7 +993,7 @@ pub fn build_session_with_codemap( sopts: config::Options, local_crate_source_file: Option, registry: errors::registry::Registry, - codemap: Lrc, + codemap: Lrc, emitter_dest: Option>, ) -> Session { // FIXME: This is not general enough to make the warning lint completely override @@ -1070,7 +1070,7 @@ pub fn build_session_( sopts: config::Options, local_crate_source_file: Option, span_diagnostic: errors::Handler, - codemap: Lrc, + codemap: Lrc, ) -> Session { let host_triple = TargetTriple::from_triple(config::host_triple()); let host = match Target::search(&host_triple) { diff --git a/src/librustc/traits/query/dropck_outlives.rs b/src/librustc/traits/query/dropck_outlives.rs index e41ed0824b4..56484e17777 100644 --- a/src/librustc/traits/query/dropck_outlives.rs +++ b/src/librustc/traits/query/dropck_outlives.rs @@ -12,7 +12,7 @@ use infer::at::At; use infer::InferOk; use rustc_data_structures::small_vec::SmallVec; use std::iter::FromIterator; -use syntax::codemap::Span; +use syntax::source_map::Span; use ty::subst::Kind; use ty::{self, Ty, TyCtxt}; diff --git a/src/librustc/traits/query/outlives_bounds.rs b/src/librustc/traits/query/outlives_bounds.rs index 0127ae423da..868b8dfc885 100644 --- a/src/librustc/traits/query/outlives_bounds.rs +++ b/src/librustc/traits/query/outlives_bounds.rs @@ -10,7 +10,7 @@ use infer::InferCtxt; use syntax::ast; -use syntax::codemap::Span; +use syntax::source_map::Span; use rustc_data_structures::small_vec::SmallVec; use traits::{FulfillmentContext, ObligationCause, TraitEngine, TraitEngineExt}; use traits::query::NoSolution; diff --git a/src/librustc/traits/query/type_op/custom.rs b/src/librustc/traits/query/type_op/custom.rs index cc752d21ab2..737388ad411 100644 --- a/src/librustc/traits/query/type_op/custom.rs +++ b/src/librustc/traits/query/type_op/custom.rs @@ -15,7 +15,7 @@ use traits::query::Fallible; use infer::canonical::query_result; use infer::canonical::QueryRegionConstraint; use std::rc::Rc; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; use traits::{ObligationCause, TraitEngine, TraitEngineExt}; pub struct CustomTypeOp { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 42948a3f5f1..921b3b1793a 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -73,7 +73,7 @@ use std::sync::Arc; use rustc_target::spec::abi; use syntax::ast::{self, NodeId}; use syntax::attr; -use syntax::codemap::MultiSpan; +use syntax::source_map::MultiSpan; use syntax::edition::Edition; use syntax::feature_gate; use syntax::symbol::{Symbol, keywords, InternedString}; diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 0dcdf44d6e6..c1f69d4a0f1 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -26,7 +26,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, use session::{CrateDisambiguator, Session}; use std::mem; use syntax::ast::NodeId; -use syntax::codemap::{SourceMap, StableFilemapId}; +use syntax::source_map::{SourceMap, StableFilemapId}; use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile}; use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo}; use ty; diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index f59e48cb351..450fbc0edbb 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -32,7 +32,7 @@ use std::mem; use std::ptr; use std::collections::hash_map::Entry; use syntax_pos::Span; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; pub struct QueryCache<'tcx, D: QueryConfig<'tcx> + ?Sized> { pub(super) results: FxHashMap>, diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index 676dbeeeeb0..da60f41ee68 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -17,7 +17,7 @@ use syntax::{ Mac, Mod, Mutability, Ty, TyKind, Unsafety, VisibilityKind, }, attr, - codemap::{ + source_map::{ respan, ExpnInfo, MacroAttribute, }, ext::{ diff --git a/src/librustc_codegen_llvm/mir/constant.rs b/src/librustc_codegen_llvm/mir/constant.rs index a6e14a99f3c..47fd92682fd 100644 --- a/src/librustc_codegen_llvm/mir/constant.rs +++ b/src/librustc_codegen_llvm/mir/constant.rs @@ -25,7 +25,7 @@ use consts; use type_of::LayoutLlvmExt; use type_::Type; use syntax::ast::Mutability; -use syntax::codemap::Span; +use syntax::source_map::Span; use value::Value; use super::super::callee; diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index ed6e9db5e4e..07d9ab4e497 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -107,7 +107,7 @@ use std::sync::{Once, ONCE_INIT}; use std::thread; use syntax::ast; -use syntax::codemap::{SourceMap, FileLoader, RealFileLoader}; +use syntax::source_map::{SourceMap, FileLoader, RealFileLoader}; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; use syntax::parse::{self, PResult}; use syntax_pos::{DUMMY_SP, MultiSpan, FileName}; diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index a4bba31b669..284111756a6 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -32,7 +32,7 @@ use rustc_data_structures::sync::{self, Lrc}; use syntax; use syntax::ast; use rustc_target::spec::abi::Abi; -use syntax::codemap::{SourceMap, FilePathMapping, FileName}; +use syntax::source_map::{SourceMap, FilePathMapping, FileName}; use errors; use errors::emitter::Emitter; use errors::{Level, DiagnosticBuilder}; diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 3a449b6a68e..cbcaa53b280 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -45,7 +45,7 @@ use rustc::util::nodemap::FxHashSet; use syntax::tokenstream::{TokenTree, TokenStream}; use syntax::ast; use syntax::attr; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::edition::Edition; use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes}; use syntax_pos::{BytePos, Span, SyntaxContext}; diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index f1636c4dcb0..b8e765d9290 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -25,7 +25,7 @@ use syntax::{ast, attr}; use syntax::errors::Applicability; use rustc_target::spec::abi::Abi; use syntax_pos::Span; -use syntax::codemap; +use syntax::source_map; use rustc::hir; @@ -208,7 +208,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { } fn rev_binop(binop: hir::BinOp) -> hir::BinOp { - codemap::respan(binop.span, + source_map::respan(binop.span, match binop.node { hir::BinOpKind::Lt => hir::BinOpKind::Gt, hir::BinOpKind::Le => hir::BinOpKind::Ge, diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 54431e669a8..da971e18261 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -38,7 +38,7 @@ use std::sync::Arc; use syntax::ast; use syntax::attr; -use syntax::codemap; +use syntax::source_map; use syntax::edition::Edition; use syntax::parse::source_file_to_stream; use syntax::symbol::Symbol; @@ -487,7 +487,7 @@ impl cstore::CStore { tokens: body.into(), legacy: def.legacy, }), - vis: codemap::respan(local_span.shrink_to_lo(), ast::VisibilityKind::Inherited), + vis: source_map::respan(local_span.shrink_to_lo(), ast::VisibilityKind::Inherited), tokens: None, }) } diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 1efe6e50a24..29d21337726 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -39,7 +39,7 @@ use std::u32; use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; use syntax::attr; use syntax::ast::{self, Ident}; -use syntax::codemap; +use syntax::source_map; use syntax::symbol::InternedString; use syntax::ext::base::MacroKind; use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; @@ -1117,7 +1117,7 @@ impl<'a, 'tcx> CrateMetadata { /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. pub fn imported_source_files(&'a self, - local_codemap: &codemap::SourceMap) + local_codemap: &source_map::SourceMap) -> ReadGuard<'a, Vec> { { let source_files = self.codemap_import_info.borrow(); diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 2111cb363b2..fe9ea00a488 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -40,7 +40,7 @@ use rustc_data_structures::sync::Lrc; use std::u32; use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::symbol::keywords; use syntax_pos::{self, hygiene, FileName, SourceFile, Span}; diff --git a/src/librustc_metadata/native_libs.rs b/src/librustc_metadata/native_libs.rs index 078295c99bd..70eda895bd0 100644 --- a/src/librustc_metadata/native_libs.rs +++ b/src/librustc_metadata/native_libs.rs @@ -16,7 +16,7 @@ use rustc::ty::TyCtxt; use rustc::util::nodemap::FxHashSet; use rustc_target::spec::abi::Abi; use syntax::attr; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax::feature_gate::{self, GateIssue}; use syntax::symbol::Symbol; diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs index f1c3a7489ee..57ff0f4c10a 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs @@ -13,7 +13,7 @@ use borrow_check::nll::ToRegionVid; use rustc::mir::{Local, Mir}; use rustc::ty::{RegionVid, TyCtxt}; use rustc_data_structures::indexed_vec::Idx; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax_pos::symbol::Symbol; impl<'tcx> RegionInferenceContext<'tcx> { diff --git a/src/librustc_mir/interpret/const_eval.rs b/src/librustc_mir/interpret/const_eval.rs index 9d66a0b396b..dd298d9becb 100644 --- a/src/librustc_mir/interpret/const_eval.rs +++ b/src/librustc_mir/interpret/const_eval.rs @@ -10,8 +10,8 @@ use rustc::ty::subst::Subst; use rustc_data_structures::indexed_vec::IndexVec; use syntax::ast::Mutability; -use syntax::codemap::Span; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::Span; +use syntax::source_map::DUMMY_SP; use rustc::mir::interpret::{ EvalResult, EvalError, EvalErrorKind, GlobalId, diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index 52305be5fac..3329a47759b 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -18,7 +18,7 @@ use rustc::mir::interpret::{ ScalarMaybeUndef, }; -use syntax::codemap::{self, Span}; +use syntax::source_map::{self, Span}; use syntax::ast::Mutability; use super::{Place, PlaceExtra, Memory, @@ -91,7 +91,7 @@ pub struct Frame<'mir, 'tcx: 'mir> { pub instance: ty::Instance<'tcx>, /// The span of the call site. - pub span: codemap::Span, + pub span: source_map::Span, //////////////////////////////////////////////////////////////////////////////// // Return place and locals @@ -545,7 +545,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M pub fn push_stack_frame( &mut self, instance: ty::Instance<'tcx>, - span: codemap::Span, + span: source_map::Span, mir: &'mir mir::Mir<'tcx>, return_place: Place, return_to_block: StackPopCleanup, diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs index e2086c57c7c..112d8759c68 100644 --- a/src/librustc_mir/interpret/machine.rs +++ b/src/librustc_mir/interpret/machine.rs @@ -10,7 +10,7 @@ use super::{EvalContext, Place, ValTy, Memory}; use rustc::mir; use rustc::ty::{self, Ty}; use rustc::ty::layout::Size; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax::ast::Mutability; /// Methods of this trait signifies a point where CTFE evaluation would fail diff --git a/src/librustc_mir/interpret/terminator/drop.rs b/src/librustc_mir/interpret/terminator/drop.rs index fe8071897c3..f86c0e89954 100644 --- a/src/librustc_mir/interpret/terminator/drop.rs +++ b/src/librustc_mir/interpret/terminator/drop.rs @@ -1,6 +1,6 @@ use rustc::mir::BasicBlock; use rustc::ty::{self, Ty}; -use syntax::codemap::Span; +use syntax::source_map::Span; use rustc::mir::interpret::{EvalResult, Value}; use interpret::{Machine, ValTy, EvalContext, Place, PlaceExtra}; diff --git a/src/librustc_mir/interpret/terminator/mod.rs b/src/librustc_mir/interpret/terminator/mod.rs index 682e384da39..3a772559d6d 100644 --- a/src/librustc_mir/interpret/terminator/mod.rs +++ b/src/librustc_mir/interpret/terminator/mod.rs @@ -1,7 +1,7 @@ use rustc::mir; use rustc::ty::{self, Ty}; use rustc::ty::layout::{LayoutOf, Size}; -use syntax::codemap::Span; +use syntax::source_map::Span; use rustc_target::spec::abi::Abi; use rustc::mir::interpret::{EvalResult, Scalar, Value}; diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs index 95968c0ea4f..f73c9d43699 100644 --- a/src/librustc_mir/monomorphize/item.rs +++ b/src/librustc_mir/monomorphize/item.rs @@ -26,7 +26,7 @@ use std::fmt::{self, Write}; use std::iter; use rustc::mir::mono::Linkage; use syntax_pos::symbol::Symbol; -use syntax::codemap::Span; +use syntax::source_map::Span; pub use rustc::mir::mono::MonoItem; /// Describes how a monomorphization will be instantiated in object files. diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 5912eee758d..47c45adb85f 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -24,7 +24,7 @@ use interpret::EvalContext; use interpret::CompileTimeEvaluator; use interpret::{eval_promoted, mk_borrowck_eval_cx, ValTy}; use transform::{MirPass, MirSource}; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax::source_map::{Span, DUMMY_SP}; use rustc::ty::subst::Substs; use rustc_data_structures::indexed_vec::IndexVec; use rustc::ty::ParamEnv; diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 0ea90e74531..99ea62e8029 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -20,7 +20,7 @@ use rustc::lint; use rustc::session::Session; use syntax::ast::*; use syntax::attr; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax_pos::Span; diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 0356bdae7dc..7b6011cc6f1 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -49,7 +49,7 @@ use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap}; use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; -use syntax::codemap::SourceMap; +use syntax::source_map::SourceMap; use syntax::ext::hygiene::{Mark, Transparency, SyntaxContext}; use syntax::ast::{self, Name, NodeId, Ident, FloatTy, IntTy, UintTy}; use syntax::ext::base::SyntaxExtension; diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 04a4bca4ffb..05b7ed96251 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -41,7 +41,7 @@ use syntax::print::pprust::{ ty_to_string }; use syntax::ptr::P; -use syntax::codemap::{Spanned, DUMMY_SP, respan}; +use syntax::source_map::{Spanned, DUMMY_SP, respan}; use syntax_pos::*; use {escape, generated_code, lower_attributes, PathCollector, SaveContext}; diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 50aa48e8c52..1eb8bacb75a 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -56,14 +56,14 @@ use std::fs::File; use std::path::{Path, PathBuf}; use syntax::ast::{self, Attribute, NodeId, PatKind}; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::parse::lexer::comments::strip_doc_comment_decoration; use syntax::parse::token; use syntax::print::pprust; use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{arg_to_string, ty_to_string}; -use syntax::codemap::MacroAttribute; +use syntax::source_map::MacroAttribute; use syntax_pos::*; use json_dumper::JsonDumper; diff --git a/src/librustc_traits/dropck_outlives.rs b/src/librustc_traits/dropck_outlives.rs index 5f9060b3623..8bffecfc7c9 100644 --- a/src/librustc_traits/dropck_outlives.rs +++ b/src/librustc_traits/dropck_outlives.rs @@ -18,7 +18,7 @@ use rustc::ty::subst::{Subst, Substs}; use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt}; use rustc::util::nodemap::FxHashSet; use rustc_data_structures::sync::Lrc; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax::source_map::{Span, DUMMY_SP}; crate fn provide(p: &mut Providers) { *p = Providers { diff --git a/src/librustc_traits/evaluate_obligation.rs b/src/librustc_traits/evaluate_obligation.rs index e8a3447902f..b5ee346569a 100644 --- a/src/librustc_traits/evaluate_obligation.rs +++ b/src/librustc_traits/evaluate_obligation.rs @@ -13,7 +13,7 @@ use rustc::traits::{EvaluationResult, Obligation, ObligationCause, use rustc::traits::query::CanonicalPredicateGoal; use rustc::ty::query::Providers; use rustc::ty::{ParamEnvAnd, TyCtxt}; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; crate fn provide(p: &mut Providers) { *p = Providers { diff --git a/src/librustc_traits/implied_outlives_bounds.rs b/src/librustc_traits/implied_outlives_bounds.rs index b6560fae110..7b31518c07b 100644 --- a/src/librustc_traits/implied_outlives_bounds.rs +++ b/src/librustc_traits/implied_outlives_bounds.rs @@ -21,7 +21,7 @@ use rustc::ty::outlives::Component; use rustc::ty::query::Providers; use rustc::ty::wf; use syntax::ast::DUMMY_NODE_ID; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; use rustc::traits::FulfillmentContext; use rustc_data_structures::sync::Lrc; diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index c9b5fd525dd..93c9764202f 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -22,7 +22,7 @@ use util::nodemap::FxHashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::cmp; use syntax::ast; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::ptr::P; use syntax_pos::Span; diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index f2745d06390..242b69294a5 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -24,7 +24,7 @@ use rustc::ty::subst::Substs; use std::cmp; use std::iter; use rustc_target::spec::abi::Abi; -use syntax::codemap::Span; +use syntax::source_map::Span; use rustc::hir; /// What signature do we *expect* the closure to have from context? diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 27b427f7f89..5fa1cd168a2 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -122,7 +122,7 @@ use std::ops::{self, Deref}; use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::attr; -use syntax::codemap::original_sp; +use syntax::source_map::original_sp; use syntax::feature_gate::{GateIssue, emit_feature_err}; use syntax::ptr::P; use syntax::symbol::{Symbol, LocalInternedString, keywords}; diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 02f431dccca..956d542ab7d 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -43,7 +43,7 @@ use rustc_target::spec::abi; use syntax::ast; use syntax::ast::MetaItemKind; use syntax::attr::{InlineAttr, list_contains_name, mark_used}; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::symbol::{Symbol, keywords}; use syntax::feature_gate; use syntax_pos::{Span, DUMMY_SP}; diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index dceb04a7daa..d5aab1fb098 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -417,7 +417,7 @@ mod test { use syntax::symbol::Symbol; use syntax::ast::*; - use syntax::codemap::dummy_spanned; + use syntax::source_map::dummy_spanned; use syntax_pos::DUMMY_SP; use syntax::with_globals; diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 9e852a077bc..784ef3f256b 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -21,7 +21,7 @@ pub use self::Visibility::{Public, Inherited}; use rustc_target::spec::abi::Abi; use syntax::ast::{self, AttrStyle, Ident}; use syntax::attr; -use syntax::codemap::{dummy_spanned, Spanned}; +use syntax::source_map::{dummy_spanned, Spanned}; use syntax::ptr::P; use syntax::symbol::keywords::{self, Keyword}; use syntax::symbol::InternedString; diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 6b2cb53b8f1..bd37a87b1e1 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -27,7 +27,7 @@ use rustc_metadata::cstore::CStore; use rustc_target::spec::TargetTriple; use syntax::ast::{self, Ident}; -use syntax::codemap; +use syntax::source_map; use syntax::edition::Edition; use syntax::feature_gate::UnstableFeatures; use syntax::json::JsonEmitter; @@ -260,7 +260,7 @@ impl DocAccessLevels for AccessLevels { /// /// If the given `error_format` is `ErrorOutputType::Json` and no `SourceMap` is given, a new one /// will be created for the handler. -pub fn new_handler(error_format: ErrorOutputType, codemap: Option>) +pub fn new_handler(error_format: ErrorOutputType, codemap: Option>) -> errors::Handler { // rustdoc doesn't override (or allow to override) anything from this that is relevant here, so @@ -277,7 +277,7 @@ pub fn new_handler(error_format: ErrorOutputType, codemap: Option { let codemap = codemap.unwrap_or_else( - || Lrc::new(codemap::SourceMap::new(sessopts.file_path_mapping()))); + || Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping()))); Box::new( JsonEmitter::stderr( None, @@ -387,7 +387,7 @@ pub fn run_core(search_paths: SearchPaths, ..Options::default() }; driver::spawn_thread_pool(sessopts, move |sessopts| { - let codemap = Lrc::new(codemap::SourceMap::new(sessopts.file_path_mapping())); + let codemap = Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping())); let diagnostic_handler = new_handler(error_format, Some(codemap.clone())); let mut sess = session::build_session_( diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index d6e8f3d938e..dd1e1e99957 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -16,7 +16,7 @@ use syntax::ast; use syntax::ast::{Name, NodeId}; use syntax::attr; use syntax::ptr::P; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax_pos::{self, Span}; use rustc::hir; diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index a3ad50b7079..4e78275f26b 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -21,7 +21,7 @@ use std::fmt::Display; use std::io; use std::io::prelude::*; -use syntax::codemap::{SourceMap, FilePathMapping}; +use syntax::source_map::{SourceMap, FilePathMapping}; use syntax::parse::lexer::{self, TokenAndSpan}; use syntax::parse::token; use syntax::parse; diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 33b3934e3a4..44a9710e30f 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -56,7 +56,7 @@ use externalfiles::ExternalHtml; use serialize::json::{ToJson, Json, as_json}; use syntax::ast; -use syntax::codemap::FileName; +use syntax::source_map::FileName; use syntax::feature_gate::UnstableFeatures; use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; use rustc::middle::privacy::AccessLevels; diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs index 05661dc6856..a3ae953e6ee 100644 --- a/src/librustdoc/markdown.rs +++ b/src/librustdoc/markdown.rs @@ -19,7 +19,7 @@ use getopts; use testing; use rustc::session::search_paths::SearchPaths; use rustc::session::config::{Externs, CodegenOptions}; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; use syntax::feature_gate::UnstableFeatures; use syntax::edition::Edition; diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 605b08219a1..9854b919f5c 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -33,7 +33,7 @@ use rustc_driver::driver::phase_2_configure_and_expand; use rustc_metadata::cstore::CStore; use rustc_resolve::MakeGlobMap; use syntax::ast; -use syntax::codemap::SourceMap; +use syntax::source_map::SourceMap; use syntax::edition::Edition; use syntax::feature_gate::UnstableFeatures; use syntax::with_globals; diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index e2c935e2f69..68ddf72da06 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -15,7 +15,7 @@ use std::mem; use syntax::ast; use syntax::attr; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax_pos::{self, Span}; use rustc::hir::map as hir_map; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index e53f3ea9036..ec6ac86ba6b 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -16,7 +16,7 @@ pub use symbol::{Ident, Symbol as Name}; pub use util::parser::ExprPrecedence; use syntax_pos::{Span, DUMMY_SP}; -use codemap::{dummy_spanned, respan, Spanned}; +use source_map::{dummy_spanned, respan, Spanned}; use rustc_target::spec::abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; use print::pprust; diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 5857bd282f0..cd9d7682210 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -25,7 +25,7 @@ use ast; use ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam}; -use codemap::{BytePos, Spanned, respan, dummy_spanned}; +use source_map::{BytePos, Spanned, respan, dummy_spanned}; use syntax_pos::{FileName, Span}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::parser::Parser; diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs deleted file mode 100644 index 34cd026f7a0..00000000000 --- a/src/libsyntax/codemap.rs +++ /dev/null @@ -1,1235 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! The SourceMap tracks all the source code used within a single crate, mapping -//! from integer byte positions to the original source code location. Each bit -//! of source parsed during crate parsing (typically files, in-memory strings, -//! or various bits of macro expansion) cover a continuous range of bytes in the -//! SourceMap and are represented by SourceFiles. Byte positions are stored in -//! `spans` and used pervasively in the compiler. They are absolute positions -//! within the SourceMap, which upon request can be converted to line and column -//! information, source code snippets, etc. - - -pub use syntax_pos::*; -pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo}; -pub use self::ExpnFormat::*; - -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::StableHasher; -use rustc_data_structures::sync::{Lrc, Lock, LockGuard}; -use std::cmp; -use std::hash::Hash; -use std::path::{Path, PathBuf}; - -use std::env; -use std::fs; -use std::io::{self, Read}; -use errors::SourceMapper; - -/// Return the span itself if it doesn't come from a macro expansion, -/// otherwise return the call site span up to the `enclosing_sp` by -/// following the `expn_info` chain. -pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site); - let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site); - match (call_site1, call_site2) { - (None, _) => sp, - (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, - (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), - } -} - -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub struct Spanned { - pub node: T, - pub span: Span, -} - -pub fn respan(sp: Span, t: T) -> Spanned { - Spanned {node: t, span: sp} -} - -pub fn dummy_spanned(t: T) -> Spanned { - respan(DUMMY_SP, t) -} - -// _____________________________________________________________________________ -// SourceFile, MultiByteChar, FileName, FileLines -// - -/// An abstraction over the fs operations used by the Parser. -pub trait FileLoader { - /// Query the existence of a file. - fn file_exists(&self, path: &Path) -> bool; - - /// Return an absolute path to a file, if possible. - fn abs_path(&self, path: &Path) -> Option; - - /// Read the contents of an UTF-8 file into memory. - fn read_file(&self, path: &Path) -> io::Result; -} - -/// A FileLoader that uses std::fs to load real files. -pub struct RealFileLoader; - -impl FileLoader for RealFileLoader { - fn file_exists(&self, path: &Path) -> bool { - fs::metadata(path).is_ok() - } - - fn abs_path(&self, path: &Path) -> Option { - if path.is_absolute() { - Some(path.to_path_buf()) - } else { - env::current_dir() - .ok() - .map(|cwd| cwd.join(path)) - } - } - - fn read_file(&self, path: &Path) -> io::Result { - let mut src = String::new(); - fs::File::open(path)?.read_to_string(&mut src)?; - Ok(src) - } -} - -// This is a SourceFile identifier that is used to correlate SourceFiles between -// subsequent compilation sessions (which is something we need to do during -// incremental compilation). -#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] -pub struct StableFilemapId(u128); - -impl StableFilemapId { - pub fn new(source_file: &SourceFile) -> StableFilemapId { - let mut hasher = StableHasher::new(); - - source_file.name.hash(&mut hasher); - source_file.name_was_remapped.hash(&mut hasher); - source_file.unmapped_path.hash(&mut hasher); - - StableFilemapId(hasher.finish()) - } -} - -// _____________________________________________________________________________ -// SourceMap -// - -pub(super) struct SourceMapFiles { - pub(super) file_maps: Vec>, - stable_id_to_source_file: FxHashMap> -} - -pub struct SourceMap { - pub(super) files: Lock, - file_loader: Box, - // This is used to apply the file path remapping as specified via - // --remap-path-prefix to all SourceFiles allocated within this SourceMap. - path_mapping: FilePathMapping, - /// In case we are in a doctest, replace all file names with the PathBuf, - /// and add the given offsets to the line info - doctest_offset: Option<(FileName, isize)>, -} - -impl SourceMap { - pub fn new(path_mapping: FilePathMapping) -> SourceMap { - SourceMap { - files: Lock::new(SourceMapFiles { - file_maps: Vec::new(), - stable_id_to_source_file: FxHashMap(), - }), - file_loader: Box::new(RealFileLoader), - path_mapping, - doctest_offset: None, - } - } - - pub fn new_doctest(path_mapping: FilePathMapping, - file: FileName, line: isize) -> SourceMap { - SourceMap { - doctest_offset: Some((file, line)), - ..SourceMap::new(path_mapping) - } - - } - - pub fn with_file_loader(file_loader: Box, - path_mapping: FilePathMapping) - -> SourceMap { - SourceMap { - files: Lock::new(SourceMapFiles { - file_maps: Vec::new(), - stable_id_to_source_file: FxHashMap(), - }), - file_loader: file_loader, - path_mapping, - doctest_offset: None, - } - } - - pub fn path_mapping(&self) -> &FilePathMapping { - &self.path_mapping - } - - pub fn file_exists(&self, path: &Path) -> bool { - self.file_loader.file_exists(path) - } - - pub fn load_file(&self, path: &Path) -> io::Result> { - let src = self.file_loader.read_file(path)?; - let filename = if let Some((ref name, _)) = self.doctest_offset { - name.clone() - } else { - path.to_owned().into() - }; - Ok(self.new_source_file(filename, src)) - } - - pub fn files(&self) -> LockGuard>> { - LockGuard::map(self.files.borrow(), |files| &mut files.file_maps) - } - - pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { - self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone()) - } - - fn next_start_pos(&self) -> usize { - match self.files.borrow().file_maps.last() { - None => 0, - // Add one so there is some space between files. This lets us distinguish - // positions in the codemap, even in the presence of zero-length files. - Some(last) => last.end_pos.to_usize() + 1, - } - } - - /// Creates a new source_file. - /// This does not ensure that only one SourceFile exists per file name. - pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc { - let start_pos = self.next_start_pos(); - - // The path is used to determine the directory for loading submodules and - // include files, so it must be before remapping. - // Note that filename may not be a valid path, eg it may be `` etc, - // but this is okay because the directory determined by `path.pop()` will - // be empty, so the working directory will be used. - let unmapped_path = filename.clone(); - - let (filename, was_remapped) = match filename { - FileName::Real(filename) => { - let (filename, was_remapped) = self.path_mapping.map_prefix(filename); - (FileName::Real(filename), was_remapped) - }, - other => (other, false), - }; - let source_file = Lrc::new(SourceFile::new( - filename, - was_remapped, - unmapped_path, - src, - Pos::from_usize(start_pos), - )); - - let mut files = self.files.borrow_mut(); - - files.file_maps.push(source_file.clone()); - files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); - - source_file - } - - /// Allocates a new SourceFile representing a source file from an external - /// crate. The source code of such an "imported source_file" is not available, - /// but we still know enough to generate accurate debuginfo location - /// information for things inlined from other crates. - pub fn new_imported_source_file(&self, - filename: FileName, - name_was_remapped: bool, - crate_of_origin: u32, - src_hash: u128, - name_hash: u128, - source_len: usize, - mut file_local_lines: Vec, - mut file_local_multibyte_chars: Vec, - mut file_local_non_narrow_chars: Vec) - -> Lrc { - let start_pos = self.next_start_pos(); - - let end_pos = Pos::from_usize(start_pos + source_len); - let start_pos = Pos::from_usize(start_pos); - - for pos in &mut file_local_lines { - *pos = *pos + start_pos; - } - - for mbc in &mut file_local_multibyte_chars { - mbc.pos = mbc.pos + start_pos; - } - - for swc in &mut file_local_non_narrow_chars { - *swc = *swc + start_pos; - } - - let source_file = Lrc::new(SourceFile { - name: filename, - name_was_remapped, - unmapped_path: None, - crate_of_origin, - src: None, - src_hash, - external_src: Lock::new(ExternalSource::AbsentOk), - start_pos, - end_pos, - lines: file_local_lines, - multibyte_chars: file_local_multibyte_chars, - non_narrow_chars: file_local_non_narrow_chars, - name_hash, - }); - - let mut files = self.files.borrow_mut(); - - files.file_maps.push(source_file.clone()); - files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); - - source_file - } - - pub fn mk_substr_filename(&self, sp: Span) -> String { - let pos = self.lookup_char_pos(sp.lo()); - format!("<{}:{}:{}>", - pos.file.name, - pos.line, - pos.col.to_usize() + 1) - } - - // If there is a doctest_offset, apply it to the line - pub fn doctest_offset_line(&self, mut orig: usize) -> usize { - if let Some((_, line)) = self.doctest_offset { - if line >= 0 { - orig = orig + line as usize; - } else { - orig = orig - (-line) as usize; - } - } - orig - } - - /// Lookup source information about a BytePos - pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { - let chpos = self.bytepos_to_file_charpos(pos); - match self.lookup_line(pos) { - Ok(SourceFileAndLine { fm: f, line: a }) => { - let line = a + 1; // Line numbers start at 1 - let linebpos = f.lines[a]; - let linechpos = self.bytepos_to_file_charpos(linebpos); - let col = chpos - linechpos; - - let col_display = { - let start_width_idx = f - .non_narrow_chars - .binary_search_by_key(&linebpos, |x| x.pos()) - .unwrap_or_else(|x| x); - let end_width_idx = f - .non_narrow_chars - .binary_search_by_key(&pos, |x| x.pos()) - .unwrap_or_else(|x| x); - let special_chars = end_width_idx - start_width_idx; - let non_narrow: usize = f - .non_narrow_chars[start_width_idx..end_width_idx] - .into_iter() - .map(|x| x.width()) - .sum(); - col.0 - special_chars + non_narrow - }; - debug!("byte pos {:?} is on the line at byte pos {:?}", - pos, linebpos); - debug!("char pos {:?} is on the line at char pos {:?}", - chpos, linechpos); - debug!("byte is on line: {}", line); - assert!(chpos >= linechpos); - Loc { - file: f, - line, - col, - col_display, - } - } - Err(f) => { - let col_display = { - let end_width_idx = f - .non_narrow_chars - .binary_search_by_key(&pos, |x| x.pos()) - .unwrap_or_else(|x| x); - let non_narrow: usize = f - .non_narrow_chars[0..end_width_idx] - .into_iter() - .map(|x| x.width()) - .sum(); - chpos.0 - end_width_idx + non_narrow - }; - Loc { - file: f, - line: 0, - col: chpos, - col_display, - } - } - } - } - - // If the relevant source_file is empty, we don't return a line number. - pub fn lookup_line(&self, pos: BytePos) -> Result> { - let idx = self.lookup_source_file_idx(pos); - - let f = (*self.files.borrow().file_maps)[idx].clone(); - - match f.lookup_line(pos) { - Some(line) => Ok(SourceFileAndLine { fm: f, line: line }), - None => Err(f) - } - } - - pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { - let loc = self.lookup_char_pos(pos); - LocWithOpt { - filename: loc.file.name.clone(), - line: loc.line, - col: loc.col, - file: Some(loc.file) - } - } - - /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If - /// there are gaps between lhs and rhs, the resulting union will cross these gaps. - /// For this to work, the spans have to be: - /// - /// * the ctxt of both spans much match - /// * the lhs span needs to end on the same line the rhs span begins - /// * the lhs span must start at or before the rhs span - pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { - // make sure we're at the same expansion id - if sp_lhs.ctxt() != sp_rhs.ctxt() { - return None; - } - - let lhs_end = match self.lookup_line(sp_lhs.hi()) { - Ok(x) => x, - Err(_) => return None - }; - let rhs_begin = match self.lookup_line(sp_rhs.lo()) { - Ok(x) => x, - Err(_) => return None - }; - - // if we must cross lines to merge, don't merge - if lhs_end.line != rhs_begin.line { - return None; - } - - // ensure these follow the expected order and we don't overlap - if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) { - Some(sp_lhs.to(sp_rhs)) - } else { - None - } - } - - pub fn span_to_string(&self, sp: Span) -> String { - if self.files.borrow().file_maps.is_empty() && sp.is_dummy() { - return "no-location".to_string(); - } - - let lo = self.lookup_char_pos_adj(sp.lo()); - let hi = self.lookup_char_pos_adj(sp.hi()); - format!("{}:{}:{}: {}:{}", - lo.filename, - lo.line, - lo.col.to_usize() + 1, - hi.line, - hi.col.to_usize() + 1) - } - - pub fn span_to_filename(&self, sp: Span) -> FileName { - self.lookup_char_pos(sp.lo()).file.name.clone() - } - - pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { - self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() - .expect("SourceMap::span_to_unmapped_path called for imported SourceFile?") - } - - pub fn is_multiline(&self, sp: Span) -> bool { - let lo = self.lookup_char_pos(sp.lo()); - let hi = self.lookup_char_pos(sp.hi()); - lo.line != hi.line - } - - pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { - debug!("span_to_lines(sp={:?})", sp); - - if sp.lo() > sp.hi() { - return Err(SpanLinesError::IllFormedSpan(sp)); - } - - let lo = self.lookup_char_pos(sp.lo()); - debug!("span_to_lines: lo={:?}", lo); - let hi = self.lookup_char_pos(sp.hi()); - debug!("span_to_lines: hi={:?}", hi); - - if lo.file.start_pos != hi.file.start_pos { - return Err(SpanLinesError::DistinctSources(DistinctSources { - begin: (lo.file.name.clone(), lo.file.start_pos), - end: (hi.file.name.clone(), hi.file.start_pos), - })); - } - assert!(hi.line >= lo.line); - - let mut lines = Vec::with_capacity(hi.line - lo.line + 1); - - // The span starts partway through the first line, - // but after that it starts from offset 0. - let mut start_col = lo.col; - - // For every line but the last, it extends from `start_col` - // and to the end of the line. Be careful because the line - // numbers in Loc are 1-based, so we subtract 1 to get 0-based - // lines. - for line_index in lo.line-1 .. hi.line-1 { - let line_len = lo.file.get_line(line_index) - .map(|s| s.chars().count()) - .unwrap_or(0); - lines.push(LineInfo { line_index, - start_col, - end_col: CharPos::from_usize(line_len) }); - start_col = CharPos::from_usize(0); - } - - // For the last line, it extends from `start_col` to `hi.col`: - lines.push(LineInfo { line_index: hi.line - 1, - start_col, - end_col: hi.col }); - - Ok(FileLines {file: lo.file, lines: lines}) - } - - /// Extract the source surrounding the given `Span` using the `extract_source` function. The - /// extract function takes three arguments: a string slice containing the source, an index in - /// the slice for the beginning of the span and an index in the slice for the end of the span. - fn span_to_source(&self, sp: Span, extract_source: F) -> Result - where F: Fn(&str, usize, usize) -> String - { - if sp.lo() > sp.hi() { - return Err(SpanSnippetError::IllFormedSpan(sp)); - } - - let local_begin = self.lookup_byte_offset(sp.lo()); - let local_end = self.lookup_byte_offset(sp.hi()); - - if local_begin.fm.start_pos != local_end.fm.start_pos { - return Err(SpanSnippetError::DistinctSources(DistinctSources { - begin: (local_begin.fm.name.clone(), - local_begin.fm.start_pos), - end: (local_end.fm.name.clone(), - local_end.fm.start_pos) - })); - } else { - self.ensure_source_file_source_present(local_begin.fm.clone()); - - let start_index = local_begin.pos.to_usize(); - let end_index = local_end.pos.to_usize(); - let source_len = (local_begin.fm.end_pos - - local_begin.fm.start_pos).to_usize(); - - if start_index > end_index || end_index > source_len { - return Err(SpanSnippetError::MalformedForCodemap( - MalformedCodemapPositions { - name: local_begin.fm.name.clone(), - source_len, - begin_pos: local_begin.pos, - end_pos: local_end.pos, - })); - } - - if let Some(ref src) = local_begin.fm.src { - return Ok(extract_source(src, start_index, end_index)); - } else if let Some(src) = local_begin.fm.external_src.borrow().get_source() { - return Ok(extract_source(src, start_index, end_index)); - } else { - return Err(SpanSnippetError::SourceNotAvailable { - filename: local_begin.fm.name.clone() - }); - } - } - } - - /// Return the source snippet as `String` corresponding to the given `Span` - pub fn span_to_snippet(&self, sp: Span) -> Result { - self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] - .to_string()) - } - - /// Return the source snippet as `String` before the given `Span` - pub fn span_to_prev_source(&self, sp: Span) -> Result { - self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) - } - - /// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span - /// if no character could be found or if an error occurred while retrieving the code snippet. - pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span { - if let Ok(prev_source) = self.span_to_prev_source(sp) { - let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_left(); - if !prev_source.is_empty() && !prev_source.contains('\n') { - return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); - } - } - - sp - } - - /// Extend the given `Span` to just after the previous occurrence of `pat` when surrounded by - /// whitespace. Return the same span if no character could be found or if an error occurred - /// while retrieving the code snippet. - pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span { - // assure that the pattern is delimited, to avoid the following - // fn my_fn() - // ^^^^ returned span without the check - // ---------- correct span - for ws in &[" ", "\t", "\n"] { - let pat = pat.to_owned() + ws; - if let Ok(prev_source) = self.span_to_prev_source(sp) { - let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_left(); - if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) { - return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); - } - } - } - - sp - } - - /// Given a `Span`, try to get a shorter span ending before the first occurrence of `c` `char` - pub fn span_until_char(&self, sp: Span, c: char) -> Span { - match self.span_to_snippet(sp) { - Ok(snippet) => { - let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right(); - if !snippet.is_empty() && !snippet.contains('\n') { - sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) - } else { - sp - } - } - _ => sp, - } - } - - /// Given a `Span`, try to get a shorter span ending just after the first occurrence of `char` - /// `c`. - pub fn span_through_char(&self, sp: Span, c: char) -> Span { - if let Ok(snippet) = self.span_to_snippet(sp) { - if let Some(offset) = snippet.find(c) { - return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32)); - } - } - sp - } - - /// Given a `Span`, get a new `Span` covering the first token and all its trailing whitespace or - /// the original `Span`. - /// - /// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned. - pub fn span_until_non_whitespace(&self, sp: Span) -> Span { - let mut whitespace_found = false; - - self.span_take_while(sp, |c| { - if !whitespace_found && c.is_whitespace() { - whitespace_found = true; - } - - if whitespace_found && !c.is_whitespace() { - false - } else { - true - } - }) - } - - /// Given a `Span`, get a new `Span` covering the first token without its trailing whitespace or - /// the original `Span` in case of error. - /// - /// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned. - pub fn span_until_whitespace(&self, sp: Span) -> Span { - self.span_take_while(sp, |c| !c.is_whitespace()) - } - - /// Given a `Span`, get a shorter one until `predicate` yields false. - pub fn span_take_while

(&self, sp: Span, predicate: P) -> Span - where P: for <'r> FnMut(&'r char) -> bool - { - if let Ok(snippet) = self.span_to_snippet(sp) { - let offset = snippet.chars() - .take_while(predicate) - .map(|c| c.len_utf8()) - .sum::(); - - sp.with_hi(BytePos(sp.lo().0 + (offset as u32))) - } else { - sp - } - } - - pub fn def_span(&self, sp: Span) -> Span { - self.span_until_char(sp, '{') - } - - /// Returns a new span representing just the start-point of this span - pub fn start_point(&self, sp: Span) -> Span { - let pos = sp.lo().0; - let width = self.find_width_of_character_at_span(sp, false); - let corrected_start_position = pos.checked_add(width).unwrap_or(pos); - let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0)); - sp.with_hi(end_point) - } - - /// Returns a new span representing just the end-point of this span - pub fn end_point(&self, sp: Span) -> Span { - let pos = sp.hi().0; - - let width = self.find_width_of_character_at_span(sp, false); - let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); - - let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); - sp.with_lo(end_point) - } - - /// Returns a new span representing the next character after the end-point of this span - pub fn next_point(&self, sp: Span) -> Span { - let start_of_next_point = sp.hi().0; - - let width = self.find_width_of_character_at_span(sp, true); - // If the width is 1, then the next span should point to the same `lo` and `hi`. However, - // in the case of a multibyte character, where the width != 1, the next span should - // span multiple bytes to include the whole character. - let end_of_next_point = start_of_next_point.checked_add( - width - 1).unwrap_or(start_of_next_point); - - let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point)); - Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt()) - } - - /// Finds the width of a character, either before or after the provided span. - fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { - // Disregard malformed spans and assume a one-byte wide character. - if sp.lo() >= sp.hi() { - debug!("find_width_of_character_at_span: early return malformed span"); - return 1; - } - - let local_begin = self.lookup_byte_offset(sp.lo()); - let local_end = self.lookup_byte_offset(sp.hi()); - debug!("find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`", - local_begin, local_end); - - let start_index = local_begin.pos.to_usize(); - let end_index = local_end.pos.to_usize(); - debug!("find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`", - start_index, end_index); - - // Disregard indexes that are at the start or end of their spans, they can't fit bigger - // characters. - if (!forwards && end_index == usize::min_value()) || - (forwards && start_index == usize::max_value()) { - debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte"); - return 1; - } - - let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize(); - debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len); - // Ensure indexes are also not malformed. - if start_index > end_index || end_index > source_len { - debug!("find_width_of_character_at_span: source indexes are malformed"); - return 1; - } - - let src = local_begin.fm.external_src.borrow(); - - // We need to extend the snippet to the end of the src rather than to end_index so when - // searching forwards for boundaries we've got somewhere to search. - let snippet = if let Some(ref src) = local_begin.fm.src { - let len = src.len(); - (&src[start_index..len]) - } else if let Some(src) = src.get_source() { - let len = src.len(); - (&src[start_index..len]) - } else { - return 1; - }; - debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet); - - let mut target = if forwards { end_index + 1 } else { end_index - 1 }; - debug!("find_width_of_character_at_span: initial target=`{:?}`", target); - - while !snippet.is_char_boundary(target - start_index) && target < source_len { - target = if forwards { - target + 1 - } else { - match target.checked_sub(1) { - Some(target) => target, - None => { - break; - } - } - }; - debug!("find_width_of_character_at_span: target=`{:?}`", target); - } - debug!("find_width_of_character_at_span: final target=`{:?}`", target); - - if forwards { - (target - end_index) as u32 - } else { - (end_index - target) as u32 - } - } - - pub fn get_source_file(&self, filename: &FileName) -> Option> { - for fm in self.files.borrow().file_maps.iter() { - if *filename == fm.name { - return Some(fm.clone()); - } - } - None - } - - /// For a global BytePos compute the local offset within the containing SourceFile - pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { - let idx = self.lookup_source_file_idx(bpos); - let fm = (*self.files.borrow().file_maps)[idx].clone(); - let offset = bpos - fm.start_pos; - SourceFileAndBytePos {fm: fm, pos: offset} - } - - /// Converts an absolute BytePos to a CharPos relative to the source_file. - pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { - let idx = self.lookup_source_file_idx(bpos); - let map = &(*self.files.borrow().file_maps)[idx]; - - // The number of extra bytes due to multibyte chars in the SourceFile - let mut total_extra_bytes = 0; - - for mbc in map.multibyte_chars.iter() { - debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); - if mbc.pos < bpos { - // every character is at least one byte, so we only - // count the actual extra bytes. - total_extra_bytes += mbc.bytes as u32 - 1; - // We should never see a byte position in the middle of a - // character - assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); - } else { - break; - } - } - - assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); - CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) - } - - // Return the index of the source_file (in self.files) which contains pos. - pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { - let files = self.files.borrow(); - let files = &files.file_maps; - let count = files.len(); - - // Binary search for the source_file. - let mut a = 0; - let mut b = count; - while b - a > 1 { - let m = (a + b) / 2; - if files[m].start_pos > pos { - b = m; - } else { - a = m; - } - } - - assert!(a < count, "position {} does not resolve to a source location", pos.to_usize()); - - return a; - } - - pub fn count_lines(&self) -> usize { - self.files().iter().fold(0, |a, f| a + f.count_lines()) - } - - - pub fn generate_fn_name_span(&self, span: Span) -> Option { - let prev_span = self.span_extend_to_prev_str(span, "fn", true); - self.span_to_snippet(prev_span).map(|snippet| { - let len = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') - .expect("no label after fn"); - prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)) - }).ok() - } - - /// Take the span of a type parameter in a function signature and try to generate a span for the - /// function name (with generics) and a new snippet for this span with the pointed type - /// parameter as a new local type parameter. - /// - /// For instance: - /// ```rust,ignore (pseudo-Rust) - /// // Given span - /// fn my_function(param: T) - /// // ^ Original span - /// - /// // Result - /// fn my_function(param: T) - /// // ^^^^^^^^^^^ Generated span with snippet `my_function` - /// ``` - /// - /// Attention: The method used is very fragile since it essentially duplicates the work of the - /// parser. If you need to use this function or something similar, please consider updating the - /// codemap functions and this function to something more robust. - pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> { - // Try to extend the span to the previous "fn" keyword to retrieve the function - // signature - let sugg_span = self.span_extend_to_prev_str(span, "fn", false); - if sugg_span != span { - if let Ok(snippet) = self.span_to_snippet(sugg_span) { - // Consume the function name - let mut offset = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') - .expect("no label after fn"); - - // Consume the generics part of the function signature - let mut bracket_counter = 0; - let mut last_char = None; - for c in snippet[offset..].chars() { - match c { - '<' => bracket_counter += 1, - '>' => bracket_counter -= 1, - '(' => if bracket_counter == 0 { break; } - _ => {} - } - offset += c.len_utf8(); - last_char = Some(c); - } - - // Adjust the suggestion span to encompass the function name with its generics - let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32)); - - // Prepare the new suggested snippet to append the type parameter that triggered - // the error in the generics of the function signature - let mut new_snippet = if last_char == Some('>') { - format!("{}, ", &snippet[..(offset - '>'.len_utf8())]) - } else { - format!("{}<", &snippet[..offset]) - }; - new_snippet.push_str(&self.span_to_snippet(span).unwrap_or("T".to_string())); - new_snippet.push('>'); - - return Some((sugg_span, new_snippet)); - } - } - - None - } -} - -impl SourceMapper for SourceMap { - fn lookup_char_pos(&self, pos: BytePos) -> Loc { - self.lookup_char_pos(pos) - } - fn span_to_lines(&self, sp: Span) -> FileLinesResult { - self.span_to_lines(sp) - } - fn span_to_string(&self, sp: Span) -> String { - self.span_to_string(sp) - } - fn span_to_filename(&self, sp: Span) -> FileName { - self.span_to_filename(sp) - } - fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { - self.merge_spans(sp_lhs, sp_rhs) - } - fn call_span_if_macro(&self, sp: Span) -> Span { - if self.span_to_filename(sp.clone()).is_macros() { - let v = sp.macro_backtrace(); - if let Some(use_site) = v.last() { - return use_site.call_site; - } - } - sp - } - fn ensure_source_file_source_present(&self, file_map: Lrc) -> bool { - file_map.add_external_src( - || match file_map.name { - FileName::Real(ref name) => self.file_loader.read_file(name).ok(), - _ => None, - } - ) - } - fn doctest_offset_line(&self, line: usize) -> usize { - self.doctest_offset_line(line) - } -} - -#[derive(Clone)] -pub struct FilePathMapping { - mapping: Vec<(PathBuf, PathBuf)>, -} - -impl FilePathMapping { - pub fn empty() -> FilePathMapping { - FilePathMapping { - mapping: vec![] - } - } - - pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { - FilePathMapping { - mapping, - } - } - - /// Applies any path prefix substitution as defined by the mapping. - /// The return value is the remapped path and a boolean indicating whether - /// the path was affected by the mapping. - pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { - // NOTE: We are iterating over the mapping entries from last to first - // because entries specified later on the command line should - // take precedence. - for &(ref from, ref to) in self.mapping.iter().rev() { - if let Ok(rest) = path.strip_prefix(from) { - return (to.join(rest), true); - } - } - - (path, false) - } -} - -// _____________________________________________________________________________ -// Tests -// - -#[cfg(test)] -mod tests { - use super::*; - use rustc_data_structures::sync::Lrc; - - fn init_code_map() -> SourceMap { - let cm = SourceMap::new(FilePathMapping::empty()); - cm.new_source_file(PathBuf::from("blork.rs").into(), - "first line.\nsecond line".to_string()); - cm.new_source_file(PathBuf::from("empty.rs").into(), - "".to_string()); - cm.new_source_file(PathBuf::from("blork2.rs").into(), - "first line.\nsecond line".to_string()); - cm - } - - #[test] - fn t3() { - // Test lookup_byte_offset - let cm = init_code_map(); - - let fmabp1 = cm.lookup_byte_offset(BytePos(23)); - assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into()); - assert_eq!(fmabp1.pos, BytePos(23)); - - let fmabp1 = cm.lookup_byte_offset(BytePos(24)); - assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into()); - assert_eq!(fmabp1.pos, BytePos(0)); - - let fmabp2 = cm.lookup_byte_offset(BytePos(25)); - assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into()); - assert_eq!(fmabp2.pos, BytePos(0)); - } - - #[test] - fn t4() { - // Test bytepos_to_file_charpos - let cm = init_code_map(); - - let cp1 = cm.bytepos_to_file_charpos(BytePos(22)); - assert_eq!(cp1, CharPos(22)); - - let cp2 = cm.bytepos_to_file_charpos(BytePos(25)); - assert_eq!(cp2, CharPos(0)); - } - - #[test] - fn t5() { - // Test zero-length source_files. - let cm = init_code_map(); - - let loc1 = cm.lookup_char_pos(BytePos(22)); - assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into()); - assert_eq!(loc1.line, 2); - assert_eq!(loc1.col, CharPos(10)); - - let loc2 = cm.lookup_char_pos(BytePos(25)); - assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into()); - assert_eq!(loc2.line, 1); - assert_eq!(loc2.col, CharPos(0)); - } - - fn init_code_map_mbc() -> SourceMap { - let cm = SourceMap::new(FilePathMapping::empty()); - // € is a three byte utf8 char. - cm.new_source_file(PathBuf::from("blork.rs").into(), - "fir€st €€€€ line.\nsecond line".to_string()); - cm.new_source_file(PathBuf::from("blork2.rs").into(), - "first line€€.\n€ second line".to_string()); - cm - } - - #[test] - fn t6() { - // Test bytepos_to_file_charpos in the presence of multi-byte chars - let cm = init_code_map_mbc(); - - let cp1 = cm.bytepos_to_file_charpos(BytePos(3)); - assert_eq!(cp1, CharPos(3)); - - let cp2 = cm.bytepos_to_file_charpos(BytePos(6)); - assert_eq!(cp2, CharPos(4)); - - let cp3 = cm.bytepos_to_file_charpos(BytePos(56)); - assert_eq!(cp3, CharPos(12)); - - let cp4 = cm.bytepos_to_file_charpos(BytePos(61)); - assert_eq!(cp4, CharPos(15)); - } - - #[test] - fn t7() { - // Test span_to_lines for a span ending at the end of source_file - let cm = init_code_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); - let file_lines = cm.span_to_lines(span).unwrap(); - - assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); - assert_eq!(file_lines.lines.len(), 1); - assert_eq!(file_lines.lines[0].line_index, 1); - } - - /// Given a string like " ~~~~~~~~~~~~ ", produces a span - /// converting that range. The idea is that the string has the same - /// length as the input, and we uncover the byte positions. Note - /// that this can span lines and so on. - fn span_from_selection(input: &str, selection: &str) -> Span { - assert_eq!(input.len(), selection.len()); - let left_index = selection.find('~').unwrap() as u32; - let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) - } - - /// Test span_to_snippet and span_to_lines for a span converting 3 - /// lines in the middle of a file. - #[test] - fn span_to_snippet_and_lines_spanning_multiple_lines() { - let cm = SourceMap::new(FilePathMapping::empty()); - let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; - let selection = " \n ~~\n~~~\n~~~~~ \n \n"; - cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); - let span = span_from_selection(inputtext, selection); - - // check that we are extracting the text we thought we were extracting - assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD"); - - // check that span_to_lines gives us the complete result with the lines/cols we expected - let lines = cm.span_to_lines(span).unwrap(); - let expected = vec![ - LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) }, - LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) }, - LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) } - ]; - assert_eq!(lines.lines, expected); - } - - #[test] - fn t8() { - // Test span_to_snippet for a span ending at the end of source_file - let cm = init_code_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); - let snippet = cm.span_to_snippet(span); - - assert_eq!(snippet, Ok("second line".to_string())); - } - - #[test] - fn t9() { - // Test span_to_str for a span ending at the end of source_file - let cm = init_code_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); - let sstr = cm.span_to_string(span); - - assert_eq!(sstr, "blork.rs:2:1: 2:12"); - } - - /// Test failing to merge two spans on different lines - #[test] - fn span_merging_fail() { - let cm = SourceMap::new(FilePathMapping::empty()); - let inputtext = "bbbb BB\ncc CCC\n"; - let selection1 = " ~~\n \n"; - let selection2 = " \n ~~~\n"; - cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); - let span1 = span_from_selection(inputtext, selection1); - let span2 = span_from_selection(inputtext, selection2); - - assert!(cm.merge_spans(span1, span2).is_none()); - } - - /// Returns the span corresponding to the `n`th occurrence of - /// `substring` in `source_text`. - trait SourceMapExtension { - fn span_substr(&self, - file: &Lrc, - source_text: &str, - substring: &str, - n: usize) - -> Span; - } - - impl SourceMapExtension for SourceMap { - fn span_substr(&self, - file: &Lrc, - source_text: &str, - substring: &str, - n: usize) - -> Span - { - println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", - file.name, file.start_pos, substring, n); - let mut i = 0; - let mut hi = 0; - loop { - let offset = source_text[hi..].find(substring).unwrap_or_else(|| { - panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", - source_text, n, substring, i); - }); - let lo = hi + offset; - hi = lo + substring.len(); - if i == n { - let span = Span::new( - BytePos(lo as u32 + file.start_pos.0), - BytePos(hi as u32 + file.start_pos.0), - NO_EXPANSION, - ); - assert_eq!(&self.span_to_snippet(span).unwrap()[..], - substring); - return span; - } - i += 1; - } - } - } -} diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index b4e35a9d564..0e52434ec01 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -12,7 +12,7 @@ use attr::HasAttrs; use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; use {fold, attr}; use ast; -use codemap::Spanned; +use source_map::Spanned; use edition::Edition; use parse::{token, ParseSess}; use OneVector; diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 6a5a2a5e500..d044efa3c38 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -13,7 +13,7 @@ use std::env; use ast; use ast::{Ident, Name}; -use codemap; +use source_map; use syntax_pos::Span; use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; @@ -223,7 +223,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, ty, expr, ), - vis: codemap::respan(span.shrink_to_lo(), ast::VisibilityKind::Public), + vis: source_map::respan(span.shrink_to_lo(), ast::VisibilityKind::Public), span, tokens: None, }) diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8ae4f9c1aa4..c17874cf6f8 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -12,7 +12,7 @@ pub use self::SyntaxExtension::*; use ast::{self, Attribute, Name, PatKind, MetaItem}; use attr::HasAttrs; -use codemap::{self, SourceMap, Spanned, respan}; +use source_map::{self, SourceMap, Spanned, respan}; use syntax_pos::{Span, MultiSpan, DUMMY_SP}; use edition::Edition; use errors::{DiagnosticBuilder, DiagnosticId}; @@ -481,7 +481,7 @@ impl DummyResult { pub fn raw_expr(sp: Span) -> P { P(ast::Expr { id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Lit(P(codemap::respan(sp, ast::LitKind::Bool(false)))), + node: ast::ExprKind::Lit(P(source_map::respan(sp, ast::LitKind::Bool(false)))), span: sp, attrs: ThinVec::new(), }) diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1a17aa3e8fb..1378a669e68 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -12,7 +12,7 @@ use rustc_target::spec::abi::Abi; use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind}; use attr; use syntax_pos::{Pos, Span, DUMMY_SP}; -use codemap::{dummy_spanned, respan, Spanned}; +use source_map::{dummy_spanned, respan, Spanned}; use ext::base::ExtCtxt; use ptr::P; use symbol::{Symbol, keywords}; diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 32ace937ac0..80bbc618932 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -10,7 +10,7 @@ use attr::HasAttrs; use ast; -use codemap::{hygiene, ExpnInfo, ExpnFormat}; +use source_map::{hygiene, ExpnInfo, ExpnFormat}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use parse::parser::PathStyle; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6b41dfafd07..54b56874d6a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -11,7 +11,7 @@ use ast::{self, Block, Ident, NodeId, PatKind, Path}; use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; -use codemap::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan}; +use source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan}; use config::{is_test_or_bench, StripUnconfigured}; use errors::{Applicability, FatalError}; use ext::base::*; diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 1dc9bae8848..18b4119fde8 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, NodeId}; -use codemap::{DUMMY_SP, dummy_spanned}; +use source_map::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{AstFragment, AstFragmentKind}; use ext::hygiene::Mark; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index a4b19681164..13a139deea4 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty}; -use codemap::respan; +use source_map::respan; use syntax_pos::Span; use ext::base::ExtCtxt; use ext::base; @@ -28,7 +28,7 @@ use tokenstream::{TokenStream, TokenTree}; pub mod rt { use ast; - use codemap::Spanned; + use source_map::Spanned; use ext::base::ExtCtxt; use parse::{self, classify}; use parse::token::{self, Token}; @@ -40,7 +40,7 @@ pub mod rt { pub use parse::new_parser_from_tts; pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName}; - pub use codemap::{dummy_spanned}; + pub use source_map::{dummy_spanned}; pub trait ToTokens { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec; diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index e8245a553eb..ac542948314 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -29,7 +29,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_target::spec::abi::Abi; use ast::{self, NodeId, PatKind, RangeEnd}; use attr; -use codemap::Spanned; +use source_map::Spanned; use edition::{ALL_EDITIONS, Edition}; use syntax_pos::{Span, DUMMY_SP}; use errors::{DiagnosticBuilder, Handler}; diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 3209939d9b1..50a49e2f548 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -21,7 +21,7 @@ use ast::*; use ast; use syntax_pos::Span; -use codemap::{Spanned, respan}; +use source_map::{Spanned, respan}; use parse::token::{self, Token}; use ptr::P; use OneVector; diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 1ac51a68b62..b0cf29e9f63 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -19,7 +19,7 @@ // FIXME spec the JSON output properly. -use codemap::{SourceMap, FilePathMapping}; +use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan}; use errors::registry::Registry; use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper}; diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 0a42325d2b6..289f023cefa 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -145,7 +145,7 @@ pub mod syntax { pub mod ast; pub mod attr; -pub mod codemap; +pub mod source_map; #[macro_use] pub mod config; pub mod entry; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index b0136c3e18b..a240604bfe0 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -10,7 +10,7 @@ use attr; use ast; -use codemap::respan; +use source_map::respan; use parse::{SeqSep, PResult}; use parse::token::{self, Nonterminal, DelimToken}; use parse::parser::{Parser, TokenType, PathStyle}; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index f4d4635b61e..67bc3d5e435 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -11,7 +11,7 @@ pub use self::CommentStyle::*; use ast; -use codemap::SourceMap; +use source_map::SourceMap; use syntax_pos::{BytePos, CharPos, Pos, FileName}; use parse::lexer::{is_block_doc_comment, is_pattern_whitespace}; use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan}; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index acec975d32a..7a8c46ad343 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -10,7 +10,7 @@ use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; -use codemap::{SourceMap, FilePathMapping}; +use source_map::{SourceMap, FilePathMapping}; use errors::{Applicability, FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; use str::char_at; @@ -1827,7 +1827,7 @@ mod tests { use ast::{Ident, CrateConfig}; use symbol::Symbol; use syntax_pos::{BytePos, Span, NO_EXPANSION}; - use codemap::SourceMap; + use source_map::SourceMap; use errors; use feature_gate::UnstableFeatures; use parse::token; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 07a9f44fe4a..d1f6191cf75 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -13,7 +13,7 @@ use rustc_data_structures::sync::{Lrc, Lock}; use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; -use codemap::{SourceMap, FilePathMapping}; +use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1e6c1eee483..4c6034fdfce 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -42,7 +42,7 @@ use ast::{UseTree, UseTreeKind}; use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; use {ast, attr}; -use codemap::{self, SourceMap, Spanned, respan}; +use source_map::{self, SourceMap, Spanned, respan}; use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, edition::Edition}; use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use parse::{self, SeqSep, classify, token}; @@ -1879,7 +1879,7 @@ impl<'a> Parser<'a> { let lit = self.parse_lit_token()?; lit }; - Ok(codemap::Spanned { node: lit, span: lo.to(self.prev_span) }) + Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat) @@ -2185,7 +2185,7 @@ impl<'a> Parser<'a> { pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, - node: ExprKind::Mac(codemap::Spanned {node: m, span: span}), + node: ExprKind::Mac(source_map::Spanned {node: m, span: span}), span, attrs, }) @@ -3074,7 +3074,7 @@ impl<'a> Parser<'a> { AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); - let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs); + let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); self.mk_expr(span, binary, ThinVec::new()) } AssocOp::Assign => @@ -3094,7 +3094,7 @@ impl<'a> Parser<'a> { token::Shl => BinOpKind::Shl, token::Shr => BinOpKind::Shr, }; - let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs); + let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); self.mk_expr(span, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { @@ -3736,7 +3736,7 @@ impl<'a> Parser<'a> { &mut self, lo: Span, attrs: Vec - ) -> PResult<'a, codemap::Spanned> { + ) -> PResult<'a, source_map::Spanned> { // Check if a colon exists one ahead. This means we're parsing a fieldname. let hi; let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { @@ -3779,7 +3779,7 @@ impl<'a> Parser<'a> { (subpat, fieldname, true) }; - Ok(codemap::Spanned { + Ok(source_map::Spanned { span: lo.to(hi), node: ast::FieldPat { ident: fieldname, @@ -3791,7 +3791,7 @@ impl<'a> Parser<'a> { } /// Parse the fields of a struct-like pattern - fn parse_pat_fields(&mut self) -> PResult<'a, (Vec>, bool)> { + fn parse_pat_fields(&mut self) -> PResult<'a, (Vec>, bool)> { let mut fields = Vec::new(); let mut etc = false; let mut ate_comma = true; @@ -4518,7 +4518,7 @@ impl<'a> Parser<'a> { } } else if let Some(macro_def) = self.eat_macro_def( &attrs, - &codemap::respan(lo, VisibilityKind::Inherited), + &source_map::respan(lo, VisibilityKind::Inherited), lo, )? { Stmt { @@ -5398,7 +5398,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), }; - let eself = codemap::respan(eself_lo.to(eself_hi), eself); + let eself = source_map::respan(eself_lo.to(eself_hi), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2646d52b739..3065e795ed8 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -16,7 +16,7 @@ use ast::{SelfKind, GenericBound, TraitBoundModifier}; use ast::{Attribute, MacDelimiter, GenericArg}; use util::parser::{self, AssocOp, Fixity}; use attr; -use codemap::{self, SourceMap, Spanned}; +use source_map::{self, SourceMap, Spanned}; use syntax_pos::{self, BytePos}; use syntax_pos::hygiene::{Mark, SyntaxContext}; use parse::token::{self, BinOpToken, Token}; @@ -380,7 +380,7 @@ pub fn fun_to_string(decl: &ast::FnDecl, to_string(|s| { s.head("")?; s.print_fn(decl, header, Some(name), - generics, &codemap::dummy_spanned(ast::VisibilityKind::Inherited))?; + generics, &source_map::dummy_spanned(ast::VisibilityKind::Inherited))?; s.end()?; // Close the head box s.end() // Close the outer box }) @@ -1606,7 +1606,7 @@ impl<'a> State<'a> { ti.ident, ty, default.as_ref().map(|expr| &**expr), - &codemap::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), + &source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), )?; } ast::TraitItemKind::Method(ref sig, ref body) => { @@ -1617,7 +1617,7 @@ impl<'a> State<'a> { ti.ident, &ti.generics, sig, - &codemap::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), + &source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), )?; if let Some(ref body) = *body { self.nbsp()?; @@ -3085,7 +3085,7 @@ impl<'a> State<'a> { ast::FnHeader { unsafety, abi, ..ast::FnHeader::default() }, name, &generics, - &codemap::dummy_spanned(ast::VisibilityKind::Inherited))?; + &source_map::dummy_spanned(ast::VisibilityKind::Inherited))?; self.end() } @@ -3185,7 +3185,7 @@ mod tests { use super::*; use ast; - use codemap; + use source_map; use syntax_pos; use with_globals; @@ -3205,7 +3205,7 @@ mod tests { &decl, ast::FnHeader { unsafety: ast::Unsafety::Normal, - constness: codemap::dummy_spanned(ast::Constness::NotConst), + constness: source_map::dummy_spanned(ast::Constness::NotConst), asyncness: ast::IsAsync::NotAsync, abi: Abi::Rust, }, @@ -3222,7 +3222,7 @@ mod tests { with_globals(|| { let ident = ast::Ident::from_str("principal_skinner"); - let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { + let var = source_map::respan(syntax_pos::DUMMY_SP, ast::Variant_ { ident, attrs: Vec::new(), // making this up as I go.... ? diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs new file mode 100644 index 00000000000..34cd026f7a0 --- /dev/null +++ b/src/libsyntax/source_map.rs @@ -0,0 +1,1235 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! The SourceMap tracks all the source code used within a single crate, mapping +//! from integer byte positions to the original source code location. Each bit +//! of source parsed during crate parsing (typically files, in-memory strings, +//! or various bits of macro expansion) cover a continuous range of bytes in the +//! SourceMap and are represented by SourceFiles. Byte positions are stored in +//! `spans` and used pervasively in the compiler. They are absolute positions +//! within the SourceMap, which upon request can be converted to line and column +//! information, source code snippets, etc. + + +pub use syntax_pos::*; +pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo}; +pub use self::ExpnFormat::*; + +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::stable_hasher::StableHasher; +use rustc_data_structures::sync::{Lrc, Lock, LockGuard}; +use std::cmp; +use std::hash::Hash; +use std::path::{Path, PathBuf}; + +use std::env; +use std::fs; +use std::io::{self, Read}; +use errors::SourceMapper; + +/// Return the span itself if it doesn't come from a macro expansion, +/// otherwise return the call site span up to the `enclosing_sp` by +/// following the `expn_info` chain. +pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { + let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site); + let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site); + match (call_site1, call_site2) { + (None, _) => sp, + (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, + (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), + } +} + +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub struct Spanned { + pub node: T, + pub span: Span, +} + +pub fn respan(sp: Span, t: T) -> Spanned { + Spanned {node: t, span: sp} +} + +pub fn dummy_spanned(t: T) -> Spanned { + respan(DUMMY_SP, t) +} + +// _____________________________________________________________________________ +// SourceFile, MultiByteChar, FileName, FileLines +// + +/// An abstraction over the fs operations used by the Parser. +pub trait FileLoader { + /// Query the existence of a file. + fn file_exists(&self, path: &Path) -> bool; + + /// Return an absolute path to a file, if possible. + fn abs_path(&self, path: &Path) -> Option; + + /// Read the contents of an UTF-8 file into memory. + fn read_file(&self, path: &Path) -> io::Result; +} + +/// A FileLoader that uses std::fs to load real files. +pub struct RealFileLoader; + +impl FileLoader for RealFileLoader { + fn file_exists(&self, path: &Path) -> bool { + fs::metadata(path).is_ok() + } + + fn abs_path(&self, path: &Path) -> Option { + if path.is_absolute() { + Some(path.to_path_buf()) + } else { + env::current_dir() + .ok() + .map(|cwd| cwd.join(path)) + } + } + + fn read_file(&self, path: &Path) -> io::Result { + let mut src = String::new(); + fs::File::open(path)?.read_to_string(&mut src)?; + Ok(src) + } +} + +// This is a SourceFile identifier that is used to correlate SourceFiles between +// subsequent compilation sessions (which is something we need to do during +// incremental compilation). +#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] +pub struct StableFilemapId(u128); + +impl StableFilemapId { + pub fn new(source_file: &SourceFile) -> StableFilemapId { + let mut hasher = StableHasher::new(); + + source_file.name.hash(&mut hasher); + source_file.name_was_remapped.hash(&mut hasher); + source_file.unmapped_path.hash(&mut hasher); + + StableFilemapId(hasher.finish()) + } +} + +// _____________________________________________________________________________ +// SourceMap +// + +pub(super) struct SourceMapFiles { + pub(super) file_maps: Vec>, + stable_id_to_source_file: FxHashMap> +} + +pub struct SourceMap { + pub(super) files: Lock, + file_loader: Box, + // This is used to apply the file path remapping as specified via + // --remap-path-prefix to all SourceFiles allocated within this SourceMap. + path_mapping: FilePathMapping, + /// In case we are in a doctest, replace all file names with the PathBuf, + /// and add the given offsets to the line info + doctest_offset: Option<(FileName, isize)>, +} + +impl SourceMap { + pub fn new(path_mapping: FilePathMapping) -> SourceMap { + SourceMap { + files: Lock::new(SourceMapFiles { + file_maps: Vec::new(), + stable_id_to_source_file: FxHashMap(), + }), + file_loader: Box::new(RealFileLoader), + path_mapping, + doctest_offset: None, + } + } + + pub fn new_doctest(path_mapping: FilePathMapping, + file: FileName, line: isize) -> SourceMap { + SourceMap { + doctest_offset: Some((file, line)), + ..SourceMap::new(path_mapping) + } + + } + + pub fn with_file_loader(file_loader: Box, + path_mapping: FilePathMapping) + -> SourceMap { + SourceMap { + files: Lock::new(SourceMapFiles { + file_maps: Vec::new(), + stable_id_to_source_file: FxHashMap(), + }), + file_loader: file_loader, + path_mapping, + doctest_offset: None, + } + } + + pub fn path_mapping(&self) -> &FilePathMapping { + &self.path_mapping + } + + pub fn file_exists(&self, path: &Path) -> bool { + self.file_loader.file_exists(path) + } + + pub fn load_file(&self, path: &Path) -> io::Result> { + let src = self.file_loader.read_file(path)?; + let filename = if let Some((ref name, _)) = self.doctest_offset { + name.clone() + } else { + path.to_owned().into() + }; + Ok(self.new_source_file(filename, src)) + } + + pub fn files(&self) -> LockGuard>> { + LockGuard::map(self.files.borrow(), |files| &mut files.file_maps) + } + + pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option> { + self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone()) + } + + fn next_start_pos(&self) -> usize { + match self.files.borrow().file_maps.last() { + None => 0, + // Add one so there is some space between files. This lets us distinguish + // positions in the codemap, even in the presence of zero-length files. + Some(last) => last.end_pos.to_usize() + 1, + } + } + + /// Creates a new source_file. + /// This does not ensure that only one SourceFile exists per file name. + pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc { + let start_pos = self.next_start_pos(); + + // The path is used to determine the directory for loading submodules and + // include files, so it must be before remapping. + // Note that filename may not be a valid path, eg it may be `` etc, + // but this is okay because the directory determined by `path.pop()` will + // be empty, so the working directory will be used. + let unmapped_path = filename.clone(); + + let (filename, was_remapped) = match filename { + FileName::Real(filename) => { + let (filename, was_remapped) = self.path_mapping.map_prefix(filename); + (FileName::Real(filename), was_remapped) + }, + other => (other, false), + }; + let source_file = Lrc::new(SourceFile::new( + filename, + was_remapped, + unmapped_path, + src, + Pos::from_usize(start_pos), + )); + + let mut files = self.files.borrow_mut(); + + files.file_maps.push(source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); + + source_file + } + + /// Allocates a new SourceFile representing a source file from an external + /// crate. The source code of such an "imported source_file" is not available, + /// but we still know enough to generate accurate debuginfo location + /// information for things inlined from other crates. + pub fn new_imported_source_file(&self, + filename: FileName, + name_was_remapped: bool, + crate_of_origin: u32, + src_hash: u128, + name_hash: u128, + source_len: usize, + mut file_local_lines: Vec, + mut file_local_multibyte_chars: Vec, + mut file_local_non_narrow_chars: Vec) + -> Lrc { + let start_pos = self.next_start_pos(); + + let end_pos = Pos::from_usize(start_pos + source_len); + let start_pos = Pos::from_usize(start_pos); + + for pos in &mut file_local_lines { + *pos = *pos + start_pos; + } + + for mbc in &mut file_local_multibyte_chars { + mbc.pos = mbc.pos + start_pos; + } + + for swc in &mut file_local_non_narrow_chars { + *swc = *swc + start_pos; + } + + let source_file = Lrc::new(SourceFile { + name: filename, + name_was_remapped, + unmapped_path: None, + crate_of_origin, + src: None, + src_hash, + external_src: Lock::new(ExternalSource::AbsentOk), + start_pos, + end_pos, + lines: file_local_lines, + multibyte_chars: file_local_multibyte_chars, + non_narrow_chars: file_local_non_narrow_chars, + name_hash, + }); + + let mut files = self.files.borrow_mut(); + + files.file_maps.push(source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); + + source_file + } + + pub fn mk_substr_filename(&self, sp: Span) -> String { + let pos = self.lookup_char_pos(sp.lo()); + format!("<{}:{}:{}>", + pos.file.name, + pos.line, + pos.col.to_usize() + 1) + } + + // If there is a doctest_offset, apply it to the line + pub fn doctest_offset_line(&self, mut orig: usize) -> usize { + if let Some((_, line)) = self.doctest_offset { + if line >= 0 { + orig = orig + line as usize; + } else { + orig = orig - (-line) as usize; + } + } + orig + } + + /// Lookup source information about a BytePos + pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { + let chpos = self.bytepos_to_file_charpos(pos); + match self.lookup_line(pos) { + Ok(SourceFileAndLine { fm: f, line: a }) => { + let line = a + 1; // Line numbers start at 1 + let linebpos = f.lines[a]; + let linechpos = self.bytepos_to_file_charpos(linebpos); + let col = chpos - linechpos; + + let col_display = { + let start_width_idx = f + .non_narrow_chars + .binary_search_by_key(&linebpos, |x| x.pos()) + .unwrap_or_else(|x| x); + let end_width_idx = f + .non_narrow_chars + .binary_search_by_key(&pos, |x| x.pos()) + .unwrap_or_else(|x| x); + let special_chars = end_width_idx - start_width_idx; + let non_narrow: usize = f + .non_narrow_chars[start_width_idx..end_width_idx] + .into_iter() + .map(|x| x.width()) + .sum(); + col.0 - special_chars + non_narrow + }; + debug!("byte pos {:?} is on the line at byte pos {:?}", + pos, linebpos); + debug!("char pos {:?} is on the line at char pos {:?}", + chpos, linechpos); + debug!("byte is on line: {}", line); + assert!(chpos >= linechpos); + Loc { + file: f, + line, + col, + col_display, + } + } + Err(f) => { + let col_display = { + let end_width_idx = f + .non_narrow_chars + .binary_search_by_key(&pos, |x| x.pos()) + .unwrap_or_else(|x| x); + let non_narrow: usize = f + .non_narrow_chars[0..end_width_idx] + .into_iter() + .map(|x| x.width()) + .sum(); + chpos.0 - end_width_idx + non_narrow + }; + Loc { + file: f, + line: 0, + col: chpos, + col_display, + } + } + } + } + + // If the relevant source_file is empty, we don't return a line number. + pub fn lookup_line(&self, pos: BytePos) -> Result> { + let idx = self.lookup_source_file_idx(pos); + + let f = (*self.files.borrow().file_maps)[idx].clone(); + + match f.lookup_line(pos) { + Some(line) => Ok(SourceFileAndLine { fm: f, line: line }), + None => Err(f) + } + } + + pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { + let loc = self.lookup_char_pos(pos); + LocWithOpt { + filename: loc.file.name.clone(), + line: loc.line, + col: loc.col, + file: Some(loc.file) + } + } + + /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If + /// there are gaps between lhs and rhs, the resulting union will cross these gaps. + /// For this to work, the spans have to be: + /// + /// * the ctxt of both spans much match + /// * the lhs span needs to end on the same line the rhs span begins + /// * the lhs span must start at or before the rhs span + pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { + // make sure we're at the same expansion id + if sp_lhs.ctxt() != sp_rhs.ctxt() { + return None; + } + + let lhs_end = match self.lookup_line(sp_lhs.hi()) { + Ok(x) => x, + Err(_) => return None + }; + let rhs_begin = match self.lookup_line(sp_rhs.lo()) { + Ok(x) => x, + Err(_) => return None + }; + + // if we must cross lines to merge, don't merge + if lhs_end.line != rhs_begin.line { + return None; + } + + // ensure these follow the expected order and we don't overlap + if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) { + Some(sp_lhs.to(sp_rhs)) + } else { + None + } + } + + pub fn span_to_string(&self, sp: Span) -> String { + if self.files.borrow().file_maps.is_empty() && sp.is_dummy() { + return "no-location".to_string(); + } + + let lo = self.lookup_char_pos_adj(sp.lo()); + let hi = self.lookup_char_pos_adj(sp.hi()); + format!("{}:{}:{}: {}:{}", + lo.filename, + lo.line, + lo.col.to_usize() + 1, + hi.line, + hi.col.to_usize() + 1) + } + + pub fn span_to_filename(&self, sp: Span) -> FileName { + self.lookup_char_pos(sp.lo()).file.name.clone() + } + + pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { + self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() + .expect("SourceMap::span_to_unmapped_path called for imported SourceFile?") + } + + pub fn is_multiline(&self, sp: Span) -> bool { + let lo = self.lookup_char_pos(sp.lo()); + let hi = self.lookup_char_pos(sp.hi()); + lo.line != hi.line + } + + pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { + debug!("span_to_lines(sp={:?})", sp); + + if sp.lo() > sp.hi() { + return Err(SpanLinesError::IllFormedSpan(sp)); + } + + let lo = self.lookup_char_pos(sp.lo()); + debug!("span_to_lines: lo={:?}", lo); + let hi = self.lookup_char_pos(sp.hi()); + debug!("span_to_lines: hi={:?}", hi); + + if lo.file.start_pos != hi.file.start_pos { + return Err(SpanLinesError::DistinctSources(DistinctSources { + begin: (lo.file.name.clone(), lo.file.start_pos), + end: (hi.file.name.clone(), hi.file.start_pos), + })); + } + assert!(hi.line >= lo.line); + + let mut lines = Vec::with_capacity(hi.line - lo.line + 1); + + // The span starts partway through the first line, + // but after that it starts from offset 0. + let mut start_col = lo.col; + + // For every line but the last, it extends from `start_col` + // and to the end of the line. Be careful because the line + // numbers in Loc are 1-based, so we subtract 1 to get 0-based + // lines. + for line_index in lo.line-1 .. hi.line-1 { + let line_len = lo.file.get_line(line_index) + .map(|s| s.chars().count()) + .unwrap_or(0); + lines.push(LineInfo { line_index, + start_col, + end_col: CharPos::from_usize(line_len) }); + start_col = CharPos::from_usize(0); + } + + // For the last line, it extends from `start_col` to `hi.col`: + lines.push(LineInfo { line_index: hi.line - 1, + start_col, + end_col: hi.col }); + + Ok(FileLines {file: lo.file, lines: lines}) + } + + /// Extract the source surrounding the given `Span` using the `extract_source` function. The + /// extract function takes three arguments: a string slice containing the source, an index in + /// the slice for the beginning of the span and an index in the slice for the end of the span. + fn span_to_source(&self, sp: Span, extract_source: F) -> Result + where F: Fn(&str, usize, usize) -> String + { + if sp.lo() > sp.hi() { + return Err(SpanSnippetError::IllFormedSpan(sp)); + } + + let local_begin = self.lookup_byte_offset(sp.lo()); + let local_end = self.lookup_byte_offset(sp.hi()); + + if local_begin.fm.start_pos != local_end.fm.start_pos { + return Err(SpanSnippetError::DistinctSources(DistinctSources { + begin: (local_begin.fm.name.clone(), + local_begin.fm.start_pos), + end: (local_end.fm.name.clone(), + local_end.fm.start_pos) + })); + } else { + self.ensure_source_file_source_present(local_begin.fm.clone()); + + let start_index = local_begin.pos.to_usize(); + let end_index = local_end.pos.to_usize(); + let source_len = (local_begin.fm.end_pos - + local_begin.fm.start_pos).to_usize(); + + if start_index > end_index || end_index > source_len { + return Err(SpanSnippetError::MalformedForCodemap( + MalformedCodemapPositions { + name: local_begin.fm.name.clone(), + source_len, + begin_pos: local_begin.pos, + end_pos: local_end.pos, + })); + } + + if let Some(ref src) = local_begin.fm.src { + return Ok(extract_source(src, start_index, end_index)); + } else if let Some(src) = local_begin.fm.external_src.borrow().get_source() { + return Ok(extract_source(src, start_index, end_index)); + } else { + return Err(SpanSnippetError::SourceNotAvailable { + filename: local_begin.fm.name.clone() + }); + } + } + } + + /// Return the source snippet as `String` corresponding to the given `Span` + pub fn span_to_snippet(&self, sp: Span) -> Result { + self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] + .to_string()) + } + + /// Return the source snippet as `String` before the given `Span` + pub fn span_to_prev_source(&self, sp: Span) -> Result { + self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) + } + + /// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span + /// if no character could be found or if an error occurred while retrieving the code snippet. + pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span { + if let Ok(prev_source) = self.span_to_prev_source(sp) { + let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_left(); + if !prev_source.is_empty() && !prev_source.contains('\n') { + return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); + } + } + + sp + } + + /// Extend the given `Span` to just after the previous occurrence of `pat` when surrounded by + /// whitespace. Return the same span if no character could be found or if an error occurred + /// while retrieving the code snippet. + pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span { + // assure that the pattern is delimited, to avoid the following + // fn my_fn() + // ^^^^ returned span without the check + // ---------- correct span + for ws in &[" ", "\t", "\n"] { + let pat = pat.to_owned() + ws; + if let Ok(prev_source) = self.span_to_prev_source(sp) { + let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_left(); + if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) { + return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); + } + } + } + + sp + } + + /// Given a `Span`, try to get a shorter span ending before the first occurrence of `c` `char` + pub fn span_until_char(&self, sp: Span, c: char) -> Span { + match self.span_to_snippet(sp) { + Ok(snippet) => { + let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right(); + if !snippet.is_empty() && !snippet.contains('\n') { + sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) + } else { + sp + } + } + _ => sp, + } + } + + /// Given a `Span`, try to get a shorter span ending just after the first occurrence of `char` + /// `c`. + pub fn span_through_char(&self, sp: Span, c: char) -> Span { + if let Ok(snippet) = self.span_to_snippet(sp) { + if let Some(offset) = snippet.find(c) { + return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32)); + } + } + sp + } + + /// Given a `Span`, get a new `Span` covering the first token and all its trailing whitespace or + /// the original `Span`. + /// + /// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned. + pub fn span_until_non_whitespace(&self, sp: Span) -> Span { + let mut whitespace_found = false; + + self.span_take_while(sp, |c| { + if !whitespace_found && c.is_whitespace() { + whitespace_found = true; + } + + if whitespace_found && !c.is_whitespace() { + false + } else { + true + } + }) + } + + /// Given a `Span`, get a new `Span` covering the first token without its trailing whitespace or + /// the original `Span` in case of error. + /// + /// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned. + pub fn span_until_whitespace(&self, sp: Span) -> Span { + self.span_take_while(sp, |c| !c.is_whitespace()) + } + + /// Given a `Span`, get a shorter one until `predicate` yields false. + pub fn span_take_while

(&self, sp: Span, predicate: P) -> Span + where P: for <'r> FnMut(&'r char) -> bool + { + if let Ok(snippet) = self.span_to_snippet(sp) { + let offset = snippet.chars() + .take_while(predicate) + .map(|c| c.len_utf8()) + .sum::(); + + sp.with_hi(BytePos(sp.lo().0 + (offset as u32))) + } else { + sp + } + } + + pub fn def_span(&self, sp: Span) -> Span { + self.span_until_char(sp, '{') + } + + /// Returns a new span representing just the start-point of this span + pub fn start_point(&self, sp: Span) -> Span { + let pos = sp.lo().0; + let width = self.find_width_of_character_at_span(sp, false); + let corrected_start_position = pos.checked_add(width).unwrap_or(pos); + let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0)); + sp.with_hi(end_point) + } + + /// Returns a new span representing just the end-point of this span + pub fn end_point(&self, sp: Span) -> Span { + let pos = sp.hi().0; + + let width = self.find_width_of_character_at_span(sp, false); + let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); + + let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); + sp.with_lo(end_point) + } + + /// Returns a new span representing the next character after the end-point of this span + pub fn next_point(&self, sp: Span) -> Span { + let start_of_next_point = sp.hi().0; + + let width = self.find_width_of_character_at_span(sp, true); + // If the width is 1, then the next span should point to the same `lo` and `hi`. However, + // in the case of a multibyte character, where the width != 1, the next span should + // span multiple bytes to include the whole character. + let end_of_next_point = start_of_next_point.checked_add( + width - 1).unwrap_or(start_of_next_point); + + let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point)); + Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt()) + } + + /// Finds the width of a character, either before or after the provided span. + fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { + // Disregard malformed spans and assume a one-byte wide character. + if sp.lo() >= sp.hi() { + debug!("find_width_of_character_at_span: early return malformed span"); + return 1; + } + + let local_begin = self.lookup_byte_offset(sp.lo()); + let local_end = self.lookup_byte_offset(sp.hi()); + debug!("find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`", + local_begin, local_end); + + let start_index = local_begin.pos.to_usize(); + let end_index = local_end.pos.to_usize(); + debug!("find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`", + start_index, end_index); + + // Disregard indexes that are at the start or end of their spans, they can't fit bigger + // characters. + if (!forwards && end_index == usize::min_value()) || + (forwards && start_index == usize::max_value()) { + debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte"); + return 1; + } + + let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize(); + debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len); + // Ensure indexes are also not malformed. + if start_index > end_index || end_index > source_len { + debug!("find_width_of_character_at_span: source indexes are malformed"); + return 1; + } + + let src = local_begin.fm.external_src.borrow(); + + // We need to extend the snippet to the end of the src rather than to end_index so when + // searching forwards for boundaries we've got somewhere to search. + let snippet = if let Some(ref src) = local_begin.fm.src { + let len = src.len(); + (&src[start_index..len]) + } else if let Some(src) = src.get_source() { + let len = src.len(); + (&src[start_index..len]) + } else { + return 1; + }; + debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet); + + let mut target = if forwards { end_index + 1 } else { end_index - 1 }; + debug!("find_width_of_character_at_span: initial target=`{:?}`", target); + + while !snippet.is_char_boundary(target - start_index) && target < source_len { + target = if forwards { + target + 1 + } else { + match target.checked_sub(1) { + Some(target) => target, + None => { + break; + } + } + }; + debug!("find_width_of_character_at_span: target=`{:?}`", target); + } + debug!("find_width_of_character_at_span: final target=`{:?}`", target); + + if forwards { + (target - end_index) as u32 + } else { + (end_index - target) as u32 + } + } + + pub fn get_source_file(&self, filename: &FileName) -> Option> { + for fm in self.files.borrow().file_maps.iter() { + if *filename == fm.name { + return Some(fm.clone()); + } + } + None + } + + /// For a global BytePos compute the local offset within the containing SourceFile + pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { + let idx = self.lookup_source_file_idx(bpos); + let fm = (*self.files.borrow().file_maps)[idx].clone(); + let offset = bpos - fm.start_pos; + SourceFileAndBytePos {fm: fm, pos: offset} + } + + /// Converts an absolute BytePos to a CharPos relative to the source_file. + pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { + let idx = self.lookup_source_file_idx(bpos); + let map = &(*self.files.borrow().file_maps)[idx]; + + // The number of extra bytes due to multibyte chars in the SourceFile + let mut total_extra_bytes = 0; + + for mbc in map.multibyte_chars.iter() { + debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); + if mbc.pos < bpos { + // every character is at least one byte, so we only + // count the actual extra bytes. + total_extra_bytes += mbc.bytes as u32 - 1; + // We should never see a byte position in the middle of a + // character + assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); + } else { + break; + } + } + + assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); + CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) + } + + // Return the index of the source_file (in self.files) which contains pos. + pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { + let files = self.files.borrow(); + let files = &files.file_maps; + let count = files.len(); + + // Binary search for the source_file. + let mut a = 0; + let mut b = count; + while b - a > 1 { + let m = (a + b) / 2; + if files[m].start_pos > pos { + b = m; + } else { + a = m; + } + } + + assert!(a < count, "position {} does not resolve to a source location", pos.to_usize()); + + return a; + } + + pub fn count_lines(&self) -> usize { + self.files().iter().fold(0, |a, f| a + f.count_lines()) + } + + + pub fn generate_fn_name_span(&self, span: Span) -> Option { + let prev_span = self.span_extend_to_prev_str(span, "fn", true); + self.span_to_snippet(prev_span).map(|snippet| { + let len = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') + .expect("no label after fn"); + prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)) + }).ok() + } + + /// Take the span of a type parameter in a function signature and try to generate a span for the + /// function name (with generics) and a new snippet for this span with the pointed type + /// parameter as a new local type parameter. + /// + /// For instance: + /// ```rust,ignore (pseudo-Rust) + /// // Given span + /// fn my_function(param: T) + /// // ^ Original span + /// + /// // Result + /// fn my_function(param: T) + /// // ^^^^^^^^^^^ Generated span with snippet `my_function` + /// ``` + /// + /// Attention: The method used is very fragile since it essentially duplicates the work of the + /// parser. If you need to use this function or something similar, please consider updating the + /// codemap functions and this function to something more robust. + pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> { + // Try to extend the span to the previous "fn" keyword to retrieve the function + // signature + let sugg_span = self.span_extend_to_prev_str(span, "fn", false); + if sugg_span != span { + if let Ok(snippet) = self.span_to_snippet(sugg_span) { + // Consume the function name + let mut offset = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') + .expect("no label after fn"); + + // Consume the generics part of the function signature + let mut bracket_counter = 0; + let mut last_char = None; + for c in snippet[offset..].chars() { + match c { + '<' => bracket_counter += 1, + '>' => bracket_counter -= 1, + '(' => if bracket_counter == 0 { break; } + _ => {} + } + offset += c.len_utf8(); + last_char = Some(c); + } + + // Adjust the suggestion span to encompass the function name with its generics + let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32)); + + // Prepare the new suggested snippet to append the type parameter that triggered + // the error in the generics of the function signature + let mut new_snippet = if last_char == Some('>') { + format!("{}, ", &snippet[..(offset - '>'.len_utf8())]) + } else { + format!("{}<", &snippet[..offset]) + }; + new_snippet.push_str(&self.span_to_snippet(span).unwrap_or("T".to_string())); + new_snippet.push('>'); + + return Some((sugg_span, new_snippet)); + } + } + + None + } +} + +impl SourceMapper for SourceMap { + fn lookup_char_pos(&self, pos: BytePos) -> Loc { + self.lookup_char_pos(pos) + } + fn span_to_lines(&self, sp: Span) -> FileLinesResult { + self.span_to_lines(sp) + } + fn span_to_string(&self, sp: Span) -> String { + self.span_to_string(sp) + } + fn span_to_filename(&self, sp: Span) -> FileName { + self.span_to_filename(sp) + } + fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { + self.merge_spans(sp_lhs, sp_rhs) + } + fn call_span_if_macro(&self, sp: Span) -> Span { + if self.span_to_filename(sp.clone()).is_macros() { + let v = sp.macro_backtrace(); + if let Some(use_site) = v.last() { + return use_site.call_site; + } + } + sp + } + fn ensure_source_file_source_present(&self, file_map: Lrc) -> bool { + file_map.add_external_src( + || match file_map.name { + FileName::Real(ref name) => self.file_loader.read_file(name).ok(), + _ => None, + } + ) + } + fn doctest_offset_line(&self, line: usize) -> usize { + self.doctest_offset_line(line) + } +} + +#[derive(Clone)] +pub struct FilePathMapping { + mapping: Vec<(PathBuf, PathBuf)>, +} + +impl FilePathMapping { + pub fn empty() -> FilePathMapping { + FilePathMapping { + mapping: vec![] + } + } + + pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { + FilePathMapping { + mapping, + } + } + + /// Applies any path prefix substitution as defined by the mapping. + /// The return value is the remapped path and a boolean indicating whether + /// the path was affected by the mapping. + pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { + // NOTE: We are iterating over the mapping entries from last to first + // because entries specified later on the command line should + // take precedence. + for &(ref from, ref to) in self.mapping.iter().rev() { + if let Ok(rest) = path.strip_prefix(from) { + return (to.join(rest), true); + } + } + + (path, false) + } +} + +// _____________________________________________________________________________ +// Tests +// + +#[cfg(test)] +mod tests { + use super::*; + use rustc_data_structures::sync::Lrc; + + fn init_code_map() -> SourceMap { + let cm = SourceMap::new(FilePathMapping::empty()); + cm.new_source_file(PathBuf::from("blork.rs").into(), + "first line.\nsecond line".to_string()); + cm.new_source_file(PathBuf::from("empty.rs").into(), + "".to_string()); + cm.new_source_file(PathBuf::from("blork2.rs").into(), + "first line.\nsecond line".to_string()); + cm + } + + #[test] + fn t3() { + // Test lookup_byte_offset + let cm = init_code_map(); + + let fmabp1 = cm.lookup_byte_offset(BytePos(23)); + assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into()); + assert_eq!(fmabp1.pos, BytePos(23)); + + let fmabp1 = cm.lookup_byte_offset(BytePos(24)); + assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into()); + assert_eq!(fmabp1.pos, BytePos(0)); + + let fmabp2 = cm.lookup_byte_offset(BytePos(25)); + assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into()); + assert_eq!(fmabp2.pos, BytePos(0)); + } + + #[test] + fn t4() { + // Test bytepos_to_file_charpos + let cm = init_code_map(); + + let cp1 = cm.bytepos_to_file_charpos(BytePos(22)); + assert_eq!(cp1, CharPos(22)); + + let cp2 = cm.bytepos_to_file_charpos(BytePos(25)); + assert_eq!(cp2, CharPos(0)); + } + + #[test] + fn t5() { + // Test zero-length source_files. + let cm = init_code_map(); + + let loc1 = cm.lookup_char_pos(BytePos(22)); + assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into()); + assert_eq!(loc1.line, 2); + assert_eq!(loc1.col, CharPos(10)); + + let loc2 = cm.lookup_char_pos(BytePos(25)); + assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into()); + assert_eq!(loc2.line, 1); + assert_eq!(loc2.col, CharPos(0)); + } + + fn init_code_map_mbc() -> SourceMap { + let cm = SourceMap::new(FilePathMapping::empty()); + // € is a three byte utf8 char. + cm.new_source_file(PathBuf::from("blork.rs").into(), + "fir€st €€€€ line.\nsecond line".to_string()); + cm.new_source_file(PathBuf::from("blork2.rs").into(), + "first line€€.\n€ second line".to_string()); + cm + } + + #[test] + fn t6() { + // Test bytepos_to_file_charpos in the presence of multi-byte chars + let cm = init_code_map_mbc(); + + let cp1 = cm.bytepos_to_file_charpos(BytePos(3)); + assert_eq!(cp1, CharPos(3)); + + let cp2 = cm.bytepos_to_file_charpos(BytePos(6)); + assert_eq!(cp2, CharPos(4)); + + let cp3 = cm.bytepos_to_file_charpos(BytePos(56)); + assert_eq!(cp3, CharPos(12)); + + let cp4 = cm.bytepos_to_file_charpos(BytePos(61)); + assert_eq!(cp4, CharPos(15)); + } + + #[test] + fn t7() { + // Test span_to_lines for a span ending at the end of source_file + let cm = init_code_map(); + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let file_lines = cm.span_to_lines(span).unwrap(); + + assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); + assert_eq!(file_lines.lines.len(), 1); + assert_eq!(file_lines.lines[0].line_index, 1); + } + + /// Given a string like " ~~~~~~~~~~~~ ", produces a span + /// converting that range. The idea is that the string has the same + /// length as the input, and we uncover the byte positions. Note + /// that this can span lines and so on. + fn span_from_selection(input: &str, selection: &str) -> Span { + assert_eq!(input.len(), selection.len()); + let left_index = selection.find('~').unwrap() as u32; + let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); + Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) + } + + /// Test span_to_snippet and span_to_lines for a span converting 3 + /// lines in the middle of a file. + #[test] + fn span_to_snippet_and_lines_spanning_multiple_lines() { + let cm = SourceMap::new(FilePathMapping::empty()); + let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; + let selection = " \n ~~\n~~~\n~~~~~ \n \n"; + cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); + let span = span_from_selection(inputtext, selection); + + // check that we are extracting the text we thought we were extracting + assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD"); + + // check that span_to_lines gives us the complete result with the lines/cols we expected + let lines = cm.span_to_lines(span).unwrap(); + let expected = vec![ + LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) }, + LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) }, + LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) } + ]; + assert_eq!(lines.lines, expected); + } + + #[test] + fn t8() { + // Test span_to_snippet for a span ending at the end of source_file + let cm = init_code_map(); + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let snippet = cm.span_to_snippet(span); + + assert_eq!(snippet, Ok("second line".to_string())); + } + + #[test] + fn t9() { + // Test span_to_str for a span ending at the end of source_file + let cm = init_code_map(); + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let sstr = cm.span_to_string(span); + + assert_eq!(sstr, "blork.rs:2:1: 2:12"); + } + + /// Test failing to merge two spans on different lines + #[test] + fn span_merging_fail() { + let cm = SourceMap::new(FilePathMapping::empty()); + let inputtext = "bbbb BB\ncc CCC\n"; + let selection1 = " ~~\n \n"; + let selection2 = " \n ~~~\n"; + cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); + let span1 = span_from_selection(inputtext, selection1); + let span2 = span_from_selection(inputtext, selection2); + + assert!(cm.merge_spans(span1, span2).is_none()); + } + + /// Returns the span corresponding to the `n`th occurrence of + /// `substring` in `source_text`. + trait SourceMapExtension { + fn span_substr(&self, + file: &Lrc, + source_text: &str, + substring: &str, + n: usize) + -> Span; + } + + impl SourceMapExtension for SourceMap { + fn span_substr(&self, + file: &Lrc, + source_text: &str, + substring: &str, + n: usize) + -> Span + { + println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", + file.name, file.start_pos, substring, n); + let mut i = 0; + let mut hi = 0; + loop { + let offset = source_text[hi..].find(substring).unwrap_or_else(|| { + panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", + source_text, n, substring, i); + }); + let lo = hi + offset; + hi = lo + substring.len(); + if i == n { + let span = Span::new( + BytePos(lo as u32 + file.start_pos.0), + BytePos(hi as u32 + file.start_pos.0), + NO_EXPANSION, + ); + assert_eq!(&self.span_to_snippet(span).unwrap()[..], + substring); + return span; + } + i += 1; + } + } + } +} diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 626a610017d..0db24c3b482 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -16,7 +16,7 @@ use edition::Edition; use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; -use codemap::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan}; +use source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan}; use ptr::P; use tokenstream::TokenStream; diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index b85fbae587a..393989711de 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -22,7 +22,7 @@ use std::vec; use attr::{self, HasAttrs}; use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; -use codemap::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned}; +use source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned}; use errors; use config; use entry::{self, EntryPointType}; @@ -616,8 +616,8 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { (item, reexport) } -fn nospan(t: T) -> codemap::Spanned { - codemap::Spanned { node: t, span: DUMMY_SP } +fn nospan(t: T) -> source_map::Spanned { + source_map::Spanned { node: t, span: DUMMY_SP } } fn path_node(ids: Vec) -> ast::Path { diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index 00dd79ffb00..d49965fd936 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use codemap::{SourceMap, FilePathMapping}; +use source_map::{SourceMap, FilePathMapping}; use errors::Handler; use errors::emitter::EmitterWriter; use std::io; diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 35dae1a4e67..b43d22d8855 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, Ident}; -use codemap::FilePathMapping; +use source_map::FilePathMapping; use parse::{ParseSess, PResult, source_file_to_stream}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index 8d0a04831fc..e3bd2ca0131 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -9,7 +9,7 @@ // except according to those terms. use syntax::ast::*; -use syntax::codemap::Spanned; +use syntax::source_map::Spanned; use syntax::ext::base::*; use syntax::ext::build::AstBuilder; use syntax::parse::token; diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index c3958bb58dd..973ad631b83 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -14,7 +14,7 @@ use errors::FatalError; use proc_macro::{TokenStream, __internal}; use syntax::ast::{self, ItemKind, Attribute, Mac}; use syntax::attr::{mark_used, mark_known}; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax::ext::base::*; use syntax::visit::Visitor; diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 2e0ba65dc65..28a2c11ceb1 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -198,7 +198,7 @@ use syntax::ast::{VariantData, GenericParamKind, GenericArg}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::codemap::{self, respan}; +use syntax::source_map::{self, respan}; use syntax::util::move_map::MoveMap; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; @@ -1619,7 +1619,7 @@ impl<'a> TraitDef<'a> { if ident.is_none() { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } - codemap::Spanned { + source_map::Spanned { span: pat.span.with_ctxt(self.span.ctxt()), node: ast::FieldPat { ident: ident.unwrap(), diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index a0845e0982d..fa284f4ab0e 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -18,7 +18,7 @@ use syntax::ast; use syntax::ast::{Expr, GenericParamKind, Generics, Ident, SelfKind, GenericArg}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; -use syntax::codemap::{respan, DUMMY_SP}; +use syntax::source_map::{respan, DUMMY_SP}; use syntax::ptr::P; use syntax_pos::Span; use syntax_pos::symbol::keywords; diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs index 7290b701e4d..56f28d04e9d 100644 --- a/src/libsyntax_ext/global_asm.rs +++ b/src/libsyntax_ext/global_asm.rs @@ -21,7 +21,7 @@ use rustc_data_structures::small_vec::OneVector; use syntax::ast; -use syntax::codemap::respan; +use syntax::source_map::respan; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs index 12400e363f4..ff60262055b 100644 --- a/src/libsyntax_ext/proc_macro_impl.rs +++ b/src/libsyntax_ext/proc_macro_impl.rs @@ -12,7 +12,7 @@ use std::panic; use errors::FatalError; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax::ext::base::*; use syntax::tokenstream::TokenStream; use syntax::ext::base; diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 5031182d484..65e175f95df 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -14,7 +14,7 @@ use errors; use syntax::ast::{self, Ident}; use syntax::attr; -use syntax::codemap::{ExpnInfo, MacroAttribute, hygiene, respan}; +use syntax::source_map::{ExpnInfo, MacroAttribute, hygiene, respan}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index 3e2829adeb5..4b0bc8f02b4 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -16,7 +16,7 @@ extern crate syntax; extern crate syntax_pos; use syntax::ast; -use syntax::codemap::FilePathMapping; +use syntax::source_map::FilePathMapping; use syntax::print::pprust; use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index c8c80b7759c..d757dd97e94 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -18,7 +18,7 @@ extern crate syntax; extern crate syntax_pos; use syntax::ast; -use syntax::codemap; +use syntax::source_map; use syntax::print::pprust; use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; @@ -28,7 +28,7 @@ fn main() { } fn run() { - let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty()); + let ps = syntax::parse::ParseSess::new(source_map::FilePathMapping::empty()); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( &ps, diff --git a/src/test/run-make-fulldeps/issue-19371/foo.rs b/src/test/run-make-fulldeps/issue-19371/foo.rs index d83b21e6441..4dfecb33c14 100644 --- a/src/test/run-make-fulldeps/issue-19371/foo.rs +++ b/src/test/run-make-fulldeps/issue-19371/foo.rs @@ -24,7 +24,7 @@ use rustc::session::config::{Input, Options, use rustc_driver::driver::{self, compile_input, CompileController}; use rustc_metadata::cstore::CStore; use rustc_errors::registry::Registry; -use syntax::codemap::FileName; +use syntax::source_map::FileName; use rustc_codegen_utils::codegen_backend::CodegenBackend; use std::path::PathBuf; diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index 2ef7eba6c05..6a706bdb9b2 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -17,7 +17,7 @@ extern crate syntax; use syntax::ast::*; use syntax::attr::*; use syntax::ast; -use syntax::codemap::{FilePathMapping, FileName}; +use syntax::source_map::{FilePathMapping, FileName}; use syntax::parse; use syntax::parse::{ParseSess, PResult}; use syntax::parse::new_parser_from_source_str; diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs index 6d5e82c68cc..985f31296fb 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs @@ -22,7 +22,7 @@ use deriving::generic::ty::*; use rustc_plugin::Registry; use syntax::ast::*; -use syntax::codemap::Span; +use syntax::source_map::Span; use syntax::ext::base::*; use syntax::ext::build::AstBuilder; use syntax::symbol::Symbol; diff --git a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs index 64fdd7f9a95..14e9dbf3a37 100644 --- a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs @@ -18,7 +18,7 @@ extern crate rustc_plugin; extern crate syntax_pos; use syntax::ast::{self, Item, MetaItem, ItemKind}; -use syntax::codemap::DUMMY_SP; +use syntax::source_map::DUMMY_SP; use syntax::ext::base::*; use syntax::ext::quote::rt::ToTokens; use syntax::parse::{self, token}; diff --git a/src/test/run-pass-fulldeps/mod_dir_path_canonicalized.rs b/src/test/run-pass-fulldeps/mod_dir_path_canonicalized.rs index 3bf50652113..ee424b31636 100644 --- a/src/test/run-pass-fulldeps/mod_dir_path_canonicalized.rs +++ b/src/test/run-pass-fulldeps/mod_dir_path_canonicalized.rs @@ -16,7 +16,7 @@ extern crate syntax; use std::path::Path; -use syntax::codemap::FilePathMapping; +use syntax::source_map::FilePathMapping; use syntax::parse::{self, ParseSess}; #[path = "mod_dir_simple/test.rs"] diff --git a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs index 3da50f16965..e944ef2b620 100644 --- a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs +++ b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs @@ -35,8 +35,8 @@ extern crate syntax; use rustc_data_structures::thin_vec::ThinVec; use syntax::ast::*; -use syntax::codemap::{Spanned, DUMMY_SP, FileName}; -use syntax::codemap::FilePathMapping; +use syntax::source_map::{Spanned, DUMMY_SP, FileName}; +use syntax::source_map::FilePathMapping; use syntax::fold::{self, Folder}; use syntax::parse::{self, ParseSess}; use syntax::print::pprust; diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs index 56c163b8ce3..5214d7db5cc 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs @@ -29,7 +29,7 @@ use rustc::hir::intravisit; use rustc::hir::map as hir_map; use rustc::lint::{LateContext, LintPass, LintArray, LateLintPass, LintContext}; use rustc::ty; -use syntax::{ast, codemap}; +use syntax::{ast, source_map}; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index c597360c042..55fed8693a0 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -15,7 +15,7 @@ extern crate syntax; extern crate syntax_pos; -use syntax::codemap::FilePathMapping; +use syntax::source_map::FilePathMapping; use syntax::print::pprust::*; use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; -- cgit 1.4.1-3-g733a5 From d3fe97f3d32b4cef1c22b6a5ba5326b1b195e262 Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:14:09 +0200 Subject: mv codemap() source_map() --- src/libproc_macro/lib.rs | 2 +- src/librustc/hir/lowering.rs | 2 +- src/librustc/hir/map/mod.rs | 2 +- src/librustc/ich/hcx.rs | 6 ++-- src/librustc/infer/error_reporting/mod.rs | 14 ++++---- .../nice_region_error/static_impl_trait.rs | 2 +- src/librustc/lint/builtin.rs | 6 ++-- src/librustc/lint/mod.rs | 2 +- src/librustc/middle/dead.rs | 4 +-- src/librustc/middle/liveness.rs | 2 +- src/librustc/middle/region.rs | 2 +- src/librustc/middle/resolve_lifetime.rs | 2 +- src/librustc/middle/stability.rs | 2 +- src/librustc/session/mod.rs | 4 +-- src/librustc/traits/error_reporting.rs | 32 +++++++++--------- src/librustc/traits/mod.rs | 2 +- src/librustc/traits/specialize/mod.rs | 4 +-- src/librustc/ty/context.rs | 2 +- src/librustc/ty/error.rs | 2 +- src/librustc/ty/item_path.rs | 2 +- src/librustc/ty/query/on_disk_cache.rs | 6 ++-- src/librustc/ty/query/plumbing.rs | 4 +-- src/librustc_borrowck/borrowck/check_loans.rs | 2 +- .../borrowck/gather_loans/move_error.rs | 2 +- src/librustc_borrowck/borrowck/mod.rs | 14 ++++---- src/librustc_borrowck/borrowck/unused.rs | 2 +- src/librustc_codegen_llvm/debuginfo/source_loc.rs | 2 +- src/librustc_codegen_llvm/debuginfo/utils.rs | 2 +- src/librustc_codegen_llvm/mir/block.rs | 2 +- src/librustc_codegen_llvm/mir/mod.rs | 2 +- src/librustc_driver/driver.rs | 4 +-- src/librustc_driver/pretty.rs | 10 +++--- src/librustc_driver/test.rs | 2 +- src/librustc_incremental/persist/load.rs | 4 +-- src/librustc_lint/builtin.rs | 14 ++++---- src/librustc_lint/types.rs | 2 +- src/librustc_metadata/cstore_impl.rs | 2 +- src/librustc_metadata/decoder.rs | 2 +- src/librustc_metadata/encoder.rs | 10 +++--- src/librustc_mir/borrow_check/mod.rs | 4 +-- src/librustc_mir/borrow_check/move_errors.rs | 4 +-- src/librustc_mir/borrow_check/mutability_errors.rs | 8 ++--- .../region_infer/error_reporting/region_name.rs | 6 ++-- src/librustc_mir/build/scope.rs | 2 +- src/librustc_mir/transform/check_unsafety.rs | 4 +-- src/librustc_mir/util/mod.rs | 2 +- src/librustc_mir/util/pretty.rs | 2 +- src/librustc_resolve/check_unused.rs | 2 +- src/librustc_resolve/lib.rs | 12 +++---- src/librustc_resolve/resolve_imports.rs | 4 +-- src/librustc_save_analysis/dump_visitor.rs | 2 +- src/librustc_save_analysis/lib.rs | 6 ++-- src/librustc_save_analysis/span_utils.rs | 6 ++-- src/librustc_typeck/check/_match.rs | 2 +- src/librustc_typeck/check/cast.rs | 6 ++-- src/librustc_typeck/check/compare_method.rs | 16 ++++----- src/librustc_typeck/check/demand.rs | 4 +-- src/librustc_typeck/check/method/suggest.rs | 16 ++++----- src/librustc_typeck/check/mod.rs | 16 ++++----- src/librustc_typeck/check/op.rs | 4 +-- src/librustc_typeck/check_unused.rs | 2 +- src/librustc_typeck/coherence/mod.rs | 4 +-- src/librustc_typeck/coherence/orphan.rs | 2 +- src/librustc_typeck/structured_errors.rs | 2 +- src/librustdoc/clean/mod.rs | 8 ++--- src/librustdoc/html/highlight.rs | 4 +-- src/libsyntax/diagnostics/metadata.rs | 2 +- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/ext/expand.rs | 6 ++-- src/libsyntax/ext/source_util.rs | 12 +++---- src/libsyntax/feature_gate.rs | 2 +- src/libsyntax/parse/lexer/comments.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 4 +-- src/libsyntax/parse/mod.rs | 10 +++--- src/libsyntax/parse/parser.rs | 38 +++++++++++----------- src/libsyntax/util/parser_testing.rs | 2 +- src/libsyntax_ext/format.rs | 2 +- 78 files changed, 209 insertions(+), 209 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 08ae78f775b..31aad29d083 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -1186,7 +1186,7 @@ pub mod __internal { use super::{TokenStream, LexError, Span}; pub fn lookup_char_pos(pos: BytePos) -> Loc { - with_sess(|sess, _| sess.codemap().lookup_char_pos(pos)) + with_sess(|sess, _| sess.source_map().lookup_char_pos(pos)) } pub fn new_token_stream(item: P) -> TokenStream { diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index e677906b058..b5f5f4d5c11 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -3621,7 +3621,7 @@ impl<'a> LoweringContext<'a> { let tail = block.expr.take().map_or_else( || { let LoweredNodeId { node_id, hir_id } = this.next_id(); - let span = this.sess.codemap().end_point(unstable_span); + let span = this.sess.source_map().end_point(unstable_span); hir::Expr { id: node_id, span, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 5bc80e537a3..ebda91cb7b0 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -1202,7 +1202,7 @@ pub fn map_crate<'hir>(sess: &::session::Session, let cmdline_args = sess.opts.dep_tracking_hash(); collector.finalize_and_compute_crate_hash(crate_disambiguator, cstore, - sess.codemap(), + sess.source_map(), cmdline_args) }; diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 30ae1fbfb30..799887df05d 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -101,7 +101,7 @@ impl<'a> StableHashingContext<'a> { definitions, cstore, caching_codemap: None, - raw_codemap: sess.codemap(), + raw_codemap: sess.source_map(), hash_spans: hash_spans_initial, hash_bodies: true, node_id_hashing_mode: NodeIdHashingMode::HashDefPath, @@ -169,7 +169,7 @@ impl<'a> StableHashingContext<'a> { } #[inline] - pub fn codemap(&mut self) -> &mut CachingCodemapView<'a> { + pub fn source_map(&mut self) -> &mut CachingCodemapView<'a> { match self.caching_codemap { Some(ref mut cm) => { cm @@ -340,7 +340,7 @@ impl<'a> HashStable> for Span { return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher); } - let (file_lo, line_lo, col_lo) = match hcx.codemap() + let (file_lo, line_lo, col_lo) = match hcx.source_map() .byte_pos_to_line_and_col(span.lo) { Some(pos) => pos, None => { diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 212821cac2e..d2987119e29 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -189,7 +189,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self, region: ty::Region<'tcx>, ) -> (String, Option) { - let cm = self.sess.codemap(); + let cm = self.sess.source_map(); let scope = region.free_region_binding_scope(self); let node = self.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID); @@ -286,7 +286,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } fn explain_span(self, heading: &str, span: Span) -> (String, Option) { - let lo = self.sess.codemap().lookup_char_pos_adj(span.lo()); + let lo = self.sess.source_map().lookup_char_pos_adj(span.lo()); ( format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1), Some(span), @@ -502,14 +502,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source { hir::MatchSource::IfLetDesugar { .. } => { let msg = "`if let` arm with an incompatible type"; - if self.tcx.sess.codemap().is_multiline(arm_span) { + if self.tcx.sess.source_map().is_multiline(arm_span) { err.span_note(arm_span, msg); } else { err.span_label(arm_span, msg); } }, hir::MatchSource::TryDesugar => { // Issue #51632 - if let Ok(try_snippet) = self.tcx.sess.codemap().span_to_snippet(arm_span) { + if let Ok(try_snippet) = self.tcx.sess.source_map().span_to_snippet(arm_span) { err.span_suggestion_with_applicability( arm_span, "try wrapping with a success variant", @@ -520,7 +520,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { }, _ => { let msg = "match arm with an incompatible type"; - if self.tcx.sess.codemap().is_multiline(arm_span) { + if self.tcx.sess.source_map().is_multiline(arm_span) { err.span_note(arm_span, msg); } else { err.span_label(arm_span, msg); @@ -1136,8 +1136,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let sp = if has_bounds { sp.to(self.tcx .sess - .codemap() - .next_point(self.tcx.sess.codemap().next_point(sp))) + .source_map() + .next_point(self.tcx.sess.source_map().next_point(sp))) } else { sp }; diff --git a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs index 193f86a3827..d25dcd5b045 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs @@ -60,7 +60,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { }) => name.to_string(), _ => "'_".to_owned(), }; - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(return_sp) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(return_sp) { err.span_suggestion( return_sp, &format!( diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 32687cb4884..015f755e97d 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -420,7 +420,7 @@ impl BuiltinLintDiagnostics { match self { BuiltinLintDiagnostics::Normal => (), BuiltinLintDiagnostics::BareTraitObject(span, is_global) => { - let (sugg, app) = match sess.codemap().span_to_snippet(span) { + let (sugg, app) = match sess.source_map().span_to_snippet(span) { Ok(ref s) if is_global => (format!("dyn ({})", s), Applicability::MachineApplicable), Ok(s) => (format!("dyn {}", s), Applicability::MachineApplicable), @@ -429,7 +429,7 @@ impl BuiltinLintDiagnostics { db.span_suggestion_with_applicability(span, "use `dyn`", sugg, app); } BuiltinLintDiagnostics::AbsPathWithModule(span) => { - let (sugg, app) = match sess.codemap().span_to_snippet(span) { + let (sugg, app) = match sess.source_map().span_to_snippet(span) { Ok(ref s) => { // FIXME(Manishearth) ideally the emitting code // can tell us whether or not this is global @@ -462,7 +462,7 @@ impl BuiltinLintDiagnostics { // When possible, prefer a suggestion that replaces the whole // `Path` expression with `Path<'_, T>`, rather than inserting `'_, ` // at a point (which makes for an ugly/confusing label) - if let Ok(snippet) = sess.codemap().span_to_snippet(path_span) { + if let Ok(snippet) = sess.source_map().span_to_snippet(path_span) { // But our spans can get out of whack due to macros; if the place we think // we want to insert `'_` isn't even within the path expression's span, we // should bail out of making any suggestion rather than panicking on a diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 62a582a3b93..5872440c362 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -754,7 +754,7 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool { None => return true, }; - match sess.codemap().span_to_snippet(def_site) { + match sess.source_map().span_to_snippet(def_site) { Ok(code) => !code.starts_with("macro_rules"), // no snippet = external macro or compiler-builtin expansion Err(_) => true, diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 2090950932b..8c4c54ec954 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -551,7 +551,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) | hir::ItemKind::Trait(..) | - hir::ItemKind::Impl(..) => self.tcx.sess.codemap().def_span(item.span), + hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span), _ => item.span, }; let participle = match item.node { @@ -612,7 +612,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { } hir::ImplItemKind::Method(_, body_id) => { if !self.symbol_is_live(impl_item.id, None) { - let span = self.tcx.sess.codemap().def_span(impl_item.span); + let span = self.tcx.sess.source_map().def_span(impl_item.span); self.warn_dead_code(impl_item.id, span, impl_item.ident.name, "method", "used"); } self.visit_nested_body(body_id) diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index b828b1bd30a..7d9590ee578 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -157,7 +157,7 @@ enum LiveNodeKind { } fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt) -> String { - let cm = tcx.sess.codemap(); + let cm = tcx.sess.source_map(); match lnk { FreeVarNode(s) => { format!("Free var node [{}]", cm.span_to_string(s)) diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index e8429c78dc6..be1d93dbad1 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -1314,7 +1314,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})", owner_id, - self.tcx.sess.codemap().span_to_string(body.value.span), + self.tcx.sess.source_map().span_to_string(body.value.span), body_id, self.cx.parent); diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 6ae027dac7e..379f4df11fa 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -2457,7 +2457,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { "insert_lifetime: {} resolved to {:?} span={:?}", self.tcx.hir.node_to_string(lifetime_ref.id), def, - self.tcx.sess.codemap().span_to_string(lifetime_ref.span) + self.tcx.sess.source_map().span_to_string(lifetime_ref.span) ); self.map.defs.insert(lifetime_ref.id, def); diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 8af8d463b11..efd568b03ab 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -685,7 +685,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }; let msp: MultiSpan = span.into(); - let cm = &self.sess.parse_sess.codemap(); + let cm = &self.sess.parse_sess.source_map(); let span_key = msp.primary_span().and_then(|sp: Span| if !sp.is_dummy() { let file = cm.lookup_char_pos(sp.lo()).file; diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 27d6a1f1791..b92f55f4284 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -484,8 +484,8 @@ impl Session { ); } - pub fn codemap<'a>(&'a self) -> &'a source_map::SourceMap { - self.parse_sess.codemap() + pub fn source_map<'a>(&'a self) -> &'a source_map::SourceMap { + self.parse_sess.source_map() } pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 92c66ef39ea..5d45badf49f 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -528,12 +528,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { -> DiagnosticBuilder<'tcx> { let msg = "impl has stricter requirements than trait"; - let sp = self.tcx.sess.codemap().def_span(error_span); + let sp = self.tcx.sess.source_map().def_span(error_span); let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg); if let Some(trait_item_span) = self.tcx.hir.span_if_local(trait_item_def_id) { - let span = self.tcx.sess.codemap().def_span(trait_item_span); + let span = self.tcx.sess.source_map().def_span(trait_item_span); err.span_label(span, format!("definition of `{}` from trait", item_name)); } @@ -715,7 +715,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { let found_kind = self.closure_kind(closure_def_id, closure_substs).unwrap(); - let closure_span = self.tcx.sess.codemap() + let closure_span = self.tcx.sess.source_map() .def_span(self.tcx.hir.span_if_local(closure_def_id).unwrap()); let node_id = self.tcx.hir.as_local_node_id(closure_def_id).unwrap(); let mut err = struct_span_err!( @@ -792,7 +792,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { }; let found_span = found_did.and_then(|did| { self.tcx.hir.span_if_local(did) - }).map(|sp| self.tcx.sess.codemap().def_span(sp)); // the sp could be an fn def + }).map(|sp| self.tcx.sess.source_map().def_span(sp)); // the sp could be an fn def let found = match found_trait_ref.skip_binder().substs.type_at(1).sty { ty::TyTuple(ref tys) => tys.iter() @@ -867,7 +867,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { if let Some(hir::map::NodeLocal(ref local)) = self.tcx.hir.find(parent_node) { if let Some(ref expr) = local.init { if let hir::ExprKind::Index(_, _) = expr.node { - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(expr.span) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(expr.span) { err.span_suggestion_with_applicability( expr.span, "consider borrowing here", @@ -890,7 +890,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let trait_ref = trait_ref.skip_binder(); let span = obligation.cause.span; - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { let refs_number = snippet.chars() .filter(|c| !c.is_whitespace()) .take_while(|c| *c == '&') @@ -909,7 +909,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { new_trait_ref.to_predicate()); if self.predicate_may_hold(&new_obligation) { - let sp = self.tcx.sess.codemap() + let sp = self.tcx.sess.source_map() .span_take_while(span, |c| c.is_whitespace() || *c == '&'); let remove_refs = refs_remaining + 1; @@ -938,7 +938,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { node: hir::ExprKind::Closure(_, ref _decl, id, span, _), .. }) => { - (self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter() + (self.tcx.sess.source_map().def_span(span), self.tcx.hir.body(id).arguments.iter() .map(|arg| { if let hir::Pat { node: hir::PatKind::Tuple(args, _), @@ -948,13 +948,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ArgKind::Tuple( Some(span), args.iter().map(|pat| { - let snippet = self.tcx.sess.codemap() + let snippet = self.tcx.sess.source_map() .span_to_snippet(pat.span).unwrap(); (snippet, "_".to_owned()) }).collect::>(), ) } else { - let name = self.tcx.sess.codemap() + let name = self.tcx.sess.source_map() .span_to_snippet(arg.pat.span).unwrap(); ArgKind::Arg(name, "_".to_owned()) } @@ -976,7 +976,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { node: hir::TraitItemKind::Method(hir::MethodSig { ref decl, .. }, _), .. }) => { - (self.tcx.sess.codemap().def_span(span), decl.inputs.iter() + (self.tcx.sess.source_map().def_span(span), decl.inputs.iter() .map(|arg| match arg.clone().node { hir::TyKind::Tup(ref tys) => ArgKind::Tuple( Some(arg.span), @@ -995,13 +995,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { }, .. }) => { - (self.tcx.sess.codemap().def_span(span), + (self.tcx.sess.source_map().def_span(span), fields.iter().map(|field| { ArgKind::Arg(field.ident.to_string(), "_".to_string()) }).collect::>()) } hir::map::NodeStructCtor(ref variant_data) => { - (self.tcx.sess.codemap().def_span(self.tcx.hir.span(variant_data.id())), + (self.tcx.sess.source_map().def_span(self.tcx.hir.span(variant_data.id())), variant_data.fields() .iter().map(|_| ArgKind::Arg("_".to_owned(), "_".to_owned())) .collect()) @@ -1192,7 +1192,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { { assert!(type_def_id.is_local()); let span = self.hir.span_if_local(type_def_id).unwrap(); - let span = self.sess.codemap().def_span(span); + let span = self.sess.source_map().def_span(span); let mut err = struct_span_err!(self.sess, span, E0072, "recursive type `{}` has infinite size", self.item_path_str(type_def_id)); @@ -1210,7 +1210,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { -> DiagnosticBuilder<'tcx> { let trait_str = self.item_path_str(trait_def_id); - let span = self.sess.codemap().def_span(span); + let span = self.sess.source_map().def_span(span); let mut err = struct_span_err!( self.sess, span, E0038, "the trait `{}` cannot be made into an object", @@ -1438,7 +1438,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let item_name = tcx.item_path_str(item_def_id); let msg = format!("required by `{}`", item_name); if let Some(sp) = tcx.hir.span_if_local(item_def_id) { - let sp = tcx.sess.codemap().def_span(sp); + let sp = tcx.sess.source_map().def_span(sp); err.span_note(sp, &msg); } else { err.note(&msg); diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index ef14d6d05c2..0ba94b5275c 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -142,7 +142,7 @@ impl<'tcx> ObligationCause<'tcx> { ObligationCauseCode::CompareImplMethodObligation { .. } | ObligationCauseCode::MainFunctionType | ObligationCauseCode::StartFunctionType => { - tcx.sess.codemap().def_span(self.span) + tcx.sess.source_map().def_span(self.span) } _ => self.span, } diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 06f9b446146..caebcbec390 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -344,7 +344,7 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx }), if used_to_be_allowed { " (E0119)" } else { "" } ); - let impl_span = tcx.sess.codemap().def_span( + let impl_span = tcx.sess.source_map().def_span( tcx.span_of_impl(impl_def_id).unwrap() ); let mut err = if used_to_be_allowed { @@ -363,7 +363,7 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx match tcx.span_of_impl(overlap.with_impl) { Ok(span) => { - err.span_label(tcx.sess.codemap().def_span(span), + err.span_label(tcx.sess.source_map().def_span(span), "first implementation here".to_string()); err.span_label(impl_span, format!("conflicting implementation{}", diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 921b3b1793a..ef363459108 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1818,7 +1818,7 @@ pub mod tls { /// in librustc otherwise fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result { with(|tcx| { - write!(f, "{}", tcx.sess.codemap().span_to_string(span)) + write!(f, "{}", tcx.sess.source_map().span_to_string(span)) }) } diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 49fffaa375b..e23034bde76 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -252,7 +252,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } match (&values.found.sty, &values.expected.sty) { // Issue #53280 (ty::TyInfer(ty::IntVar(_)), ty::TyFloat(_)) => { - if let Ok(snippet) = self.sess.codemap().span_to_snippet(sp) { + if let Ok(snippet) = self.sess.source_map().span_to_snippet(sp) { if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') { db.span_suggestion_with_applicability( sp, diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index c44b7327a08..593470b5d1e 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -336,7 +336,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.push_item_path(buffer, parent_def_id); let node_id = self.hir.as_local_node_id(impl_def_id).unwrap(); let item = self.hir.expect_item(node_id); - let span_str = self.sess.codemap().span_to_string(item.span); + let span_str = self.sess.source_map().span_to_string(item.span); buffer.push(&format!("", span_str)); } diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index c1f69d4a0f1..f7876ee035c 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -140,7 +140,7 @@ impl<'sess> OnDiskCache<'sess> { file_index_to_file: Lock::new(FxHashMap()), prev_cnums: footer.prev_cnums, cnum_map: Once::new(), - codemap: sess.codemap(), + codemap: sess.source_map(), current_diagnostics: Lock::new(FxHashMap()), query_result_index: footer.query_result_index.into_iter().collect(), prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), @@ -178,7 +178,7 @@ impl<'sess> OnDiskCache<'sess> { let mut file_to_file_index = FxHashMap(); let mut file_index_to_stable_id = FxHashMap(); - for (index, file) in tcx.sess.codemap().files().iter().enumerate() { + for (index, file) in tcx.sess.source_map().files().iter().enumerate() { let index = SourceFileIndex(index as u32); let file_ptr: *const SourceFile = &**file as *const _; file_to_file_index.insert(file_ptr, index); @@ -196,7 +196,7 @@ impl<'sess> OnDiskCache<'sess> { expn_info_shorthands: FxHashMap(), interpret_allocs: FxHashMap(), interpret_allocs_inverse: Vec::new(), - codemap: CachingCodemapView::new(tcx.sess.codemap()), + codemap: CachingCodemapView::new(tcx.sess.source_map()), file_to_file_index, }; diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index 450fbc0edbb..bf721ddd13f 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -251,7 +251,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { assert!(!stack.is_empty()); let fix_span = |span: Span, query: &Query<'gcx>| { - self.sess.codemap().def_span(query.default_span(self, span)) + self.sess.source_map().def_span(query.default_span(self, span)) }; // Disable naming impls with types in this path, since that @@ -299,7 +299,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { i, query.info.query.name(), query.info.query.describe(icx.tcx))); - db.set_span(icx.tcx.sess.codemap().def_span(query.info.span)); + db.set_span(icx.tcx.sess.source_map().def_span(query.info.span)); icx.tcx.sess.diagnostic().force_print_db(db); current_query = query.parent.clone(); diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index 709590f649b..dab60ef1661 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -594,7 +594,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { // 3. Where does old loan expire. let previous_end_span = - Some(self.tcx().sess.codemap().end_point( + Some(self.tcx().sess.source_map().end_point( old_loan.kill_scope.span(self.tcx(), &self.bccx.region_scope_tree))); let mut err = match (new_loan.kind, old_loan.kind) { diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index b217e6a8564..e1c3ac83990 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -79,7 +79,7 @@ fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, errors: &[MoveErr // see `get_pattern_source()` for details let initializer = e.init.as_ref().expect("should have an initializer to get an error"); - if let Ok(snippet) = bccx.tcx.sess.codemap().span_to_snippet(initializer.span) { + if let Ok(snippet) = bccx.tcx.sess.source_map().span_to_snippet(initializer.span) { err.span_suggestion(initializer.span, "consider using a reference instead", format!("&{}", snippet)); diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 5b08400eb11..7208e1ca362 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -848,7 +848,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { let sp = self.tcx.hir.span(node_id); let fn_closure_msg = "`Fn` closures cannot capture their enclosing \ environment for modifications"; - match (self.tcx.sess.codemap().span_to_snippet(sp), &err.cmt.cat) { + match (self.tcx.sess.source_map().span_to_snippet(sp), &err.cmt.cat) { (_, &Categorization::Upvar(mc::Upvar { kind: ty::ClosureKind::Fn, .. })) => { @@ -1160,13 +1160,13 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ref ty }) = pty.node { // Account for existing lifetimes when generating the message - let pointee_snippet = match self.tcx.sess.codemap().span_to_snippet(ty.span) { + let pointee_snippet = match self.tcx.sess.source_map().span_to_snippet(ty.span) { Ok(snippet) => snippet, _ => return None }; let lifetime_snippet = if !lifetime.is_elided() { - format!("{} ", match self.tcx.sess.codemap().span_to_snippet(lifetime.span) { + format!("{} ", match self.tcx.sess.source_map().span_to_snippet(lifetime.span) { Ok(lifetime_snippet) => lifetime_snippet, _ => return None }) @@ -1277,7 +1277,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { binding_node_id: ast::NodeId) { let let_span = self.tcx.hir.span(binding_node_id); if let ty::BindByValue(..) = self.local_binding_mode(binding_node_id) { - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(let_span) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(let_span) { let (ty, is_implicit_self) = self.local_ty(binding_node_id); if is_implicit_self && snippet != "self" { // avoid suggesting `mut &self`. @@ -1315,7 +1315,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { let cmt_path_or_string = self.cmt_to_path_or_string(&err.cmt); let suggestion = - match self.tcx.sess.codemap().span_to_snippet(err.span) { + match self.tcx.sess.source_map().span_to_snippet(err.span) { Ok(string) => format!("move {}", string), Err(_) => "move || ".to_string() }; @@ -1337,7 +1337,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { fn region_end_span(&self, region: ty::Region<'tcx>) -> Option { match *region { ty::ReScope(scope) => { - Some(self.tcx.sess.codemap().end_point( + Some(self.tcx.sess.source_map().end_point( scope.span(self.tcx, &self.region_scope_tree))) } _ => None @@ -1368,7 +1368,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { db.span_label(*error_span, "cannot borrow as mutable"); } else if let Categorization::Local(local_id) = err.cmt.cat { let span = self.tcx.hir.span(local_id); - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { if snippet.starts_with("ref mut ") || snippet.starts_with("&mut ") { db.span_label(*error_span, "cannot reborrow mutably"); db.span_label(*error_span, "try removing `&mut` here"); diff --git a/src/librustc_borrowck/borrowck/unused.rs b/src/librustc_borrowck/borrowck/unused.rs index 88545c12415..c5571ecb97c 100644 --- a/src/librustc_borrowck/borrowck/unused.rs +++ b/src/librustc_borrowck/borrowck/unused.rs @@ -75,7 +75,7 @@ impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> { } let (hir_id, span) = ids[0]; - let mut_span = tcx.sess.codemap().span_until_non_whitespace(span); + let mut_span = tcx.sess.source_map().span_until_non_whitespace(span); // Ok, every name wasn't used mutably, so issue a warning that this // didn't need to be mutable. diff --git a/src/librustc_codegen_llvm/debuginfo/source_loc.rs b/src/librustc_codegen_llvm/debuginfo/source_loc.rs index 55cf1393943..c59b5e2b8f5 100644 --- a/src/librustc_codegen_llvm/debuginfo/source_loc.rs +++ b/src/librustc_codegen_llvm/debuginfo/source_loc.rs @@ -40,7 +40,7 @@ pub fn set_source_location( }; let dbg_loc = if function_debug_context.source_locations_enabled.get() { - debug!("set_source_location: {}", bx.sess().codemap().span_to_string(span)); + debug!("set_source_location: {}", bx.sess().source_map().span_to_string(span)); let loc = span_start(bx.cx, span); InternalDebugLocation::new(scope.unwrap(), loc.line, loc.col.to_usize()) } else { diff --git a/src/librustc_codegen_llvm/debuginfo/utils.rs b/src/librustc_codegen_llvm/debuginfo/utils.rs index 9f4a555082a..19bc4ac39d3 100644 --- a/src/librustc_codegen_llvm/debuginfo/utils.rs +++ b/src/librustc_codegen_llvm/debuginfo/utils.rs @@ -47,7 +47,7 @@ pub fn create_DIArray( /// Return syntax_pos::Loc corresponding to the beginning of the span pub fn span_start(cx: &CodegenCx, span: Span) -> syntax_pos::Loc { - cx.sess().codemap().lookup_char_pos(span.lo()) + cx.sess().source_map().lookup_char_pos(span.lo()) } #[inline] diff --git a/src/librustc_codegen_llvm/mir/block.rs b/src/librustc_codegen_llvm/mir/block.rs index 5f718ae456c..1104408c1c8 100644 --- a/src/librustc_codegen_llvm/mir/block.rs +++ b/src/librustc_codegen_llvm/mir/block.rs @@ -359,7 +359,7 @@ impl FunctionCx<'a, 'll, 'tcx> { self.set_debug_loc(&bx, terminator.source_info); // Get the location information. - let loc = bx.sess().codemap().lookup_char_pos(span.lo()); + let loc = bx.sess().source_map().lookup_char_pos(span.lo()); let filename = Symbol::intern(&loc.file.name.to_string()).as_str(); let filename = C_str_slice(bx.cx, filename); let line = C_u32(bx.cx, loc.line as u32); diff --git a/src/librustc_codegen_llvm/mir/mod.rs b/src/librustc_codegen_llvm/mir/mod.rs index 5991bb80c30..d3cb97f65ce 100644 --- a/src/librustc_codegen_llvm/mir/mod.rs +++ b/src/librustc_codegen_llvm/mir/mod.rs @@ -166,7 +166,7 @@ impl FunctionCx<'a, 'll, 'tcx> { let scope_metadata = self.scopes[scope_id].scope_metadata; if pos < self.scopes[scope_id].file_start_pos || pos >= self.scopes[scope_id].file_end_pos { - let cm = self.cx.sess().codemap(); + let cm = self.cx.sess().source_map(); let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate; Some(debuginfo::extend_scope_to_file(self.cx, scope_metadata.unwrap(), diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index e5042a4a020..1c2c0ad73a8 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -695,7 +695,7 @@ pub fn phase_1_parse_input<'a>( if sess.opts.debugging_opts.input_stats { println!( "Lines of code: {}", - sess.codemap().count_lines() + sess.source_map().count_lines() ); println!("Pre-expansion node count: {}", count_nodes(&krate)); } @@ -1462,7 +1462,7 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[Pa let result = (|| -> io::Result<()> { // Build a list of files used to compile the output and // write Makefile-compatible dependency rules - let files: Vec = sess.codemap() + let files: Vec = sess.source_map() .files() .iter() .filter(|fmap| fmap.is_real_file()) diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 4d4198d34bc..65cbee821e8 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -915,7 +915,7 @@ pub fn fold_crate(sess: &Session, krate: ast::Crate, ppm: PpMode) -> ast::Crate fn get_source(input: &Input, sess: &Session) -> (Vec, FileName) { let src_name = driver::source_name(input); - let src = sess.codemap() + let src = sess.source_map() .get_source_file(&src_name) .unwrap() .src @@ -954,7 +954,7 @@ pub fn print_after_parsing(sess: &Session, s.call_with_pp_support(sess, None, move |annotation| { debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); - pprust::print_crate(sess.codemap(), + pprust::print_crate(sess.source_map(), &sess.parse_sess, krate, src_name, @@ -1011,7 +1011,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, s.call_with_pp_support(sess, Some(hir_map), move |annotation| { debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); - pprust::print_crate(sess.codemap(), + pprust::print_crate(sess.source_map(), &sess.parse_sess, krate, src_name, @@ -1035,7 +1035,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, move |annotation, krate| { debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); - pprust_hir::print_crate(sess.codemap(), + pprust_hir::print_crate(sess.source_map(), &sess.parse_sess, krate, src_name, @@ -1076,7 +1076,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); let hir_map = annotation.hir_map().expect("-Z unpretty missing HIR map"); - let mut pp_state = pprust_hir::State::new_from_input(sess.codemap(), + let mut pp_state = pprust_hir::State::new_from_input(sess.source_map(), &sess.parse_sess, src_name, &mut rdr, diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 284111756a6..175422975e0 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -162,7 +162,7 @@ fn test_env_with_pool( &arenas, resolutions, hir_map, - OnDiskCache::new_empty(sess.codemap()), + OnDiskCache::new_empty(sess.source_map()), "test_crate", tx, &outputs, diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 9ee3b216dcf..fbc3bf03599 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -207,11 +207,11 @@ pub fn load_dep_graph(sess: &Session) -> pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> { if sess.opts.incremental.is_none() || !sess.opts.debugging_opts.incremental_queries { - return OnDiskCache::new_empty(sess.codemap()); + return OnDiskCache::new_empty(sess.source_map()); } match load_data(sess.opts.debugging_opts.incremental_info, &query_cache_path(sess)) { LoadResult::Ok{ data: (bytes, start_pos) } => OnDiskCache::new(sess, bytes, start_pos), - _ => OnDiskCache::new_empty(sess.codemap()) + _ => OnDiskCache::new_empty(sess.source_map()) } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index cbcaa53b280..c346d3e76cd 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -82,7 +82,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for WhileTrue { if let ast::LitKind::Bool(true) = lit.node { if lit.span.ctxt() == SyntaxContext::empty() { let msg = "denote infinite loops with `loop { ... }`"; - let condition_span = cx.tcx.sess.codemap().def_span(e.span); + let condition_span = cx.tcx.sess.source_map().def_span(e.span); let mut err = cx.struct_span_lint(WHILE_TRUE, condition_span, msg); err.span_suggestion_short_with_applicability( condition_span, @@ -195,7 +195,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, &format!("the `{}:` in this pattern is redundant", ident)); - let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':'); + let subspan = cx.tcx.sess.source_map().span_through_char(fieldpat.span, ':'); err.span_suggestion_short_with_applicability( subspan, "remove this", @@ -367,7 +367,7 @@ impl MissingDoc { let has_doc = attrs.iter().any(|a| has_doc(a)); if !has_doc { cx.span_lint(MISSING_DOCS, - cx.tcx.sess.codemap().def_span(sp), + cx.tcx.sess.source_map().def_span(sp), &format!("missing documentation for {}", desc)); } } @@ -651,7 +651,7 @@ impl EarlyLintPass for AnonymousParameters { if ident.name == keywords::Invalid.name() { let ty_snip = cx .sess - .codemap() + .source_map() .span_to_snippet(arg.ty.span); let (ty_snip, appl) = if let Ok(snip) = ty_snip { @@ -958,7 +958,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnconditionalRecursion { // no break */ }`) shouldn't be linted unless it actually // recurs. if !reached_exit_without_self_call && !self_call_spans.is_empty() { - let sp = cx.tcx.sess.codemap().def_span(sp); + let sp = cx.tcx.sess.source_map().def_span(sp); let mut db = cx.struct_span_lint(UNCONDITIONAL_RECURSION, sp, "function cannot return without recurring"); @@ -1278,7 +1278,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidNoMangleItems { let mut err = cx.struct_span_lint(NO_MANGLE_CONST_ITEMS, it.span, msg); // account for "pub const" (#45562) - let start = cx.tcx.sess.codemap().span_to_snippet(it.span) + let start = cx.tcx.sess.source_map().span_to_snippet(it.span) .map(|snippet| snippet.find("const").unwrap_or(0)) .unwrap_or(0) as u32; // `const` is 5 chars @@ -1440,7 +1440,7 @@ impl UnreachablePub { if span.ctxt().outer().expn_info().is_some() { applicability = Applicability::MaybeIncorrect; } - let def_span = cx.tcx.sess.codemap().def_span(span); + let def_span = cx.tcx.sess.source_map().def_span(span); let mut err = cx.struct_span_lint(UNREACHABLE_PUB, def_span, &format!("unreachable `pub` {}", what)); let replacement = if cx.tcx.features().crate_visibility_modifier { diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index b8e765d9290..46b33255b45 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -300,7 +300,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { } fn get_bin_hex_repr(cx: &LateContext, lit: &ast::Lit) -> Option { - let src = cx.sess().codemap().span_to_snippet(lit.span).ok()?; + let src = cx.sess().source_map().span_to_snippet(lit.span).ok()?; let firstch = src.chars().next()?; if firstch == '0' { diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index da971e18261..c51bebd2e3b 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -463,7 +463,7 @@ impl cstore::CStore { let (name, def) = data.get_macro(id.index); let source_name = FileName::Macros(name.to_string()); - let source_file = sess.parse_sess.codemap().new_source_file(source_name, def.body); + let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); let body = source_file_to_stream(&sess.parse_sess, source_file, None); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 29d21337726..ceccdea6587 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -314,7 +314,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { bug!("Cannot decode Span without Session.") }; - let imported_source_files = self.cdata().imported_source_files(&sess.codemap()); + let imported_source_files = self.cdata().imported_source_files(&sess.source_map()); let source_file = { // Optimize for the case that most spans within a translated item // originate from the same source_file. diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index fe9ea00a488..8f3bed6bdbe 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -158,7 +158,7 @@ impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { debug_assert!(span.lo <= span.hi); if !self.source_file_cache.contains(span.lo) { - let codemap = self.tcx.sess.codemap(); + let codemap = self.tcx.sess.source_map(); let source_file_index = codemap.lookup_source_file_idx(span.lo); self.source_file_cache = codemap.files()[source_file_index].clone(); } @@ -337,8 +337,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy(definitions.def_path_table()) } - fn encode_codemap(&mut self) -> LazySeq { - let codemap = self.tcx.sess.codemap(); + fn encode_source_map(&mut self) -> LazySeq { + let codemap = self.tcx.sess.source_map(); let all_source_files = codemap.files(); let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone(); @@ -420,7 +420,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // Encode codemap i = self.position(); - let codemap = self.encode_codemap(); + let codemap = self.encode_source_map(); let codemap_bytes = self.position() - i; // Encode DefPathTable @@ -1842,7 +1842,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, lazy_state: LazyState::NoNode, type_shorthands: Default::default(), predicate_shorthands: Default::default(), - source_file_cache: tcx.sess.codemap().files()[0].clone(), + source_file_cache: tcx.sess.source_map().files()[0].clone(), interpret_allocs: Default::default(), interpret_allocs_inverse: Default::default(), }; diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index ce0e76a636d..82d3d31a52e 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -316,7 +316,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>( } let span = local_decl.source_info.span; - let mut_span = tcx.sess.codemap().span_until_non_whitespace(span); + let mut_span = tcx.sess.source_map().span_until_non_whitespace(span); let mut err = tcx.struct_span_lint_node( UNUSED_MUT, @@ -1509,7 +1509,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { debug!("check_for_invalidation_at_exit({:?}): INVALID", place); // FIXME: should be talking about the region lifetime instead // of just a span here. - let span = self.tcx.sess.codemap().end_point(span); + let span = self.tcx.sess.source_map().end_point(span); self.report_borrowed_value_does_not_live_long_enough( context, borrow, diff --git a/src/librustc_mir/borrow_check/move_errors.rs b/src/librustc_mir/borrow_check/move_errors.rs index 56ca4db098f..5ef647585c3 100644 --- a/src/librustc_mir/borrow_check/move_errors.rs +++ b/src/librustc_mir/borrow_check/move_errors.rs @@ -332,7 +332,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> { err: &mut DiagnosticBuilder<'a>, span: Span, ) { - let snippet = self.tcx.sess.codemap().span_to_snippet(span).unwrap(); + let snippet = self.tcx.sess.source_map().span_to_snippet(span).unwrap(); match error { GroupedMoveError::MovesFromPlace { mut binds_to, @@ -394,7 +394,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> { })) ) = bind_to.is_user_variable { let pat_snippet = self - .tcx.sess.codemap() + .tcx.sess.source_map() .span_to_snippet(pat_span) .unwrap(); if pat_snippet.starts_with('&') { diff --git a/src/librustc_mir/borrow_check/mutability_errors.rs b/src/librustc_mir/borrow_check/mutability_errors.rs index f11135fc026..05d6f49d97c 100644 --- a/src/librustc_mir/borrow_check/mutability_errors.rs +++ b/src/librustc_mir/borrow_check/mutability_errors.rs @@ -268,7 +268,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> { // a local variable, then just suggest the user remove it. Place::Local(_) if { - if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { snippet.starts_with("&mut ") } else { false @@ -406,7 +406,7 @@ fn suggest_ampmut_self<'cx, 'gcx, 'tcx>( local_decl: &mir::LocalDecl<'tcx>, ) -> (Span, String) { let sp = local_decl.source_info.span; - (sp, match tcx.sess.codemap().span_to_snippet(sp) { + (sp, match tcx.sess.source_map().span_to_snippet(sp) { Ok(snippet) => { let lt_pos = snippet.find('\''); if let Some(lt_pos) = lt_pos { @@ -444,7 +444,7 @@ fn suggest_ampmut<'cx, 'gcx, 'tcx>( let locations = mir.find_assignments(local); if locations.len() > 0 { let assignment_rhs_span = mir.source_info(locations[0]).span; - if let Ok(src) = tcx.sess.codemap().span_to_snippet(assignment_rhs_span) { + if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) { if let (true, Some(ws_pos)) = ( src.starts_with("&'"), src.find(|c: char| -> bool { c.is_whitespace() }), @@ -469,7 +469,7 @@ fn suggest_ampmut<'cx, 'gcx, 'tcx>( None => local_decl.source_info.span, }; - if let Ok(src) = tcx.sess.codemap().span_to_snippet(highlight_span) { + if let Ok(src) = tcx.sess.source_map().span_to_snippet(highlight_span) { if let (true, Some(ws_pos)) = ( src.starts_with("&'"), src.find(|c: char| -> bool { c.is_whitespace() }), diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs index fbecab2dd1f..9724d9deead 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs @@ -189,7 +189,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { name: &InternedString, diag: &mut DiagnosticBuilder<'_>, ) { - let cm = tcx.sess.codemap(); + let cm = tcx.sess.source_map(); let scope = error_region.free_region_binding_scope(tcx); let node = tcx.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID); @@ -383,7 +383,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let region_name = self.synthesize_region_name(counter); // Just grab the first character, the `&`. - let codemap = tcx.sess.codemap(); + let codemap = tcx.sess.source_map(); let ampersand_span = codemap.start_point(hir_ty.span); diag.span_label( @@ -593,7 +593,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { = tcx.hir.expect_expr(mir_node_id).node { ( - tcx.sess.codemap().end_point(span), + tcx.sess.source_map().end_point(span), if gen_move.is_some() { " of generator" } else { " of closure" } ) } else { diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 2dc5138c6f0..8e99a45c87f 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -732,7 +732,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); // Attribute scope exit drops to scope's closing brace. - let scope_end = self.hir.tcx().sess.codemap().end_point(region_scope_span); + let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); scope.drops.push(DropData { span: scope_end, diff --git a/src/librustc_mir/transform/check_unsafety.rs b/src/librustc_mir/transform/check_unsafety.rs index aba39966110..80e484d5957 100644 --- a/src/librustc_mir/transform/check_unsafety.rs +++ b/src/librustc_mir/transform/check_unsafety.rs @@ -424,12 +424,12 @@ fn is_enclosed(tcx: TyCtxt, } fn report_unused_unsafe(tcx: TyCtxt, used_unsafe: &FxHashSet, id: ast::NodeId) { - let span = tcx.sess.codemap().def_span(tcx.hir.span(id)); + let span = tcx.sess.source_map().def_span(tcx.hir.span(id)); let msg = "unnecessary `unsafe` block"; let mut db = tcx.struct_span_lint_node(UNUSED_UNSAFE, id, span, msg); db.span_label(span, msg); if let Some((kind, id)) = is_enclosed(tcx, used_unsafe, id) { - db.span_label(tcx.sess.codemap().def_span(tcx.hir.span(id)), + db.span_label(tcx.sess.source_map().def_span(tcx.hir.span(id)), format!("because it's nested under this `unsafe` {}", kind)); } db.emit(); diff --git a/src/librustc_mir/util/mod.rs b/src/librustc_mir/util/mod.rs index fe6fefe89fd..1e624081bcf 100644 --- a/src/librustc_mir/util/mod.rs +++ b/src/librustc_mir/util/mod.rs @@ -33,7 +33,7 @@ pub fn suggest_ref_mut<'cx, 'gcx, 'tcx>( tcx: ty::TyCtxt<'cx, 'gcx, 'tcx>, binding_span: Span, ) -> Option<(String)> { - let hi_src = tcx.sess.codemap().span_to_snippet(binding_span).unwrap(); + let hi_src = tcx.sess.source_map().span_to_snippet(binding_span).unwrap(); if hi_src.starts_with("ref") && hi_src["ref".len()..].starts_with(Pattern_White_Space) { diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs index 01ad85cf668..df975998edc 100644 --- a/src/librustc_mir/util/pretty.rs +++ b/src/librustc_mir/util/pretty.rs @@ -441,7 +441,7 @@ fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String { format!( "scope {} at {}", scope.index(), - tcx.sess.codemap().span_to_string(span) + tcx.sess.source_map().span_to_string(span) ) } diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index cafacf99c3d..de9481579e2 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -175,7 +175,7 @@ pub fn check_crate(resolver: &mut Resolver, krate: &ast::Crate) { let ms = MultiSpan::from_spans(spans.clone()); let mut span_snippets = spans.iter() .filter_map(|s| { - match visitor.session.codemap().span_to_snippet(*s) { + match visitor.session.source_map().span_to_snippet(*s) { Ok(s) => Some(format!("`{}`", s)), _ => None, } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 7b6011cc6f1..6dd0c32bb3e 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -195,7 +195,7 @@ fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver, "can't use type parameters from outer function"); err.span_label(span, "use of type variable from outer function"); - let cm = resolver.session.codemap(); + let cm = resolver.session.source_map(); match outer_def { Def::SelfTy(_, maybe_impl_defid) => { if let Some(impl_span) = maybe_impl_defid.map_or(None, @@ -3085,7 +3085,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> { // parser issue where a struct literal is being used on an expression // where a brace being opened means a block is being started. Look // ahead for the next text to see if `span` is followed by a `{`. - let cm = this.session.codemap(); + let cm = this.session.source_map(); let mut sp = span; loop { sp = cm.next_point(sp); @@ -3212,7 +3212,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> { err: &mut DiagnosticBuilder, base_span: Span) { debug!("type_ascription_suggetion {:?}", base_span); - let cm = self.session.codemap(); + let cm = self.session.source_map(); debug!("self.current_type_ascription {:?}", self.current_type_ascription); if let Some(sp) = self.current_type_ascription.last() { let mut sp = *sp; @@ -4527,7 +4527,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> { false => "defined", }; - let (name, span) = (ident.name, self.session.codemap().def_span(new_binding.span)); + let (name, span) = (ident.name, self.session.source_map().def_span(new_binding.span)); if let Some(s) = self.name_already_seen.get(&name) { if s == &span { @@ -4566,7 +4566,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> { err.span_label(span, format!("`{}` re{} here", name, new_participle)); if !old_binding.span.is_dummy() { - err.span_label(self.session.codemap().def_span(old_binding.span), + err.span_label(self.session.source_map().def_span(old_binding.span), format!("previous {} of the {} `{}` here", old_noun, old_kind, name)); } @@ -4578,7 +4578,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> { old_binding }; - let cm = self.session.codemap(); + let cm = self.session.source_map(); let rename_msg = "You can use `as` to change the binding name of the import"; if let (Ok(snippet), false) = (cm.span_to_snippet(binding.span), diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index e16039bc239..58390c22886 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -1205,8 +1205,8 @@ impl<'a, 'b:'a, 'c: 'b> ImportResolver<'a, 'b, 'c> { let enum_span = enum_resolution.borrow() .binding.expect("binding should exist") .span; - let enum_def_span = self.session.codemap().def_span(enum_span); - let enum_def_snippet = self.session.codemap() + let enum_def_span = self.session.source_map().def_span(enum_span); + let enum_def_snippet = self.session.source_map() .span_to_snippet(enum_def_span).expect("snippet should exist"); // potentially need to strip extant `crate`/`pub(path)` for suggestion let after_vis_index = enum_def_snippet.find("enum") diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 05b7ed96251..c6cec2ecca6 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -1368,7 +1368,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> Visitor<'l> for DumpVisitor<'l, 'tc let qualname = format!("::{}", self.tcx.node_path_str(id)); - let cm = self.tcx.sess.codemap(); + let cm = self.tcx.sess.source_map(); let filename = cm.span_to_filename(span); let data_id = ::id_from_node_id(id, &self.save_ctxt); let children = m.items diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 1eb8bacb75a..b2d78850cba 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -95,7 +95,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { fn span_from_span(&self, span: Span) -> SpanData { use rls_span::{Column, Row}; - let cm = self.tcx.sess.codemap(); + let cm = self.tcx.sess.source_map(); let start = cm.lookup_char_pos(span.lo()); let end = cm.lookup_char_pos(span.hi()); @@ -122,7 +122,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { continue; } }; - let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo()); + let lo_loc = self.span_utils.sess.source_map().lookup_char_pos(span.lo()); result.push(ExternalCrateData { // FIXME: change file_name field to PathBuf in rls-data // https://github.com/nrc/rls-data/issues/7 @@ -268,7 +268,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { ast::ItemKind::Mod(ref m) => { let qualname = format!("::{}", self.tcx.node_path_str(item.id)); - let cm = self.tcx.sess.codemap(); + let cm = self.tcx.sess.source_map(); let filename = cm.span_to_filename(m.inner); let sub_span = self.span_utils diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index f764042926a..e1a3829cd75 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -47,7 +47,7 @@ impl<'a> SpanUtils<'a> { } pub fn snippet(&self, span: Span) -> String { - match self.sess.codemap().span_to_snippet(span) { + match self.sess.source_map().span_to_snippet(span) { Ok(s) => s, Err(_) => String::new(), } @@ -151,7 +151,7 @@ impl<'a> SpanUtils<'a> { } #[cfg(debug_assertions)] { if angle_count != 0 || bracket_count != 0 { - let loc = self.sess.codemap().lookup_char_pos(span.lo()); + let loc = self.sess.source_map().lookup_char_pos(span.lo()); span_bug!( span, "Mis-counted brackets when breaking path? Parsing '{}' \ @@ -278,7 +278,7 @@ impl<'a> SpanUtils<'a> { //If the span comes from a fake source_file, filter it. if !self.sess - .codemap() + .source_map() .lookup_char_pos(parent.lo()) .file .is_real_file() diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 93c9764202f..c14a07b7cf5 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -350,7 +350,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let Some(mut err) = err { if is_arg { if let PatKind::Binding(..) = inner.node { - if let Ok(snippet) = tcx.sess.codemap() + if let Ok(snippet) = tcx.sess.source_map() .span_to_snippet(pat.span) { err.help(&format!("did you mean `{}: &{}`?", diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 07e19c84a95..29d2fe2c7b6 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -221,7 +221,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { format!("cannot cast `{}` as `{}`", fcx.ty_to_string(self.expr_ty), cast_ty)); - if let Ok(snippet) = fcx.sess().codemap().span_to_snippet(self.expr.span) { + if let Ok(snippet) = fcx.sess().source_map().span_to_snippet(self.expr.span) { err.span_help(self.expr.span, &format!("did you mean `*{}`?", snippet)); } @@ -325,7 +325,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { hir::MutImmutable => "", }; if self.cast_ty.is_trait() { - match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) { + match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) { Ok(s) => { err.span_suggestion(self.cast_span, "try casting to a reference instead", @@ -344,7 +344,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { } } ty::TyAdt(def, ..) if def.is_box() => { - match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) { + match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) { Ok(s) => { err.span_suggestion(self.cast_span, "try casting to a `Box` instead", diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 546553ac245..044621dabb4 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -40,7 +40,7 @@ pub fn compare_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("compare_impl_method(impl_trait_ref={:?})", impl_trait_ref); - let impl_m_span = tcx.sess.codemap().def_span(impl_m_span); + let impl_m_span = tcx.sess.source_map().def_span(impl_m_span); if let Err(ErrorReported) = compare_self_type(tcx, impl_m, @@ -319,7 +319,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_m.ident); if let TypeError::Mutability = terr { if let Some(trait_err_span) = trait_err_span { - if let Ok(trait_err_str) = tcx.sess.codemap().span_to_snippet(trait_err_span) { + if let Ok(trait_err_str) = tcx.sess.source_map().span_to_snippet(trait_err_span) { diag.span_suggestion( impl_err_span, "consider change the type to match the mutability in trait", @@ -386,7 +386,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // are zero. Since I don't quite know how to phrase things at // the moment, give a kind of vague error message. if trait_params != impl_params { - let def_span = tcx.sess.codemap().def_span(span); + let def_span = tcx.sess.source_map().def_span(span); let span = tcx.hir.get_generics_span(impl_m.def_id).unwrap_or(def_span); let mut err = struct_span_err!( tcx.sess, @@ -397,7 +397,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ); err.span_label(span, "lifetimes do not match method in trait"); if let Some(sp) = tcx.hir.span_if_local(trait_m.def_id) { - let def_sp = tcx.sess.codemap().def_span(sp); + let def_sp = tcx.sess.source_map().def_span(sp); let sp = tcx.hir.get_generics_span(trait_m.def_id).unwrap_or(def_sp); err.span_label(sp, "lifetimes in impl do not match this method in trait"); } @@ -770,7 +770,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // as another generic argument let new_name = tcx .sess - .codemap() + .source_map() .span_to_snippet(trait_span) .ok()?; let trait_m = tcx.hir.as_local_node_id(trait_m.def_id)?; @@ -783,7 +783,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // and the opening paren of the argument list let new_generics_span = tcx .sess - .codemap() + .source_map() .generate_fn_name_span(impl_span)? .shrink_to_hi(); // in case there are generics, just replace them @@ -794,7 +794,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // replace with the generics from the trait let new_generics = tcx .sess - .codemap() + .source_map() .span_to_snippet(trait_m.generics.span) .ok()?; @@ -865,7 +865,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let bounds = bounds.first()?.span().to(bounds.last()?.span()); let bounds = tcx .sess - .codemap() + .source_map() .span_to_snippet(bounds) .ok()?; diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs index 8bd53976e26..00cc36ecd42 100644 --- a/src/librustc_typeck/check/demand.rs +++ b/src/librustc_typeck/check/demand.rs @@ -251,7 +251,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { checked_ty: Ty<'tcx>, expected: Ty<'tcx>) -> Option<(Span, &'static str, String)> { - let cm = self.sess().codemap(); + let cm = self.sess().source_map(); // Use the callsite's span if this is a macro call. #41858 let sp = cm.call_span_if_macro(expr.span); if !cm.span_to_filename(sp).is_real() { @@ -405,7 +405,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8); - if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(expr.span) { + if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) { let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty); let cast_suggestion = format!("{}{}{} as {}", if needs_paren { "(" } else { "" }, diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 0fbdecffb8b..ed6ec1bf8e6 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -132,7 +132,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }; if let Some(note_span) = note_span { // We have a span pointing to the method. Show note with snippet. - err.span_note(self.tcx.sess.codemap().def_span(note_span), ¬e_str); + err.span_note(self.tcx.sess.source_map().def_span(note_span), ¬e_str); } else { err.note(¬e_str); } @@ -141,7 +141,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let item = self .associated_item(trait_did, item_name, Namespace::Value) .unwrap(); - let item_span = self.tcx.sess.codemap() + let item_span = self.tcx.sess.source_map() .def_span(self.tcx.def_span(item.def_id)); if sources.len() > 1 { span_note!(err, @@ -246,7 +246,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }; match expr.node { hir::ExprKind::Lit(ref lit) => { // numeric literal - let snippet = tcx.sess.codemap().span_to_snippet(lit.span) + let snippet = tcx.sess.source_map().span_to_snippet(lit.span) .unwrap_or("".to_string()); err.span_suggestion(lit.span, @@ -261,9 +261,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let &hir::QPath::Resolved(_, ref path) = &qpath { if let hir::def::Def::Local(node_id) = path.def { let span = tcx.hir.span(node_id); - let snippet = tcx.sess.codemap().span_to_snippet(span) + let snippet = tcx.sess.source_map().span_to_snippet(span) .unwrap(); - let filename = tcx.sess.codemap().span_to_filename(span); + let filename = tcx.sess.source_map().span_to_filename(span); let parent_node = self.tcx.hir.get( self.tcx.hir.get_parent_node(node_id), @@ -320,7 +320,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let Some(def) = actual.ty_adt_def() { if let Some(full_sp) = tcx.hir.span_if_local(def.did) { - let def_sp = tcx.sess.codemap().def_span(full_sp); + let def_sp = tcx.sess.source_map().def_span(full_sp); err.span_label(def_sp, format!("{} `{}` not found {}", item_kind, item_name, @@ -341,7 +341,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let variant = &def.non_enum_variant(); if let Some(index) = self.tcx.find_field_index(item_name, variant) { let field = &variant.fields[index]; - let snippet = tcx.sess.codemap().span_to_snippet(expr.span); + let snippet = tcx.sess.source_map().span_to_snippet(expr.span); let expr_string = match snippet { Ok(expr_string) => expr_string, _ => "s".into(), // Default to a generic placeholder for the @@ -387,7 +387,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } if let Some(expr) = rcvr_expr { - if let Ok(expr_string) = tcx.sess.codemap().span_to_snippet(expr.span) { + if let Ok(expr_string) = tcx.sess.source_map().span_to_snippet(expr.span) { report_function!(expr.span, expr_string); } else if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.node diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 5fa1cd168a2..b57967a7aab 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -1447,7 +1447,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_id: DefId, impl_trait_ref: ty::TraitRef<'tcx>, impl_item_refs: &[hir::ImplItemRef]) { - let impl_span = tcx.sess.codemap().def_span(impl_span); + let impl_span = tcx.sess.source_map().def_span(impl_span); // If the trait reference itself is erroneous (so the compilation is going // to fail), skip checking the items here -- the `impl_item` table in `tcx` @@ -2668,11 +2668,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if arg_count == 1 {" was"} else {"s were"}), DiagnosticId::Error(error_code.to_owned())); - if let Some(def_s) = def_span.map(|sp| tcx.sess.codemap().def_span(sp)) { + if let Some(def_s) = def_span.map(|sp| tcx.sess.source_map().def_span(sp)) { err.span_label(def_s, "defined here"); } if sugg_unit { - let sugg_span = tcx.sess.codemap().end_point(expr_sp); + let sugg_span = tcx.sess.source_map().end_point(expr_sp); // remove closing `)` from the span let sugg_span = sugg_span.shrink_to_lo(); err.span_suggestion( @@ -2937,8 +2937,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (ExpectIfCondition, &hir::ExprKind::Assign(ref lhs, ref rhs)) => { let msg = "try comparing for equality"; if let (Ok(left), Ok(right)) = ( - self.tcx.sess.codemap().span_to_snippet(lhs.span), - self.tcx.sess.codemap().span_to_snippet(rhs.span)) + self.tcx.sess.source_map().span_to_snippet(lhs.span), + self.tcx.sess.source_map().span_to_snippet(rhs.span)) { err.span_suggestion(expr.span, msg, format!("{} == {}", left, right)); } else { @@ -4232,7 +4232,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let hir::ExprKind::Lit(ref lit) = idx.node { if let ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) = lit.node { - let snip = tcx.sess.codemap().span_to_snippet(base.span); + let snip = tcx.sess.source_map().span_to_snippet(base.span); if let Ok(snip) = snip { err.span_suggestion(expr.span, "to access tuple elements, use", @@ -4629,7 +4629,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { err.span_suggestion(sp, msg, suggestion); } else if !self.check_for_cast(err, expr, found, expected) { let methods = self.get_conversion_methods(expr.span, expected, found); - if let Ok(expr_text) = self.sess().codemap().span_to_snippet(expr.span) { + if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) { let suggestions = iter::repeat(expr_text).zip(methods.iter()) .filter_map(|(receiver, method)| { let method_call = format!(".{}()", method.ident); @@ -4673,7 +4673,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { hir::ExprKind::Loop(..) | hir::ExprKind::Match(..) | hir::ExprKind::Block(..) => { - let sp = self.tcx.sess.codemap().next_point(cause_span); + let sp = self.tcx.sess.source_map().next_point(cause_span); err.span_suggestion(sp, "try adding a semicolon", ";".to_string()); diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index 3adcd638a62..66a71cdd3f8 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -253,7 +253,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Err(()) => { // error types are considered "builtin" if !lhs_ty.references_error() { - let codemap = self.tcx.sess.codemap(); + let codemap = self.tcx.sess.source_map(); match is_assign { IsAssign::Yes => { let mut err = struct_span_err!(self.tcx.sess, expr.span, E0368, @@ -420,7 +420,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { err: &mut errors::DiagnosticBuilder, is_assign: bool, ) -> bool { - let codemap = self.tcx.sess.codemap(); + let codemap = self.tcx.sess.source_map(); let msg = "`to_owned()` can be used to create an owned `String` \ from a string reference. String concatenation \ appends the string on the right to the string \ diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs index 1a57dfd745e..08624331433 100644 --- a/src/librustc_typeck/check_unused.rs +++ b/src/librustc_typeck/check_unused.rs @@ -71,7 +71,7 @@ impl<'a, 'tcx> CheckVisitor<'a, 'tcx> { return; } - let msg = if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { + let msg = if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { format!("unused import: `{}`", snippet) } else { "unused import".to_string() diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index e92349040e8..1b04c5da59b 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -52,7 +52,7 @@ fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) { fn enforce_trait_manually_implementable(tcx: TyCtxt, impl_def_id: DefId, trait_def_id: DefId) { let did = Some(trait_def_id); let li = tcx.lang_items(); - let span = tcx.sess.codemap().def_span(tcx.span_of_impl(impl_def_id).unwrap()); + let span = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap()); // Disallow *all* explicit impls of `Sized` and `Unsize` for now. if did == li.sized_trait() { @@ -169,7 +169,7 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI traits::supertrait_def_ids(tcx, data.principal().unwrap().def_id()); if supertrait_def_ids.any(|d| d == trait_def_id) { - let sp = tcx.sess.codemap().def_span(tcx.span_of_impl(impl_def_id).unwrap()); + let sp = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap()); struct_span_err!(tcx.sess, sp, E0371, diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index 9be509b3588..4a3747017df 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -40,7 +40,7 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> { self.tcx.hir.node_to_string(item.id)); let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap(); let trait_def_id = trait_ref.def_id; - let cm = self.tcx.sess.codemap(); + let cm = self.tcx.sess.source_map(); let sp = cm.def_span(item.span); match traits::orphan_check(self.tcx, def_id) { Ok(()) => {} diff --git a/src/librustc_typeck/structured_errors.rs b/src/librustc_typeck/structured_errors.rs index afcdc7575a3..ffd9da8c8b9 100644 --- a/src/librustc_typeck/structured_errors.rs +++ b/src/librustc_typeck/structured_errors.rs @@ -72,7 +72,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> { self.code(), ) }; - if let Ok(snippet) = self.sess.codemap().span_to_snippet(self.span) { + if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.span) { err.span_suggestion(self.span, &format!("cast the value to `{}`", self.cast_ty), format!("{} as {}", snippet, self.cast_ty)); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 784ef3f256b..9b77ff82f90 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -239,7 +239,7 @@ impl Clean for CrateNum { fn clean(&self, cx: &DocContext) -> ExternalCrate { let root = DefId { krate: *self, index: CRATE_DEF_INDEX }; let krate_span = cx.tcx.def_span(root); - let krate_src = cx.sess().codemap().span_to_filename(krate_span); + let krate_src = cx.sess().source_map().span_to_filename(krate_span); // Collect all inner modules which are tagged as implementations of // primitives. @@ -596,7 +596,7 @@ impl Clean for doctree::Module { // determine if we should display the inner contents or // the outer `mod` item for the source code. let whence = { - let cm = cx.sess().codemap(); + let cm = cx.sess().source_map(); let outer = cm.lookup_char_pos(self.where_outer.lo()); let inner = cm.lookup_char_pos(self.where_inner.lo()); if outer.file.start_pos == inner.file.start_pos { @@ -3015,7 +3015,7 @@ impl Clean for syntax_pos::Span { return Span::empty(); } - let cm = cx.sess().codemap(); + let cm = cx.sess().source_map(); let filename = cm.span_to_filename(*self); let lo = cm.lookup_char_pos(self.lo()); let hi = cm.lookup_char_pos(self.hi()); @@ -3620,7 +3620,7 @@ pub trait ToSource { impl ToSource for syntax_pos::Span { fn to_src(&self, cx: &DocContext) -> String { debug!("converting span {:?} to snippet", self.clean(cx)); - let sn = match cx.sess().codemap().span_to_snippet(*self) { + let sn = match cx.sess().source_map().span_to_snippet(*self) { Ok(x) => x.to_string(), Err(_) => "".to_string() }; diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 4e78275f26b..4634054cf16 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -33,7 +33,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, tooltip: Option<(&str, &str)>) -> String { debug!("highlighting: ================\n{}\n==============", src); let sess = parse::ParseSess::new(FilePathMapping::empty()); - let fm = sess.codemap().new_source_file(FileName::Custom("stdin".to_string()), src.to_string()); + let fm = sess.source_map().new_source_file(FileName::Custom("stdin".to_string()), src.to_string()); let mut out = Vec::new(); if let Some((tooltip, class)) = tooltip { @@ -43,7 +43,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, } write_header(class, &mut out).unwrap(); - let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.codemap()); + let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.source_map()); if classifier.write_source(&mut out).is_err() { return format!("

{}
", src); } diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs index 61b0579a3e7..1438299d375 100644 --- a/src/libsyntax/diagnostics/metadata.rs +++ b/src/libsyntax/diagnostics/metadata.rs @@ -45,7 +45,7 @@ pub struct ErrorLocation { impl ErrorLocation { /// Create an error location from a span. pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation { - let loc = ecx.codemap().lookup_char_pos_adj(sp.lo()); + let loc = ecx.source_map().lookup_char_pos_adj(sp.lo()); ErrorLocation { filename: loc.filename, line: loc.line diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index c17874cf6f8..154fe11dd35 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -836,7 +836,7 @@ impl<'a> ExtCtxt<'a> { pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect()) } - pub fn codemap(&self) -> &'a SourceMap { self.parse_sess.codemap() } + pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1378a669e68..b1bed9602f3 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -764,7 +764,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_fail(&self, span: Span, msg: Symbol) -> P { - let loc = self.codemap().lookup_char_pos(span.lo()); + let loc = self.source_map().lookup_char_pos(span.lo()); let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string())); let expr_line = self.expr_u32(span, loc.line as u32); let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 54b56874d6a..b12b2c49caa 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -267,7 +267,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { let mut module = ModuleData { mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], - directory: match self.cx.codemap().span_to_unmapped_path(krate.span) { + directory: match self.cx.source_map().span_to_unmapped_path(krate.span) { FileName::Real(path) => path, other => PathBuf::from(other.to_string()), }, @@ -1355,7 +1355,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { module.directory.push(&*item.ident.as_str()); } } else { - let path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner); + let path = self.cx.parse_sess.source_map().span_to_unmapped_path(inner); let mut path = match path { FileName::Real(path) => path, other => PathBuf::from(other.to_string()), @@ -1563,7 +1563,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // Add this input file to the code map to make it available as // dependency information - self.cx.codemap().new_source_file(filename.into(), src); + self.cx.source_map().new_source_file(filename.into(), src); let include_info = vec![ dummy_spanned(ast::NestedMetaItemKind::MetaItem( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index fdf9c33b6f4..19a204cc989 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -36,7 +36,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "line!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo()); + let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32)) } @@ -47,7 +47,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "column!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo()); + let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1)) } @@ -70,7 +70,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "file!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo()); + let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string()))) } @@ -154,7 +154,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT // Add this input file to the code map to make it available as // dependency information - cx.codemap().new_source_file(file.into(), src); + cx.source_map().new_source_file(file.into(), src); base::MacEager::expr(cx.expr_str(sp, interned_src)) } @@ -184,7 +184,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Ok(..) => { // Add this input file to the code map to make it available as // dependency information, but don't enter it's contents - cx.codemap().new_source_file(file.into(), "".to_string()); + cx.source_map().new_source_file(file.into(), "".to_string()); base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) } @@ -199,7 +199,7 @@ fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf // after macro expansion (that is, they are unhygienic). if !arg.is_absolute() { let callsite = sp.source_callsite(); - let mut path = match cx.codemap().span_to_unmapped_path(callsite) { + let mut path = match cx.source_map().span_to_unmapped_path(callsite) { FileName::Real(path) => path, other => panic!("cannot resolve relative path in non-file source `{}`", other), }; diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index ac542948314..74f573fceba 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1556,7 +1556,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { if !name.as_str().is_ascii() { gate_feature_post!(&self, non_ascii_idents, - self.context.parse_sess.codemap().def_span(sp), + self.context.parse_sess.source_map().def_span(sp), "non-ascii idents are not fully supported."); } } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 67bc3d5e435..dde0466f43c 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -371,7 +371,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut { let mut src = String::new(); srdr.read_to_string(&mut src).unwrap(); - let cm = SourceMap::new(sess.codemap().path_mapping().clone()); + let cm = SourceMap::new(sess.source_map().path_mapping().clone()); let source_file = cm.new_source_file(path, src); let mut rdr = lexer::StringReader::new_raw(sess, source_file, None); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 7a8c46ad343..6cfa2b4abe8 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -234,8 +234,8 @@ impl<'a> StringReader<'a> { } pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { - let begin = sess.codemap().lookup_byte_offset(span.lo()); - let end = sess.codemap().lookup_byte_offset(span.hi()); + let begin = sess.source_map().lookup_byte_offset(span.lo()); + let end = sess.source_map().lookup_byte_offset(span.hi()); // Make the range zero-length if the span is invalid. if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d1f6191cf75..3f66dae4e1b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -86,7 +86,7 @@ impl ParseSess { } } - pub fn codemap(&self) -> &SourceMap { + pub fn source_map(&self) -> &SourceMap { &self.code_map } @@ -171,13 +171,13 @@ crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &Parse pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, override_span: Option) -> TokenStream { - source_file_to_stream(sess, sess.codemap().new_source_file(name, source), override_span) + source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } // Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { - let mut parser = source_file_to_parser(sess, sess.codemap().new_source_file(name, source)); + let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source)); parser.recurse_into_file_modules = false; parser } @@ -227,7 +227,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { /// add the path to the session's codemap and return the new source_file. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) -> Lrc { - match sess.codemap().load_file(path) { + match sess.source_map().load_file(path) { Ok(source_file) => source_file, Err(e) => { let msg = format!("couldn't read {:?}: {}", path.display(), e); @@ -969,7 +969,7 @@ mod tests { let span = tts.iter().rev().next().unwrap().span(); - match sess.codemap().span_to_snippet(span) { + match sess.source_map().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), Err(_) => panic!("could not get snippet"), } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 4c6034fdfce..c9053f0fec9 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -577,7 +577,7 @@ impl<'a> Parser<'a> { if let Some(directory) = directory { parser.directory = directory; } else if !parser.span.is_dummy() { - if let FileName::Real(mut path) = sess.codemap().span_to_unmapped_path(parser.span) { + if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { path.pop(); parser.directory.path = Cow::from(path); } @@ -652,10 +652,10 @@ impl<'a> Parser<'a> { // EOF, don't want to point at the following char, but rather the last token self.prev_span } else { - self.sess.codemap().next_point(self.prev_span) + self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); - let cm = self.sess.codemap(); + let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content @@ -720,14 +720,14 @@ impl<'a> Parser<'a> { expect.clone() }; (format!("expected one of {}, found `{}`", expect, actual), - (self.sess.codemap().next_point(self.prev_span), + (self.sess.source_map().next_point(self.prev_span), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { (format!("unexpected token: `{}`", actual), (self.prev_span, "unexpected token after this".to_string())) } else { (format!("expected {}, found `{}`", expect, actual), - (self.sess.codemap().next_point(self.prev_span), + (self.sess.source_map().next_point(self.prev_span), format!("expected {} here", expect))) }; let mut err = self.fatal(&msg_exp); @@ -738,7 +738,7 @@ impl<'a> Parser<'a> { label_sp }; - let cm = self.sess.codemap(); + let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content between @@ -2902,7 +2902,7 @@ impl<'a> Parser<'a> { self.this_token_descr())); // span the `not` plus trailing whitespace to avoid // trailing whitespace after the `!` in our suggestion - let to_replace = self.sess.codemap() + let to_replace = self.sess.source_map() .span_until_non_whitespace(lo.to(self.span)); err.span_suggestion_short_with_applicability( to_replace, @@ -3000,7 +3000,7 @@ impl<'a> Parser<'a> { Err(mut err) => { err.span_label(self.span, "expecting a type here because of type ascription"); - let cm = self.sess.codemap(); + let cm = self.sess.source_map(); let cur_pos = cm.lookup_char_pos(self.span.lo()); let op_pos = cm.lookup_char_pos(cur_op_span.hi()); if cur_pos.line != op_pos.line { @@ -3161,7 +3161,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID })); - let expr_str = self.sess.codemap().span_to_snippet(expr.span) + let expr_str = self.sess.source_map().span_to_snippet(expr.span) .unwrap_or(pprust::expr_to_string(&expr)); err.span_suggestion_with_applicability( expr.span, @@ -3277,7 +3277,7 @@ impl<'a> Parser<'a> { // return. This won't catch blocks with an explicit `return`, but that would be caught by // the dead code lint. if self.eat_keyword(keywords::Else) || !cond.returns() { - let sp = self.sess.codemap().next_point(lo); + let sp = self.sess.source_map().next_point(lo); let mut err = self.diagnostic() .struct_span_err(sp, "missing condition for `if` statemement"); err.span_label(sp, "expected if condition here"); @@ -3527,7 +3527,7 @@ impl<'a> Parser<'a> { && self.token != token::CloseDelim(token::Brace); if require_comma { - let cm = self.sess.codemap(); + let cm = self.sess.source_map(); self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) .map_err(|mut err| { match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) { @@ -3837,7 +3837,7 @@ impl<'a> Parser<'a> { err.span_label(self.span, "expected `}`"); let mut comma_sp = None; if self.token == token::Comma { // Issue #49257 - etc_sp = etc_sp.to(self.sess.codemap().span_until_non_whitespace(self.span)); + etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span)); err.span_label(etc_sp, "`..` must be at the end and cannot have a trailing comma"); comma_sp = Some(self.span); @@ -3955,7 +3955,7 @@ impl<'a> Parser<'a> { let seq_span = pat.span.to(self.prev_span); let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); - if let Ok(seq_snippet) = self.sess.codemap().span_to_snippet(seq_span) { + if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) { err.span_suggestion_with_applicability( seq_span, "try adding parentheses", @@ -4220,7 +4220,7 @@ impl<'a> Parser<'a> { let parser_snapshot_after_type = self.clone(); mem::replace(self, parser_snapshot_before_type); - let snippet = self.sess.codemap().span_to_snippet(pat.span).unwrap(); + let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap(); err.span_label(pat.span, format!("while parsing the type for `{}`", snippet)); (Some((parser_snapshot_after_type, colon_sp, err)), None) } @@ -6039,7 +6039,7 @@ impl<'a> Parser<'a> { err.emit(); } else { if seen_comma == false { - let sp = self.sess.codemap().next_point(previous_span); + let sp = self.sess.source_map().next_point(previous_span); err.span_suggestion_with_applicability( sp, "missing comma here", @@ -6051,7 +6051,7 @@ impl<'a> Parser<'a> { } } _ => { - let sp = self.sess.codemap().next_point(self.prev_span); + let sp = self.sess.source_map().next_point(self.prev_span); let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found `{}`", self.this_token_to_string())); if self.token.is_ident() { @@ -6418,7 +6418,7 @@ impl<'a> Parser<'a> { DirectoryOwnership::UnownedViaMod(_) => None, }; let paths = Parser::default_submod_path( - id, relative, &self.directory.path, self.sess.codemap()); + id, relative, &self.directory.path, self.sess.source_map()); match self.directory.ownership { DirectoryOwnership::Owned { .. } => { @@ -6445,7 +6445,7 @@ impl<'a> Parser<'a> { let mut err = self.diagnostic().struct_span_err(id_sp, "cannot declare a new module at this location"); if !id_sp.is_dummy() { - let src_path = self.sess.codemap().span_to_filename(id_sp); + let src_path = self.sess.source_map().span_to_filename(id_sp); if let FileName::Real(src_path) = src_path { if let Some(stem) = src_path.file_stem() { let mut dest_path = src_path.clone(); @@ -7207,7 +7207,7 @@ impl<'a> Parser<'a> { sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable ); } else { - if let Ok(snippet) = self.sess.codemap().span_to_snippet(ident_sp) { + if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) { err.span_suggestion_with_applicability( full_sp, "if you meant to call a macro, try", diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index b43d22d8855..374154e6333 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -21,7 +21,7 @@ use std::path::PathBuf; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); - source_file_to_stream(&ps, ps.codemap() + source_file_to_stream(&ps, ps.source_map() .new_source_file(PathBuf::from("bogofile").into(), source_str), None) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 61f52194aad..d97c0b14f79 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -801,7 +801,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, } }; - let is_literal = match ecx.codemap().span_to_snippet(fmt_sp) { + let is_literal = match ecx.source_map().span_to_snippet(fmt_sp) { Ok(ref s) if s.starts_with("\"") || s.starts_with("r#") => true, _ => false, }; -- cgit 1.4.1-3-g733a5 From 062bfbf39bef9360e1553f293f4f1015c5680dec Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:14:14 +0200 Subject: mv codemap source_map --- src/librustc/hir/map/collector.rs | 4 +-- src/librustc/ich/caching_codemap_view.rs | 14 ++++---- src/librustc/ich/hcx.rs | 12 +++---- src/librustc/session/mod.rs | 24 +++++++------- src/librustc/ty/query/on_disk_cache.rs | 22 ++++++------- src/librustc_driver/lib.rs | 6 ++-- src/librustc_metadata/creader.rs | 2 +- src/librustc_metadata/cstore.rs | 8 ++--- src/librustc_metadata/decoder.rs | 38 +++++++++++----------- src/librustc_metadata/encoder.rs | 20 ++++++------ src/librustc_metadata/schema.rs | 2 +- .../region_infer/error_reporting/region_name.rs | 4 +-- src/librustc_resolve/lib.rs | 2 +- src/librustc_typeck/check/op.rs | 14 ++++---- src/librustdoc/core.rs | 16 ++++----- src/librustdoc/html/highlight.rs | 10 +++--- src/librustdoc/test.rs | 28 ++++++++-------- src/libsyntax/parse/lexer/mod.rs | 4 +-- src/libsyntax/parse/mod.rs | 4 +-- src/libsyntax/parse/parser.rs | 6 ++-- src/libsyntax/source_map.rs | 4 +-- src/libsyntax/std_inject.rs | 2 +- src/libsyntax/test.rs | 2 +- src/libsyntax_pos/lib.rs | 4 +-- src/test/incremental/span_hash_stable/main.rs | 2 +- .../proc-macro/auxiliary/issue-40001-plugin.rs | 2 +- src/test/ui/cfg-empty-codemap.rs | 2 +- src/test/ui/mod/mod_file_correct_spans.rs | 2 +- 28 files changed, 130 insertions(+), 130 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index bea9d80639c..a14745a1381 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -122,7 +122,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { pub(super) fn finalize_and_compute_crate_hash(mut self, crate_disambiguator: CrateDisambiguator, cstore: &dyn CrateStore, - codemap: &SourceMap, + source_map: &SourceMap, commandline_args_hash: u64) -> (Vec>, Svh) { self @@ -155,7 +155,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { // If we included the full mapping in the SVH, we could only have // reproducible builds by compiling from the same directory. So we just // hash the result of the mapping instead of the mapping itself. - let mut source_file_names: Vec<_> = codemap + let mut source_file_names: Vec<_> = source_map .files() .iter() .filter(|source_file| CrateNum::from_u32(source_file.crate_of_origin) == LOCAL_CRATE) diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index c219bbcb621..6c958823d3f 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -24,14 +24,14 @@ struct CacheEntry { #[derive(Clone)] pub struct CachingCodemapView<'cm> { - codemap: &'cm SourceMap, + source_map: &'cm SourceMap, line_cache: [CacheEntry; 3], time_stamp: usize, } impl<'cm> CachingCodemapView<'cm> { - pub fn new(codemap: &'cm SourceMap) -> CachingCodemapView<'cm> { - let files = codemap.files(); + pub fn new(source_map: &'cm SourceMap) -> CachingCodemapView<'cm> { + let files = source_map.files(); let first_file = files[0].clone(); let entry = CacheEntry { time_stamp: 0, @@ -43,7 +43,7 @@ impl<'cm> CachingCodemapView<'cm> { }; CachingCodemapView { - codemap, + source_map, line_cache: [entry.clone(), entry.clone(), entry.clone()], time_stamp: 0, } @@ -78,9 +78,9 @@ impl<'cm> CachingCodemapView<'cm> { // If the entry doesn't point to the correct file, fix it up if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos { let file_valid; - if self.codemap.files().len() > 0 { - let file_index = self.codemap.lookup_source_file_idx(pos); - let file = self.codemap.files()[file_index].clone(); + if self.source_map.files().len() > 0 { + let file_index = self.source_map.lookup_source_file_idx(pos); + let file = self.source_map.files()[file_index].clone(); if pos >= file.start_pos && pos < file.end_pos { cache_entry.file = file; diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 799887df05d..e496dbb17f6 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -58,8 +58,8 @@ pub struct StableHashingContext<'a> { // Very often, we are hashing something that does not need the // CachingCodemapView, so we initialize it lazily. - raw_codemap: &'a SourceMap, - caching_codemap: Option>, + raw_source_map: &'a SourceMap, + caching_source_map: Option>, pub(super) alloc_id_recursion_tracker: FxHashSet, } @@ -100,8 +100,8 @@ impl<'a> StableHashingContext<'a> { body_resolver: BodyResolver(krate), definitions, cstore, - caching_codemap: None, - raw_codemap: sess.source_map(), + caching_source_map: None, + raw_source_map: sess.source_map(), hash_spans: hash_spans_initial, hash_bodies: true, node_id_hashing_mode: NodeIdHashingMode::HashDefPath, @@ -170,12 +170,12 @@ impl<'a> StableHashingContext<'a> { #[inline] pub fn source_map(&mut self) -> &mut CachingCodemapView<'a> { - match self.caching_codemap { + match self.caching_source_map { Some(ref mut cm) => { cm } ref mut none => { - *none = Some(CachingCodemapView::new(self.raw_codemap)); + *none = Some(CachingCodemapView::new(self.raw_source_map)); none.as_mut().unwrap() } } diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index b92f55f4284..78f7de0092d 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -980,7 +980,7 @@ pub fn build_session( ) -> Session { let file_path_mapping = sopts.file_path_mapping(); - build_session_with_codemap( + build_session_with_source_map( sopts, local_crate_source_file, registry, @@ -989,11 +989,11 @@ pub fn build_session( ) } -pub fn build_session_with_codemap( +pub fn build_session_with_source_map( sopts: config::Options, local_crate_source_file: Option, registry: errors::registry::Registry, - codemap: Lrc, + source_map: Lrc, emitter_dest: Option>, ) -> Session { // FIXME: This is not general enough to make the warning lint completely override @@ -1020,19 +1020,19 @@ pub fn build_session_with_codemap( (config::ErrorOutputType::HumanReadable(color_config), None) => Box::new( EmitterWriter::stderr( color_config, - Some(codemap.clone()), + Some(source_map.clone()), false, sopts.debugging_opts.teach, ).ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::HumanReadable(_), Some(dst)) => Box::new( - EmitterWriter::new(dst, Some(codemap.clone()), false, false) + EmitterWriter::new(dst, Some(source_map.clone()), false, false) .ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::Json(pretty), None) => Box::new( JsonEmitter::stderr( Some(registry), - codemap.clone(), + source_map.clone(), pretty, ).ui_testing(sopts.debugging_opts.ui_testing), ), @@ -1040,15 +1040,15 @@ pub fn build_session_with_codemap( JsonEmitter::new( dst, Some(registry), - codemap.clone(), + source_map.clone(), pretty, ).ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::Short(color_config), None) => Box::new( - EmitterWriter::stderr(color_config, Some(codemap.clone()), true, false), + EmitterWriter::stderr(color_config, Some(source_map.clone()), true, false), ), (config::ErrorOutputType::Short(_), Some(dst)) => { - Box::new(EmitterWriter::new(dst, Some(codemap.clone()), true, false)) + Box::new(EmitterWriter::new(dst, Some(source_map.clone()), true, false)) } }; @@ -1063,14 +1063,14 @@ pub fn build_session_with_codemap( }, ); - build_session_(sopts, local_crate_source_file, diagnostic_handler, codemap) + build_session_(sopts, local_crate_source_file, diagnostic_handler, source_map) } pub fn build_session_( sopts: config::Options, local_crate_source_file: Option, span_diagnostic: errors::Handler, - codemap: Lrc, + source_map: Lrc, ) -> Session { let host_triple = TargetTriple::from_triple(config::host_triple()); let host = match Target::search(&host_triple) { @@ -1083,7 +1083,7 @@ pub fn build_session_( }; let target_cfg = config::build_target_config(&sopts, &span_diagnostic); - let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap); + let p_s = parse::ParseSess::with_span_handler(span_diagnostic, source_map); let default_sysroot = match sopts.maybe_sysroot { Some(_) => None, None => Some(filesearch::get_or_default_sysroot()), diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index f7876ee035c..7ccd8574e83 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -62,7 +62,7 @@ pub struct OnDiskCache<'sess> { prev_cnums: Vec<(u32, String, CrateDisambiguator)>, cnum_map: Once>>, - codemap: &'sess SourceMap, + source_map: &'sess SourceMap, file_index_to_stable_id: FxHashMap, // These two fields caches that are populated lazily during decoding. @@ -140,7 +140,7 @@ impl<'sess> OnDiskCache<'sess> { file_index_to_file: Lock::new(FxHashMap()), prev_cnums: footer.prev_cnums, cnum_map: Once::new(), - codemap: sess.source_map(), + source_map: sess.source_map(), current_diagnostics: Lock::new(FxHashMap()), query_result_index: footer.query_result_index.into_iter().collect(), prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), @@ -149,14 +149,14 @@ impl<'sess> OnDiskCache<'sess> { } } - pub fn new_empty(codemap: &'sess SourceMap) -> OnDiskCache<'sess> { + pub fn new_empty(source_map: &'sess SourceMap) -> OnDiskCache<'sess> { OnDiskCache { serialized_data: Vec::new(), file_index_to_stable_id: FxHashMap(), file_index_to_file: Lock::new(FxHashMap()), prev_cnums: vec![], cnum_map: Once::new(), - codemap, + source_map, current_diagnostics: Lock::new(FxHashMap()), query_result_index: FxHashMap(), prev_diagnostics_index: FxHashMap(), @@ -196,7 +196,7 @@ impl<'sess> OnDiskCache<'sess> { expn_info_shorthands: FxHashMap(), interpret_allocs: FxHashMap(), interpret_allocs_inverse: Vec::new(), - codemap: CachingCodemapView::new(tcx.sess.source_map()), + source_map: CachingCodemapView::new(tcx.sess.source_map()), file_to_file_index, }; @@ -413,7 +413,7 @@ impl<'sess> OnDiskCache<'sess> { let mut decoder = CacheDecoder { tcx, opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()), - codemap: self.codemap, + source_map: self.source_map, cnum_map: self.cnum_map.get(), file_index_to_file: &self.file_index_to_file, file_index_to_stable_id: &self.file_index_to_stable_id, @@ -475,7 +475,7 @@ impl<'sess> OnDiskCache<'sess> { struct CacheDecoder<'a, 'tcx: 'a, 'x> { tcx: TyCtxt<'a, 'tcx, 'tcx>, opaque: opaque::Decoder<'x>, - codemap: &'x SourceMap, + source_map: &'x SourceMap, cnum_map: &'x IndexVec>, synthetic_expansion_infos: &'x Lock>, file_index_to_file: &'x Lock>>, @@ -488,13 +488,13 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { let CacheDecoder { ref file_index_to_file, ref file_index_to_stable_id, - ref codemap, + ref source_map, .. } = *self; file_index_to_file.borrow_mut().entry(index).or_insert_with(|| { let stable_id = file_index_to_stable_id[&index]; - codemap.source_file_by_stable_id(stable_id) + source_map.source_file_by_stable_id(stable_id) .expect("Failed to lookup SourceFile in new context.") }).clone() } @@ -770,7 +770,7 @@ struct CacheEncoder<'enc, 'a, 'tcx, E> expn_info_shorthands: FxHashMap, interpret_allocs: FxHashMap, interpret_allocs_inverse: Vec, - codemap: CachingCodemapView<'tcx>, + source_map: CachingCodemapView<'tcx>, file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>, } @@ -836,7 +836,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx return TAG_INVALID_SPAN.encode(self); } - let (file_lo, line_lo, col_lo) = match self.codemap + let (file_lo, line_lo, col_lo) = match self.source_map .byte_pos_to_line_and_col(span_data.lo) { Some(pos) => pos, None => { diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 07d9ab4e497..f88c619bf77 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -522,9 +522,9 @@ fn run_compiler_with_pool<'a>( }; let loader = file_loader.unwrap_or(box RealFileLoader); - let codemap = Lrc::new(SourceMap::with_file_loader(loader, sopts.file_path_mapping())); - let mut sess = session::build_session_with_codemap( - sopts, input_file_path.clone(), descriptions, codemap, emitter_dest, + let source_map = Lrc::new(SourceMap::with_file_loader(loader, sopts.file_path_mapping())); + let mut sess = session::build_session_with_source_map( + sopts, input_file_path.clone(), descriptions, source_map, emitter_dest, ); if let Some(err) = input_err { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index a10bb3e25df..c242f8d476a 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -245,7 +245,7 @@ impl<'a> CrateLoader<'a> { cnum_map, cnum, dependencies: Lock::new(dependencies), - codemap_import_info: RwLock::new(vec![]), + source_map_import_info: RwLock::new(vec![]), alloc_decoding_state: AllocDecodingState::new(interpret_alloc_index), dep_kind: Lock::new(dep_kind), source: cstore::CrateSource { diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 5c020b70e30..aad632f8918 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -44,11 +44,11 @@ pub struct MetadataBlob(pub MetadataRef); /// Holds information about a syntax_pos::SourceFile imported from another crate. /// See `imported_source_files()` for more information. pub struct ImportedSourceFile { - /// This SourceFile's byte-offset within the codemap of its original crate + /// This SourceFile's byte-offset within the source_map of its original crate pub original_start_pos: syntax_pos::BytePos, - /// The end of this SourceFile within the codemap of its original crate + /// The end of this SourceFile within the source_map of its original crate pub original_end_pos: syntax_pos::BytePos, - /// The imported SourceFile's representation within the local codemap + /// The imported SourceFile's representation within the local source_map pub translated_source_file: Lrc, } @@ -64,7 +64,7 @@ pub struct CrateMetadata { pub cnum_map: CrateNumMap, pub cnum: CrateNum, pub dependencies: Lock>, - pub codemap_import_info: RwLock>, + pub source_map_import_info: RwLock>, /// Used for decoding interpret::AllocIds in a cached & thread-safe manner. pub alloc_decoding_state: AllocDecodingState, diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index ceccdea6587..76473ec7781 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -1094,49 +1094,49 @@ impl<'a, 'tcx> CrateMetadata { self.def_path_table.def_path_hash(index) } - /// Imports the codemap from an external crate into the codemap of the crate + /// Imports the source_map from an external crate into the source_map of the crate /// currently being compiled (the "local crate"). /// /// The import algorithm works analogous to how AST items are inlined from an /// external crate's metadata: - /// For every SourceFile in the external codemap an 'inline' copy is created in the - /// local codemap. The correspondence relation between external and local + /// For every SourceFile in the external source_map an 'inline' copy is created in the + /// local source_map. The correspondence relation between external and local /// SourceFiles is recorded in the `ImportedSourceFile` objects returned from this /// function. When an item from an external crate is later inlined into this /// crate, this correspondence information is used to translate the span /// information of the inlined item so that it refers the correct positions in - /// the local codemap (see `>`). + /// the local source_map (see `>`). /// /// The import algorithm in the function below will reuse SourceFiles already - /// existing in the local codemap. For example, even if the SourceFile of some + /// existing in the local source_map. For example, even if the SourceFile of some /// source file of libstd gets imported many times, there will only ever be - /// one SourceFile object for the corresponding file in the local codemap. + /// one SourceFile object for the corresponding file in the local source_map. /// /// Note that imported SourceFiles do not actually contain the source code of the /// file they represent, just information about length, line breaks, and /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. pub fn imported_source_files(&'a self, - local_codemap: &source_map::SourceMap) + local_source_map: &source_map::SourceMap) -> ReadGuard<'a, Vec> { { - let source_files = self.codemap_import_info.borrow(); + let source_files = self.source_map_import_info.borrow(); if !source_files.is_empty() { return source_files; } } - // Lock the codemap_import_info to ensure this only happens once - let mut codemap_import_info = self.codemap_import_info.borrow_mut(); + // Lock the source_map_import_info to ensure this only happens once + let mut source_map_import_info = self.source_map_import_info.borrow_mut(); - if !codemap_import_info.is_empty() { - drop(codemap_import_info); - return self.codemap_import_info.borrow(); + if !source_map_import_info.is_empty() { + drop(source_map_import_info); + return self.source_map_import_info.borrow(); } - let external_codemap = self.root.codemap.decode(self); + let external_source_map = self.root.source_map.decode(self); - let imported_source_files = external_codemap.map(|source_file_to_import| { + let imported_source_files = external_source_map.map(|source_file_to_import| { // We can't reuse an existing SourceFile, so allocate a new one // containing the information we need. let syntax_pos::SourceFile { name, @@ -1167,7 +1167,7 @@ impl<'a, 'tcx> CrateMetadata { *swc = *swc - start_pos; } - let local_version = local_codemap.new_imported_source_file(name, + let local_version = local_source_map.new_imported_source_file(name, name_was_remapped, self.cnum.as_u32(), src_hash, @@ -1189,10 +1189,10 @@ impl<'a, 'tcx> CrateMetadata { } }).collect(); - *codemap_import_info = imported_source_files; - drop(codemap_import_info); + *source_map_import_info = imported_source_files; + drop(source_map_import_info); // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. - self.codemap_import_info.borrow() + self.source_map_import_info.borrow() } } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 8f3bed6bdbe..4b5c9d68fd7 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -158,9 +158,9 @@ impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { debug_assert!(span.lo <= span.hi); if !self.source_file_cache.contains(span.lo) { - let codemap = self.tcx.sess.source_map(); - let source_file_index = codemap.lookup_source_file_idx(span.lo); - self.source_file_cache = codemap.files()[source_file_index].clone(); + let source_map = self.tcx.sess.source_map(); + let source_file_index = source_map.lookup_source_file_idx(span.lo); + self.source_file_cache = source_map.files()[source_file_index].clone(); } if !self.source_file_cache.contains(span.hi) { @@ -338,8 +338,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } fn encode_source_map(&mut self) -> LazySeq { - let codemap = self.tcx.sess.source_map(); - let all_source_files = codemap.files(); + let source_map = self.tcx.sess.source_map(); + let all_source_files = source_map.files(); let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone(); @@ -418,10 +418,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { IsolatedEncoder::encode_foreign_modules, ()); - // Encode codemap + // Encode source_map i = self.position(); - let codemap = self.encode_source_map(); - let codemap_bytes = self.position() - i; + let source_map = self.encode_source_map(); + let source_map_bytes = self.position() - i; // Encode DefPathTable i = self.position(); @@ -523,7 +523,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { lang_items_missing, native_libraries, foreign_modules, - codemap, + source_map, def_path_table, impls, exported_symbols, @@ -546,7 +546,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { println!(" lib feature bytes: {}", lib_feature_bytes); println!(" lang item bytes: {}", lang_item_bytes); println!(" native bytes: {}", native_lib_bytes); - println!(" codemap bytes: {}", codemap_bytes); + println!(" source_map bytes: {}", source_map_bytes); println!(" impl bytes: {}", impl_bytes); println!(" exp. symbols bytes: {}", exported_symbols_bytes); println!(" def-path table bytes: {}", def_path_table_bytes); diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 520273487a9..8e454ddc0ad 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -204,7 +204,7 @@ pub struct CrateRoot { pub lang_items_missing: LazySeq, pub native_libraries: LazySeq, pub foreign_modules: LazySeq, - pub codemap: LazySeq, + pub source_map: LazySeq, pub def_path_table: Lazy, pub impls: LazySeq, pub exported_symbols: EncodedExportedSymbols, diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs index 9724d9deead..532c36f427b 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs @@ -383,8 +383,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { let region_name = self.synthesize_region_name(counter); // Just grab the first character, the `&`. - let codemap = tcx.sess.source_map(); - let ampersand_span = codemap.start_point(hir_ty.span); + let source_map = tcx.sess.source_map(); + let ampersand_span = source_map.start_point(hir_ty.span); diag.span_label( ampersand_span, diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 6dd0c32bb3e..282589c4e68 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -414,7 +414,7 @@ fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver, /// /// Attention: The method used is very fragile since it essentially duplicates the work of the /// parser. If you need to use this function or something similar, please consider updating the -/// codemap functions and this function to something more robust. +/// source_map functions and this function to something more robust. fn reduce_impl_span_to_impl_keyword(cm: &SourceMap, impl_span: Span) -> Span { let impl_span = cm.span_until_char(impl_span, '<'); let impl_span = cm.span_until_whitespace(impl_span); diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index 66a71cdd3f8..de211d2209c 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -253,7 +253,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Err(()) => { // error types are considered "builtin" if !lhs_ty.references_error() { - let codemap = self.tcx.sess.source_map(); + let source_map = self.tcx.sess.source_map(); match is_assign { IsAssign::Yes => { let mut err = struct_span_err!(self.tcx.sess, expr.span, E0368, @@ -275,7 +275,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Op::Binary(op, is_assign)) .is_ok() } { - if let Ok(lstring) = codemap.span_to_snippet(lhs_expr.span) { + if let Ok(lstring) = source_map.span_to_snippet(lhs_expr.span) { while let TyRef(_, rty_inner, _) = rty.sty { rty = rty_inner; } @@ -343,7 +343,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Op::Binary(op, is_assign)) .is_ok() } { - if let Ok(lstring) = codemap.span_to_snippet(lhs_expr.span) { + if let Ok(lstring) = source_map.span_to_snippet(lhs_expr.span) { while let TyRef(_, rty_inner, _) = rty.sty { rty = rty_inner; } @@ -420,7 +420,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { err: &mut errors::DiagnosticBuilder, is_assign: bool, ) -> bool { - let codemap = self.tcx.sess.source_map(); + let source_map = self.tcx.sess.source_map(); let msg = "`to_owned()` can be used to create an owned `String` \ from a string reference. String concatenation \ appends the string on the right to the string \ @@ -434,7 +434,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if !is_assign { err.span_label(expr.span, "`+` can't be used to concatenate two `&str` strings"); - match codemap.span_to_snippet(lhs_expr.span) { + match source_map.span_to_snippet(lhs_expr.span) { Ok(lstring) => err.span_suggestion(lhs_expr.span, msg, format!("{}.to_owned()", lstring)), @@ -448,8 +448,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { err.span_label(expr.span, "`+` can't be used to concatenate a `&str` with a `String`"); match ( - codemap.span_to_snippet(lhs_expr.span), - codemap.span_to_snippet(rhs_expr.span), + source_map.span_to_snippet(lhs_expr.span), + source_map.span_to_snippet(rhs_expr.span), is_assign, ) { (Ok(l), Ok(r), false) => { diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index bd37a87b1e1..a312913a69c 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -260,7 +260,7 @@ impl DocAccessLevels for AccessLevels { /// /// If the given `error_format` is `ErrorOutputType::Json` and no `SourceMap` is given, a new one /// will be created for the handler. -pub fn new_handler(error_format: ErrorOutputType, codemap: Option>) +pub fn new_handler(error_format: ErrorOutputType, source_map: Option>) -> errors::Handler { // rustdoc doesn't override (or allow to override) anything from this that is relevant here, so @@ -270,18 +270,18 @@ pub fn new_handler(error_format: ErrorOutputType, codemap: Option Box::new( EmitterWriter::stderr( color_config, - codemap.map(|cm| cm as _), + source_map.map(|cm| cm as _), false, sessopts.debugging_opts.teach, ).ui_testing(sessopts.debugging_opts.ui_testing) ), ErrorOutputType::Json(pretty) => { - let codemap = codemap.unwrap_or_else( + let source_map = source_map.unwrap_or_else( || Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping()))); Box::new( JsonEmitter::stderr( None, - codemap, + source_map, pretty, ).ui_testing(sessopts.debugging_opts.ui_testing) ) @@ -289,7 +289,7 @@ pub fn new_handler(error_format: ErrorOutputType, codemap: Option Box::new( EmitterWriter::stderr( color_config, - codemap.map(|cm| cm as _), + source_map.map(|cm| cm as _), true, false) ), @@ -387,11 +387,11 @@ pub fn run_core(search_paths: SearchPaths, ..Options::default() }; driver::spawn_thread_pool(sessopts, move |sessopts| { - let codemap = Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping())); - let diagnostic_handler = new_handler(error_format, Some(codemap.clone())); + let source_map = Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping())); + let diagnostic_handler = new_handler(error_format, Some(source_map.clone())); let mut sess = session::build_session_( - sessopts, cpath, diagnostic_handler, codemap, + sessopts, cpath, diagnostic_handler, source_map, ); lint::builtin::HardwiredLints.get_lints() diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 4634054cf16..2a9ad6c7f33 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -60,7 +60,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, /// each span of text in sequence. struct Classifier<'a> { lexer: lexer::StringReader<'a>, - codemap: &'a SourceMap, + source_map: &'a SourceMap, // State of the classifier. in_attribute: bool, @@ -145,10 +145,10 @@ impl Writer for U { } impl<'a> Classifier<'a> { - fn new(lexer: lexer::StringReader<'a>, codemap: &'a SourceMap) -> Classifier<'a> { + fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> { Classifier { lexer, - codemap, + source_map, in_attribute: false, in_macro: false, in_macro_nonterminal: false, @@ -338,9 +338,9 @@ impl<'a> Classifier<'a> { out.string(Escape(&self.snip(tas.sp)), klass) } - // Helper function to get a snippet from the codemap. + // Helper function to get a snippet from the source_map. fn snip(&self, sp: Span) -> String { - self.codemap.span_to_snippet(sp).unwrap() + self.source_map.span_to_snippet(sp).unwrap() } } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 9854b919f5c..3b07a2ccdde 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -86,14 +86,14 @@ pub fn run(input_path: &Path, ..config::Options::default() }; driver::spawn_thread_pool(sessopts, |sessopts| { - let codemap = Lrc::new(SourceMap::new(sessopts.file_path_mapping())); + let source_map = Lrc::new(SourceMap::new(sessopts.file_path_mapping())); let handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, - Some(codemap.clone())); + Some(source_map.clone())); let mut sess = session::build_session_( - sessopts, Some(input_path.to_owned()), handler, codemap.clone(), + sessopts, Some(input_path.to_owned()), handler, source_map.clone(), ); let codegen_backend = rustc_driver::get_codegen_backend(&sess); let cstore = CStore::new(codegen_backend.metadata_loader()); @@ -133,7 +133,7 @@ pub fn run(input_path: &Path, false, opts, maybe_sysroot, - Some(codemap), + Some(source_map), None, linker, edition @@ -262,11 +262,11 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize, let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout())); let (libdir, outdir, compile_result) = driver::spawn_thread_pool(sessopts, |sessopts| { - let codemap = Lrc::new(SourceMap::new_doctest( + let source_map = Lrc::new(SourceMap::new_doctest( sessopts.file_path_mapping(), filename.clone(), line as isize - line_offset as isize )); let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), - Some(codemap.clone()), + Some(source_map.clone()), false, false); @@ -274,7 +274,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize, let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter); let mut sess = session::build_session_( - sessopts, None, diagnostic_handler, codemap, + sessopts, None, diagnostic_handler, source_map, ); let codegen_backend = rustc_driver::get_codegen_backend(&sess); let cstore = CStore::new(codegen_backend.metadata_loader()); @@ -500,7 +500,7 @@ pub struct Collector { opts: TestOptions, maybe_sysroot: Option, position: Span, - codemap: Option>, + source_map: Option>, filename: Option, linker: Option, edition: Edition, @@ -509,7 +509,7 @@ pub struct Collector { impl Collector { pub fn new(cratename: String, cfgs: Vec, libs: SearchPaths, cg: CodegenOptions, externs: Externs, use_headers: bool, opts: TestOptions, - maybe_sysroot: Option, codemap: Option>, + maybe_sysroot: Option, source_map: Option>, filename: Option, linker: Option, edition: Edition) -> Collector { Collector { tests: Vec::new(), @@ -523,7 +523,7 @@ impl Collector { opts, maybe_sysroot, position: DUMMY_SP, - codemap, + source_map, filename, linker, edition, @@ -589,9 +589,9 @@ impl Collector { } pub fn get_line(&self) -> usize { - if let Some(ref codemap) = self.codemap { + if let Some(ref source_map) = self.source_map { let line = self.position.lo().to_usize(); - let line = codemap.lookup_char_pos(BytePos(line as u32)).line; + let line = source_map.lookup_char_pos(BytePos(line as u32)).line; if line > 0 { line - 1 } else { line } } else { 0 @@ -603,8 +603,8 @@ impl Collector { } fn get_filename(&self) -> FileName { - if let Some(ref codemap) = self.codemap { - let filename = codemap.span_to_filename(self.position); + if let Some(ref source_map) = self.source_map { + let filename = source_map.span_to_filename(self.position); if let FileName::Real(ref filename) = filename { if let Ok(cur_dir) = env::current_dir() { if let Ok(path) = filename.strip_prefix(&cur_dir) { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 6cfa2b4abe8..434548ffd9d 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -43,9 +43,9 @@ impl Default for TokenAndSpan { pub struct StringReader<'a> { pub sess: &'a ParseSess, - /// The absolute offset within the codemap of the next character to read + /// The absolute offset within the source_map of the next character to read pub next_pos: BytePos, - /// The absolute offset within the codemap of the current character + /// The absolute offset within the source_map of the current character pub pos: BytePos, /// The current character (which has been read from self.pos) pub ch: Option, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3f66dae4e1b..e3a2c83e4c2 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -189,7 +189,7 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> } /// Given a session, a crate config, a path, and a span, add -/// the file at the given path to the codemap, and return a parser. +/// the file at the given path to the source_map, and return a parser. /// On an error, use the given span as the source of the problem. crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, @@ -224,7 +224,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { // base abstractions /// Given a session and a path and an optional span (for error reporting), -/// add the path to the session's codemap and return the new source_file. +/// add the path to the session's source_map and return the new source_file. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) -> Lrc { match sess.source_map().load_file(path) { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index c9053f0fec9..b1e2e69863d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -6322,7 +6322,7 @@ impl<'a> Parser<'a> { id: ast::Ident, relative: Option, dir_path: &Path, - codemap: &SourceMap) -> ModulePath + source_map: &SourceMap) -> ModulePath { // If we're in a foo.rs file instead of a mod.rs file, // we need to look for submodules in @@ -6342,8 +6342,8 @@ impl<'a> Parser<'a> { relative_prefix, mod_name, path::MAIN_SEPARATOR); let default_path = dir_path.join(&default_path_str); let secondary_path = dir_path.join(&secondary_path_str); - let default_exists = codemap.file_exists(&default_path); - let secondary_exists = codemap.file_exists(&secondary_path); + let default_exists = source_map.file_exists(&default_path); + let secondary_exists = source_map.file_exists(&secondary_path); let result = match (default_exists, secondary_exists) { (true, false) => Ok(ModulePathSuccess { diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 34cd026f7a0..457a44b9cbb 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -206,7 +206,7 @@ impl SourceMap { match self.files.borrow().file_maps.last() { None => 0, // Add one so there is some space between files. This lets us distinguish - // positions in the codemap, even in the presence of zero-length files. + // positions in the source_map, even in the presence of zero-length files. Some(last) => last.end_pos.to_usize() + 1, } } @@ -895,7 +895,7 @@ impl SourceMap { /// /// Attention: The method used is very fragile since it essentially duplicates the work of the /// parser. If you need to use this function or something similar, please consider updating the - /// codemap functions and this function to something more robust. + /// source_map functions and this function to something more robust. pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> { // Try to extend the span to the previous "fn" keyword to retrieve the function // signature diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 0db24c3b482..1210f331b28 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -21,7 +21,7 @@ use ptr::P; use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's -/// call to codemap's `is_internal` check. +/// call to source_map's `is_internal` check. /// The expanded code uses the unstable `#[prelude_import]` attribute. fn ignored_span(sp: Span) -> Span { let mark = Mark::fresh(Mark::root()); diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 393989711de..988f50b4f0c 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -324,7 +324,7 @@ fn generate_test_harness(sess: &ParseSess, } /// Craft a span that will be ignored by the stability lint's -/// call to codemap's `is_internal` check. +/// call to source_map's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { sp.with_ctxt(cx.ctxt) diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index f9c91dc8a97..bd70344b018 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -162,11 +162,11 @@ impl FileName { } /// Spans represent a region of code, used for error reporting. Positions in spans -/// are *absolute* positions from the beginning of the codemap, not positions +/// are *absolute* positions from the beginning of the source_map, not positions /// relative to SourceFiles. Methods on the SourceMap can be used to relate spans back /// to the original source. /// You must be careful if the span crosses more than one file - you will not be -/// able to use many of the functions on spans in codemap and you cannot assume +/// able to use many of the functions on spans in source_map and you cannot assume /// that the length of the span = hi - lo; there may be space in the BytePos /// range between files. /// diff --git a/src/test/incremental/span_hash_stable/main.rs b/src/test/incremental/span_hash_stable/main.rs index 1512c5dc537..646a388c877 100644 --- a/src/test/incremental/span_hash_stable/main.rs +++ b/src/test/incremental/span_hash_stable/main.rs @@ -9,7 +9,7 @@ // except according to those terms. // This test makes sure that it doesn't make a difference in which order we are -// adding source files to the codemap. The order affects the BytePos values of +// adding source files to the source_map. The order affects the BytePos values of // the spans and this test makes sure that we handle them correctly by hashing // file:line:column instead of raw byte offset. diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs index 5214d7db5cc..7edb3e0f8a0 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-40001-plugin.rs @@ -54,7 +54,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingWhitelistedAttrPass { _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl, _: &'tcx hir::Body, - span: codemap::Span, + span: source_map::Span, id: ast::NodeId) { let item = match cx.tcx.hir.get(id) { diff --git a/src/test/ui/cfg-empty-codemap.rs b/src/test/ui/cfg-empty-codemap.rs index 8868a5a9549..f06d22d985f 100644 --- a/src/test/ui/cfg-empty-codemap.rs +++ b/src/test/ui/cfg-empty-codemap.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Tests that empty codemaps don't ICE (#23301) +// Tests that empty source_maps don't ICE (#23301) // compile-flags: --cfg "" diff --git a/src/test/ui/mod/mod_file_correct_spans.rs b/src/test/ui/mod/mod_file_correct_spans.rs index 52837479869..1efd9ba8e55 100644 --- a/src/test/ui/mod/mod_file_correct_spans.rs +++ b/src/test/ui/mod/mod_file_correct_spans.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Testing that the codemap is maintained correctly when parsing mods from external files +// Testing that the source_map is maintained correctly when parsing mods from external files mod mod_file_aux; -- cgit 1.4.1-3-g733a5 From 6138c82803a77a44e0a56cd8999299fa7f214afe Mon Sep 17 00:00:00 2001 From: Donato Sciarra Date: Sat, 18 Aug 2018 12:14:31 +0200 Subject: fix tidy errors --- src/libproc_macro/lib.rs | 2 +- src/librustc_errors/lib.rs | 9 ++++++++- src/librustc_lint/builtin.rs | 3 ++- src/librustc_metadata/decoder.rs | 6 ++++-- src/librustc_typeck/check/compare_method.rs | 3 ++- src/librustc_typeck/check/method/suggest.rs | 3 ++- src/librustdoc/html/highlight.rs | 6 ++++-- src/libsyntax/parse/lexer/mod.rs | 12 ++++++------ src/libsyntax/parse/mod.rs | 5 +++-- src/libsyntax/source_map.rs | 6 ++++-- 10 files changed, 36 insertions(+), 19 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 31aad29d083..0a32963a861 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -432,7 +432,7 @@ impl SourceFile { /// /// ### Note /// If the code span associated with this `SourceFile` was generated by an external macro, this - /// may not be an actual path on the filesystem. Use [`is_real`] to check. + /// macro, this may not be an actual path on the filesystem. Use [`is_real`] to check. /// /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on /// the command line, the path as given may not actually be valid. diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index ae88a365cbe..3582c2359c8 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -55,7 +55,14 @@ pub mod registry; mod styled_buffer; mod lock; -use syntax_pos::{BytePos, Loc, FileLinesResult, SourceFile, FileName, MultiSpan, Span, NO_EXPANSION}; +use syntax_pos::{BytePos, + Loc, + FileLinesResult, + SourceFile, + FileName, + MultiSpan, + Span, + NO_EXPANSION}; #[derive(Copy, Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub enum Applicability { diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index c346d3e76cd..0936f28a8fb 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -195,7 +195,8 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, &format!("the `{}:` in this pattern is redundant", ident)); - let subspan = cx.tcx.sess.source_map().span_through_char(fieldpat.span, ':'); + let subspan = cx.tcx.sess.source_map().span_through_char(fieldpat.span, + ':'); err.span_suggestion_short_with_applicability( subspan, "remove this", diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 76473ec7781..f4dd8861e2a 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -349,8 +349,10 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { debug_assert!(hi >= source_file.original_start_pos && hi <= source_file.original_end_pos); - let lo = (lo + source_file.translated_source_file.start_pos) - source_file.original_start_pos; - let hi = (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos; + let lo = (lo + source_file.translated_source_file.start_pos) + - source_file.original_start_pos; + let hi = (hi + source_file.translated_source_file.start_pos) + - source_file.original_start_pos; Ok(Span::new(lo, hi, NO_EXPANSION)) } diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 044621dabb4..9aa2ba363ed 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -319,7 +319,8 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_m.ident); if let TypeError::Mutability = terr { if let Some(trait_err_span) = trait_err_span { - if let Ok(trait_err_str) = tcx.sess.source_map().span_to_snippet(trait_err_span) { + if let Ok(trait_err_str) = tcx.sess.source_map(). + span_to_snippet(trait_err_span) { diag.span_suggestion( impl_err_span, "consider change the type to match the mutability in trait", diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index ed6ec1bf8e6..31ec62ba70a 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -132,7 +132,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }; if let Some(note_span) = note_span { // We have a span pointing to the method. Show note with snippet. - err.span_note(self.tcx.sess.source_map().def_span(note_span), ¬e_str); + err.span_note(self.tcx.sess.source_map().def_span(note_span), + ¬e_str); } else { err.note(¬e_str); } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 2a9ad6c7f33..5df4862290e 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -33,7 +33,8 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, tooltip: Option<(&str, &str)>) -> String { debug!("highlighting: ================\n{}\n==============", src); let sess = parse::ParseSess::new(FilePathMapping::empty()); - let fm = sess.source_map().new_source_file(FileName::Custom("stdin".to_string()), src.to_string()); + let fm = sess.source_map().new_source_file(FileName::Custom("stdin".to_string()), + src.to_string()); let mut out = Vec::new(); if let Some((tooltip, class)) = tooltip { @@ -43,7 +44,8 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, } write_header(class, &mut out).unwrap(); - let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.source_map()); + let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), + sess.source_map()); if classifier.write_source(&mut out).is_err() { return format!("
{}
", src); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 434548ffd9d..448ff9676c9 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -180,9 +180,9 @@ impl<'a> StringReader<'a> { } /// For comments.rs, which hackily pokes into next_pos and ch - fn new_raw(sess: &'a ParseSess, source_file: Lrc, override_span: Option) - -> Self - { + fn new_raw(sess: &'a ParseSess, + source_file: Lrc, + override_span: Option) -> Self { let mut sr = StringReader::new_raw_internal(sess, source_file, override_span); sr.bump(); @@ -221,9 +221,9 @@ impl<'a> StringReader<'a> { } } - pub fn new(sess: &'a ParseSess, source_file: Lrc, override_span: Option) - -> Self - { + pub fn new(sess: &'a ParseSess, + source_file: Lrc, + override_span: Option) -> Self { let mut sr = StringReader::new_raw(sess, source_file, override_span); if sr.advance_token().is_err() { sr.emit_fatal_errors(); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e3a2c83e4c2..adf01197c6d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -240,8 +240,9 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a source_file, produce a sequence of token-trees -pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc, override_span: Option) - -> TokenStream { +pub fn source_file_to_stream(sess: &ParseSess, + source_file: Lrc, + override_span: Option) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, source_file, override_span); srdr.real_token(); panictry!(srdr.parse_all_token_trees()) diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 457a44b9cbb..c65931a8577 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -241,7 +241,8 @@ impl SourceMap { let mut files = self.files.borrow_mut(); files.file_maps.push(source_file.clone()); - files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), + source_file.clone()); source_file } @@ -297,7 +298,8 @@ impl SourceMap { let mut files = self.files.borrow_mut(); files.file_maps.push(source_file.clone()); - files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); + files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), + source_file.clone()); source_file } -- cgit 1.4.1-3-g733a5