diff options
| author | bors <bors@rust-lang.org> | 2017-12-14 12:50:00 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2017-12-14 12:50:00 +0000 |
| commit | 8624ea51172c8a86d5c7c47d740be65a3a9efbc6 (patch) | |
| tree | 9cf47887652e5148f68c031c57fd7f42feff782e /src/libsyntax | |
| parent | 75a02a919c5109c55c652dd6671553df6452be04 (diff) | |
| parent | d732da813bac73d2c81caddd06df3df3d9609e3d (diff) | |
| download | rust-8624ea51172c8a86d5c7c47d740be65a3a9efbc6.tar.gz rust-8624ea51172c8a86d5c7c47d740be65a3a9efbc6.zip | |
Auto merge of #46335 - oli-obk:cleanups, r=jseyfried
Use PathBuf instead of String where applicable r? @jseyfried
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/codemap.rs | 74 | ||||
| -rw-r--r-- | src/libsyntax/diagnostics/metadata.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 11 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/json.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 24 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 30 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/test_snippet.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/util/parser_testing.rs | 6 |
15 files changed, 121 insertions, 97 deletions
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3906ed431ce..2c91d60ce9d 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -164,7 +164,7 @@ impl CodeMap { pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> { let src = self.file_loader.read_file(path)?; - Ok(self.new_filemap(path.to_str().unwrap().to_string(), src)) + Ok(self.new_filemap(path.to_owned().into(), src)) } pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> { @@ -196,9 +196,15 @@ impl CodeMap { // Note that filename may not be a valid path, eg it may be `<anon>` etc, // but this is okay because the directory determined by `path.pop()` will // be empty, so the working directory will be used. - let unmapped_path = PathBuf::from(filename.clone()); - - let (filename, was_remapped) = self.path_mapping.map_prefix(filename); + let unmapped_path = filename.clone(); + + let (filename, was_remapped) = match filename { + FileName::Real(filename) => { + let (filename, was_remapped) = self.path_mapping.map_prefix(filename); + (FileName::Real(filename), was_remapped) + }, + other => (other, false), + }; let filemap = Rc::new(FileMap::new( filename, was_remapped, @@ -217,8 +223,8 @@ impl CodeMap { } /// Creates a new filemap and sets its line information. - pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc<FileMap> { - let fm = self.new_filemap(filename.to_string(), src.to_owned()); + pub fn new_filemap_and_lines(&self, filename: &Path, src: &str) -> Rc<FileMap> { + let fm = self.new_filemap(filename.to_owned().into(), src.to_owned()); let mut byte_pos: u32 = fm.start_pos.0; for line in src.lines() { // register the start of this line @@ -373,7 +379,7 @@ impl CodeMap { pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { - filename: loc.file.name.to_string(), + filename: loc.file.name.clone(), line: loc.line, col: loc.col, file: Some(loc.file) @@ -433,7 +439,7 @@ impl CodeMap { self.lookup_char_pos(sp.lo()).file.name.clone() } - pub fn span_to_unmapped_path(&self, sp: Span) -> PathBuf { + pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() .expect("CodeMap::span_to_unmapped_path called for imported FileMap?") } @@ -561,9 +567,9 @@ impl CodeMap { self.span_until_char(sp, '{') } - pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> { + pub fn get_filemap(&self, filename: &FileName) -> Option<Rc<FileMap>> { for fm in self.files.borrow().iter() { - if filename == fm.name { + if *filename == fm.name { return Some(fm.clone()); } } @@ -650,7 +656,7 @@ impl CodeMapper for CodeMap { self.merge_spans(sp_lhs, sp_rhs) } fn call_span_if_macro(&self, sp: Span) -> Span { - if self.span_to_filename(sp.clone()).contains("macros>") { + if self.span_to_filename(sp.clone()).is_macros() { let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { return use_site.call_site; @@ -660,14 +666,17 @@ impl CodeMapper for CodeMap { } fn ensure_filemap_source_present(&self, file_map: Rc<FileMap>) -> bool { file_map.add_external_src( - || self.file_loader.read_file(Path::new(&file_map.name)).ok() + || match file_map.name { + FileName::Real(ref name) => self.file_loader.read_file(name).ok(), + _ => None, + } ) } } #[derive(Clone)] pub struct FilePathMapping { - mapping: Vec<(String, String)>, + mapping: Vec<(PathBuf, PathBuf)>, } impl FilePathMapping { @@ -677,7 +686,7 @@ impl FilePathMapping { } } - pub fn new(mapping: Vec<(String, String)>) -> FilePathMapping { + pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { FilePathMapping { mapping, } @@ -686,14 +695,13 @@ impl FilePathMapping { /// Applies any path prefix substitution as defined by the mapping. /// The return value is the remapped path and a boolean indicating whether /// the path was affected by the mapping. - pub fn map_prefix(&self, path: String) -> (String, bool) { + pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { // NOTE: We are iterating over the mapping entries from last to first // because entries specified later on the command line should // take precedence. for &(ref from, ref to) in self.mapping.iter().rev() { - if path.starts_with(from) { - let mapped = path.replacen(from, to, 1); - return (mapped, true); + if let Ok(rest) = path.strip_prefix(from) { + return (to.join(rest), true); } } @@ -714,7 +722,7 @@ mod tests { #[test] fn t1 () { let cm = CodeMap::new(FilePathMapping::empty()); - let fm = cm.new_filemap("blork.rs".to_string(), + let fm = cm.new_filemap(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); fm.next_line(BytePos(0)); // Test we can get lines with partial line info. @@ -730,7 +738,7 @@ mod tests { #[should_panic] fn t2 () { let cm = CodeMap::new(FilePathMapping::empty()); - let fm = cm.new_filemap("blork.rs".to_string(), + let fm = cm.new_filemap(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); // TESTING *REALLY* BROKEN BEHAVIOR: fm.next_line(BytePos(0)); @@ -740,11 +748,11 @@ mod tests { fn init_code_map() -> CodeMap { let cm = CodeMap::new(FilePathMapping::empty()); - let fm1 = cm.new_filemap("blork.rs".to_string(), + let fm1 = cm.new_filemap(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); - let fm2 = cm.new_filemap("empty.rs".to_string(), + let fm2 = cm.new_filemap(PathBuf::from("empty.rs").into(), "".to_string()); - let fm3 = cm.new_filemap("blork2.rs".to_string(), + let fm3 = cm.new_filemap(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string()); fm1.next_line(BytePos(0)); @@ -762,15 +770,15 @@ mod tests { let cm = init_code_map(); let fmabp1 = cm.lookup_byte_offset(BytePos(23)); - assert_eq!(fmabp1.fm.name, "blork.rs"); + assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into()); assert_eq!(fmabp1.pos, BytePos(23)); let fmabp1 = cm.lookup_byte_offset(BytePos(24)); - assert_eq!(fmabp1.fm.name, "empty.rs"); + assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into()); assert_eq!(fmabp1.pos, BytePos(0)); let fmabp2 = cm.lookup_byte_offset(BytePos(25)); - assert_eq!(fmabp2.fm.name, "blork2.rs"); + assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into()); assert_eq!(fmabp2.pos, BytePos(0)); } @@ -792,12 +800,12 @@ mod tests { let cm = init_code_map(); let loc1 = cm.lookup_char_pos(BytePos(22)); - assert_eq!(loc1.file.name, "blork.rs"); + assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into()); assert_eq!(loc1.line, 2); assert_eq!(loc1.col, CharPos(10)); let loc2 = cm.lookup_char_pos(BytePos(25)); - assert_eq!(loc2.file.name, "blork2.rs"); + assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into()); assert_eq!(loc2.line, 1); assert_eq!(loc2.col, CharPos(0)); } @@ -806,9 +814,9 @@ mod tests { let cm = CodeMap::new(FilePathMapping::empty()); // € is a three byte utf8 char. let fm1 = - cm.new_filemap("blork.rs".to_string(), + cm.new_filemap(PathBuf::from("blork.rs").into(), "fir€st €€€€ line.\nsecond line".to_string()); - let fm2 = cm.new_filemap("blork2.rs".to_string(), + let fm2 = cm.new_filemap(PathBuf::from("blork2.rs").into(), "first line€€.\n€ second line".to_string()); fm1.next_line(BytePos(0)); @@ -853,7 +861,7 @@ mod tests { let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let file_lines = cm.span_to_lines(span).unwrap(); - assert_eq!(file_lines.file.name, "blork.rs"); + assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); assert_eq!(file_lines.lines.len(), 1); assert_eq!(file_lines.lines[0].line_index, 1); } @@ -876,7 +884,7 @@ mod tests { let cm = CodeMap::new(FilePathMapping::empty()); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n"; - cm.new_filemap_and_lines("blork.rs", inputtext); + cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext); let span = span_from_selection(inputtext, selection); // check that we are extracting the text we thought we were extracting @@ -919,7 +927,7 @@ mod tests { let inputtext = "bbbb BB\ncc CCC\n"; let selection1 = " ~~\n \n"; let selection2 = " \n ~~~\n"; - cm.new_filemap_and_lines("blork.rs", inputtext); + cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext); let span1 = span_from_selection(inputtext, selection1); let span2 = span_from_selection(inputtext, selection2); diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs index daa7112235f..5f06475919f 100644 --- a/src/libsyntax/diagnostics/metadata.rs +++ b/src/libsyntax/diagnostics/metadata.rs @@ -20,7 +20,7 @@ use std::io::Write; use std::error::Error; use rustc_serialize::json::as_json; -use syntax_pos::Span; +use syntax_pos::{Span, FileName}; use ext::base::ExtCtxt; use diagnostics::plugin::{ErrorMap, ErrorInfo}; @@ -40,7 +40,7 @@ pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>; /// JSON encodable error location type with filename and line number. #[derive(PartialEq, RustcDecodable, RustcEncodable)] pub struct ErrorLocation { - pub filename: String, + pub filename: FileName, pub line: usize } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 82e7747b014..9a96432f11d 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -759,7 +759,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> { let loc = self.codemap().lookup_char_pos(span.lo()); - let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name)); + let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string())); let expr_line = self.expr_u32(span, loc.line as u32); let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1); let expr_loc_tuple = self.expr_tuple(span, vec![expr_file, expr_line, expr_col]); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index ecb396f259f..edf3d40be94 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -27,7 +27,7 @@ use parse::parser::Parser; use ptr::P; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP, FileName}; use syntax_pos::hygiene::ExpnFormat; use tokenstream::{TokenStream, TokenTree}; use util::small_vector::SmallVector; @@ -38,6 +38,7 @@ use std::fs::File; use std::io::Read; use std::mem; use std::rc::Rc; +use std::path::PathBuf; macro_rules! expansions { ($($kind:ident: $ty:ty [$($vec:ident, $ty_elt:ty)*], $kind_name:expr, .$make:ident, @@ -220,7 +221,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> { pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { let mut module = ModuleData { mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], - directory: self.cx.codemap().span_to_unmapped_path(krate.span), + directory: match self.cx.codemap().span_to_unmapped_path(krate.span) { + FileName::Real(path) => path, + other => PathBuf::from(other.to_string()), + }, }; module.directory.pop(); self.cx.root_path = module.directory.clone(); @@ -978,7 +982,11 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { module.directory.push(&*item.ident.name.as_str()); } } else { - let mut path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner); + let path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner); + let mut path = match path { + FileName::Real(path) => path, + other => PathBuf::from(other.to_string()), + }; let directory_ownership = match path.file_name().unwrap().to_str() { Some("mod.rs") => DirectoryOwnership::Owned, _ => DirectoryOwnership::UnownedViaMod(false), diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index bd8c9a0ed40..426dde4f2a7 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -18,7 +18,6 @@ use parse::token; use ptr::P; use tokenstream::{TokenStream, TokenTree}; - /// Quasiquoting works via token trees. /// /// This is registered as a set of expression syntax extension called quote! @@ -38,7 +37,7 @@ pub mod rt { use tokenstream::{self, TokenTree, TokenStream}; pub use parse::new_parser_from_tts; - pub use syntax_pos::{BytePos, Span, DUMMY_SP}; + pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName}; pub use codemap::{dummy_spanned}; pub trait ToTokens { @@ -343,27 +342,27 @@ pub mod rt { impl<'a> ExtParseUtils for ExtCtxt<'a> { fn parse_item(&self, s: String) -> P<ast::Item> { panictry!(parse::parse_item_from_source_str( - "<quote expansion>".to_string(), + FileName::QuoteExpansion, s, self.parse_sess())).expect("parse error") } fn parse_stmt(&self, s: String) -> ast::Stmt { panictry!(parse::parse_stmt_from_source_str( - "<quote expansion>".to_string(), + FileName::QuoteExpansion, s, self.parse_sess())).expect("parse error") } fn parse_expr(&self, s: String) -> P<ast::Expr> { panictry!(parse::parse_expr_from_source_str( - "<quote expansion>".to_string(), + FileName::QuoteExpansion, s, self.parse_sess())) } fn parse_tts(&self, s: String) -> Vec<TokenTree> { - let source_name = "<quote expansion>".to_owned(); + let source_name = FileName::QuoteExpansion; parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None) .into_trees().collect() } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 86657e675b2..2a80686aa0f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast; -use syntax_pos::{self, Pos, Span}; +use syntax_pos::{self, Pos, Span, FileName}; use ext::base::*; use ext::base; use ext::build::AstBuilder; @@ -23,7 +23,7 @@ use util::small_vector::SmallVector; use std::fs::File; use std::io::prelude::*; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::rc::Rc; // These macros all relate to the file system; they either return @@ -71,7 +71,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) let topmost = cx.expansion_cause().unwrap_or(sp); let loc = cx.codemap().lookup_char_pos(topmost.lo()); - base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name))) + base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string()))) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) @@ -99,7 +99,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T None => return DummyResult::expr(sp), }; // The file will be added to the code map by the parser - let path = res_rel_file(cx, sp, Path::new(&file)); + let path = res_rel_file(cx, sp, file); let directory_ownership = DirectoryOwnership::Owned; let p = parse::new_sub_parser_from_file(cx.parse_sess(), &path, directory_ownership, None, sp); @@ -135,7 +135,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT Some(f) => f, None => return DummyResult::expr(sp) }; - let file = res_rel_file(cx, sp, Path::new(&file)); + let file = res_rel_file(cx, sp, file); let mut bytes = Vec::new(); match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) { Ok(..) => {} @@ -151,8 +151,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT Ok(src) => { // Add this input file to the code map to make it available as // dependency information - let filename = format!("{}", file.display()); - cx.codemap().new_filemap_and_lines(&filename, &src); + cx.codemap().new_filemap_and_lines(&file, &src); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src))) } @@ -171,7 +170,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Some(f) => f, None => return DummyResult::expr(sp) }; - let file = res_rel_file(cx, sp, Path::new(&file)); + let file = res_rel_file(cx, sp, file); let mut bytes = Vec::new(); match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) { Err(e) => { @@ -182,8 +181,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Ok(..) => { // Add this input file to the code map to make it available as // dependency information, but don't enter it's contents - let filename = format!("{}", file.display()); - cx.codemap().new_filemap_and_lines(&filename, ""); + cx.codemap().new_filemap_and_lines(&file, ""); base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes)))) } @@ -192,16 +190,20 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke // resolve a file-system path to an absolute file-system path (if it // isn't already) -fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf { +fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf { + let arg = PathBuf::from(arg); // Relative paths are resolved relative to the file in which they are found // after macro expansion (that is, they are unhygienic). if !arg.is_absolute() { let callsite = sp.source_callsite(); - let mut path = cx.codemap().span_to_unmapped_path(callsite); + let mut path = match cx.codemap().span_to_unmapped_path(callsite) { + FileName::Real(path) => path, + other => panic!("cannot resolve relative path in non-file source `{}`", other), + }; path.pop(); path.push(arg); path } else { - arg.to_path_buf() + arg } } diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 80ac0cb4faf..54c726d8462 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -282,7 +282,7 @@ impl DiagnosticSpan { }) }); DiagnosticSpan { - file_name: start.file.name.clone(), + file_name: start.file.name.to_string(), byte_start: span.lo().0 - start.file.start_pos.0, byte_end: span.hi().0 - start.file.start_pos.0, line_start: start.line, diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index fb558d1a58f..23449ee69ab 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -12,7 +12,7 @@ pub use self::CommentStyle::*; use ast; use codemap::CodeMap; -use syntax_pos::{BytePos, CharPos, Pos}; +use syntax_pos::{BytePos, CharPos, Pos, FileName}; use parse::lexer::{is_block_doc_comment, is_pattern_whitespace}; use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan}; use print::pprust; @@ -343,7 +343,7 @@ pub struct Literal { // it appears this function is called only from pprust... that's // probably not a good thing. -pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut Read) +pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut Read) -> (Vec<Comment>, Vec<Literal>) { let mut src = Vec::new(); srdr.read_to_end(&mut src).unwrap(); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d9c33fa50bd..798dfc6d209 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1714,6 +1714,7 @@ mod tests { use std::cell::RefCell; use std::collections::HashSet; use std::io; + use std::path::PathBuf; use std::rc::Rc; fn mk_sess(cm: Rc<CodeMap>) -> ParseSess { @@ -1735,7 +1736,7 @@ mod tests { sess: &'a ParseSess, teststr: String) -> StringReader<'a> { - let fm = cm.new_filemap("zebra.rs".to_string(), teststr); + let fm = cm.new_filemap(PathBuf::from("zebra.rs").into(), teststr); StringReader::new(sess, fm) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c679efd41ea..4d435665d3c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -12,7 +12,7 @@ use ast::{self, CrateConfig}; use codemap::{CodeMap, FilePathMapping}; -use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; +use syntax_pos::{self, Span, FileMap, NO_EXPANSION, FileName}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -107,17 +107,17 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess) parser.parse_inner_attributes() } -pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<ast::Crate> { new_parser_from_source_str(sess, name, source).parse_crate_mod() } -pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Vec<ast::Attribute>> { new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_expr_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<P<ast::Expr>> { new_parser_from_source_str(sess, name, source).parse_expr() } @@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` /// when a syntax error occurred. -pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Option<P<ast::Item>>> { new_parser_from_source_str(sess, name, source).parse_item() } -pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_meta_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<ast::MetaItem> { new_parser_from_source_str(sess, name, source).parse_meta_item() } -pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess) +pub fn parse_stmt_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Option<ast::Stmt>> { new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess, +pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, override_span: Option<Span>) -> TokenStream { filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) } // Create a new parser from a source string -pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String) +pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { let mut parser = filemap_to_parser(sess, sess.codemap().new_filemap(name, source)); parser.recurse_into_file_modules = false; @@ -1018,7 +1018,7 @@ mod tests { #[test] fn crlf_doc_comments() { let sess = ParseSess::new(FilePathMapping::empty()); - let name = "<source>".to_string(); + let name = FileName::Custom("source".to_string()); let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); @@ -1042,7 +1042,7 @@ mod tests { #[test] fn ttdelim_span() { let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse::parse_expr_from_source_str("foo".to_string(), + let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), "foo!( fn main() { body } )".to_string(), &sess).unwrap(); let tts: Vec<_> = match expr.node { @@ -1065,7 +1065,7 @@ mod tests { fn out_of_line_mod() { let sess = ParseSess::new(FilePathMapping::empty()); let item = parse_item_from_source_str( - "foo".to_owned(), + PathBuf::from("foo").into(), "mod foo { struct S; mod this_does_not_exist; }".to_owned(), &sess, ).unwrap().unwrap(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 726db733482..b3ef70fd18e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -41,7 +41,7 @@ use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; use {ast, attr}; use codemap::{self, CodeMap, Spanned, respan}; -use syntax_pos::{self, Span, BytePos}; +use syntax_pos::{self, Span, BytePos, FileName, DUMMY_SP}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; @@ -527,9 +527,11 @@ impl<'a> Parser<'a> { if let Some(directory) = directory { parser.directory = directory; - } else if parser.span != syntax_pos::DUMMY_SP { - parser.directory.path = sess.codemap().span_to_unmapped_path(parser.span); - parser.directory.path.pop(); + } else if !parser.span.source_equal(&DUMMY_SP) { + if let FileName::Real(path) = sess.codemap().span_to_unmapped_path(parser.span) { + parser.directory.path = path; + parser.directory.path.pop(); + } } parser.process_potential_macro_variable(); @@ -5764,15 +5766,17 @@ impl<'a> Parser<'a> { let mut err = self.diagnostic().struct_span_err(id_sp, "cannot declare a new module at this location"); if id_sp != syntax_pos::DUMMY_SP { - let src_path = PathBuf::from(self.sess.codemap().span_to_filename(id_sp)); - if let Some(stem) = src_path.file_stem() { - let mut dest_path = src_path.clone(); - dest_path.set_file_name(stem); - dest_path.push("mod.rs"); - err.span_note(id_sp, - &format!("maybe move this module `{}` to its own \ - directory via `{}`", src_path.to_string_lossy(), - dest_path.to_string_lossy())); + let src_path = self.sess.codemap().span_to_filename(id_sp); + if let FileName::Real(src_path) = src_path { + if let Some(stem) = src_path.file_stem() { + let mut dest_path = src_path.clone(); + dest_path.set_file_name(stem); + dest_path.push("mod.rs"); + err.span_note(id_sp, + &format!("maybe move this module `{}` to its own \ + directory via `{}`", src_path.display(), + dest_path.display())); + } } } if paths.path_exists { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 26f39f60880..05368c52d2c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -21,7 +21,7 @@ use ptr::P; use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; use syntax::parse::parse_stream_from_source_str; -use syntax_pos::{self, Span}; +use syntax_pos::{self, Span, FileName}; use tokenstream::{TokenStream, TokenTree}; use tokenstream; @@ -495,9 +495,8 @@ impl Token { tokens.unwrap_or_else(|| { nt.1.force(|| { // FIXME(jseyfried): Avoid this pretty-print + reparse hack - let name = "<macro expansion>".to_owned(); let source = pprust::token_to_string(self); - parse_stream_from_source_str(name, source, sess, Some(span)) + parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span)) }) }) } @@ -629,7 +628,7 @@ fn prepend_attrs(sess: &ParseSess, assert_eq!(attr.style, ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing"); // FIXME: Avoid this pretty-print + reparse hack as bove - let name = "<macro expansion>".to_owned(); + let name = FileName::MacroExpansion; let source = pprust::attr_to_string(attr); let stream = parse_stream_from_source_str(name, source, sess, Some(span)); builder.push(stream); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 17f37d0f2c0..e4b7dc26d32 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -27,7 +27,7 @@ use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; use symbol::{Symbol, keywords}; -use syntax_pos::DUMMY_SP; +use syntax_pos::{DUMMY_SP, FileName}; use tokenstream::{self, TokenStream, TokenTree}; use std::ascii; @@ -87,7 +87,7 @@ pub const DEFAULT_COLUMNS: usize = 78; pub fn print_crate<'a>(cm: &'a CodeMap, sess: &ParseSess, krate: &ast::Crate, - filename: String, + filename: FileName, input: &mut Read, out: Box<Write+'a>, ann: &'a PpAnn, @@ -120,7 +120,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, impl<'a> State<'a> { pub fn new_from_input(cm: &'a CodeMap, sess: &ParseSess, - filename: String, + filename: FileName, input: &mut Read, out: Box<Write+'a>, ann: &'a PpAnn, diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index a29250ea5f1..5072f2e2793 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -16,6 +16,7 @@ use std::io::prelude::*; use std::rc::Rc; use std::str; use std::sync::{Arc, Mutex}; +use std::path::Path; use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; /// Identify a position in the text by the Nth occurrence of a string. @@ -48,7 +49,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: & let output = Arc::new(Mutex::new(Vec::new())); let code_map = Rc::new(CodeMap::new(FilePathMapping::empty())); - code_map.new_filemap_and_lines("test.rs", &file_text); + code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text); let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); let mut msp = MultiSpan::from_span(primary_span); diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index d993ba14a4a..42cd7c8faa5 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -16,16 +16,18 @@ use parse::parser::Parser; use ptr::P; use tokenstream::TokenStream; use std::iter::Peekable; +use std::path::PathBuf; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); - filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str), None) + filemap_to_stream(&ps, ps.codemap() + .new_filemap(PathBuf::from("bogofile").into(), source_str), None) } /// Map string to parser (via tts) pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { - new_parser_from_source_str(ps, "bogofile".to_string(), source_str) + new_parser_from_source_str(ps, PathBuf::from("bogofile").into(), source_str) } fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T where |
