diff options
| author | Mazdak Farrokhzad <twingoow@gmail.com> | 2019-10-10 10:26:10 +0200 |
|---|---|---|
| committer | Mazdak Farrokhzad <twingoow@gmail.com> | 2019-11-10 02:46:17 +0100 |
| commit | be023ebe850261c6bb202a02a686827d821c3697 (patch) | |
| tree | 4595cee969fbdb83ffc536ab940e15be2503e454 /src/libsyntax/parse | |
| parent | 5011ec7fedffe34d943654ffb4308875fc5ec8f3 (diff) | |
| download | rust-be023ebe850261c6bb202a02a686827d821c3697.tar.gz rust-be023ebe850261c6bb202a02a686827d821c3697.zip | |
move config.rs to libsyntax_expand
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tests.rs | 270 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/attr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/module.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/parse/tests.rs | 339 |
6 files changed, 11 insertions, 628 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index b1b7b08c78a..f2d5ff3440e 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -13,9 +13,6 @@ use std::convert::TryInto; use rustc_data_structures::sync::Lrc; use log::debug; -#[cfg(test)] -mod tests; - mod tokentrees; mod unicode_chars; mod unescape_error_reporting; @@ -35,7 +32,8 @@ pub struct StringReader<'a> { /// Initial position, read-only. start_pos: BytePos, /// The absolute offset within the source_map of the current character. - pos: BytePos, + // FIXME(#64197): `pub` is needed by tests for now. + pub pos: BytePos, /// Stop reading src at this index. end_src_index: usize, /// Source text to tokenize. diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs deleted file mode 100644 index baa6fb59537..00000000000 --- a/src/libsyntax/parse/lexer/tests.rs +++ /dev/null @@ -1,270 +0,0 @@ -use super::*; - -use crate::symbol::Symbol; -use crate::source_map::{SourceMap, FilePathMapping}; -use crate::token; -use crate::util::comments::is_doc_comment; -use crate::with_default_globals; - -use errors::{Handler, emitter::EmitterWriter}; -use std::io; -use std::path::PathBuf; -use syntax_pos::{BytePos, Span}; - -fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess { - let emitter = EmitterWriter::new( - Box::new(io::sink()), - Some(sm.clone()), - false, - false, - false, - None, - false, - ); - ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm) -} - -// Creates a string reader for the given string. -fn setup<'a>(sm: &SourceMap, - sess: &'a ParseSess, - teststr: String) - -> StringReader<'a> { - let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr); - StringReader::new(sess, sf, None) -} - -#[test] -fn t1() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - let mut string_reader = setup( - &sm, - &sh, - "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(), - ); - assert_eq!(string_reader.next_token(), token::Comment); - assert_eq!(string_reader.next_token(), token::Whitespace); - let tok1 = string_reader.next_token(); - let tok2 = Token::new( - mk_ident("fn"), - Span::with_root_ctxt(BytePos(21), BytePos(23)), - ); - assert_eq!(tok1.kind, tok2.kind); - assert_eq!(tok1.span, tok2.span); - assert_eq!(string_reader.next_token(), token::Whitespace); - // Read another token. - let tok3 = string_reader.next_token(); - assert_eq!(string_reader.pos.clone(), BytePos(28)); - let tok4 = Token::new( - mk_ident("main"), - Span::with_root_ctxt(BytePos(24), BytePos(28)), - ); - assert_eq!(tok3.kind, tok4.kind); - assert_eq!(tok3.span, tok4.span); - - assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren)); - assert_eq!(string_reader.pos.clone(), BytePos(29)) - }) -} - -// Checks that the given reader produces the desired stream -// of tokens (stop checking after exhausting `expected`). -fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) { - for expected_tok in &expected { - assert_eq!(&string_reader.next_token(), expected_tok); - } -} - -// Makes the identifier by looking up the string in the interner. -fn mk_ident(id: &str) -> TokenKind { - token::Ident(Symbol::intern(id), false) -} - -fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind { - TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern)) -} - -#[test] -fn doublecolon_parsing() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - check_tokenization( - setup(&sm, &sh, "a b".to_string()), - vec![mk_ident("a"), token::Whitespace, mk_ident("b")], - ); - }) -} - -#[test] -fn doublecolon_parsing_2() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - check_tokenization( - setup(&sm, &sh, "a::b".to_string()), - vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")], - ); - }) -} - -#[test] -fn doublecolon_parsing_3() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - check_tokenization( - setup(&sm, &sh, "a ::b".to_string()), - vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")], - ); - }) -} - -#[test] -fn doublecolon_parsing_4() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - check_tokenization( - setup(&sm, &sh, "a:: b".to_string()), - vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")], - ); - }) -} - -#[test] -fn character_a() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "'a'".to_string()).next_token(), - mk_lit(token::Char, "a", None), - ); - }) -} - -#[test] -fn character_space() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "' '".to_string()).next_token(), - mk_lit(token::Char, " ", None), - ); - }) -} - -#[test] -fn character_escaped() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "'\\n'".to_string()).next_token(), - mk_lit(token::Char, "\\n", None), - ); - }) -} - -#[test] -fn lifetime_name() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "'abc".to_string()).next_token(), - token::Lifetime(Symbol::intern("'abc")), - ); - }) -} - -#[test] -fn raw_string() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - assert_eq!( - setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), - mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None), - ); - }) -} - -#[test] -fn literal_suffixes() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - macro_rules! test { - ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!( - setup(&sm, &sh, format!("{}suffix", $input)).next_token(), - mk_lit(token::$tok_type, $tok_contents, Some("suffix")), - ); - // with a whitespace separator - assert_eq!( - setup(&sm, &sh, format!("{} suffix", $input)).next_token(), - mk_lit(token::$tok_type, $tok_contents, None), - ); - }} - } - - test!("'a'", Char, "a"); - test!("b'a'", Byte, "a"); - test!("\"a\"", Str, "a"); - test!("b\"a\"", ByteStr, "a"); - test!("1234", Integer, "1234"); - test!("0b101", Integer, "0b101"); - test!("0xABC", Integer, "0xABC"); - test!("1.0", Float, "1.0"); - test!("1.0e10", Float, "1.0e10"); - - assert_eq!( - setup(&sm, &sh, "2us".to_string()).next_token(), - mk_lit(token::Integer, "2", Some("us")), - ); - assert_eq!( - setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), - mk_lit(token::StrRaw(3), "raw", Some("suffix")), - ); - assert_eq!( - setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), - mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")), - ); - }) -} - -#[test] -fn line_doc_comments() { - assert!(is_doc_comment("///")); - assert!(is_doc_comment("/// blah")); - assert!(!is_doc_comment("////")); -} - -#[test] -fn nested_block_comments() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); - assert_eq!(lexer.next_token(), token::Comment); - assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None)); - }) -} - -#[test] -fn crlf_comments() { - with_default_globals(|| { - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let sh = mk_sess(sm.clone()); - let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string()); - let comment = lexer.next_token(); - assert_eq!(comment.kind, token::Comment); - assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token(), token::Whitespace); - assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test"))); - }) -} diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 9155cbe5dd8..af3f8ecb45a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -17,8 +17,7 @@ use std::str; use log::info; -#[cfg(test)] -mod tests; +pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); #[macro_use] pub mod parser; diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs index 31f0a02a483..0f9e573af82 100644 --- a/src/libsyntax/parse/parser/attr.rs +++ b/src/libsyntax/parse/parser/attr.rs @@ -268,7 +268,7 @@ impl<'a> Parser<'a> { } /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited. - crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { + pub fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { self.expect(&token::OpenDelim(token::Paren))?; let cfg_predicate = self.parse_meta_item()?; diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs index 3e5974c2eee..ad72b3a1dea 100644 --- a/src/libsyntax/parse/parser/module.rs +++ b/src/libsyntax/parse/parser/module.rs @@ -7,8 +7,8 @@ use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate}; use crate::parse::{new_sub_parser_from_file, DirectoryOwnership}; use crate::token::{self, TokenKind}; use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName}; -use crate::symbol::sym; +use syntax_pos::symbol::sym; use errors::PResult; use std::path::{self, Path, PathBuf}; @@ -39,17 +39,12 @@ impl<'a> Parser<'a> { /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { - let (in_cfg, outer_attrs) = { - // FIXME(Centril): This results in a cycle between config and parsing. - // Consider using dynamic dispatch via `self.sess` to disentangle the knot. - let mut strip_unconfigured = crate::config::StripUnconfigured { - sess: self.sess, - features: None, // Don't perform gated feature checking. - }; - let mut outer_attrs = outer_attrs.to_owned(); - strip_unconfigured.process_cfg_attrs(&mut outer_attrs); - (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs) - }; + // HACK(Centril): See documentation on `ParseSess::process_cfg_mod`. + let (in_cfg, outer_attrs) = (self.sess.process_cfg_mod)( + self.sess, + self.cfg_mods, + outer_attrs, + ); let id_span = self.token.span; let id = self.parse_ident()?; diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs deleted file mode 100644 index 27ca2b6472f..00000000000 --- a/src/libsyntax/parse/tests.rs +++ /dev/null @@ -1,339 +0,0 @@ -use super::*; - -use crate::ast::{self, Name, PatKind}; -use crate::attr::first_attr_value_str_by_name; -use crate::sess::ParseSess; -use crate::parse::{PResult, new_parser_from_source_str}; -use crate::token::Token; -use crate::print::pprust::item_to_string; -use crate::ptr::P; -use crate::source_map::FilePathMapping; -use crate::symbol::{kw, sym}; -use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse}; -use crate::tokenstream::{DelimSpan, TokenTree, TokenStream}; -use crate::with_default_globals; -use syntax_pos::{Span, BytePos, Pos}; - -use std::path::PathBuf; - -/// Parses an item. -/// -/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` -/// when a syntax error occurred. -fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) - -> PResult<'_, Option<P<ast::Item>>> { - new_parser_from_source_str(sess, name, source).parse_item() -} - -// Produces a `syntax_pos::span`. -fn sp(a: u32, b: u32) -> Span { - Span::with_root_ctxt(BytePos(a), BytePos(b)) -} - -/// Parses a string, return an expression. -fn string_to_expr(source_str : String) -> P<ast::Expr> { - let ps = ParseSess::new(FilePathMapping::empty()); - with_error_checking_parse(source_str, &ps, |p| { - p.parse_expr() - }) -} - -/// Parses a string, returns an item. -fn string_to_item(source_str : String) -> Option<P<ast::Item>> { - let ps = ParseSess::new(FilePathMapping::empty()); - with_error_checking_parse(source_str, &ps, |p| { - p.parse_item() - }) -} - -#[should_panic] -#[test] fn bad_path_expr_1() { - with_default_globals(|| { - string_to_expr("::abc::def::return".to_string()); - }) -} - -// Checks the token-tree-ization of macros. -#[test] -fn string_to_tts_macro () { - with_default_globals(|| { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); - let tts: &[TokenTree] = &tts[..]; - - match tts { - [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), - TokenTree::Token(Token { kind: token::Not, .. }), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), - TokenTree::Delimited(_, macro_delim, macro_tts) - ] - if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => { - let tts = ¯o_tts.trees().collect::<Vec<_>>(); - match &tts[..] { - [ - TokenTree::Delimited(_, first_delim, first_tts), - TokenTree::Token(Token { kind: token::FatArrow, .. }), - TokenTree::Delimited(_, second_delim, second_tts), - ] - if macro_delim == &token::Paren => { - let tts = &first_tts.trees().collect::<Vec<_>>(); - match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if first_delim == &token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), - } - let tts = &second_tts.trees().collect::<Vec<_>>(); - match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if second_delim == &token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), - } - }, - _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts), - } - }, - _ => panic!("value: {:?}",tts), - } - }) -} - -#[test] -fn string_to_tts_1() { - with_default_globals(|| { - let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - - let expected = TokenStream::new(vec![ - TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(), - TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(), - TokenTree::Delimited( - DelimSpan::from_pair(sp(5, 6), sp(13, 14)), - token::DelimToken::Paren, - TokenStream::new(vec![ - TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(), - TokenTree::token(token::Colon, sp(8, 9)).into(), - TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(), - ]).into(), - ).into(), - TokenTree::Delimited( - DelimSpan::from_pair(sp(15, 16), sp(20, 21)), - token::DelimToken::Brace, - TokenStream::new(vec![ - TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(), - TokenTree::token(token::Semi, sp(18, 19)).into(), - ]).into(), - ).into() - ]); - - assert_eq!(tts, expected); - }) -} - -#[test] fn parse_use() { - with_default_globals(|| { - let use_s = "use foo::bar::baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - - let use_s = "use foo::bar as baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - }) -} - -#[test] fn parse_extern_crate() { - with_default_globals(|| { - let ex_s = "extern crate foo;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - - let ex_s = "extern crate foo as bar;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - }) -} - -fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { - let item = string_to_item(src.to_string()).unwrap(); - - struct PatIdentVisitor { - spans: Vec<Span> - } - impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor { - fn visit_pat(&mut self, p: &'a ast::Pat) { - match p.kind { - PatKind::Ident(_ , ref ident, _) => { - self.spans.push(ident.span.clone()); - } - _ => { - crate::visit::walk_pat(self, p); - } - } - } - } - let mut v = PatIdentVisitor { spans: Vec::new() }; - crate::visit::walk_item(&mut v, &item); - return v.spans; -} - -#[test] fn span_of_self_arg_pat_idents_are_correct() { - with_default_globals(|| { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; - - for &src in &srcs { - let spans = get_spans_of_pat_idents(src); - let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) - } - }) -} - -#[test] fn parse_exprs () { - with_default_globals(|| { - // just make sure that they parse.... - string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,&(987+3))".to_string()); - }) -} - -#[test] fn attrs_fix_bug () { - with_default_globals(|| { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) - -> Result<Box<Writer>, String> { -#[cfg(windows)] -fn wb() -> c_int { - (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int -} - -#[cfg(unix)] -fn wb() -> c_int { O_WRONLY as c_int } - -let mut fflags: c_int = wb(); -}".to_string()); - }) -} - -#[test] fn crlf_doc_comments() { - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - - let name_1 = FileName::Custom("crlf_source_1".to_string()); - let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_1, source, &sess) - .unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap(); - assert_eq!(doc.as_str(), "/// doc comment"); - - let name_2 = FileName::Custom("crlf_source_2".to_string()); - let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_2, source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.has_name(sym::doc)) - .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); - let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[..], b); - - let name_3 = FileName::Custom("clrf_source_3".to_string()); - let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap(); - assert_eq!(doc.as_str(), "/** doc comment\n * with CRLF */"); - }); -} - -#[test] -fn ttdelim_span() { - fn parse_expr_from_source_str( - name: FileName, source: String, sess: &ParseSess - ) -> PResult<'_, P<ast::Expr>> { - new_parser_from_source_str(sess, name, source).parse_expr() - } - - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse_expr_from_source_str(PathBuf::from("foo").into(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - - let tts: Vec<_> = match expr.kind { - ast::ExprKind::Mac(ref mac) => mac.stream().trees().collect(), - _ => panic!("not a macro"), - }; - - let span = tts.iter().rev().next().unwrap().span(); - - match sess.source_map().span_to_snippet(span) { - Ok(s) => assert_eq!(&s[..], "{ body }"), - Err(_) => panic!("could not get snippet"), - } - }); -} - -// This tests that when parsing a string (rather than a file) we don't try -// and read in a file for a module declaration and just parse a stub. -// See `recurse_into_file_modules` in the parser. -#[test] -fn out_of_line_mod() { - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - let item = parse_item_from_source_str( - PathBuf::from("foo").into(), - "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess, - ).unwrap().unwrap(); - - if let ast::ItemKind::Mod(ref m) = item.kind { - assert!(m.items.len() == 2); - } else { - panic!(); - } - }); -} - -#[test] -fn eqmodws() { - assert_eq!(matches_codepattern("",""),true); - assert_eq!(matches_codepattern("","a"),false); - assert_eq!(matches_codepattern("a",""),false); - assert_eq!(matches_codepattern("a","a"),true); - assert_eq!(matches_codepattern("a b","a \n\t\r b"),true); - assert_eq!(matches_codepattern("a b ","a \n\t\r b"),true); - assert_eq!(matches_codepattern("a b","a \n\t\r b "),false); - assert_eq!(matches_codepattern("a b","a b"),true); - assert_eq!(matches_codepattern("ab","a b"),false); - assert_eq!(matches_codepattern("a b","ab"),true); - assert_eq!(matches_codepattern(" a b","ab"),true); -} - -#[test] -fn pattern_whitespace() { - assert_eq!(matches_codepattern("","\x0C"), false); - assert_eq!(matches_codepattern("a b ","a \u{0085}\n\t\r b"),true); - assert_eq!(matches_codepattern("a b","a \u{0085}\n\t\r b "),false); -} - -#[test] -fn non_pattern_whitespace() { - // These have the property 'White_Space' but not 'Pattern_White_Space' - assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); - assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); - assert_eq!(matches_codepattern("\u{205F}a b","ab"), false); - assert_eq!(matches_codepattern("a \u{3000}b","ab"), false); -} |
