about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-08-02 00:26:40 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-08-02 01:59:01 +0300
commit310b9fc76002066feb89dcfbf8e88b34fe5f4ad3 (patch)
treee501e5e1aea23c56ae22a86b18215da00e07b3c4 /src/libsyntax/parse
parenta332e224a3bc2925fea584337d2d30e1186672be (diff)
downloadrust-310b9fc76002066feb89dcfbf8e88b34fe5f4ad3.tar.gz
rust-310b9fc76002066feb89dcfbf8e88b34fe5f4ad3.zip
libsyntax: Unconfigure tests during normal build
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/comments.rs54
-rw-r--r--src/libsyntax/parse/lexer/comments/tests.rs47
-rw-r--r--src/libsyntax/parse/lexer/mod.rs262
-rw-r--r--src/libsyntax/parse/lexer/tests.rs255
-rw-r--r--src/libsyntax/parse/mod.rs296
-rw-r--r--src/libsyntax/parse/tests.rs339
6 files changed, 651 insertions, 602 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index d8f22072d7d..5121a9ef7b5 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -9,6 +9,9 @@ use syntax_pos::{BytePos, CharPos, Pos, FileName};
 
 use std::usize;
 
+#[cfg(test)]
+mod tests;
+
 #[derive(Clone, Copy, PartialEq, Debug)]
 pub enum CommentStyle {
     /// No code on either side of each line of the comment
@@ -249,54 +252,3 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, src: String) -> Vec<Com
 
     comments
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_block_doc_comment_1() {
-        let comment = "/**\n * Test \n **  Test\n *   Test\n*/";
-        let stripped = strip_doc_comment_decoration(comment);
-        assert_eq!(stripped, " Test \n*  Test\n   Test");
-    }
-
-    #[test]
-    fn test_block_doc_comment_2() {
-        let comment = "/**\n * Test\n *  Test\n*/";
-        let stripped = strip_doc_comment_decoration(comment);
-        assert_eq!(stripped, " Test\n  Test");
-    }
-
-    #[test]
-    fn test_block_doc_comment_3() {
-        let comment = "/**\n let a: *i32;\n *a = 5;\n*/";
-        let stripped = strip_doc_comment_decoration(comment);
-        assert_eq!(stripped, " let a: *i32;\n *a = 5;");
-    }
-
-    #[test]
-    fn test_block_doc_comment_4() {
-        let comment = "/*******************\n test\n *********************/";
-        let stripped = strip_doc_comment_decoration(comment);
-        assert_eq!(stripped, " test");
-    }
-
-    #[test]
-    fn test_line_doc_comment() {
-        let stripped = strip_doc_comment_decoration("/// test");
-        assert_eq!(stripped, " test");
-        let stripped = strip_doc_comment_decoration("///! test");
-        assert_eq!(stripped, " test");
-        let stripped = strip_doc_comment_decoration("// test");
-        assert_eq!(stripped, " test");
-        let stripped = strip_doc_comment_decoration("// test");
-        assert_eq!(stripped, " test");
-        let stripped = strip_doc_comment_decoration("///test");
-        assert_eq!(stripped, "test");
-        let stripped = strip_doc_comment_decoration("///!test");
-        assert_eq!(stripped, "test");
-        let stripped = strip_doc_comment_decoration("//test");
-        assert_eq!(stripped, "test");
-    }
-}
diff --git a/src/libsyntax/parse/lexer/comments/tests.rs b/src/libsyntax/parse/lexer/comments/tests.rs
new file mode 100644
index 00000000000..f9cd69fb50d
--- /dev/null
+++ b/src/libsyntax/parse/lexer/comments/tests.rs
@@ -0,0 +1,47 @@
+use super::*;
+
+#[test]
+fn test_block_doc_comment_1() {
+    let comment = "/**\n * Test \n **  Test\n *   Test\n*/";
+    let stripped = strip_doc_comment_decoration(comment);
+    assert_eq!(stripped, " Test \n*  Test\n   Test");
+}
+
+#[test]
+fn test_block_doc_comment_2() {
+    let comment = "/**\n * Test\n *  Test\n*/";
+    let stripped = strip_doc_comment_decoration(comment);
+    assert_eq!(stripped, " Test\n  Test");
+}
+
+#[test]
+fn test_block_doc_comment_3() {
+    let comment = "/**\n let a: *i32;\n *a = 5;\n*/";
+    let stripped = strip_doc_comment_decoration(comment);
+    assert_eq!(stripped, " let a: *i32;\n *a = 5;");
+}
+
+#[test]
+fn test_block_doc_comment_4() {
+    let comment = "/*******************\n test\n *********************/";
+    let stripped = strip_doc_comment_decoration(comment);
+    assert_eq!(stripped, " test");
+}
+
+#[test]
+fn test_line_doc_comment() {
+    let stripped = strip_doc_comment_decoration("/// test");
+    assert_eq!(stripped, " test");
+    let stripped = strip_doc_comment_decoration("///! test");
+    assert_eq!(stripped, " test");
+    let stripped = strip_doc_comment_decoration("// test");
+    assert_eq!(stripped, " test");
+    let stripped = strip_doc_comment_decoration("// test");
+    assert_eq!(stripped, " test");
+    let stripped = strip_doc_comment_decoration("///test");
+    assert_eq!(stripped, "test");
+    let stripped = strip_doc_comment_decoration("///!test");
+    assert_eq!(stripped, "test");
+    let stripped = strip_doc_comment_decoration("//test");
+    assert_eq!(stripped, "test");
+}
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 263eb1ac7a4..950b1b2ff53 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -15,6 +15,9 @@ use std::convert::TryInto;
 use rustc_data_structures::sync::Lrc;
 use log::debug;
 
+#[cfg(test)]
+mod tests;
+
 pub mod comments;
 mod tokentrees;
 mod unicode_chars;
@@ -777,262 +780,3 @@ fn is_block_doc_comment(s: &str) -> bool {
     debug!("is {:?} a doc comment? {}", s, res);
     res
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    use crate::ast::CrateConfig;
-    use crate::symbol::Symbol;
-    use crate::source_map::{SourceMap, FilePathMapping};
-    use crate::feature_gate::UnstableFeatures;
-    use crate::parse::token;
-    use crate::diagnostics::plugin::ErrorMap;
-    use crate::with_default_globals;
-    use std::io;
-    use std::path::PathBuf;
-    use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
-    use rustc_data_structures::fx::{FxHashSet, FxHashMap};
-    use rustc_data_structures::sync::{Lock, Once};
-
-    fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
-        let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
-                                                          Some(sm.clone()),
-                                                          false,
-                                                          false,
-                                                          false);
-        ParseSess {
-            span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)),
-            unstable_features: UnstableFeatures::from_environment(),
-            config: CrateConfig::default(),
-            included_mod_stack: Lock::new(Vec::new()),
-            source_map: sm,
-            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
-            raw_identifier_spans: Lock::new(Vec::new()),
-            registered_diagnostics: Lock::new(ErrorMap::new()),
-            buffered_lints: Lock::new(vec![]),
-            edition: Edition::from_session(),
-            ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
-            param_attr_spans: Lock::new(Vec::new()),
-            let_chains_spans: Lock::new(Vec::new()),
-            async_closure_spans: Lock::new(Vec::new()),
-            injected_crate_name: Once::new(),
-        }
-    }
-
-    // open a string reader for the given string
-    fn setup<'a>(sm: &SourceMap,
-                 sess: &'a ParseSess,
-                 teststr: String)
-                 -> StringReader<'a> {
-        let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
-        StringReader::new(sess, sf, None)
-    }
-
-    #[test]
-    fn t1() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            let mut string_reader = setup(&sm,
-                                        &sh,
-                                        "/* my source file */ fn main() { println!(\"zebra\"); }\n"
-                                            .to_string());
-            assert_eq!(string_reader.next_token(), token::Comment);
-            assert_eq!(string_reader.next_token(), token::Whitespace);
-            let tok1 = string_reader.next_token();
-            let tok2 = Token::new(
-                mk_ident("fn"),
-                Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
-            );
-            assert_eq!(tok1.kind, tok2.kind);
-            assert_eq!(tok1.span, tok2.span);
-            assert_eq!(string_reader.next_token(), token::Whitespace);
-            // read another token:
-            let tok3 = string_reader.next_token();
-            assert_eq!(string_reader.pos.clone(), BytePos(28));
-            let tok4 = Token::new(
-                mk_ident("main"),
-                Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
-            );
-            assert_eq!(tok3.kind, tok4.kind);
-            assert_eq!(tok3.span, tok4.span);
-
-            assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
-            assert_eq!(string_reader.pos.clone(), BytePos(29))
-        })
-    }
-
-    // check that the given reader produces the desired stream
-    // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
-        for expected_tok in &expected {
-            assert_eq!(&string_reader.next_token(), expected_tok);
-        }
-    }
-
-    // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str) -> TokenKind {
-        token::Ident(Symbol::intern(id), false)
-    }
-
-    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
-        TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
-    }
-
-    #[test]
-    fn doublecolonparsing() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            check_tokenization(setup(&sm, &sh, "a b".to_string()),
-                            vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
-        })
-    }
-
-    #[test]
-    fn dcparsing_2() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            check_tokenization(setup(&sm, &sh, "a::b".to_string()),
-                            vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
-        })
-    }
-
-    #[test]
-    fn dcparsing_3() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
-                            vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
-        })
-    }
-
-    #[test]
-    fn dcparsing_4() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
-                            vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
-        })
-    }
-
-    #[test]
-    fn character_a() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
-                       mk_lit(token::Char, "a", None));
-        })
-    }
-
-    #[test]
-    fn character_space() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
-                       mk_lit(token::Char, " ", None));
-        })
-    }
-
-    #[test]
-    fn character_escaped() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
-                       mk_lit(token::Char, "\\n", None));
-        })
-    }
-
-    #[test]
-    fn lifetime_name() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
-                       token::Lifetime(Symbol::intern("'abc")));
-        })
-    }
-
-    #[test]
-    fn raw_string() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
-                       mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
-        })
-    }
-
-    #[test]
-    fn literal_suffixes() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            macro_rules! test {
-                ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
-                               mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
-                    // with a whitespace separator:
-                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
-                               mk_lit(token::$tok_type, $tok_contents, None));
-                }}
-            }
-
-            test!("'a'", Char, "a");
-            test!("b'a'", Byte, "a");
-            test!("\"a\"", Str, "a");
-            test!("b\"a\"", ByteStr, "a");
-            test!("1234", Integer, "1234");
-            test!("0b101", Integer, "0b101");
-            test!("0xABC", Integer, "0xABC");
-            test!("1.0", Float, "1.0");
-            test!("1.0e10", Float, "1.0e10");
-
-            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
-                       mk_lit(token::Integer, "2", Some("us")));
-            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
-                       mk_lit(token::StrRaw(3), "raw", Some("suffix")));
-            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
-                       mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
-        })
-    }
-
-    #[test]
-    fn line_doc_comments() {
-        assert!(is_doc_comment("///"));
-        assert!(is_doc_comment("/// blah"));
-        assert!(!is_doc_comment("////"));
-    }
-
-    #[test]
-    fn nested_block_comments() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
-            assert_eq!(lexer.next_token(), token::Comment);
-            assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
-        })
-    }
-
-    #[test]
-    fn crlf_comments() {
-        with_default_globals(|| {
-            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(sm.clone());
-            let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
-            let comment = lexer.next_token();
-            assert_eq!(comment.kind, token::Comment);
-            assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
-            assert_eq!(lexer.next_token(), token::Whitespace);
-            assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
-        })
-    }
-}
diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs
new file mode 100644
index 00000000000..fc47e4f0b18
--- /dev/null
+++ b/src/libsyntax/parse/lexer/tests.rs
@@ -0,0 +1,255 @@
+use super::*;
+
+use crate::ast::CrateConfig;
+use crate::symbol::Symbol;
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::feature_gate::UnstableFeatures;
+use crate::parse::token;
+use crate::diagnostics::plugin::ErrorMap;
+use crate::with_default_globals;
+use std::io;
+use std::path::PathBuf;
+use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
+use rustc_data_structures::sync::{Lock, Once};
+
+fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
+    let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
+                                                        Some(sm.clone()),
+                                                        false,
+                                                        false,
+                                                        false);
+    ParseSess {
+        span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)),
+        unstable_features: UnstableFeatures::from_environment(),
+        config: CrateConfig::default(),
+        included_mod_stack: Lock::new(Vec::new()),
+        source_map: sm,
+        missing_fragment_specifiers: Lock::new(FxHashSet::default()),
+        raw_identifier_spans: Lock::new(Vec::new()),
+        registered_diagnostics: Lock::new(ErrorMap::new()),
+        buffered_lints: Lock::new(vec![]),
+        edition: Edition::from_session(),
+        ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
+        param_attr_spans: Lock::new(Vec::new()),
+        let_chains_spans: Lock::new(Vec::new()),
+        async_closure_spans: Lock::new(Vec::new()),
+        injected_crate_name: Once::new(),
+    }
+}
+
+// open a string reader for the given string
+fn setup<'a>(sm: &SourceMap,
+                sess: &'a ParseSess,
+                teststr: String)
+                -> StringReader<'a> {
+    let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
+    StringReader::new(sess, sf, None)
+}
+
+#[test]
+fn t1() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        let mut string_reader = setup(&sm,
+                                    &sh,
+                                    "/* my source file */ fn main() { println!(\"zebra\"); }\n"
+                                        .to_string());
+        assert_eq!(string_reader.next_token(), token::Comment);
+        assert_eq!(string_reader.next_token(), token::Whitespace);
+        let tok1 = string_reader.next_token();
+        let tok2 = Token::new(
+            mk_ident("fn"),
+            Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
+        );
+        assert_eq!(tok1.kind, tok2.kind);
+        assert_eq!(tok1.span, tok2.span);
+        assert_eq!(string_reader.next_token(), token::Whitespace);
+        // read another token:
+        let tok3 = string_reader.next_token();
+        assert_eq!(string_reader.pos.clone(), BytePos(28));
+        let tok4 = Token::new(
+            mk_ident("main"),
+            Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
+        );
+        assert_eq!(tok3.kind, tok4.kind);
+        assert_eq!(tok3.span, tok4.span);
+
+        assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
+        assert_eq!(string_reader.pos.clone(), BytePos(29))
+    })
+}
+
+// check that the given reader produces the desired stream
+// of tokens (stop checking after exhausting the expected vec)
+fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
+    for expected_tok in &expected {
+        assert_eq!(&string_reader.next_token(), expected_tok);
+    }
+}
+
+// make the identifier by looking up the string in the interner
+fn mk_ident(id: &str) -> TokenKind {
+    token::Ident(Symbol::intern(id), false)
+}
+
+fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
+    TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
+}
+
+#[test]
+fn doublecolonparsing() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        check_tokenization(setup(&sm, &sh, "a b".to_string()),
+                        vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
+    })
+}
+
+#[test]
+fn dcparsing_2() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        check_tokenization(setup(&sm, &sh, "a::b".to_string()),
+                        vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
+    })
+}
+
+#[test]
+fn dcparsing_3() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
+                        vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
+    })
+}
+
+#[test]
+fn dcparsing_4() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
+                        vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
+    })
+}
+
+#[test]
+fn character_a() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
+                    mk_lit(token::Char, "a", None));
+    })
+}
+
+#[test]
+fn character_space() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
+                    mk_lit(token::Char, " ", None));
+    })
+}
+
+#[test]
+fn character_escaped() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
+                    mk_lit(token::Char, "\\n", None));
+    })
+}
+
+#[test]
+fn lifetime_name() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
+                    token::Lifetime(Symbol::intern("'abc")));
+    })
+}
+
+#[test]
+fn raw_string() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
+                    mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
+    })
+}
+
+#[test]
+fn literal_suffixes() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        macro_rules! test {
+            ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
+                assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
+                            mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
+                // with a whitespace separator:
+                assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
+                            mk_lit(token::$tok_type, $tok_contents, None));
+            }}
+        }
+
+        test!("'a'", Char, "a");
+        test!("b'a'", Byte, "a");
+        test!("\"a\"", Str, "a");
+        test!("b\"a\"", ByteStr, "a");
+        test!("1234", Integer, "1234");
+        test!("0b101", Integer, "0b101");
+        test!("0xABC", Integer, "0xABC");
+        test!("1.0", Float, "1.0");
+        test!("1.0e10", Float, "1.0e10");
+
+        assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
+                    mk_lit(token::Integer, "2", Some("us")));
+        assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
+                    mk_lit(token::StrRaw(3), "raw", Some("suffix")));
+        assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
+                    mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
+    })
+}
+
+#[test]
+fn line_doc_comments() {
+    assert!(is_doc_comment("///"));
+    assert!(is_doc_comment("/// blah"));
+    assert!(!is_doc_comment("////"));
+}
+
+#[test]
+fn nested_block_comments() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
+        assert_eq!(lexer.next_token(), token::Comment);
+        assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
+    })
+}
+
+#[test]
+fn crlf_comments() {
+    with_default_globals(|| {
+        let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let sh = mk_sess(sm.clone());
+        let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
+        let comment = lexer.next_token();
+        assert_eq!(comment.kind, token::Comment);
+        assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
+        assert_eq!(lexer.next_token(), token::Whitespace);
+        assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
+    })
+}
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 1aac8bbb7aa..b7deee688ca 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -22,7 +22,8 @@ use std::borrow::Cow;
 use std::path::{Path, PathBuf};
 use std::str;
 
-pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
+#[cfg(test)]
+mod tests;
 
 #[macro_use]
 pub mod parser;
@@ -35,6 +36,8 @@ crate mod diagnostics;
 crate mod literal;
 crate mod unescape_error_reporting;
 
+pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
+
 /// Info about a parsing session.
 pub struct ParseSess {
     pub span_diagnostic: Handler,
@@ -389,294 +392,3 @@ impl SeqSep {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::ast::{self, Name, PatKind};
-    use crate::attr::first_attr_value_str_by_name;
-    use crate::ptr::P;
-    use crate::parse::token::Token;
-    use crate::print::pprust::item_to_string;
-    use crate::symbol::{kw, sym};
-    use crate::tokenstream::{DelimSpan, TokenTree};
-    use crate::util::parser_testing::string_to_stream;
-    use crate::util::parser_testing::{string_to_expr, string_to_item};
-    use crate::with_default_globals;
-    use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
-
-    /// Parses an item.
-    ///
-    /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
-    /// when a syntax error occurred.
-    fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                        -> PResult<'_, Option<P<ast::Item>>> {
-        new_parser_from_source_str(sess, name, source).parse_item()
-    }
-
-    // produce a syntax_pos::span
-    fn sp(a: u32, b: u32) -> Span {
-        Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
-    }
-
-    #[should_panic]
-    #[test] fn bad_path_expr_1() {
-        with_default_globals(|| {
-            string_to_expr("::abc::def::return".to_string());
-        })
-    }
-
-    // check the token-tree-ization of macros
-    #[test]
-    fn string_to_tts_macro () {
-        with_default_globals(|| {
-            let tts: Vec<_> =
-                string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
-            let tts: &[TokenTree] = &tts[..];
-
-            match tts {
-                [
-                    TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
-                    TokenTree::Token(Token { kind: token::Not, .. }),
-                    TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
-                    TokenTree::Delimited(_, macro_delim,  macro_tts)
-                ]
-                if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => {
-                    let tts = &macro_tts.trees().collect::<Vec<_>>();
-                    match &tts[..] {
-                        [
-                            TokenTree::Delimited(_, first_delim, first_tts),
-                            TokenTree::Token(Token { kind: token::FatArrow, .. }),
-                            TokenTree::Delimited(_, second_delim, second_tts),
-                        ]
-                        if macro_delim == &token::Paren => {
-                            let tts = &first_tts.trees().collect::<Vec<_>>();
-                            match &tts[..] {
-                                [
-                                    TokenTree::Token(Token { kind: token::Dollar, .. }),
-                                    TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
-                                ]
-                                if first_delim == &token::Paren && name.as_str() == "a" => {},
-                                _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
-                            }
-                            let tts = &second_tts.trees().collect::<Vec<_>>();
-                            match &tts[..] {
-                                [
-                                    TokenTree::Token(Token { kind: token::Dollar, .. }),
-                                    TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
-                                ]
-                                if second_delim == &token::Paren && name.as_str() == "a" => {},
-                                _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
-                            }
-                        },
-                        _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
-                    }
-                },
-                _ => panic!("value: {:?}",tts),
-            }
-        })
-    }
-
-    #[test]
-    fn string_to_tts_1() {
-        with_default_globals(|| {
-            let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
-
-            let expected = TokenStream::new(vec![
-                TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
-                TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
-                TokenTree::Delimited(
-                    DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
-                    token::DelimToken::Paren,
-                    TokenStream::new(vec![
-                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
-                        TokenTree::token(token::Colon, sp(8, 9)).into(),
-                        TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
-                    ]).into(),
-                ).into(),
-                TokenTree::Delimited(
-                    DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
-                    token::DelimToken::Brace,
-                    TokenStream::new(vec![
-                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
-                        TokenTree::token(token::Semi, sp(18, 19)).into(),
-                    ]).into(),
-                ).into()
-            ]);
-
-            assert_eq!(tts, expected);
-        })
-    }
-
-    #[test] fn parse_use() {
-        with_default_globals(|| {
-            let use_s = "use foo::bar::baz;";
-            let vitem = string_to_item(use_s.to_string()).unwrap();
-            let vitem_s = item_to_string(&vitem);
-            assert_eq!(&vitem_s[..], use_s);
-
-            let use_s = "use foo::bar as baz;";
-            let vitem = string_to_item(use_s.to_string()).unwrap();
-            let vitem_s = item_to_string(&vitem);
-            assert_eq!(&vitem_s[..], use_s);
-        })
-    }
-
-    #[test] fn parse_extern_crate() {
-        with_default_globals(|| {
-            let ex_s = "extern crate foo;";
-            let vitem = string_to_item(ex_s.to_string()).unwrap();
-            let vitem_s = item_to_string(&vitem);
-            assert_eq!(&vitem_s[..], ex_s);
-
-            let ex_s = "extern crate foo as bar;";
-            let vitem = string_to_item(ex_s.to_string()).unwrap();
-            let vitem_s = item_to_string(&vitem);
-            assert_eq!(&vitem_s[..], ex_s);
-        })
-    }
-
-    fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
-        let item = string_to_item(src.to_string()).unwrap();
-
-        struct PatIdentVisitor {
-            spans: Vec<Span>
-        }
-        impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
-            fn visit_pat(&mut self, p: &'a ast::Pat) {
-                match p.node {
-                    PatKind::Ident(_ , ref spannedident, _) => {
-                        self.spans.push(spannedident.span.clone());
-                    }
-                    _ => {
-                        crate::visit::walk_pat(self, p);
-                    }
-                }
-            }
-        }
-        let mut v = PatIdentVisitor { spans: Vec::new() };
-        crate::visit::walk_item(&mut v, &item);
-        return v.spans;
-    }
-
-    #[test] fn span_of_self_arg_pat_idents_are_correct() {
-        with_default_globals(|| {
-
-            let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
-                        "impl z { fn a (&mut self, &myarg: i32) {} }",
-                        "impl z { fn a (&'a self, &myarg: i32) {} }",
-                        "impl z { fn a (self, &myarg: i32) {} }",
-                        "impl z { fn a (self: Foo, &myarg: i32) {} }",
-                        ];
-
-            for &src in &srcs {
-                let spans = get_spans_of_pat_idents(src);
-                let (lo, hi) = (spans[0].lo(), spans[0].hi());
-                assert!("self" == &src[lo.to_usize()..hi.to_usize()],
-                        "\"{}\" != \"self\". src=\"{}\"",
-                        &src[lo.to_usize()..hi.to_usize()], src)
-            }
-        })
-    }
-
-    #[test] fn parse_exprs () {
-        with_default_globals(|| {
-            // just make sure that they parse....
-            string_to_expr("3 + 4".to_string());
-            string_to_expr("a::z.froob(b,&(987+3))".to_string());
-        })
-    }
-
-    #[test] fn attrs_fix_bug () {
-        with_default_globals(|| {
-            string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-                   -> Result<Box<Writer>, String> {
-    #[cfg(windows)]
-    fn wb() -> c_int {
-      (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
-    }
-
-    #[cfg(unix)]
-    fn wb() -> c_int { O_WRONLY as c_int }
-
-    let mut fflags: c_int = wb();
-}".to_string());
-        })
-    }
-
-    #[test] fn crlf_doc_comments() {
-        with_default_globals(|| {
-            let sess = ParseSess::new(FilePathMapping::empty());
-
-            let name_1 = FileName::Custom("crlf_source_1".to_string());
-            let source = "/// doc comment\r\nfn foo() {}".to_string();
-            let item = parse_item_from_source_str(name_1, source, &sess)
-                .unwrap().unwrap();
-            let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
-            assert_eq!(doc.as_str(), "/// doc comment");
-
-            let name_2 = FileName::Custom("crlf_source_2".to_string());
-            let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
-            let item = parse_item_from_source_str(name_2, source, &sess)
-                .unwrap().unwrap();
-            let docs = item.attrs.iter().filter(|a| a.path == sym::doc)
-                        .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
-            let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
-            assert_eq!(&docs[..], b);
-
-            let name_3 = FileName::Custom("clrf_source_3".to_string());
-            let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
-            let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
-            let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
-            assert_eq!(doc.as_str(), "/** doc comment\n *  with CRLF */");
-        });
-    }
-
-    #[test]
-    fn ttdelim_span() {
-        fn parse_expr_from_source_str(
-            name: FileName, source: String, sess: &ParseSess
-        ) -> PResult<'_, P<ast::Expr>> {
-            new_parser_from_source_str(sess, name, source).parse_expr()
-        }
-
-        with_default_globals(|| {
-            let sess = ParseSess::new(FilePathMapping::empty());
-            let expr = parse_expr_from_source_str(PathBuf::from("foo").into(),
-                "foo!( fn main() { body } )".to_string(), &sess).unwrap();
-
-            let tts: Vec<_> = match expr.node {
-                ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
-                _ => panic!("not a macro"),
-            };
-
-            let span = tts.iter().rev().next().unwrap().span();
-
-            match sess.source_map().span_to_snippet(span) {
-                Ok(s) => assert_eq!(&s[..], "{ body }"),
-                Err(_) => panic!("could not get snippet"),
-            }
-        });
-    }
-
-    // This tests that when parsing a string (rather than a file) we don't try
-    // and read in a file for a module declaration and just parse a stub.
-    // See `recurse_into_file_modules` in the parser.
-    #[test]
-    fn out_of_line_mod() {
-        with_default_globals(|| {
-            let sess = ParseSess::new(FilePathMapping::empty());
-            let item = parse_item_from_source_str(
-                PathBuf::from("foo").into(),
-                "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
-                &sess,
-            ).unwrap().unwrap();
-
-            if let ast::ItemKind::Mod(ref m) = item.node {
-                assert!(m.items.len() == 2);
-            } else {
-                panic!();
-            }
-        });
-    }
-}
diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs
new file mode 100644
index 00000000000..e619fd17fb5
--- /dev/null
+++ b/src/libsyntax/parse/tests.rs
@@ -0,0 +1,339 @@
+use super::*;
+
+use crate::ast::{self, Name, PatKind};
+use crate::attr::first_attr_value_str_by_name;
+use crate::parse::{ParseSess, PResult};
+use crate::parse::new_parser_from_source_str;
+use crate::parse::token::Token;
+use crate::print::pprust::item_to_string;
+use crate::ptr::P;
+use crate::source_map::FilePathMapping;
+use crate::symbol::{kw, sym};
+use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
+use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
+use crate::with_default_globals;
+use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
+
+use std::path::PathBuf;
+
+/// Parses an item.
+///
+/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
+/// when a syntax error occurred.
+fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
+                                    -> PResult<'_, Option<P<ast::Item>>> {
+    new_parser_from_source_str(sess, name, source).parse_item()
+}
+
+// produce a syntax_pos::span
+fn sp(a: u32, b: u32) -> Span {
+    Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
+}
+
+/// Parse a string, return an expr
+fn string_to_expr(source_str : String) -> P<ast::Expr> {
+    let ps = ParseSess::new(FilePathMapping::empty());
+    with_error_checking_parse(source_str, &ps, |p| {
+        p.parse_expr()
+    })
+}
+
+/// Parse a string, return an item
+fn string_to_item(source_str : String) -> Option<P<ast::Item>> {
+    let ps = ParseSess::new(FilePathMapping::empty());
+    with_error_checking_parse(source_str, &ps, |p| {
+        p.parse_item()
+    })
+}
+
+#[should_panic]
+#[test] fn bad_path_expr_1() {
+    with_default_globals(|| {
+        string_to_expr("::abc::def::return".to_string());
+    })
+}
+
+// check the token-tree-ization of macros
+#[test]
+fn string_to_tts_macro () {
+    with_default_globals(|| {
+        let tts: Vec<_> =
+            string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
+        let tts: &[TokenTree] = &tts[..];
+
+        match tts {
+            [
+                TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
+                TokenTree::Token(Token { kind: token::Not, .. }),
+                TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
+                TokenTree::Delimited(_, macro_delim,  macro_tts)
+            ]
+            if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => {
+                let tts = &macro_tts.trees().collect::<Vec<_>>();
+                match &tts[..] {
+                    [
+                        TokenTree::Delimited(_, first_delim, first_tts),
+                        TokenTree::Token(Token { kind: token::FatArrow, .. }),
+                        TokenTree::Delimited(_, second_delim, second_tts),
+                    ]
+                    if macro_delim == &token::Paren => {
+                        let tts = &first_tts.trees().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }),
+                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                            ]
+                            if first_delim == &token::Paren && name.as_str() == "a" => {},
+                            _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
+                        }
+                        let tts = &second_tts.trees().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }),
+                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                            ]
+                            if second_delim == &token::Paren && name.as_str() == "a" => {},
+                            _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
+                        }
+                    },
+                    _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
+                }
+            },
+            _ => panic!("value: {:?}",tts),
+        }
+    })
+}
+
+#[test]
+fn string_to_tts_1() {
+    with_default_globals(|| {
+        let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
+
+        let expected = TokenStream::new(vec![
+            TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
+            TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
+                token::DelimToken::Paren,
+                TokenStream::new(vec![
+                    TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
+                    TokenTree::token(token::Colon, sp(8, 9)).into(),
+                    TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
+                ]).into(),
+            ).into(),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
+                token::DelimToken::Brace,
+                TokenStream::new(vec![
+                    TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
+                    TokenTree::token(token::Semi, sp(18, 19)).into(),
+                ]).into(),
+            ).into()
+        ]);
+
+        assert_eq!(tts, expected);
+    })
+}
+
+#[test] fn parse_use() {
+    with_default_globals(|| {
+        let use_s = "use foo::bar::baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+
+        let use_s = "use foo::bar as baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+    })
+}
+
+#[test] fn parse_extern_crate() {
+    with_default_globals(|| {
+        let ex_s = "extern crate foo;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+
+        let ex_s = "extern crate foo as bar;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+    })
+}
+
+fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
+    let item = string_to_item(src.to_string()).unwrap();
+
+    struct PatIdentVisitor {
+        spans: Vec<Span>
+    }
+    impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
+        fn visit_pat(&mut self, p: &'a ast::Pat) {
+            match p.node {
+                PatKind::Ident(_ , ref spannedident, _) => {
+                    self.spans.push(spannedident.span.clone());
+                }
+                _ => {
+                    crate::visit::walk_pat(self, p);
+                }
+            }
+        }
+    }
+    let mut v = PatIdentVisitor { spans: Vec::new() };
+    crate::visit::walk_item(&mut v, &item);
+    return v.spans;
+}
+
+#[test] fn span_of_self_arg_pat_idents_are_correct() {
+    with_default_globals(|| {
+
+        let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
+                    "impl z { fn a (&mut self, &myarg: i32) {} }",
+                    "impl z { fn a (&'a self, &myarg: i32) {} }",
+                    "impl z { fn a (self, &myarg: i32) {} }",
+                    "impl z { fn a (self: Foo, &myarg: i32) {} }",
+                    ];
+
+        for &src in &srcs {
+            let spans = get_spans_of_pat_idents(src);
+            let (lo, hi) = (spans[0].lo(), spans[0].hi());
+            assert!("self" == &src[lo.to_usize()..hi.to_usize()],
+                    "\"{}\" != \"self\". src=\"{}\"",
+                    &src[lo.to_usize()..hi.to_usize()], src)
+        }
+    })
+}
+
+#[test] fn parse_exprs () {
+    with_default_globals(|| {
+        // just make sure that they parse....
+        string_to_expr("3 + 4".to_string());
+        string_to_expr("a::z.froob(b,&(987+3))".to_string());
+    })
+}
+
+#[test] fn attrs_fix_bug () {
+    with_default_globals(|| {
+        string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+                -> Result<Box<Writer>, String> {
+#[cfg(windows)]
+fn wb() -> c_int {
+    (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
+}
+
+#[cfg(unix)]
+fn wb() -> c_int { O_WRONLY as c_int }
+
+let mut fflags: c_int = wb();
+}".to_string());
+    })
+}
+
+#[test] fn crlf_doc_comments() {
+    with_default_globals(|| {
+        let sess = ParseSess::new(FilePathMapping::empty());
+
+        let name_1 = FileName::Custom("crlf_source_1".to_string());
+        let source = "/// doc comment\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_1, source, &sess)
+            .unwrap().unwrap();
+        let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
+        assert_eq!(doc.as_str(), "/// doc comment");
+
+        let name_2 = FileName::Custom("crlf_source_2".to_string());
+        let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_2, source, &sess)
+            .unwrap().unwrap();
+        let docs = item.attrs.iter().filter(|a| a.path == sym::doc)
+                    .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
+        let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
+        assert_eq!(&docs[..], b);
+
+        let name_3 = FileName::Custom("clrf_source_3".to_string());
+        let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
+        let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
+        assert_eq!(doc.as_str(), "/** doc comment\n *  with CRLF */");
+    });
+}
+
+#[test]
+fn ttdelim_span() {
+    fn parse_expr_from_source_str(
+        name: FileName, source: String, sess: &ParseSess
+    ) -> PResult<'_, P<ast::Expr>> {
+        new_parser_from_source_str(sess, name, source).parse_expr()
+    }
+
+    with_default_globals(|| {
+        let sess = ParseSess::new(FilePathMapping::empty());
+        let expr = parse_expr_from_source_str(PathBuf::from("foo").into(),
+            "foo!( fn main() { body } )".to_string(), &sess).unwrap();
+
+        let tts: Vec<_> = match expr.node {
+            ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
+            _ => panic!("not a macro"),
+        };
+
+        let span = tts.iter().rev().next().unwrap().span();
+
+        match sess.source_map().span_to_snippet(span) {
+            Ok(s) => assert_eq!(&s[..], "{ body }"),
+            Err(_) => panic!("could not get snippet"),
+        }
+    });
+}
+
+// This tests that when parsing a string (rather than a file) we don't try
+// and read in a file for a module declaration and just parse a stub.
+// See `recurse_into_file_modules` in the parser.
+#[test]
+fn out_of_line_mod() {
+    with_default_globals(|| {
+        let sess = ParseSess::new(FilePathMapping::empty());
+        let item = parse_item_from_source_str(
+            PathBuf::from("foo").into(),
+            "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
+            &sess,
+        ).unwrap().unwrap();
+
+        if let ast::ItemKind::Mod(ref m) = item.node {
+            assert!(m.items.len() == 2);
+        } else {
+            panic!();
+        }
+    });
+}
+
+#[test]
+fn eqmodws() {
+    assert_eq!(matches_codepattern("",""),true);
+    assert_eq!(matches_codepattern("","a"),false);
+    assert_eq!(matches_codepattern("a",""),false);
+    assert_eq!(matches_codepattern("a","a"),true);
+    assert_eq!(matches_codepattern("a b","a   \n\t\r  b"),true);
+    assert_eq!(matches_codepattern("a b ","a   \n\t\r  b"),true);
+    assert_eq!(matches_codepattern("a b","a   \n\t\r  b "),false);
+    assert_eq!(matches_codepattern("a   b","a b"),true);
+    assert_eq!(matches_codepattern("ab","a b"),false);
+    assert_eq!(matches_codepattern("a   b","ab"),true);
+    assert_eq!(matches_codepattern(" a   b","ab"),true);
+}
+
+#[test]
+fn pattern_whitespace() {
+    assert_eq!(matches_codepattern("","\x0C"), false);
+    assert_eq!(matches_codepattern("a b ","a   \u{0085}\n\t\r  b"),true);
+    assert_eq!(matches_codepattern("a b","a   \u{0085}\n\t\r  b "),false);
+}
+
+#[test]
+fn non_pattern_whitespace() {
+    // These have the property 'White_Space' but not 'Pattern_White_Space'
+    assert_eq!(matches_codepattern("a b","a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("a   b","a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("\u{205F}a   b","ab"), false);
+    assert_eq!(matches_codepattern("a  \u{3000}b","ab"), false);
+}