about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorNick Cameron <ncameron@mozilla.com>2015-12-15 16:51:13 +1300
committerNick Cameron <ncameron@mozilla.com>2015-12-17 10:00:16 +1300
commitff0c74f7d47f5261ebda7cb3b9a637e0cfc69104 (patch)
treec4bcf1b5acdae17289fb024987c4f45b00e8fea7 /src/libsyntax
parente2371518c4b03e1770948d9d2429cfeb46e25a20 (diff)
downloadrust-ff0c74f7d47f5261ebda7cb3b9a637e0cfc69104.tar.gz
rust-ff0c74f7d47f5261ebda7cb3b9a637e0cfc69104.zip
test errors
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/errors/emitter.rs61
-rw-r--r--src/libsyntax/errors/mod.rs58
-rw-r--r--src/libsyntax/parse/lexer/mod.rs78
3 files changed, 111 insertions, 86 deletions
diff --git a/src/libsyntax/errors/emitter.rs b/src/libsyntax/errors/emitter.rs
index e65eab58d9a..7fef85a833e 100644
--- a/src/libsyntax/errors/emitter.rs
+++ b/src/libsyntax/errors/emitter.rs
@@ -45,7 +45,7 @@ impl ColorConfig {
             ColorConfig::Always => true,
             ColorConfig::Never  => false,
             ColorConfig::Auto   => stderr_isatty(),
-        }        
+        }
     }
 }
 
@@ -619,3 +619,62 @@ impl Write for Destination {
     }
 }
 
+
+#[cfg(test)]
+mod test {
+    use errors::Level;
+    use super::EmitterWriter;
+    use codemap::{mk_sp, CodeMap};
+    use std::sync::{Arc, Mutex};
+    use std::io::{self, Write};
+    use std::str::from_utf8;
+    use std::rc::Rc;
+
+    // Diagnostic doesn't align properly in span where line number increases by one digit
+    #[test]
+    fn test_hilight_suggestion_issue_11715() {
+        struct Sink(Arc<Mutex<Vec<u8>>>);
+        impl Write for Sink {
+            fn write(&mut self, data: &[u8]) -> io::Result<usize> {
+                Write::write(&mut *self.0.lock().unwrap(), data)
+            }
+            fn flush(&mut self) -> io::Result<()> { Ok(()) }
+        }
+        let data = Arc::new(Mutex::new(Vec::new()));
+        let cm = Rc::new(CodeMap::new());
+        let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone());
+        let content = "abcdefg
+        koksi
+        line3
+        line4
+        cinq
+        line6
+        line7
+        line8
+        line9
+        line10
+        e-lä-vän
+        tolv
+        dreizehn
+        ";
+        let file = cm.new_filemap_and_lines("dummy.txt", content);
+        let start = file.lines.borrow()[7];
+        let end = file.lines.borrow()[11];
+        let sp = mk_sp(start, end);
+        let lvl = Level::Error;
+        println!("span_to_lines");
+        let lines = cm.span_to_lines(sp);
+        println!("highlight_lines");
+        ew.highlight_lines(sp, lvl, lines).unwrap();
+        println!("done");
+        let vec = data.lock().unwrap().clone();
+        let vec: &[u8] = &vec;
+        let str = from_utf8(vec).unwrap();
+        println!("{}", str);
+        assert_eq!(str, "dummy.txt: 8         line8\n\
+                         dummy.txt: 9         line9\n\
+                         dummy.txt:10         line10\n\
+                         dummy.txt:11         e-lä-vän\n\
+                         dummy.txt:12         tolv\n");
+    }
+}
diff --git a/src/libsyntax/errors/mod.rs b/src/libsyntax/errors/mod.rs
index 920fd2fdb00..f2e61090ba2 100644
--- a/src/libsyntax/errors/mod.rs
+++ b/src/libsyntax/errors/mod.rs
@@ -336,61 +336,3 @@ pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T where
         None => diag.bug(&msg()),
     }
 }
-
-#[cfg(test)]
-mod test {
-    use super::Level;
-    use emitter::EmitterWriter;
-    use codemap::{mk_sp, CodeMap};
-    use std::sync::{Arc, Mutex};
-    use std::io::{self, Write};
-    use std::str::from_utf8;
-
-    // Diagnostic doesn't align properly in span where line number increases by one digit
-    #[test]
-    fn test_hilight_suggestion_issue_11715() {
-        struct Sink(Arc<Mutex<Vec<u8>>>);
-        impl Write for Sink {
-            fn write(&mut self, data: &[u8]) -> io::Result<usize> {
-                Write::write(&mut *self.0.lock().unwrap(), data)
-            }
-            fn flush(&mut self) -> io::Result<()> { Ok(()) }
-        }
-        let data = Arc::new(Mutex::new(Vec::new()));
-        let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None);
-        let cm = CodeMap::new();
-        let content = "abcdefg
-        koksi
-        line3
-        line4
-        cinq
-        line6
-        line7
-        line8
-        line9
-        line10
-        e-lä-vän
-        tolv
-        dreizehn
-        ";
-        let file = cm.new_filemap_and_lines("dummy.txt", content);
-        let start = file.lines.borrow()[7];
-        let end = file.lines.borrow()[11];
-        let sp = mk_sp(start, end);
-        let lvl = Level::Error;
-        println!("span_to_lines");
-        let lines = cm.span_to_lines(sp);
-        println!("highlight_lines");
-        ew.highlight_lines(&cm, sp, lvl, lines).unwrap();
-        println!("done");
-        let vec = data.lock().unwrap().clone();
-        let vec: &[u8] = &vec;
-        let str = from_utf8(vec).unwrap();
-        println!("{}", str);
-        assert_eq!(str, "dummy.txt: 8         line8\n\
-                         dummy.txt: 9         line9\n\
-                         dummy.txt:10         line10\n\
-                         dummy.txt:11         e-lä-vän\n\
-                         dummy.txt:12         tolv\n");
-    }
-}
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 570e0882a85..4619410ada7 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1422,28 +1422,30 @@ mod tests {
     use super::*;
 
     use codemap::{BytePos, CodeMap, Span, NO_EXPANSION};
-    use diagnostic;
+    use errors;
     use parse::token;
     use parse::token::{str_to_ident};
     use std::io;
+    use std::rc::Rc;
 
-    fn mk_sh() -> diagnostic::Handler {
+    fn mk_sh(cm: Rc<CodeMap>) -> errors::Handler {
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
-        let emitter = diagnostic::EmitterWriter::new(Box::new(io::sink()), None);
-        let handler = diagnostic::Handler::with_emitter(true, Box::new(emitter));
-        diagnostic::Handler::new(handler, CodeMap::new())
+        let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), None, cm);
+        errors::Handler::with_emitter(true, false, Box::new(emitter))
     }
 
     // open a string reader for the given string
-    fn setup<'a>(span_handler: &'a diagnostic::Handler,
+    fn setup<'a>(cm: &CodeMap,
+                 span_handler: &'a errors::Handler,
                  teststr: String) -> StringReader<'a> {
-        let fm = span_handler.cm.new_filemap("zebra.rs".to_string(), teststr);
+        let fm = cm.new_filemap("zebra.rs".to_string(), teststr);
         StringReader::new(span_handler, fm)
     }
 
     #[test] fn t1 () {
-        let span_handler = mk_sh();
-        let mut string_reader = setup(&span_handler,
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        let mut string_reader = setup(&cm, &sh,
             "/* my source file */ \
              fn main() { println!(\"zebra\"); }\n".to_string());
         let id = str_to_ident("fn");
@@ -1481,21 +1483,27 @@ mod tests {
     }
 
     #[test] fn doublecolonparsing () {
-        check_tokenization(setup(&mk_sh(), "a b".to_string()),
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        check_tokenization(setup(&cm, &sh, "a b".to_string()),
                            vec![mk_ident("a", token::Plain),
                                 token::Whitespace,
                                 mk_ident("b", token::Plain)]);
     }
 
     #[test] fn dcparsing_2 () {
-        check_tokenization(setup(&mk_sh(), "a::b".to_string()),
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        check_tokenization(setup(&cm, &sh, "a::b".to_string()),
                            vec![mk_ident("a",token::ModName),
                                 token::ModSep,
                                 mk_ident("b", token::Plain)]);
     }
 
     #[test] fn dcparsing_3 () {
-        check_tokenization(setup(&mk_sh(), "a ::b".to_string()),
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
                            vec![mk_ident("a", token::Plain),
                                 token::Whitespace,
                                 token::ModSep,
@@ -1503,7 +1511,9 @@ mod tests {
     }
 
     #[test] fn dcparsing_4 () {
-        check_tokenization(setup(&mk_sh(), "a:: b".to_string()),
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
                            vec![mk_ident("a",token::ModName),
                                 token::ModSep,
                                 token::Whitespace,
@@ -1511,40 +1521,52 @@ mod tests {
     }
 
     #[test] fn character_a() {
-        assert_eq!(setup(&mk_sh(), "'a'".to_string()).next_token().tok,
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
                    token::Literal(token::Char(token::intern("a")), None));
     }
 
     #[test] fn character_space() {
-        assert_eq!(setup(&mk_sh(), "' '".to_string()).next_token().tok,
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
                    token::Literal(token::Char(token::intern(" ")), None));
     }
 
     #[test] fn character_escaped() {
-        assert_eq!(setup(&mk_sh(), "'\\n'".to_string()).next_token().tok,
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
                    token::Literal(token::Char(token::intern("\\n")), None));
     }
 
     #[test] fn lifetime_name() {
-        assert_eq!(setup(&mk_sh(), "'abc".to_string()).next_token().tok,
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
                    token::Lifetime(token::str_to_ident("'abc")));
     }
 
     #[test] fn raw_string() {
-        assert_eq!(setup(&mk_sh(),
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        assert_eq!(setup(&cm, &sh,
                          "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token()
                                                                  .tok,
                    token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None));
     }
 
     #[test] fn literal_suffixes() {
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
         macro_rules! test {
             ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                assert_eq!(setup(&mk_sh(), format!("{}suffix", $input)).next_token().tok,
+                assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
                            token::Literal(token::$tok_type(token::intern($tok_contents)),
                                           Some(token::intern("suffix"))));
                 // with a whitespace separator:
-                assert_eq!(setup(&mk_sh(), format!("{} suffix", $input)).next_token().tok,
+                assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
                            token::Literal(token::$tok_type(token::intern($tok_contents)),
                                           None));
             }}
@@ -1560,13 +1582,13 @@ mod tests {
         test!("1.0", Float, "1.0");
         test!("1.0e10", Float, "1.0e10");
 
-        assert_eq!(setup(&mk_sh(), "2us".to_string()).next_token().tok,
+        assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
                    token::Literal(token::Integer(token::intern("2")),
                                   Some(token::intern("us"))));
-        assert_eq!(setup(&mk_sh(), "r###\"raw\"###suffix".to_string()).next_token().tok,
+        assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
                    token::Literal(token::StrRaw(token::intern("raw"), 3),
                                   Some(token::intern("suffix"))));
-        assert_eq!(setup(&mk_sh(), "br###\"raw\"###suffix".to_string()).next_token().tok,
+        assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
                    token::Literal(token::ByteStrRaw(token::intern("raw"), 3),
                                   Some(token::intern("suffix"))));
     }
@@ -1578,8 +1600,9 @@ mod tests {
     }
 
     #[test] fn nested_block_comments() {
-        let sh = mk_sh();
-        let mut lexer = setup(&sh, "/* /* */ */'a'".to_string());
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
         match lexer.next_token().tok {
             token::Comment => { },
             _ => panic!("expected a comment!")
@@ -1588,8 +1611,9 @@ mod tests {
     }
 
     #[test] fn crlf_comments() {
-        let sh = mk_sh();
-        let mut lexer = setup(&sh, "// test\r\n/// test\r\n".to_string());
+        let cm = Rc::new(CodeMap::new());
+        let sh = mk_sh(cm.clone());
+        let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
         let comment = lexer.next_token();
         assert_eq!(comment.tok, token::Comment);
         assert_eq!(comment.sp, ::codemap::mk_sp(BytePos(0), BytePos(7)));