about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorPatrick Walton <pcwalton@mimiga.net>2014-01-30 18:46:19 -0800
committerHuon Wilson <dbau.pp+github@gmail.com>2014-02-02 01:44:48 +1100
commitb890237e798030ce337933b14f777a1c3810d1ea (patch)
tree1032b8f875b000cb68829eb2861962b506b5efb7 /src/libsyntax/parse
parent52eeed2f73652014cde448b5dc969c7299a1fbcb (diff)
downloadrust-b890237e798030ce337933b14f777a1c3810d1ea.tar.gz
rust-b890237e798030ce337933b14f777a1c3810d1ea.zip
libsyntax: Fix tests.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer.rs26
-rw-r--r--src/libsyntax/parse/mod.rs24
2 files changed, 25 insertions, 25 deletions
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 469d3d64f24..8c55990289a 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -972,9 +972,9 @@ mod test {
     }
 
     // open a string reader for the given string
-    fn setup(teststr: @str) -> Env {
+    fn setup(teststr: ~str) -> Env {
         let cm = CodeMap::new();
-        let fm = cm.new_filemap(@"zebra.rs", teststr);
+        let fm = cm.new_filemap(~"zebra.rs", teststr);
         let span_handler =
             diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
         Env {
@@ -984,7 +984,7 @@ mod test {
 
     #[test] fn t1 () {
         let Env {string_reader} =
-            setup(@"/* my source file */ \
+            setup(~"/* my source file */ \
                     fn main() { println!(\"zebra\"); }\n");
         let id = str_to_ident("fn");
         let tok1 = string_reader.next_token();
@@ -1020,14 +1020,14 @@ mod test {
     }
 
     #[test] fn doublecolonparsing () {
-        let env = setup (@"a b");
+        let env = setup (~"a b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              mk_ident("b",false)]);
     }
 
     #[test] fn dcparsing_2 () {
-        let env = setup (@"a::b");
+        let env = setup (~"a::b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -1035,7 +1035,7 @@ mod test {
     }
 
     #[test] fn dcparsing_3 () {
-        let env = setup (@"a ::b");
+        let env = setup (~"a ::b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              token::MOD_SEP,
@@ -1043,7 +1043,7 @@ mod test {
     }
 
     #[test] fn dcparsing_4 () {
-        let env = setup (@"a:: b");
+        let env = setup (~"a:: b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -1051,28 +1051,28 @@ mod test {
     }
 
     #[test] fn character_a() {
-        let env = setup(@"'a'");
+        let env = setup(~"'a'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok,token::LIT_CHAR('a' as u32));
     }
 
     #[test] fn character_space() {
-        let env = setup(@"' '");
+        let env = setup(~"' '");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_CHAR(' ' as u32));
     }
 
     #[test] fn character_escaped() {
-        let env = setup(@"'\\n'");
+        let env = setup(~"'\\n'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_CHAR('\n' as u32));
     }
 
     #[test] fn lifetime_name() {
-        let env = setup(@"'abc");
+        let env = setup(~"'abc");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         let id = token::str_to_ident("abc");
@@ -1080,7 +1080,7 @@ mod test {
     }
 
     #[test] fn raw_string() {
-        let env = setup(@"r###\"\"#a\\b\x00c\"\"###");
+        let env = setup(~"r###\"\"#a\\b\x00c\"\"###");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         let id = token::str_to_ident("\"#a\\b\x00c\"");
@@ -1094,7 +1094,7 @@ mod test {
     }
 
     #[test] fn nested_block_comments() {
-        let env = setup(@"/* /* */ */'a'");
+        let env = setup(~"/* /* */ */'a'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok,token::LIT_CHAR('a' as u32));
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 76ccc53b846..cec9f7c2d9f 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -314,7 +314,7 @@ mod test {
     }
 
     #[test] fn path_exprs_1() {
-        assert_eq!(string_to_expr(@"a"),
+        assert_eq!(string_to_expr(~"a"),
                    @ast::Expr{
                     id: ast::DUMMY_NODE_ID,
                     node: ast::ExprPath(ast::Path {
@@ -333,7 +333,7 @@ mod test {
     }
 
     #[test] fn path_exprs_2 () {
-        assert_eq!(string_to_expr(@"::a::b"),
+        assert_eq!(string_to_expr(~"::a::b"),
                    @ast::Expr {
                     id: ast::DUMMY_NODE_ID,
                     node: ast::ExprPath(ast::Path {
@@ -358,12 +358,12 @@ mod test {
 
     #[should_fail]
     #[test] fn bad_path_expr_1() {
-        string_to_expr(@"::abc::def::return");
+        string_to_expr(~"::abc::def::return");
     }
 
     // check the token-tree-ization of macros
     #[test] fn string_to_tts_macro () {
-        let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))");
+        let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))");
         match tts {
             [ast::TTTok(_,_),
              ast::TTTok(_,token::NOT),
@@ -407,7 +407,7 @@ mod test {
     }
 
     #[test] fn string_to_tts_1 () {
-        let tts = string_to_tts(@"fn a (b : int) { b; }");
+        let tts = string_to_tts(~"fn a (b : int) { b; }");
         assert_eq!(to_json_str(&tts),
         ~"[\
     {\
@@ -536,7 +536,7 @@ mod test {
     }
 
     #[test] fn ret_expr() {
-        assert_eq!(string_to_expr(@"return d"),
+        assert_eq!(string_to_expr(~"return d"),
                    @ast::Expr{
                     id: ast::DUMMY_NODE_ID,
                     node:ast::ExprRet(Some(@ast::Expr{
@@ -559,7 +559,7 @@ mod test {
     }
 
     #[test] fn parse_stmt_1 () {
-        assert_eq!(string_to_stmt(@"b;"),
+        assert_eq!(string_to_stmt(~"b;"),
                    @Spanned{
                        node: ast::StmtExpr(@ast::Expr {
                            id: ast::DUMMY_NODE_ID,
@@ -585,7 +585,7 @@ mod test {
     }
 
     #[test] fn parse_ident_pat () {
-        let mut parser = string_to_parser(@"b");
+        let mut parser = string_to_parser(~"b");
         assert_eq!(parser.parse_pat(),
                    @ast::Pat{id: ast::DUMMY_NODE_ID,
                              node: ast::PatIdent(
@@ -609,7 +609,7 @@ mod test {
     // check the contents of the tt manually:
     #[test] fn parse_fundecl () {
         // this test depends on the intern order of "fn" and "int"
-        assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
+        assert_eq!(string_to_item(~"fn a (b : int) { b; }"),
                   Some(
                       @ast::Item{ident:str_to_ident("a"),
                             attrs:~[],
@@ -701,12 +701,12 @@ mod test {
 
     #[test] fn parse_exprs () {
         // just make sure that they parse....
-        string_to_expr(@"3 + 4");
-        string_to_expr(@"a::z.froob(b,@(987+3))");
+        string_to_expr(~"3 + 4");
+        string_to_expr(~"a::z.froob(b,@(987+3))");
     }
 
     #[test] fn attrs_fix_bug () {
-        string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+        string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
                    -> Result<@Writer, ~str> {
     #[cfg(windows)]
     fn wb() -> c_int {