From cbdf4ec03e92ed36c162bb8c645993e48a1caa02 Mon Sep 17 00:00:00 2001 From: John Kåre Alsaker Date: Wed, 7 Mar 2018 02:44:10 +0100 Subject: Remove syntax and syntax_pos thread locals --- src/libsyntax/Cargo.toml | 1 + src/libsyntax/attr.rs | 35 ++- src/libsyntax/fold.rs | 43 +-- src/libsyntax/lib.rs | 30 ++ src/libsyntax/parse/lexer/mod.rs | 259 +++++++++-------- src/libsyntax/parse/mod.rs | 609 +++++++++++++++++++++------------------ src/libsyntax/print/pprust.rs | 51 ++-- src/libsyntax/test_snippet.rs | 53 ++-- src/libsyntax/tokenstream.rs | 83 +++--- 9 files changed, 643 insertions(+), 521 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml index 07631e0dcfc..8c24f36615b 100644 --- a/src/libsyntax/Cargo.toml +++ b/src/libsyntax/Cargo.toml @@ -12,6 +12,7 @@ crate-type = ["dylib"] bitflags = "1.0" serialize = { path = "../libserialize" } log = "0.4" +scoped-tls = "0.1" syntax_pos = { path = "../libsyntax_pos" } rustc_cratesio_shim = { path = "../librustc_cratesio_shim" } rustc_errors = { path = "../librustc_errors" } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 4818248129e..f2cdcda98da 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -30,15 +30,10 @@ use ptr::P; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::ThinVec; +use GLOBALS; -use std::cell::RefCell; use std::iter; -thread_local! { - static USED_ATTRS: RefCell> = RefCell::new(Vec::new()); - static KNOWN_ATTRS: RefCell> = RefCell::new(Vec::new()); -} - enum AttrError { MultipleItem(Name), UnknownMetaItem(Name), @@ -65,22 +60,24 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) { pub fn mark_used(attr: &Attribute) { debug!("Marking {:?} as used.", attr); let AttrId(id) = attr.id; - USED_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let mut slot = globals.used_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - if slot.borrow().len() <= idx { - slot.borrow_mut().resize(idx + 1, 0); + if slot.len() <= idx { + slot.resize(idx + 1, 0); } - slot.borrow_mut()[idx] |= 1 << shift; + slot[idx] |= 1 << shift; }); } pub fn is_used(attr: &Attribute) -> bool { let AttrId(id) = attr.id; - USED_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let slot = globals.used_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) + slot.get(idx).map(|bits| bits & (1 << shift) != 0) .unwrap_or(false) }) } @@ -88,22 +85,24 @@ pub fn is_used(attr: &Attribute) -> bool { pub fn mark_known(attr: &Attribute) { debug!("Marking {:?} as known.", attr); let AttrId(id) = attr.id; - KNOWN_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let mut slot = globals.known_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - if slot.borrow().len() <= idx { - slot.borrow_mut().resize(idx + 1, 0); + if slot.len() <= idx { + slot.resize(idx + 1, 0); } - slot.borrow_mut()[idx] |= 1 << shift; + slot[idx] |= 1 << shift; }); } pub fn is_known(attr: &Attribute) -> bool { let AttrId(id) = attr.id; - KNOWN_ATTRS.with(|slot| { + GLOBALS.with(|globals| { + let slot = globals.known_attrs.lock(); let idx = (id / 64) as usize; let shift = id % 64; - slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) + slot.get(idx).map(|bits| bits & (1 << shift) != 0) .unwrap_or(false) }) } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index bc1854d3cd8..2cf99e15d1f 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -1386,6 +1386,7 @@ mod tests { use util::parser_testing::{string_to_crate, matches_codepattern}; use print::pprust; use fold; + use with_globals; use super::*; // this version doesn't care about getting comments or docstrings in. @@ -1423,28 +1424,32 @@ mod tests { // make sure idents get transformed everywhere #[test] fn ident_transformation () { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( - "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); - let folded_crate = zz_fold.fold_crate(ast); - assert_pred!( - matches_codepattern, - "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + with_globals(|| { + let mut zz_fold = ToZzIdentFolder; + let ast = string_to_crate( + "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); + let folded_crate = zz_fold.fold_crate(ast); + assert_pred!( + matches_codepattern, + "matches_codepattern", + pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + }) } // even inside macro defs.... #[test] fn ident_transformation_in_defs () { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( - "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ - (g $(d $d $e)+))} ".to_string()); - let folded_crate = zz_fold.fold_crate(ast); - assert_pred!( - matches_codepattern, - "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); + with_globals(|| { + let mut zz_fold = ToZzIdentFolder; + let ast = string_to_crate( + "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ + (g $(d $d $e)+))} ".to_string()); + let folded_crate = zz_fold.fold_crate(ast); + assert_pred!( + matches_codepattern, + "matches_codepattern", + pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); + }) } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 50e94e5cba7..5f58b3bc3a0 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -39,9 +39,12 @@ extern crate std_unicode; pub extern crate rustc_errors as errors; extern crate syntax_pos; extern crate rustc_data_structures; +#[macro_use] extern crate scoped_tls; extern crate serialize as rustc_serialize; // used by deriving +use rustc_data_structures::sync::Lock; + // A variant of 'try!' that panics on an Err. This is used as a crutch on the // way towards a non-panic!-prone parser. It should be used for fatal parsing // errors; eventually we plan to convert all code using panictry to just use @@ -72,6 +75,33 @@ macro_rules! unwrap_or { } } +struct Globals { + used_attrs: Lock>, + known_attrs: Lock>, + syntax_pos_globals: syntax_pos::Globals, +} + +impl Globals { + fn new() -> Globals { + Globals { + used_attrs: Lock::new(Vec::new()), + known_attrs: Lock::new(Vec::new()), + syntax_pos_globals: syntax_pos::Globals::new(), + } + } +} + +pub fn with_globals(f: F) -> R + where F: FnOnce() -> R +{ + let globals = Globals::new(); + GLOBALS.set(&globals, || { + syntax_pos::GLOBALS.set(&globals.syntax_pos_globals, f) + }) +} + +scoped_thread_local!(static GLOBALS: Globals); + #[macro_use] pub mod diagnostics { #[macro_use] diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index cdf38453d7e..d0075c89656 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1766,6 +1766,7 @@ mod tests { use std::path::PathBuf; use diagnostics::plugin::ErrorMap; use rustc_data_structures::sync::Lock; + use with_globals; fn mk_sess(cm: Lrc) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), @@ -1794,33 +1795,35 @@ mod tests { #[test] fn t1() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut string_reader = setup(&cm, - &sh, - "/* my source file */ fn main() { println!(\"zebra\"); }\n" - .to_string()); - let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().tok, token::Comment); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - let tok1 = string_reader.next_token(); - let tok2 = TokenAndSpan { - tok: token::Ident(id), - sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), - }; - assert_eq!(tok1, tok2); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - // the 'main' id is already read: - assert_eq!(string_reader.pos.clone(), BytePos(28)); - // read another token: - let tok3 = string_reader.next_token(); - let tok4 = TokenAndSpan { - tok: token::Ident(Ident::from_str("main")), - sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), - }; - assert_eq!(tok3, tok4); - // the lparen is already read: - assert_eq!(string_reader.pos.clone(), BytePos(29)) + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut string_reader = setup(&cm, + &sh, + "/* my source file */ fn main() { println!(\"zebra\"); }\n" + .to_string()); + let id = Ident::from_str("fn"); + assert_eq!(string_reader.next_token().tok, token::Comment); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + let tok1 = string_reader.next_token(); + let tok2 = TokenAndSpan { + tok: token::Ident(id), + sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + }; + assert_eq!(tok1, tok2); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + // the 'main' id is already read: + assert_eq!(string_reader.pos.clone(), BytePos(28)); + // read another token: + let tok3 = string_reader.next_token(); + let tok4 = TokenAndSpan { + tok: token::Ident(Ident::from_str("main")), + sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + }; + assert_eq!(tok3, tok4); + // the lparen is already read: + assert_eq!(string_reader.pos.clone(), BytePos(29)) + }) } // check that the given reader produces the desired stream @@ -1838,113 +1841,133 @@ mod tests { #[test] fn doublecolonparsing() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a b".to_string()), + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + }) } #[test] fn dcparsing_2() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a::b".to_string()), + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_3() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a ::b".to_string()), + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_4() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a:: b".to_string()), + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + }) } #[test] fn character_a() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn character_space() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern(" ")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern(" ")), None)); + }) } #[test] fn character_escaped() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("\\n")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("\\n")), None)); + }) } #[test] fn lifetime_name() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, - token::Lifetime(Ident::from_str("'abc"))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, + token::Lifetime(Ident::from_str("'abc"))); + }) } #[test] fn raw_string() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) - .next_token() - .tok, - token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) + .next_token() + .tok, + token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + }) } #[test] fn literal_suffixes() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - macro_rules! test { - ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - Some(Symbol::intern("suffix")))); - // with a whitespace separator: - assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - None)); - }} - } - - test!("'a'", Char, "a"); - test!("b'a'", Byte, "a"); - test!("\"a\"", Str_, "a"); - test!("b\"a\"", ByteStr, "a"); - test!("1234", Integer, "1234"); - test!("0b101", Integer, "0b101"); - test!("0xABC", Integer, "0xABC"); - test!("1.0", Float, "1.0"); - test!("1.0e10", Float, "1.0e10"); - - assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, - token::Literal(token::Integer(Symbol::intern("2")), - Some(Symbol::intern("us")))); - assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::StrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); - assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + macro_rules! test { + ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ + assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + Some(Symbol::intern("suffix")))); + // with a whitespace separator: + assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + None)); + }} + } + + test!("'a'", Char, "a"); + test!("b'a'", Byte, "a"); + test!("\"a\"", Str_, "a"); + test!("b\"a\"", ByteStr, "a"); + test!("1234", Integer, "1234"); + test!("0b101", Integer, "0b101"); + test!("0xABC", Integer, "0xABC"); + test!("1.0", Float, "1.0"); + test!("1.0e10", Float, "1.0e10"); + + assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, + token::Literal(token::Integer(Symbol::intern("2")), + Some(Symbol::intern("us")))); + assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::StrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + }) } #[test] @@ -1956,27 +1979,31 @@ mod tests { #[test] fn nested_block_comments() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().tok { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); + match lexer.next_token().tok { + token::Comment => {} + _ => panic!("expected a comment!"), + } + assert_eq!(lexer.next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn crlf_comments() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); - let comment = lexer.next_token(); - assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().tok, token::Whitespace); - assert_eq!(lexer.next_token().tok, - token::DocComment(Symbol::intern("/// test"))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); + let comment = lexer.next_token(); + assert_eq!(comment.tok, token::Comment); + assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); + assert_eq!(lexer.next_token().tok, token::Whitespace); + assert_eq!(lexer.next_token().tok, + token::DocComment(Symbol::intern("/// test"))); + }) } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3fb0c209f70..ff097c362fe 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -680,6 +680,7 @@ mod tests { use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::ThinVec; + use with_globals; // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { @@ -691,156 +692,170 @@ mod tests { } #[test] fn path_exprs_1() { - assert!(string_to_expr("a".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(|| { + assert!(string_to_expr("a".to_string()) == + P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 1), + segments: vec![str2seg("a", 0, 1)], + }), span: sp(0, 1), - segments: vec![str2seg("a", 0, 1)], - }), - span: sp(0, 1), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[test] fn path_exprs_2 () { - assert!(string_to_expr("::a::b".to_string()) == - P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(|| { + assert!(string_to_expr("::a::b".to_string()) == + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 6), + segments: vec![ast::PathSegment::crate_root(sp(0, 2)), + str2seg("a", 2, 3), + str2seg("b", 5, 6)] + }), span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(sp(0, 2)), - str2seg("a", 2, 3), - str2seg("b", 5, 6)] - }), - span: sp(0, 6), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[should_panic] #[test] fn bad_path_expr_1() { - string_to_expr("::abc::def::return".to_string()); + with_globals(|| { + string_to_expr("::abc::def::return".to_string()); + }) } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); - let tts: &[TokenTree] = &tts[..]; - - match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { - ( - 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip))), - Some(&TokenTree::Delimited(_, ref macro_delimed)), - ) - if name_macro_rules.name == "macro_rules" - && name_zip.name == "zip" => { - let tts = ¯o_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { - ( - 3, - Some(&TokenTree::Delimited(_, ref first_delimed)), - Some(&TokenTree::Token(_, token::FatArrow)), - Some(&TokenTree::Delimited(_, ref second_delimed)), - ) - if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", *first_delimed), - } - let tts = &second_delimed.stream().trees().collect::>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if second_delimed.delim == token::Paren - && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", *second_delimed), - } - }, - _ => panic!("value 2: {:?}", *macro_delimed), - } - }, - _ => panic!("value: {:?}",tts), - } + with_globals(|| { + let tts: Vec<_> = + string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); + let tts: &[TokenTree] = &tts[..]; + + match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { + ( + 4, + Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), + Some(&TokenTree::Token(_, token::Not)), + Some(&TokenTree::Token(_, token::Ident(name_zip))), + Some(&TokenTree::Delimited(_, ref macro_delimed)), + ) + if name_macro_rules.name == "macro_rules" + && name_zip.name == "zip" => { + let tts = ¯o_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { + ( + 3, + Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Delimited(_, ref second_delimed)), + ) + if macro_delimed.delim == token::Paren => { + let tts = &first_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident))), + ) + if first_delimed.delim == token::Paren && ident.name == "a" => {}, + _ => panic!("value 3: {:?}", *first_delimed), + } + let tts = &second_delimed.stream().trees().collect::>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident))), + ) + if second_delimed.delim == token::Paren + && ident.name == "a" => {}, + _ => panic!("value 4: {:?}", *second_delimed), + } + }, + _ => panic!("value 2: {:?}", *macro_delimed), + } + }, + _ => panic!("value: {:?}",tts), + } + }) } #[test] fn string_to_tts_1() { - let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - - let expected = TokenStream::concat(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), - TokenTree::Delimited( - sp(5, 14), - tokenstream::Delimited { - delim: token::DelimToken::Paren, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), - ]).into(), - }).into(), - TokenTree::Delimited( - sp(15, 21), - tokenstream::Delimited { - delim: token::DelimToken::Brace, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), - ]).into(), - }).into() - ]); - - assert_eq!(tts, expected); + with_globals(|| { + let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); + + let expected = TokenStream::concat(vec![ + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), + TokenTree::Delimited( + sp(5, 14), + tokenstream::Delimited { + delim: token::DelimToken::Paren, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), + token::Ident(Ident::from_str("i32"))).into(), + ]).into(), + }).into(), + TokenTree::Delimited( + sp(15, 21), + tokenstream::Delimited { + delim: token::DelimToken::Brace, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + }).into() + ]); + + assert_eq!(tts, expected); + }) } #[test] fn ret_expr() { - assert!(string_to_expr("return d".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Ret(Some(P(ast::Expr{ + with_globals(|| { + assert!(string_to_expr("return d".to_string()) == + P(ast::Expr{ id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Path(None, ast::Path{ - span: sp(7, 8), - segments: vec![str2seg("d", 7, 8)], - }), - span:sp(7,8), + node:ast::ExprKind::Ret(Some(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node:ast::ExprKind::Path(None, ast::Path{ + span: sp(7, 8), + segments: vec![str2seg("d", 7, 8)], + }), + span:sp(7,8), + attrs: ThinVec::new(), + }))), + span:sp(0,8), attrs: ThinVec::new(), - }))), - span:sp(0,8), - attrs: ThinVec::new(), - })) + })) + }) } #[test] fn parse_stmt_1 () { - assert!(string_to_stmt("b;".to_string()) == - Some(ast::Stmt { - node: ast::StmtKind::Expr(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { - span:sp(0,1), - segments: vec![str2seg("b", 0, 1)], - }), - span: sp(0,1), - attrs: ThinVec::new()})), - id: ast::DUMMY_NODE_ID, - span: sp(0,1)})) - + with_globals(|| { + assert!(string_to_stmt("b;".to_string()) == + Some(ast::Stmt { + node: ast::StmtKind::Expr(P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span:sp(0,1), + segments: vec![str2seg("b", 0, 1)], + }), + span: sp(0,1), + attrs: ThinVec::new()})), + id: ast::DUMMY_NODE_ID, + span: sp(0,1)})) + }) } fn parser_done(p: Parser){ @@ -848,120 +863,128 @@ mod tests { } #[test] fn parse_ident_pat () { - let sess = ParseSess::new(FilePathMapping::empty()); - let mut parser = string_to_parser(&sess, "b".to_string()); - assert!(panictry!(parser.parse_pat()) - == P(ast::Pat{ - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), - Spanned{ span:sp(0, 1), - node: Ident::from_str("b") - }, - None), - span: sp(0,1)})); - parser_done(parser); + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let mut parser = string_to_parser(&sess, "b".to_string()); + assert!(panictry!(parser.parse_pat()) + == P(ast::Pat{ + id: ast::DUMMY_NODE_ID, + node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), + Spanned{ span:sp(0, 1), + node: Ident::from_str("b") + }, + None), + span: sp(0,1)})); + parser_done(parser); + }) } // check the contents of the tt manually: #[test] fn parse_fundecl () { - // this test depends on the intern order of "fn" and "i32" - let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { - m.map(|mut m| { - m.tokens = None; - m - }) - }); - assert_eq!(item, - Some( - P(ast::Item{ident:Ident::from_str("a"), - attrs:Vec::new(), - id: ast::DUMMY_NODE_ID, - tokens: None, - node: ast::ItemKind::Fn(P(ast::FnDecl { - inputs: vec![ast::Arg{ - ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, - node: ast::TyKind::Path(None, ast::Path{ - span:sp(10,13), - segments: vec![str2seg("i32", 10, 13)], + with_globals(|| { + // this test depends on the intern order of "fn" and "i32" + let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { + m.map(|mut m| { + m.tokens = None; + m + }) + }); + assert_eq!(item, + Some( + P(ast::Item{ident:Ident::from_str("a"), + attrs:Vec::new(), + id: ast::DUMMY_NODE_ID, + tokens: None, + node: ast::ItemKind::Fn(P(ast::FnDecl { + inputs: vec![ast::Arg{ + ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, + node: ast::TyKind::Path(None, ast::Path{ + span:sp(10,13), + segments: vec![str2seg("i32", 10, 13)], + }), + span:sp(10,13) }), - span:sp(10,13) - }), - pat: P(ast::Pat { - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident( - ast::BindingMode::ByValue( - ast::Mutability::Immutable), - Spanned{ - span: sp(6,7), - node: Ident::from_str("b")}, - None - ), - span: sp(6,7) - }), - id: ast::DUMMY_NODE_ID - }], - output: ast::FunctionRetTy::Default(sp(15, 15)), - variadic: false - }), - ast::Unsafety::Normal, - Spanned { - span: sp(0,2), - node: ast::Constness::NotConst, - }, - Abi::Rust, - ast::Generics{ - params: Vec::new(), - where_clause: ast::WhereClause { + pat: P(ast::Pat { id: ast::DUMMY_NODE_ID, - predicates: Vec::new(), + node: PatKind::Ident( + ast::BindingMode::ByValue( + ast::Mutability::Immutable), + Spanned{ + span: sp(6,7), + node: Ident::from_str("b")}, + None + ), + span: sp(6,7) + }), + id: ast::DUMMY_NODE_ID + }], + output: ast::FunctionRetTy::Default(sp(15, 15)), + variadic: false + }), + ast::Unsafety::Normal, + Spanned { + span: sp(0,2), + node: ast::Constness::NotConst, + }, + Abi::Rust, + ast::Generics{ + params: Vec::new(), + where_clause: ast::WhereClause { + id: ast::DUMMY_NODE_ID, + predicates: Vec::new(), + span: syntax_pos::DUMMY_SP, + }, span: syntax_pos::DUMMY_SP, }, - span: syntax_pos::DUMMY_SP, - }, - P(ast::Block { - stmts: vec![ast::Stmt { - node: ast::StmtKind::Semi(P(ast::Expr{ + P(ast::Block { + stmts: vec![ast::Stmt { + node: ast::StmtKind::Semi(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, + ast::Path{ + span:sp(17,18), + segments: vec![str2seg("b", 17, 18)], + }), + span: sp(17,18), + attrs: ThinVec::new()})), id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, - ast::Path{ - span:sp(17,18), - segments: vec![str2seg("b", 17, 18)], - }), - span: sp(17,18), - attrs: ThinVec::new()})), + span: sp(17,19)}], id: ast::DUMMY_NODE_ID, - span: sp(17,19)}], - id: ast::DUMMY_NODE_ID, - rules: ast::BlockCheckMode::Default, // no idea - span: sp(15,21), - recovered: false, - })), - vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), - span: sp(0,21)}))); + rules: ast::BlockCheckMode::Default, // no idea + span: sp(15,21), + recovered: false, + })), + vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), + span: sp(0,21)}))); + }) } #[test] fn parse_use() { - let use_s = "use foo::bar::baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - - let use_s = "use foo::bar as baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); + with_globals(|| { + let use_s = "use foo::bar::baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + + let use_s = "use foo::bar as baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + }) } #[test] fn parse_extern_crate() { - let ex_s = "extern crate foo;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - - let ex_s = "extern crate foo as bar;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); + with_globals(|| { + let ex_s = "extern crate foo;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + + let ex_s = "extern crate foo as bar;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + }) } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -988,31 +1011,36 @@ mod tests { } #[test] fn span_of_self_arg_pat_idents_are_correct() { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; - - for &src in &srcs { - let spans = get_spans_of_pat_idents(src); - let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) - } + with_globals(|| { + + let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", + "impl z { fn a (&mut self, &myarg: i32) {} }", + "impl z { fn a (&'a self, &myarg: i32) {} }", + "impl z { fn a (self, &myarg: i32) {} }", + "impl z { fn a (self: Foo, &myarg: i32) {} }", + ]; + + for &src in &srcs { + let spans = get_spans_of_pat_idents(src); + let (lo, hi) = (spans[0].lo(), spans[0].hi()); + assert!("self" == &src[lo.to_usize()..hi.to_usize()], + "\"{}\" != \"self\". src=\"{}\"", + &src[lo.to_usize()..hi.to_usize()], src) + } + }) } #[test] fn parse_exprs () { - // just make sure that they parse.... - string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,&(987+3))".to_string()); + with_globals(|| { + // just make sure that they parse.... + string_to_expr("3 + 4".to_string()); + string_to_expr("a::z.froob(b,&(987+3))".to_string()); + }) } #[test] fn attrs_fix_bug () { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + with_globals(|| { + string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result, String> { #[cfg(windows)] fn wb() -> c_int { @@ -1024,49 +1052,54 @@ mod tests { let mut fflags: c_int = wb(); }".to_string()); + }) } #[test] fn crlf_doc_comments() { - let sess = ParseSess::new(FilePathMapping::empty()); - - let name = FileName::Custom("source".to_string()); - let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/// doc comment"); - - let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.path == "doc") - .map(|a| a.value_str().unwrap().to_string()).collect::>(); - let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[..], b); - - let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/** doc comment\n * with CRLF */"); + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + + let name = FileName::Custom("source".to_string()); + let source = "/// doc comment\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/// doc comment"); + + let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let docs = item.attrs.iter().filter(|a| a.path == "doc") + .map(|a| a.value_str().unwrap().to_string()).collect::>(); + let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; + assert_eq!(&docs[..], b); + + let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/** doc comment\n * with CRLF */"); + }); } #[test] fn ttdelim_span() { - let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - - let tts: Vec<_> = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), - _ => panic!("not a macro"), - }; + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), + "foo!( fn main() { body } )".to_string(), &sess).unwrap(); + + let tts: Vec<_> = match expr.node { + ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), + _ => panic!("not a macro"), + }; - let span = tts.iter().rev().next().unwrap().span(); + let span = tts.iter().rev().next().unwrap().span(); - match sess.codemap().span_to_snippet(span) { - Ok(s) => assert_eq!(&s[..], "{ body }"), - Err(_) => panic!("could not get snippet"), - } + match sess.codemap().span_to_snippet(span) { + Ok(s) => assert_eq!(&s[..], "{ body }"), + Err(_) => panic!("could not get snippet"), + } + }); } // This tests that when parsing a string (rather than a file) we don't try @@ -1074,17 +1107,19 @@ mod tests { // See `recurse_into_file_modules` in the parser. #[test] fn out_of_line_mod() { - let sess = ParseSess::new(FilePathMapping::empty()); - let item = parse_item_from_source_str( - PathBuf::from("foo").into(), - "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess, - ).unwrap().unwrap(); - - if let ast::ItemKind::Mod(ref m) = item.node { - assert!(m.items.len() == 2); - } else { - panic!(); - } + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let item = parse_item_from_source_str( + PathBuf::from("foo").into(), + "mod foo { struct S; mod this_does_not_exist; }".to_owned(), + &sess, + ).unwrap().unwrap(); + + if let ast::ItemKind::Mod(ref m) = item.node { + assert!(m.items.len() == 2); + } else { + panic!(); + } + }); } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 77afafbb4e0..1cf2b7a44bc 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -3178,36 +3178,41 @@ mod tests { use ast; use codemap; use syntax_pos; + use with_globals; #[test] fn test_fun_to_string() { - let abba_ident = ast::Ident::from_str("abba"); + with_globals(|| { + let abba_ident = ast::Ident::from_str("abba"); - let decl = ast::FnDecl { - inputs: Vec::new(), - output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), - variadic: false - }; - let generics = ast::Generics::default(); - assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, - ast::Constness::NotConst, - abba_ident, &generics), - "fn abba()"); + let decl = ast::FnDecl { + inputs: Vec::new(), + output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), + variadic: false + }; + let generics = ast::Generics::default(); + assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, + ast::Constness::NotConst, + abba_ident, &generics), + "fn abba()"); + }) } #[test] fn test_variant_to_string() { - let ident = ast::Ident::from_str("principal_skinner"); - - let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { - name: ident, - attrs: Vec::new(), - // making this up as I go.... ? - data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), - disr_expr: None, - }); - - let varstr = variant_to_string(&var); - assert_eq!(varstr, "principal_skinner"); + with_globals(|| { + let ident = ast::Ident::from_str("principal_skinner"); + + let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { + name: ident, + attrs: Vec::new(), + // making this up as I go.... ? + data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), + disr_expr: None, + }); + + let varstr = variant_to_string(&var); + assert_eq!(varstr, "principal_skinner"); + }) } } diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index 772334e3ef1..81dcc1998ed 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -18,6 +18,7 @@ use std::str; use std::sync::{Arc, Mutex}; use std::path::Path; use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; +use with_globals; /// Identify a position in the text by the Nth occurrence of a string. struct Position { @@ -46,37 +47,39 @@ impl Write for Shared { } fn test_harness(file_text: &str, span_labels: Vec, expected_output: &str) { - let output = Arc::new(Mutex::new(Vec::new())); + with_globals(|| { + let output = Arc::new(Mutex::new(Vec::new())); - let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty())); - code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text); + let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty())); + code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text); - let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); - let mut msp = MultiSpan::from_span(primary_span); - for span_label in span_labels { - let span = make_span(&file_text, &span_label.start, &span_label.end); - msp.push_span_label(span, span_label.label.to_string()); - println!("span: {:?} label: {:?}", span, span_label.label); - println!("text: {:?}", code_map.span_to_snippet(span)); - } + let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); + let mut msp = MultiSpan::from_span(primary_span); + for span_label in span_labels { + let span = make_span(&file_text, &span_label.start, &span_label.end); + msp.push_span_label(span, span_label.label.to_string()); + println!("span: {:?} label: {:?}", span, span_label.label); + println!("text: {:?}", code_map.span_to_snippet(span)); + } - let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), - Some(code_map.clone()), - false, - false); - let handler = Handler::with_emitter(true, false, Box::new(emitter)); - handler.span_err(msp, "foo"); + let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), + Some(code_map.clone()), + false, + false); + let handler = Handler::with_emitter(true, false, Box::new(emitter)); + handler.span_err(msp, "foo"); - assert!(expected_output.chars().next() == Some('\n'), - "expected output should begin with newline"); - let expected_output = &expected_output[1..]; + assert!(expected_output.chars().next() == Some('\n'), + "expected output should begin with newline"); + let expected_output = &expected_output[1..]; - let bytes = output.lock().unwrap(); - let actual_output = str::from_utf8(&bytes).unwrap(); - println!("expected output:\n------\n{}------", expected_output); - println!("actual output:\n------\n{}------", actual_output); + let bytes = output.lock().unwrap(); + let actual_output = str::from_utf8(&bytes).unwrap(); + println!("expected output:\n------\n{}------", expected_output); + println!("actual output:\n------\n{}------", actual_output); - assert!(expected_output == actual_output) + assert!(expected_output == actual_output) + }) } fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index ad04b6ab2b5..1219e909e12 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -599,6 +599,7 @@ impl Hash for ThinTokenStream { mod tests { use super::*; use syntax::ast::Ident; + use with_globals; use syntax_pos::{Span, BytePos, NO_EXPANSION}; use parse::token::Token; use util::parser_testing::string_to_stream; @@ -613,67 +614,83 @@ mod tests { #[test] fn test_concat() { - let test_res = string_to_ts("foo::bar::baz"); - let test_fst = string_to_ts("foo::bar"); - let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::concat(vec![test_fst, test_snd]); - assert_eq!(test_res.trees().count(), 5); - assert_eq!(eq_res.trees().count(), 5); - assert_eq!(test_res.eq_unspanned(&eq_res), true); + with_globals(|| { + let test_res = string_to_ts("foo::bar::baz"); + let test_fst = string_to_ts("foo::bar"); + let test_snd = string_to_ts("::baz"); + let eq_res = TokenStream::concat(vec![test_fst, test_snd]); + assert_eq!(test_res.trees().count(), 5); + assert_eq!(eq_res.trees().count(), 5); + assert_eq!(test_res.eq_unspanned(&eq_res), true); + }) } #[test] fn test_to_from_bijection() { - let test_start = string_to_ts("foo::bar(baz)"); - let test_end = test_start.trees().collect(); - assert_eq!(test_start, test_end) + with_globals(|| { + let test_start = string_to_ts("foo::bar(baz)"); + let test_end = test_start.trees().collect(); + assert_eq!(test_start, test_end) + }) } #[test] fn test_eq_0() { - let test_res = string_to_ts("foo"); - let test_eqs = string_to_ts("foo"); - assert_eq!(test_res, test_eqs) + with_globals(|| { + let test_res = string_to_ts("foo"); + let test_eqs = string_to_ts("foo"); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_eq_1() { - let test_res = string_to_ts("::bar::baz"); - let test_eqs = string_to_ts("::bar::baz"); - assert_eq!(test_res, test_eqs) + with_globals(|| { + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("::bar::baz"); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_eq_3() { - let test_res = string_to_ts(""); - let test_eqs = string_to_ts(""); - assert_eq!(test_res, test_eqs) + with_globals(|| { + let test_res = string_to_ts(""); + let test_eqs = string_to_ts(""); + assert_eq!(test_res, test_eqs) + }) } #[test] fn test_diseq_0() { - let test_res = string_to_ts("::bar::baz"); - let test_eqs = string_to_ts("bar::baz"); - assert_eq!(test_res == test_eqs, false) + with_globals(|| { + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("bar::baz"); + assert_eq!(test_res == test_eqs, false) + }) } #[test] fn test_diseq_1() { - let test_res = string_to_ts("(bar,baz)"); - let test_eqs = string_to_ts("bar,baz"); - assert_eq!(test_res == test_eqs, false) + with_globals(|| { + let test_res = string_to_ts("(bar,baz)"); + let test_eqs = string_to_ts("bar,baz"); + assert_eq!(test_res == test_eqs, false) + }) } #[test] fn test_is_empty() { - let test0: TokenStream = Vec::::new().into_iter().collect(); - let test1: TokenStream = - TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); - let test2 = string_to_ts("foo(bar::baz)"); - - assert_eq!(test0.is_empty(), true); - assert_eq!(test1.is_empty(), false); - assert_eq!(test2.is_empty(), false); + with_globals(|| { + let test0: TokenStream = Vec::::new().into_iter().collect(); + let test1: TokenStream = + TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); + let test2 = string_to_ts("foo(bar::baz)"); + + assert_eq!(test0.is_empty(), true); + assert_eq!(test1.is_empty(), false); + assert_eq!(test2.is_empty(), false); + }) } #[test] -- cgit 1.4.1-3-g733a5