From b74e97cf42e647d87d67a03c134a0494b6aaa811 Mon Sep 17 00:00:00 2001 From: John Kåre Alsaker Date: Tue, 27 Feb 2018 17:11:14 +0100 Subject: Replace Rc with Lrc for shared data --- src/libsyntax/parse/lexer/mod.rs | 42 +++++++++++++++++++--------------------- src/libsyntax/parse/mod.rs | 20 +++++++++---------- src/libsyntax/parse/token.rs | 6 +++--- 3 files changed, 33 insertions(+), 35 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 11ab84a5729..b5368b3ecab 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -20,7 +20,7 @@ use std_unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::mem::replace; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; pub mod comments; mod tokentrees; @@ -48,7 +48,7 @@ pub struct StringReader<'a> { pub col: CharPos, /// The current character (which has been read from self.pos) pub ch: Option, - pub filemap: Rc, + pub filemap: Lrc, /// If Some, stop reading the source at this position (inclusive). pub terminator: Option, /// Whether to record new-lines and multibyte chars in filemap. @@ -61,7 +61,7 @@ pub struct StringReader<'a> { pub fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. - source_text: Rc, + source_text: Lrc, /// Stack of open delimiters and their spans. Used for error message. token: token::Token, span: Span, @@ -152,13 +152,13 @@ impl<'a> StringReader<'a> { impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into next_pos and ch - pub fn new_raw(sess: &'a ParseSess, filemap: Rc) -> Self { + pub fn new_raw(sess: &'a ParseSess, filemap: Lrc) -> Self { let mut sr = StringReader::new_raw_internal(sess, filemap); sr.bump(); sr } - fn new_raw_internal(sess: &'a ParseSess, filemap: Rc) -> Self { + fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc) -> Self { if filemap.src.is_none() { sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}", filemap.name)); @@ -187,7 +187,7 @@ impl<'a> StringReader<'a> { } } - pub fn new(sess: &'a ParseSess, filemap: Rc) -> Self { + pub fn new(sess: &'a ParseSess, filemap: Lrc) -> Self { let mut sr = StringReader::new_raw(sess, filemap); if sr.advance_token().is_err() { sr.emit_fatal_errors(); @@ -1747,9 +1747,7 @@ mod tests { use std::collections::HashSet; use std::io; use std::path::PathBuf; - use std::rc::Rc; - - fn mk_sess(cm: Rc) -> ParseSess { + fn mk_sess(cm: Lrc) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), false, @@ -1776,7 +1774,7 @@ mod tests { #[test] fn t1() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut string_reader = setup(&cm, &sh, @@ -1820,7 +1818,7 @@ mod tests { #[test] fn doublecolonparsing() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); @@ -1828,7 +1826,7 @@ mod tests { #[test] fn dcparsing_2() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), vec![mk_ident("a"), token::ModSep, mk_ident("b")]); @@ -1836,7 +1834,7 @@ mod tests { #[test] fn dcparsing_3() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); @@ -1844,7 +1842,7 @@ mod tests { #[test] fn dcparsing_4() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); @@ -1852,7 +1850,7 @@ mod tests { #[test] fn character_a() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("a")), None)); @@ -1860,7 +1858,7 @@ mod tests { #[test] fn character_space() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern(" ")), None)); @@ -1868,7 +1866,7 @@ mod tests { #[test] fn character_escaped() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("\\n")), None)); @@ -1876,7 +1874,7 @@ mod tests { #[test] fn lifetime_name() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, token::Lifetime(Ident::from_str("'abc"))); @@ -1884,7 +1882,7 @@ mod tests { #[test] fn raw_string() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) .next_token() @@ -1894,7 +1892,7 @@ mod tests { #[test] fn literal_suffixes() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ @@ -1938,7 +1936,7 @@ mod tests { #[test] fn nested_block_comments() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); match lexer.next_token().tok { @@ -1951,7 +1949,7 @@ mod tests { #[test] fn crlf_comments() { - let cm = Rc::new(CodeMap::new(FilePathMapping::empty())); + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 06eb64e157c..1d9af682fec 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -10,6 +10,7 @@ //! The main parser interface +use rustc_data_structures::sync::Lrc; use ast::{self, CrateConfig}; use codemap::{CodeMap, FilePathMapping}; use syntax_pos::{self, Span, FileMap, NO_EXPANSION, FileName}; @@ -25,7 +26,6 @@ use std::cell::RefCell; use std::collections::HashSet; use std::iter; use std::path::{Path, PathBuf}; -use std::rc::Rc; use std::str; pub type PResult<'a, T> = Result>; @@ -52,12 +52,12 @@ pub struct ParseSess { pub non_modrs_mods: RefCell>, /// Used to determine and report recursive mod inclusions included_mod_stack: RefCell>, - code_map: Rc, + code_map: Lrc, } impl ParseSess { pub fn new(file_path_mapping: FilePathMapping) -> Self { - let cm = Rc::new(CodeMap::new(file_path_mapping)); + let cm = Lrc::new(CodeMap::new(file_path_mapping)); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, @@ -65,7 +65,7 @@ impl ParseSess { ParseSess::with_span_handler(handler, cm) } - pub fn with_span_handler(handler: Handler, code_map: Rc) -> ParseSess { + pub fn with_span_handler(handler: Handler, code_map: Lrc) -> ParseSess { ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), @@ -183,7 +183,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, } /// Given a filemap and config, return a parser -pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { +pub fn filemap_to_parser(sess: & ParseSess, filemap: Lrc) -> Parser { let end_pos = filemap.end_pos; let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); @@ -206,7 +206,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's codemap and return the new filemap. fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) - -> Rc { + -> Lrc { match sess.codemap().load_file(path) { Ok(filemap) => filemap, Err(e) => { @@ -220,7 +220,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc, override_span: Option) +pub fn filemap_to_stream(sess: &ParseSess, filemap: Lrc, override_span: Option) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); srdr.override_span = override_span; @@ -422,7 +422,7 @@ pub fn lit_token(lit: token::Lit, suf: Option, diag: Option<(Span, &Hand (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str())))) } token::ByteStrRaw(i, _) => { - (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))) + (true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes())))) } } } @@ -496,7 +496,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { } } -pub fn byte_str_lit(lit: &str) -> Rc> { +pub fn byte_str_lit(lit: &str) -> Lrc> { let mut res = Vec::with_capacity(lit.len()); // FIXME #8372: This could be a for-loop if it didn't borrow the iterator @@ -553,7 +553,7 @@ pub fn byte_str_lit(lit: &str) -> Rc> { } } - Rc::new(res) + Lrc::new(res) } pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 7fbe781e9a1..097a2eb89fd 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -27,7 +27,7 @@ use tokenstream; use std::cell::Cell; use std::{cmp, fmt}; -use std::rc::Rc; +use rustc_data_structures::sync::Lrc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum BinOpToken { @@ -180,7 +180,7 @@ pub enum Token { // The `LazyTokenStream` is a pure function of the `Nonterminal`, // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. - Interpolated(Rc<(Nonterminal, LazyTokenStream)>), + Interpolated(Lrc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), @@ -200,7 +200,7 @@ pub enum Token { impl Token { pub fn interpolated(nt: Nonterminal) -> Token { - Token::Interpolated(Rc::new((nt, LazyTokenStream::new()))) + Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) } /// Returns `true` if the token starts with '>'. -- cgit 1.4.1-3-g733a5