about summary refs log tree commit diff
path: root/src/libsyntax/parse/lexer
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2017-07-31 23:04:34 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2017-08-30 01:38:54 +0300
commit3da868dcb67e85ccbd6d64cdcc29829b1399de15 (patch)
tree379d1e6e3d7a0b3bca1a5d6f73fd4443b841b0a6 /src/libsyntax/parse/lexer
parent630e02f25be1e65b316857c5bd8022da0b96db40 (diff)
downloadrust-3da868dcb67e85ccbd6d64cdcc29829b1399de15.tar.gz
rust-3da868dcb67e85ccbd6d64cdcc29829b1399de15.zip
Make fields of `Span` private
Diffstat (limited to 'src/libsyntax/parse/lexer')
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs28
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs7
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs2
4 files changed, 15 insertions, 24 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index f65fffebe33..fb558d1a58f 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -386,7 +386,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R
                 debug!("tok lit: {}", s);
                 literals.push(Literal {
                     lit: s.to_string(),
-                    pos: sp.lo,
+                    pos: sp.lo(),
                 });
             })
         } else {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 527d2e41396..f26a0460905 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -71,7 +71,7 @@ pub struct StringReader<'a> {
 
 impl<'a> StringReader<'a> {
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
-        unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
+        unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION))
     }
 
     fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
@@ -190,20 +190,20 @@ impl<'a> StringReader<'a> {
     }
 
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
-        let begin = sess.codemap().lookup_byte_offset(span.lo);
-        let end = sess.codemap().lookup_byte_offset(span.hi);
+        let begin = sess.codemap().lookup_byte_offset(span.lo());
+        let end = sess.codemap().lookup_byte_offset(span.hi());
 
         // Make the range zero-length if the span is invalid.
-        if span.lo > span.hi || begin.fm.start_pos != end.fm.start_pos {
-            span.hi = span.lo;
+        if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
+            span = span.with_hi(span.lo());
         }
 
         let mut sr = StringReader::new_raw_internal(sess, begin.fm);
 
         // Seek the lexer to the right byte range.
         sr.save_new_lines_and_multibyte = false;
-        sr.next_pos = span.lo;
-        sr.terminator = Some(span.hi);
+        sr.next_pos = span.lo();
+        sr.terminator = Some(span.hi());
 
         sr.bump();
 
@@ -1745,11 +1745,7 @@ mod tests {
         let tok1 = string_reader.next_token();
         let tok2 = TokenAndSpan {
             tok: token::Ident(id),
-            sp: Span {
-                lo: BytePos(21),
-                hi: BytePos(23),
-                ctxt: NO_EXPANSION,
-            },
+            sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
         };
         assert_eq!(tok1, tok2);
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
@@ -1759,11 +1755,7 @@ mod tests {
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
             tok: token::Ident(Ident::from_str("main")),
-            sp: Span {
-                lo: BytePos(24),
-                hi: BytePos(28),
-                ctxt: NO_EXPANSION,
-            },
+            sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
         };
         assert_eq!(tok3, tok4);
         // the lparen is already read:
@@ -1921,7 +1913,7 @@ mod tests {
         let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
         let comment = lexer.next_token();
         assert_eq!(comment.tok, token::Comment);
-        assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7)));
+        assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
         assert_eq!(lexer.next_token().tok, token::Whitespace);
         assert_eq!(lexer.next_token().tok,
                    token::DocComment(Symbol::intern("/// test")));
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index ad389ab510a..a2c81e24754 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -11,7 +11,6 @@
 use print::pprust::token_to_string;
 use parse::lexer::StringReader;
 use parse::{token, PResult};
-use syntax_pos::Span;
 use tokenstream::{Delimited, TokenStream, TokenTree};
 
 impl<'a> StringReader<'a> {
@@ -20,7 +19,7 @@ impl<'a> StringReader<'a> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
             let tree = self.parse_token_tree()?;
-            let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
+            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
             tts.push(if is_joint { tree.joint() } else { tree.into() });
         }
         Ok(TokenStream::concat(tts))
@@ -40,7 +39,7 @@ impl<'a> StringReader<'a> {
                     return TokenStream::concat(tts);
                 }
             };
-            let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
+            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
             tts.push(if is_joint { tree.joint() } else { tree.into() });
         }
     }
@@ -69,7 +68,7 @@ impl<'a> StringReader<'a> {
                 let tts = self.parse_token_trees_until_close_delim();
 
                 // Expand to cover the entire delimited token tree
-                let span = Span { hi: self.span.hi, ..pre_span };
+                let span = pre_span.with_hi(self.span.hi());
 
                 match self.token {
                     // Correct delimiter.
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index c36fdef2d4c..39b5482a066 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -340,7 +340,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
     .iter()
     .find(|&&(c, _, _)| c == ch)
     .map(|&(_, u_name, ascii_char)| {
-        let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION };
+        let span = Span::new(reader.pos, reader.next_pos, NO_EXPANSION);
         match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {
             Some(&(ascii_char, ascii_name)) => {
                 let msg =