diff options
| author | bors <bors@rust-lang.org> | 2014-10-29 10:22:01 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-10-29 10:22:01 +0000 |
| commit | 3bc545373df4c81ba223a8bece14cbc27eb85a4d (patch) | |
| tree | 6f2bc6000e1b8b10a1a74aedc57fa9d1f0fc565b /src/libsyntax/print | |
| parent | 124508dea1caf213886e5e1a02d425cac8dd0b54 (diff) | |
| parent | 665ad9c175f746b78c7eae81432b543d2e16c3c9 (diff) | |
| download | rust-3bc545373df4c81ba223a8bece14cbc27eb85a4d.tar.gz rust-3bc545373df4c81ba223a8bece14cbc27eb85a4d.zip | |
auto merge of #18365 : bjz/rust/token, r=alexcrichton
[breaking-change]
(for syntax-extensions)
- Token variant identifiers have been converted to PascalCase for consistency with Rust coding standards
- Some free-functions in `syntax::token` have been converted to methods on `syntax::token::Token`:
- `can_begin_expr` -> `Token::can_begin_expr`
- `close_delimiter_for` -> `Token::get_close_delimiter`
- `is_lit` -> `Token::is_lit`
- `is_ident` -> `Token::is_ident`
- `is_path` -> `Token::is_path`
- `is_plain_ident` -> `Token::is_plain_ident`
- `is_lifetime` -> `Token::is_lifetime`
- `is_mutability` -> `Token::is_mutability`
- `to_binop` -> `Token::to_binop`
- `is_keyword` -> `Token::is_keyword`
- `is_any_keyword` -> `Token:is_any_keyword`
- `is_strict_keyword` -> `Token::is_strict_keyword`
- `is_reserved_keyword` -> `Token::is_reserved_keyword`
- `mtwt_token_eq` -> `Token::mtwt_eq`
- `token::Ident` now takes an enum instead of a boolean for clarity
- `token::{to_string, binop_to_string}` were moved to `pprust::{token_to_string, binop_to_string}`
Diffstat (limited to 'src/libsyntax/print')
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 109 |
1 files changed, 102 insertions, 7 deletions
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 69e6d78d16a..a52987f5bd1 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -21,6 +21,7 @@ use attr::{AttrMetaMethods, AttributeMethods}; use codemap::{CodeMap, BytePos}; use codemap; use diagnostic; +use parse::token::{BinOpToken, Token}; use parse::token; use parse::lexer::comments; use parse; @@ -181,6 +182,101 @@ pub fn to_string(f: |&mut State| -> IoResult<()>) -> String { } } +pub fn binop_to_string(op: BinOpToken) -> &'static str { + match op { + token::Plus => "+", + token::Minus => "-", + token::Star => "*", + token::Slash => "/", + token::Percent => "%", + token::Caret => "^", + token::And => "&", + token::Or => "|", + token::Shl => "<<", + token::Shr => ">>", + } +} + +pub fn token_to_string(tok: &Token) -> String { + match *tok { + token::Eq => "=".into_string(), + token::Lt => "<".into_string(), + token::Le => "<=".into_string(), + token::EqEq => "==".into_string(), + token::Ne => "!=".into_string(), + token::Ge => ">=".into_string(), + token::Gt => ">".into_string(), + token::Not => "!".into_string(), + token::Tilde => "~".into_string(), + token::OrOr => "||".into_string(), + token::AndAnd => "&&".into_string(), + token::BinOp(op) => binop_to_string(op).into_string(), + token::BinOpEq(op) => format!("{}=", binop_to_string(op)), + + /* Structural symbols */ + token::At => "@".into_string(), + token::Dot => ".".into_string(), + token::DotDot => "..".into_string(), + token::DotDotDot => "...".into_string(), + token::Comma => ",".into_string(), + token::Semi => ";".into_string(), + token::Colon => ":".into_string(), + token::ModSep => "::".into_string(), + token::RArrow => "->".into_string(), + token::LArrow => "<-".into_string(), + token::FatArrow => "=>".into_string(), + token::LParen => "(".into_string(), + token::RParen => ")".into_string(), + token::LBracket => "[".into_string(), + token::RBracket => "]".into_string(), + token::LBrace => "{".into_string(), + token::RBrace => "}".into_string(), + token::Pound => "#".into_string(), + token::Dollar => "$".into_string(), + token::Question => "?".into_string(), + + /* Literals */ + token::LitByte(b) => format!("b'{}'", b.as_str()), + token::LitChar(c) => format!("'{}'", c.as_str()), + token::LitFloat(c) => c.as_str().into_string(), + token::LitInteger(c) => c.as_str().into_string(), + token::LitStr(s) => format!("\"{}\"", s.as_str()), + token::LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", + delim="#".repeat(n), + string=s.as_str()), + token::LitBinary(v) => format!("b\"{}\"", v.as_str()), + token::LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}", + delim="#".repeat(n), + string=s.as_str()), + + /* Name components */ + token::Ident(s, _) => token::get_ident(s).get().into_string(), + token::Lifetime(s) => format!("{}", token::get_ident(s)), + token::Underscore => "_".into_string(), + + /* Other */ + token::DocComment(s) => s.as_str().into_string(), + token::Eof => "<eof>".into_string(), + token::Whitespace => " ".into_string(), + token::Comment => "/* */".into_string(), + token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()), + + token::Interpolated(ref nt) => match *nt { + token::NtExpr(ref e) => expr_to_string(&**e), + token::NtMeta(ref e) => meta_item_to_string(&**e), + token::NtTy(ref e) => ty_to_string(&**e), + token::NtPath(ref e) => path_to_string(&**e), + token::NtItem(..) => "an interpolated item".into_string(), + token::NtBlock(..) => "an interpolated block".into_string(), + token::NtStmt(..) => "an interpolated statement".into_string(), + token::NtPat(..) => "an interpolated pattern".into_string(), + token::NtIdent(..) => "an interpolated identifier".into_string(), + token::NtTT(..) => "an interpolated tt".into_string(), + token::NtMatchers(..) => "an interpolated matcher sequence".into_string(), + } + } +} + // FIXME (Issue #16472): the thing_to_string_impls macro should go away // after we revise the syntax::ext::quote::ToToken impls to go directly // to token-trees instead of thing -> string -> token-trees. @@ -1026,16 +1122,16 @@ impl<'a> State<'a> { match *tt { ast::TtDelimited(_, ref delimed) => { let (ref open, ref tts, ref close) = **delimed; - try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); + try!(word(&mut self.s, token_to_string(&open.token).as_slice())); try!(space(&mut self.s)); try!(self.print_tts(tts.as_slice())); try!(space(&mut self.s)); - word(&mut self.s, parse::token::to_string(&close.token).as_slice()) + word(&mut self.s, token_to_string(&close.token).as_slice()) }, ast::TtToken(_, ref tk) => { - try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); + try!(word(&mut self.s, token_to_string(tk).as_slice())); match *tk { - parse::token::DOC_COMMENT(..) => { + parse::token::DocComment(..) => { hardbreak(&mut self.s) } _ => Ok(()) @@ -1049,10 +1145,9 @@ impl<'a> State<'a> { try!(word(&mut self.s, ")")); match *separator { Some(ref tk) => { - try!(word(&mut self.s, - parse::token::to_string(tk).as_slice())); + try!(word(&mut self.s, token_to_string(tk).as_slice())); } - None => () + None => {}, } match kleene_op { ast::ZeroOrMore => word(&mut self.s, "*"), |
