diff options
| author | Eduard Burtescu <edy.burt@gmail.com> | 2014-02-14 07:07:09 +0200 |
|---|---|---|
| committer | Eduard Burtescu <edy.burt@gmail.com> | 2014-02-14 08:43:29 +0200 |
| commit | a02b10a0621adfe36eb3cc2e46f45fc7ccdb7ea2 (patch) | |
| tree | 86fe8ac57360a232b07c4303547194646129561a /src/libsyntax/ext/tt | |
| parent | 22c34f3c4cddea33b916eb92f8d7286b02b865a7 (diff) | |
| download | rust-a02b10a0621adfe36eb3cc2e46f45fc7ccdb7ea2.tar.gz rust-a02b10a0621adfe36eb3cc2e46f45fc7ccdb7ea2.zip | |
Refactored ast_map and friends, mainly to have Paths without storing them.
Diffstat (limited to 'src/libsyntax/ext/tt')
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 29 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 21 |
3 files changed, 27 insertions, 33 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 92bc204e2c1..c2d005da74e 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -18,7 +18,7 @@ use parse::lexer::*; //resolve bug? use parse::ParseSess; use parse::attr::ParserAttr; use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; -use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner}; +use parse::token::{Token, EOF, Nonterminal}; use parse::token; use std::hashmap::HashMap; @@ -180,14 +180,15 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch]) }; } codemap::Spanned { - node: MatchNonterminal(ref bind_name, _, idx), span: sp + node: MatchNonterminal(bind_name, _, idx), + span } => { - if ret_val.contains_key(bind_name) { - let string = token::get_ident(bind_name.name); + if ret_val.contains_key(&bind_name) { + let string = token::get_ident(bind_name); p_s.span_diagnostic - .span_fatal(sp, "duplicated bind name: " + string.get()) + .span_fatal(span, "duplicated bind name: " + string.get()) } - ret_val.insert(*bind_name, res[idx]); + ret_val.insert(bind_name, res[idx]); } } } @@ -364,12 +365,10 @@ pub fn parse<R: Reader>(sess: @ParseSess, || bb_eis.len() > 1u { let nts = bb_eis.map(|ei| { match ei.elts[ei.idx].node { - MatchNonterminal(ref bind,ref name,_) => { - let bind_string = token::get_ident(bind.name); - let name_string = token::get_ident(name.name); + MatchNonterminal(bind, name, _) => { format!("{} ('{}')", - name_string.get(), - bind_string.get()) + token::get_ident(name), + token::get_ident(bind)) } _ => fail!() } }).connect(" or "); @@ -379,7 +378,7 @@ pub fn parse<R: Reader>(sess: @ParseSess, nts, next_eis.len())); } else if bb_eis.len() == 0u && next_eis.len() == 0u { return Failure(sp, format!("no rules expected the token `{}`", - to_str(get_ident_interner(), &tok))); + token::to_str(&tok))); } else if next_eis.len() > 0u { /* Now process the next token */ while next_eis.len() > 0u { @@ -391,8 +390,8 @@ pub fn parse<R: Reader>(sess: @ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.elts[ei.idx].node { - MatchNonterminal(_, ref name, idx) => { - let name_string = token::get_ident(name.name); + MatchNonterminal(_, name, idx) => { + let name_string = token::get_ident(name); ei.matches[idx].push(@MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get()))); ei.idx += 1u; @@ -426,7 +425,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { "ident" => match p.token { token::IDENT(sn,b) => { p.bump(); token::NtIdent(~sn,b) } _ => { - let token_str = token::to_str(get_ident_interner(), &p.token); + let token_str = token::to_str(&p.token); p.fatal(~"expected ident, found " + token_str) } }, diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index e196bdccfe3..45fe24ebf68 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -21,7 +21,7 @@ use ext::tt::macro_parser::{parse, parse_or_else}; use parse::lexer::new_tt_reader; use parse::parser::Parser; use parse::attr::ParserAttr; -use parse::token::{get_ident_interner, special_idents, gensym_ident}; +use parse::token::{special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; use parse::token; use print; @@ -113,11 +113,9 @@ fn generic_extension(cx: &ExtCtxt, rhses: &[@NamedMatch]) -> MacResult { if cx.trace_macros() { - let interned_name = token::get_ident(name.name); println!("{}! \\{ {} \\}", - interned_name.get(), - print::pprust::tt_to_str(&TTDelim(@arg.to_owned()), - get_ident_interner())); + token::get_ident(name), + print::pprust::tt_to_str(&TTDelim(@arg.to_owned()))); } // Which arm's failure should we report? (the one furthest along) @@ -231,7 +229,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt, }; return MRDef(MacroDef { - name: token::get_ident(name.name).get().to_str(), + name: token::get_ident(name).get().to_str(), ext: NormalTT(exp, Some(sp)) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index fccbc57f12c..fb2aae9b8c1 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -19,7 +19,6 @@ use parse::lexer::TokenAndSpan; use std::cell::{Cell, RefCell}; use std::hashmap::HashMap; -use std::option; ///an unzipping of `TokenTree`s struct TtFrame { @@ -57,7 +56,7 @@ pub fn new_tt_reader(sp_diag: @SpanHandler, idx: Cell::new(0u), dotdotdoted: false, sep: None, - up: option::None + up: None }), interpolations: match interp { /* just a convienience */ None => RefCell::new(HashMap::new()), @@ -122,10 +121,9 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch { match matched_opt { Some(s) => lookup_cur_matched_by_matched(r, s), None => { - let name_string = token::get_ident(name.name); r.sp_diag.span_fatal(r.cur_span.get(), format!("unknown macro variable `{}`", - name_string.get())); + token::get_ident(name))); } } } @@ -141,16 +139,16 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { match lhs { LisUnconstrained => rhs.clone(), LisContradiction(_) => lhs.clone(), - LisConstraint(l_len, ref l_id) => match rhs { + LisConstraint(l_len, l_id) => match rhs { LisUnconstrained => lhs.clone(), LisContradiction(_) => rhs.clone(), LisConstraint(r_len, _) if l_len == r_len => lhs.clone(), - LisConstraint(r_len, ref r_id) => { - let l_n = token::get_ident(l_id.name); - let r_n = token::get_ident(r_id.name); + LisConstraint(r_len, r_id) => { + let l_n = token::get_ident(l_id); + let r_n = token::get_ident(r_id); LisContradiction(format!("inconsistent lockstep iteration: \ '{}' has {} items, but '{}' has {}", - l_n.get(), l_len, r_n.get(), r_len)) + l_n, l_len, r_n, r_len)) } } } @@ -240,7 +238,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { idx: Cell::new(0u), dotdotdoted: false, sep: None, - up: option::Some(r.stack.get()) + up: Some(r.stack.get()) }); // if this could be 0-length, we'd need to potentially recur here } @@ -314,11 +312,10 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { return ret_val; } MatchedSeq(..) => { - let string = token::get_ident(ident.name); r.sp_diag.span_fatal( r.cur_span.get(), /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - string.get())); + token::get_ident(ident))); } } } |
