diff options
| author | bors <bors@rust-lang.org> | 2015-02-07 02:04:47 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-02-07 02:04:47 +0000 |
| commit | 7ebf9bc5c22155d622537ded42b4ebf94238b296 (patch) | |
| tree | b3f937f2f554e961236d3a2048778441ab062c5e /src/libsyntax | |
| parent | d3732a12e896ab98aa27eaffab99a78bbaf837e4 (diff) | |
| parent | a2e01c62d5b6259d55b6688c8b059ac28e5dd03e (diff) | |
| download | rust-7ebf9bc5c22155d622537ded42b4ebf94238b296.tar.gz rust-7ebf9bc5c22155d622537ded42b4ebf94238b296.zip | |
Auto merge of #21505 - GuillaumeGomez:interned_string, r=alexcrichton
It's in order to make the code more homogeneous.
Diffstat (limited to 'src/libsyntax')
25 files changed, 103 insertions, 105 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 34eeedeaa76..7e1bf7a2230 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -112,13 +112,13 @@ impl fmt::Display for Ident { impl fmt::Debug for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Name(nm) = *self; - write!(f, "{:?}({})", token::get_name(*self).get(), nm) + write!(f, "{:?}({})", token::get_name(*self), nm) } } impl fmt::Display for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(token::get_name(*self).get(), f) + fmt::Display::fmt(&token::get_name(*self), f) } } @@ -174,7 +174,7 @@ impl Name { pub fn as_str<'a>(&'a self) -> &'a str { unsafe { // FIXME #12938: can't use copy_lifetime since &str isn't a &T - ::std::mem::transmute::<&str,&str>(token::get_name(*self).get()) + ::std::mem::transmute::<&str,&str>(&token::get_name(*self)) } } @@ -193,7 +193,7 @@ pub type Mrk = u32; impl Encodable for Ident { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(token::get_ident(*self).get()) + s.emit_str(&token::get_ident(*self)) } } diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index c62f76564a7..b8d4c90f745 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -27,7 +27,7 @@ use std::u32; pub fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") idents.iter().map(|i| { - token::get_ident(*i).get().to_string() + token::get_ident(*i).to_string() }).collect::<Vec<String>>().connect("::") } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 15ea68a19ce..a3afe5780d0 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -44,7 +44,7 @@ pub fn is_used(attr: &Attribute) -> bool { pub trait AttrMetaMethods { fn check_name(&self, name: &str) -> bool { - name == self.name().get() + name == &self.name()[] } /// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`, @@ -62,7 +62,7 @@ pub trait AttrMetaMethods { impl AttrMetaMethods for Attribute { fn check_name(&self, name: &str) -> bool { - let matches = name == self.name().get(); + let matches = name == &self.name()[]; if matches { mark_used(self); } @@ -142,7 +142,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(&strip_doc_comment_decoration( - comment.get())[])); + &comment)[])); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -209,7 +209,7 @@ pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute { pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, hi: BytePos) -> Attribute { - let style = doc_comment_style(text.get()); + let style = doc_comment_style(&text); let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr)); let attr = Attribute_ { id: id, @@ -326,11 +326,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool { match cfg.node { - ast::MetaList(ref pred, ref mis) if pred.get() == "any" => + ast::MetaList(ref pred, ref mis) if &pred[] == "any" => mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if pred.get() == "all" => + ast::MetaList(ref pred, ref mis) if &pred[] == "all" => mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if pred.get() == "not" => { + ast::MetaList(ref pred, ref mis) if &pred[] == "not" => { if mis.len() != 1 { diagnostic.span_err(cfg.span, "expected 1 cfg-pattern"); return false; @@ -382,7 +382,7 @@ fn find_stability_generic<'a, 'outer: for attr in attrs { let tag = attr.name(); - let tag = tag.get(); + let tag = &tag[]; if tag != "deprecated" && tag != "unstable" && tag != "stable" { continue // not a stability level } @@ -394,8 +394,8 @@ fn find_stability_generic<'a, let mut feature = None; let mut since = None; let mut reason = None; - for meta in metas { - if meta.name().get() == "feature" { + for meta in metas.iter() { + if meta.name() == "feature" { match meta.value_str() { Some(v) => feature = Some(v), None => { @@ -404,7 +404,7 @@ fn find_stability_generic<'a, } } } - if meta.name().get() == "since" { + if &meta.name()[] == "since" { match meta.value_str() { Some(v) => since = Some(v), None => { @@ -413,7 +413,7 @@ fn find_stability_generic<'a, } } } - if meta.name().get() == "reason" { + if &meta.name()[] == "reason" { match meta.value_str() { Some(v) => reason = Some(v), None => { @@ -521,11 +521,11 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt for item in items { match item.node { ast::MetaWord(ref word) => { - let hint = match word.get() { + let hint = match &word[] { // Can't use "extern" because it's not a lexical identifier. "C" => Some(ReprExtern), "packed" => Some(ReprPacked), - _ => match int_type_of_word(word.get()) { + _ => match int_type_of_word(&word) { Some(ity) => Some(ReprInt(item.span, ity)), None => { // Not a word we recognize diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index bd5247bbad6..833a6d52acb 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -10,6 +10,7 @@ use std::cell::RefCell; use std::collections::BTreeMap; + use ast; use ast::{Ident, Name, TokenTree}; use codemap::Span; @@ -57,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, match diagnostics.insert(code.name, span) { Some(previous_span) => { ecx.span_warn(span, &format!( - "diagnostic code {} already used", token::get_ident(code).get() + "diagnostic code {} already used", &token::get_ident(code) )[]); ecx.span_note(previous_span, "previous invocation"); }, @@ -68,7 +69,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, with_registered_diagnostics(|diagnostics| { if !diagnostics.contains_key(&code.name) { ecx.span_err(span, &format!( - "used diagnostic code {} not registered", token::get_ident(code).get() + "used diagnostic code {} not registered", &token::get_ident(code) )[]); } }); @@ -93,12 +94,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, with_registered_diagnostics(|diagnostics| { if diagnostics.insert(code.name, description).is_some() { ecx.span_err(span, &format!( - "diagnostic code {} already registered", token::get_ident(*code).get() + "diagnostic code {} already registered", &token::get_ident(*code) )[]); } }); let sym = Ident::new(token::gensym(&( - "__register_diagnostic_".to_string() + token::get_ident(*code).get() + "__register_diagnostic_".to_string() + &token::get_ident(*code) )[])); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 4258eb32fdf..1ceda2e08dd 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -102,7 +102,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // It's the opposite of '=&' which means that the memory // cannot be shared with any other operand (usually when // a register is clobbered early.) - let output = match constraint.get().slice_shift_char() { + let output = match constraint.slice_shift_char() { Some(('=', _)) => None, Some(('+', operand)) => { Some(token::intern_and_get_ident(&format!( @@ -129,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = p.parse_str(); - if constraint.get().starts_with("=") { + if constraint.starts_with("=") { cx.span_err(p.last_span, "input operand constraint contains '='"); - } else if constraint.get().starts_with("+") { + } else if constraint.starts_with("+") { cx.span_err(p.last_span, "input operand constraint contains '+'"); } @@ -213,7 +213,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) MacExpr::new(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprInlineAsm(ast::InlineAsm { - asm: token::intern_and_get_ident(asm.get()), + asm: token::intern_and_get_ident(&asm), asm_str_style: asm_str_style.unwrap(), outputs: outputs, inputs: inputs, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index a184cc5c2b2..b5f6893a8c2 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -790,7 +790,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, cx.span_err(sp, &format!("{} takes 1 argument", name)[]); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { - s.get().to_string() + s.to_string() }) } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 53c35ef34cd..55faf692e98 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -21,7 +21,6 @@ use parse::token::InternedString; use parse::token; use ptr::P; - // Transitional reexports so qquote can find the paths it is looking for mod syntax { pub use ext; @@ -576,7 +575,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> { let field_name = token::get_ident(ident); let field_span = Span { - lo: sp.lo - Pos::from_usize(field_name.get().len()), + lo: sp.lo - Pos::from_usize(field_name.len()), hi: sp.hi, expn_id: sp.expn_id, }; diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 4e10cc9aacc..80d128959ea 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -32,7 +32,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitStr(ref s, _) | ast::LitFloat(ref s, _) | ast::LitFloatUnsuffixed(ref s) => { - accumulator.push_str(s.get()); + accumulator.push_str(&s); } ast::LitChar(c) => { accumulator.push(c); diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 1af3ba1d326..364cacd735c 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } else { match *e { ast::TtToken(_, token::Ident(ident, _)) => { - res_str.push_str(token::get_ident(ident).get()) + res_str.push_str(&token::get_ident(ident)) }, _ => { cx.span_err(sp, "concat_idents! requires ident args."); diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index bce48747b60..1c82ca5d2ad 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -24,7 +24,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt, { let name = match mitem.node { MetaWord(ref tname) => { - match tname.get() { + match &tname[] { "Copy" => "Copy", "Send" | "Sync" => { return cx.span_err(span, diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 7d72a7ec358..28573ef757b 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -363,7 +363,7 @@ impl<'a> TraitDef<'a> { // generated implementations are linted let mut attrs = newitem.attrs.clone(); attrs.extend(item.attrs.iter().filter(|a| { - match a.name().get() { + match &a.name()[] { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index d3d7fee3a18..318b748ad7f 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -74,7 +74,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, |i| push(i))) } - match tname.get() { + match &tname[] { "Clone" => expand!(clone::expand_deriving_clone), "Hash" => expand!(hash::expand_deriving_hash), diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index ec5941f58f3..821fdeaa86a 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -72,7 +72,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, } }; - let mut format_string = String::from_str(token::get_ident(name).get()); + let mut format_string = String::from_str(&token::get_ident(name)); // the internal fields we're actually formatting let mut exprs = Vec::new(); @@ -107,7 +107,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let name = token::get_ident(field.name.unwrap()); format_string.push_str(" "); - format_string.push_str(name.get()); + format_string.push_str(&name); format_string.push_str(": {:?}"); exprs.push(field.self_.clone()); diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 9aa454ae8d5..417506cf3aa 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -101,12 +101,12 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } - let e = match env::var_string(var.get()) { - Err(..) => { - cx.span_err(sp, msg.get()); + let e = match env::var_string(&var[]) { + Err(_) => { + cx.span_err(sp, &msg); cx.expr_usize(sp, 0) } - Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[])) + Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index eaee67f9a61..33712dae900 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -375,7 +375,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("macro undefined: '{}!'", - extnamestr.get())[]); + &extnamestr)[]); // let compilation continue None @@ -385,7 +385,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - name: extnamestr.get().to_string(), + name: extnamestr.to_string(), format: MacroBang, span: exp_span, }, @@ -411,7 +411,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("non-expression macro in expression position: {}", - &extnamestr.get()[] + &extnamestr[] )[]); return None; } @@ -422,7 +422,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("'{}' is not a tt-style macro", - extnamestr.get())[]); + &extnamestr)[]); None } } @@ -506,14 +506,14 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander) for attr in &modifiers { let mname = attr.name(); - match fld.cx.syntax_env.find(&intern(mname.get())) { + match fld.cx.syntax_env.find(&intern(&mname)) { Some(rc) => match *rc { Modifier(ref mac) => { attr::mark_used(attr); fld.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - name: mname.get().to_string(), + name: mname.to_string(), format: MacroAttribute, span: None, } @@ -613,7 +613,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.get().to_string(), + name: extnamestr.to_string(), format: MacroBang, span: span } @@ -626,13 +626,13 @@ pub fn expand_item_mac(it: P<ast::Item>, if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, &format!("macro {}! expects an ident argument", - extnamestr.get())[]); + &extnamestr)[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.get().to_string(), + name: extnamestr.to_string(), format: MacroBang, span: span } @@ -651,7 +651,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.get().to_string(), + name: extnamestr.to_string(), format: MacroBang, span: None, } @@ -677,7 +677,7 @@ pub fn expand_item_mac(it: P<ast::Item>, _ => { fld.cx.span_err(it.span, &format!("{}! is not legal in item position", - extnamestr.get())[]); + &extnamestr)[]); return SmallVector::zero(); } } @@ -696,7 +696,7 @@ pub fn expand_item_mac(it: P<ast::Item>, None => { fld.cx.span_err(path_span, &format!("non-item macro in item position: {}", - extnamestr.get())[]); + &extnamestr)[]); return SmallVector::zero(); } }; @@ -950,7 +950,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { fld.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - name: extnamestr.get().to_string(), + name: extnamestr.to_string(), format: MacroBang, span: tt_span } @@ -968,7 +968,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { pth.span, &format!( "non-pattern macro in pattern position: {}", - extnamestr.get() + &extnamestr )[] ); return DummyResult::raw_pat(span); @@ -981,7 +981,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { _ => { fld.cx.span_err(span, &format!("{}! is not legal in pattern position", - extnamestr.get())[]); + &extnamestr)[]); return DummyResult::raw_pat(span); } } @@ -1065,7 +1065,7 @@ fn expand_annotatable(a: Annotatable, for attr in a.attrs() { let mname = attr.name(); - match fld.cx.syntax_env.find(&intern(mname.get())) { + match fld.cx.syntax_env.find(&intern(&mname)) { Some(rc) => match *rc { Decorator(ref dec) => { let it = match a { @@ -1079,7 +1079,7 @@ fn expand_annotatable(a: Annotatable, fld.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - name: mname.get().to_string(), + name: mname.to_string(), format: MacroAttribute, span: None } @@ -1180,7 +1180,7 @@ fn modifiers(attrs: &Vec<ast::Attribute>, fld: &MacroExpander) -> (Vec<ast::Attribute>, Vec<ast::Attribute>) { attrs.iter().cloned().partition(|attr| { - match fld.cx.syntax_env.find(&intern(attr.name().get())) { + match fld.cx.syntax_env.find(&intern(&attr.name())) { Some(rc) => match *rc { Modifier(_) => true, _ => false @@ -1195,7 +1195,7 @@ fn multi_modifiers(attrs: &[ast::Attribute], fld: &MacroExpander) -> (Vec<ast::Attribute>, Vec<ast::Attribute>) { attrs.iter().cloned().partition(|attr| { - match fld.cx.syntax_env.find(&intern(attr.name().get())) { + match fld.cx.syntax_env.find(&intern(&attr.name())) { Some(rc) => match *rc { MultiModifier(_) => true, _ => false @@ -1220,14 +1220,14 @@ fn expand_item_multi_modifier(mut it: Annotatable, for attr in &modifiers { let mname = attr.name(); - match fld.cx.syntax_env.find(&intern(mname.get())) { + match fld.cx.syntax_env.find(&intern(&mname)) { Some(rc) => match *rc { MultiModifier(ref mac) => { attr::mark_used(attr); fld.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - name: mname.get().to_string(), + name: mname.to_string(), format: MacroAttribute, span: None, } @@ -1862,7 +1862,7 @@ mod test { .collect(); println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name); let string = token::get_ident(final_varref_ident); - println!("varref's first segment's string: \"{}\"", string.get()); + println!("varref's first segment's string: \"{}\"", &string[]); println!("binding #{}: {}, resolves to {}", binding_idx, bindings[binding_idx], binding_name); mtwt::with_sctable(|x| mtwt::display_sctable(x)); @@ -1915,7 +1915,7 @@ foo_module!(); let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); - let string = ident.get(); + let string = &ident[]; "xx" == string }).collect(); let cxbinds: &[&ast::Ident] = &cxbinds[]; @@ -1929,7 +1929,7 @@ foo_module!(); // the xx binding should bind all of the xx varrefs: for (idx,v) in varrefs.iter().filter(|p| { p.segments.len() == 1 - && "xx" == token::get_ident(p.segments[0].identifier).get() + && "xx" == &token::get_ident(p.segments[0].identifier)[] }).enumerate() { if mtwt::resolve(v.segments[0].identifier) != resolved_binding { println!("uh oh, xx binding didn't match xx varref:"); diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 56da24de8bb..96055e3635a 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -118,7 +118,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; let interned_name = token::get_ident(ident); - let name = interned_name.get(); + let name = &interned_name[]; + p.expect(&token::Eq); let e = p.parse_expr(); match names.get(name) { @@ -672,7 +673,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, None => return DummyResult::raw_expr(sp) }; - let mut parser = parse::Parser::new(fmt.get()); + let mut parser = parse::Parser::new(&fmt); + loop { match parser.next() { Some(piece) => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 7376b235238..67990895d07 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -161,7 +161,7 @@ pub mod rt { impl ToSource for ast::Ident { fn to_source(&self) -> String { - token::get_ident(*self).get().to_string() + token::get_ident(*self).to_string() } } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index fbc591834d0..be02ba5ddc2 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -73,7 +73,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) base::check_zero_tts(cx, sp, tts, "module_path!"); let string = cx.mod_path() .iter() - .map(|x| token::get_ident(*x).get().to_string()) + .map(|x| token::get_ident(*x).to_string()) .collect::<Vec<String>>() .connect("::"); base::MacExpr::new(cx.expr_str( diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 823efdd3eed..d752e34c112 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) p_s.span_diagnostic .span_fatal(sp, &format!("duplicated bind name: {}", - string.get())[]) + &string)[]) } } } @@ -487,8 +487,8 @@ pub fn parse(sess: &ParseSess, let name_string = token::get_ident(name); let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( - parse_nt(&mut rust_parser, span, name_string.get())))); - ei.idx += 1; + parse_nt(&mut rust_parser, span, &name_string)))); + ei.idx += 1us; ei.match_cur += 1; } _ => panic!() diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index a93ddbb2379..12efd959918 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -21,6 +21,7 @@ //! For the purpose of future feature-tracking, once code for detection of feature //! gate usage is added, *do not remove it again* even once the feature //! becomes stable. + use self::Status::*; use abi::RustIntrinsic; @@ -255,7 +256,7 @@ impl<'a> PostExpansionVisitor<'a> { impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_name(&mut self, sp: Span, name: ast::Name) { - if !token::get_name(name).get().is_ascii() { + if !token::get_name(name).is_ascii() { self.gate_feature("non_ascii_idents", sp, "non-ascii idents are not fully supported."); } @@ -382,7 +383,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, "link_name") { - Some(val) => val.get().starts_with("llvm."), + Some(val) => val.starts_with("llvm."), _ => false }; if links_to_llvm { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 694da9b8b28..5f4cf9af5ee 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1201,19 +1201,19 @@ mod test { let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc.get(), "/// doc comment"); + assert_eq!(&doc[], "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); - let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") - .map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>(); + let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc") + .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc.get(), "/** doc comment\n * with CRLF */"); + assert_eq!(&doc[], "/** doc comment\n * with CRLF */"); } #[test] diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cae23c5a2cc..3107f47de78 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5133,7 +5133,7 @@ impl<'a> Parser<'a> { outer_attrs, "path") { Some(d) => (dir_path.join(d), true), None => { - let mod_name = mod_string.get().to_string(); + let mod_name = mod_string.to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); let default_path = dir_path.join(&default_path_str[]); @@ -5145,7 +5145,7 @@ impl<'a> Parser<'a> { self.span_err(id_sp, "cannot declare a new module at this location"); let this_module = match self.mod_path_stack.last() { - Some(name) => name.get().to_string(), + Some(name) => name.to_string(), None => self.root_module_name.as_ref().unwrap().clone(), }; self.span_note(id_sp, @@ -5191,7 +5191,7 @@ impl<'a> Parser<'a> { }; self.eval_src_mod_from_path(file_path, owns_directory, - mod_string.get().to_string(), id_sp) + mod_string.to_string(), id_sp) } fn eval_src_mod_from_path(&mut self, diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 129c1d20bc0..45f4f044ea4 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -625,11 +625,6 @@ impl InternedString { string: string, } } - - #[inline] - pub fn get<'a>(&'a self) -> &'a str { - &self.string[] - } } impl Deref for InternedString { @@ -644,7 +639,7 @@ impl BytesContainer for InternedString { // of `BytesContainer`, which is itself a workaround for the lack of // DST. unsafe { - let this = self.get(); + let this = &self[]; mem::transmute::<&[u8],&[u8]>(this.container_as_bytes()) } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 542cc41c950..c177cd1fafa 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -258,7 +258,7 @@ pub fn token_to_string(tok: &Token) -> String { } /* Name components */ - token::Ident(s, _) => token::get_ident(s).get().to_string(), + token::Ident(s, _) => token::get_ident(s).to_string(), token::Lifetime(s) => format!("{}", token::get_ident(s)), token::Underscore => "_".to_string(), @@ -798,7 +798,7 @@ impl<'a> State<'a> { try!(self.head(&visibility_qualified(item.vis, "extern crate")[])); if let Some((ref p, style)) = *optional_path { - try!(self.print_string(p.get(), style)); + try!(self.print_string(p, style)); try!(space(&mut self.s)); try!(word(&mut self.s, "as")); try!(space(&mut self.s)); @@ -1313,7 +1313,7 @@ impl<'a> State<'a> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(attr.span.lo)); if attr.node.is_sugared_doc { - word(&mut self.s, attr.value_str().unwrap().get()) + word(&mut self.s, &attr.value_str().unwrap()) } else { match attr.node.style { ast::AttrInner => try!(word(&mut self.s, "#![")), @@ -1847,17 +1847,17 @@ impl<'a> State<'a> { ast::ExprInlineAsm(ref a) => { try!(word(&mut self.s, "asm!")); try!(self.popen()); - try!(self.print_string(a.asm.get(), a.asm_str_style)); + try!(self.print_string(&a.asm, a.asm_str_style)); try!(self.word_space(":")); try!(self.commasep(Inconsistent, &a.outputs[], |s, &(ref co, ref o, is_rw)| { - match co.get().slice_shift_char() { + match co.slice_shift_char() { Some(('=', operand)) if is_rw => { try!(s.print_string(&format!("+{}", operand)[], ast::CookedStr)) } - _ => try!(s.print_string(co.get(), ast::CookedStr)) + _ => try!(s.print_string(&co, ast::CookedStr)) } try!(s.popen()); try!(s.print_expr(&**o)); @@ -1869,7 +1869,7 @@ impl<'a> State<'a> { try!(self.commasep(Inconsistent, &a.inputs[], |s, &(ref co, ref o)| { - try!(s.print_string(co.get(), ast::CookedStr)); + try!(s.print_string(&co, ast::CookedStr)); try!(s.popen()); try!(s.print_expr(&**o)); try!(s.pclose()); @@ -1880,7 +1880,7 @@ impl<'a> State<'a> { try!(self.commasep(Inconsistent, &a.clobbers[], |s, co| { - try!(s.print_string(co.get(), ast::CookedStr)); + try!(s.print_string(&co, ast::CookedStr)); Ok(()) })); @@ -1954,7 +1954,7 @@ impl<'a> State<'a> { let encoded = ident.encode_with_hygiene(); try!(word(&mut self.s, &encoded[])) } else { - try!(word(&mut self.s, token::get_ident(ident).get())) + try!(word(&mut self.s, &token::get_ident(ident))) } self.ann.post(self, NodeIdent(&ident)) } @@ -1964,7 +1964,7 @@ impl<'a> State<'a> { } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { - try!(word(&mut self.s, token::get_name(name).get())); + try!(word(&mut self.s, &token::get_name(name))); self.ann.post(self, NodeName(&name)) } @@ -2532,15 +2532,15 @@ impl<'a> State<'a> { try!(self.ibox(indent_unit)); match item.node { ast::MetaWord(ref name) => { - try!(word(&mut self.s, name.get())); + try!(word(&mut self.s, &name)); } ast::MetaNameValue(ref name, ref value) => { - try!(self.word_space(name.get())); + try!(self.word_space(&name[])); try!(self.word_space("=")); try!(self.print_literal(value)); } ast::MetaList(ref name, ref items) => { - try!(word(&mut self.s, name.get())); + try!(word(&mut self.s, &name)); try!(self.popen()); try!(self.commasep(Consistent, &items[], @@ -2731,7 +2731,7 @@ impl<'a> State<'a> { _ => () } match lit.node { - ast::LitStr(ref st, style) => self.print_string(st.get(), style), + ast::LitStr(ref st, style) => self.print_string(&st, style), ast::LitByte(byte) => { let mut res = String::from_str("b'"); ascii::escape_default(byte, |c| res.push(c as char)); @@ -2772,10 +2772,10 @@ impl<'a> State<'a> { word(&mut self.s, &format!( "{}{}", - f.get(), + &f, &ast_util::float_ty_to_string(t)[])[]) } - ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), + ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]), ast::LitBool(val) => { if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") } } diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index d9d56889512..1a8cb2b376a 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -512,7 +512,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) { }); let reexport = cx.reexport_test_harness_main.as_ref().map(|s| { // building `use <ident> = __test::main` - let reexport_ident = token::str_to_ident(s.get()); + let reexport_ident = token::str_to_ident(&s); let use_path = nospan(ast::ViewPathSimple(reexport_ident, @@ -575,7 +575,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs[]) { - Some(ref s) if "test" == &s.get()[] => true, + Some(ref s) if "test" == &s[] => true, _ => false } } |
