diff options
| author | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
| commit | 522d09dfecbeca1595f25ac58c6d0178bbd21d7d (patch) | |
| tree | cc0252dd3413e5f890d0ebcfdaa096e5b002be0b /src/libsyntax | |
| parent | 0b664bb8436f2cfda7f13a6f302ab486f332816f (diff) | |
| parent | 49771bafa5fca16486bfd06741dac3de2c587adf (diff) | |
| download | rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.tar.gz rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.zip | |
Auto merge of #22541 - Manishearth:rollup, r=Gankro 1.0.0-alpha.2
Continued from #22520
Diffstat (limited to 'src/libsyntax')
41 files changed, 495 insertions, 404 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index d6778be553e..140e21b5d04 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -198,7 +198,7 @@ impl Encodable for Ident { impl Decodable for Ident { fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> { - Ok(str_to_ident(&try!(d.read_str())[])) + Ok(str_to_ident(&try!(d.read_str())[..])) } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index 5535e5911e0..ba08f61b557 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -86,7 +86,7 @@ pub fn path_to_string<PI: Iterator<Item=PathElem>>(path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(&e[]); + s.push_str(&e[..]); s }) } @@ -251,7 +251,7 @@ impl<'ast> Map<'ast> { } fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> { - self.map.borrow().get(id as usize).map(|e| *e) + self.map.borrow().get(id as usize).cloned() } pub fn krate(&self) -> &'ast Crate { @@ -463,20 +463,20 @@ impl<'ast> Map<'ast> { F: FnOnce(Option<&[Attribute]>) -> T, { let attrs = match self.get(id) { - NodeItem(i) => Some(&i.attrs[]), - NodeForeignItem(fi) => Some(&fi.attrs[]), + NodeItem(i) => Some(&i.attrs[..]), + NodeForeignItem(fi) => Some(&fi.attrs[..]), NodeTraitItem(ref tm) => match **tm { - RequiredMethod(ref type_m) => Some(&type_m.attrs[]), - ProvidedMethod(ref m) => Some(&m.attrs[]), - TypeTraitItem(ref typ) => Some(&typ.attrs[]), + RequiredMethod(ref type_m) => Some(&type_m.attrs[..]), + ProvidedMethod(ref m) => Some(&m.attrs[..]), + TypeTraitItem(ref typ) => Some(&typ.attrs[..]), }, NodeImplItem(ref ii) => { match **ii { - MethodImplItem(ref m) => Some(&m.attrs[]), - TypeImplItem(ref t) => Some(&t.attrs[]), + MethodImplItem(ref m) => Some(&m.attrs[..]), + TypeImplItem(ref t) => Some(&t.attrs[..]), } } - NodeVariant(ref v) => Some(&v.node.attrs[]), + NodeVariant(ref v) => Some(&v.node.attrs[..]), // unit/tuple structs take the attributes straight from // the struct definition. // FIXME(eddyb) make this work again (requires access to the map). @@ -577,7 +577,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { None => return false, Some((node_id, name)) => (node_id, name), }; - if &part[] != mod_name.as_str() { + if &part[..] != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -615,7 +615,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == &self.item_name[] && + name.as_str() == &self.item_name[..] && self.suffix_matches(parent_of_n) } } @@ -1026,7 +1026,7 @@ impl<'a> NodePrinter for pprust::State<'a> { fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { let id_str = format!(" (id={})", id); - let id_str = if include_id { &id_str[] } else { "" }; + let id_str = if include_id { &id_str[..] } else { "" }; match map.find(id) { Some(NodeItem(item)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 117507ad8b7..f660296fcd7 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -257,11 +257,11 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); - pretty.push_str(&pprust::path_to_string(&trait_ref.path)[]); + pretty.push_str(&pprust::path_to_string(&trait_ref.path)); } None => {} } - token::gensym_ident(&pretty[]) + token::gensym_ident(&pretty[..]) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { @@ -673,7 +673,7 @@ pub fn pat_is_ident(pat: P<ast::Pat>) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(&a.segments[], &b.segments[])) + && (segments_name_eq(&a.segments[..], &b.segments[..])) } // are two arrays of segments equal when compared unhygienically? diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index a3afe5780d0..62e676891a0 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -26,11 +26,11 @@ use parse::token; use ptr::P; use std::cell::{RefCell, Cell}; -use std::collections::BitvSet; +use std::collections::BitSet; use std::collections::HashSet; use std::fmt; -thread_local! { static USED_ATTRS: RefCell<BitvSet> = RefCell::new(BitvSet::new()) } +thread_local! { static USED_ATTRS: RefCell<BitSet> = RefCell::new(BitSet::new()) } pub fn mark_used(attr: &Attribute) { let AttrId(id) = attr.node.id; @@ -44,7 +44,7 @@ pub fn is_used(attr: &Attribute) -> bool { pub trait AttrMetaMethods { fn check_name(&self, name: &str) -> bool { - name == &self.name()[] + name == &self.name()[..] } /// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`, @@ -62,7 +62,7 @@ pub trait AttrMetaMethods { impl AttrMetaMethods for Attribute { fn check_name(&self, name: &str) -> bool { - let matches = name == &self.name()[]; + let matches = name == &self.name()[..]; if matches { mark_used(self); } @@ -101,7 +101,7 @@ impl AttrMetaMethods for MetaItem { fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> { match self.node { - MetaList(_, ref l) => Some(&l[]), + MetaList(_, ref l) => Some(&l[..]), _ => None } } @@ -142,7 +142,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(&strip_doc_comment_decoration( - &comment)[])); + &comment))); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -302,9 +302,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { } MetaList(ref n, ref items) if *n == "inline" => { mark_used(attr); - if contains_name(&items[], "always") { + if contains_name(&items[..], "always") { InlineAlways - } else if contains_name(&items[], "never") { + } else if contains_name(&items[..], "never") { InlineNever } else { InlineHint @@ -326,11 +326,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool { match cfg.node { - ast::MetaList(ref pred, ref mis) if &pred[] == "any" => + ast::MetaList(ref pred, ref mis) if &pred[..] == "any" => mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if &pred[] == "all" => + ast::MetaList(ref pred, ref mis) if &pred[..] == "all" => mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if &pred[] == "not" => { + ast::MetaList(ref pred, ref mis) if &pred[..] == "not" => { if mis.len() != 1 { diagnostic.span_err(cfg.span, "expected 1 cfg-pattern"); return false; @@ -382,7 +382,7 @@ fn find_stability_generic<'a, 'outer: for attr in attrs { let tag = attr.name(); - let tag = &tag[]; + let tag = &tag[..]; if tag != "deprecated" && tag != "unstable" && tag != "stable" { continue // not a stability level } @@ -404,7 +404,7 @@ fn find_stability_generic<'a, } } } - if &meta.name()[] == "since" { + if &meta.name()[..] == "since" { match meta.value_str() { Some(v) => since = Some(v), None => { @@ -413,7 +413,7 @@ fn find_stability_generic<'a, } } } - if &meta.name()[] == "reason" { + if &meta.name()[..] == "reason" { match meta.value_str() { Some(v) => reason = Some(v), None => { @@ -501,7 +501,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, - &format!("duplicate meta item `{}`", name)[]); + &format!("duplicate meta item `{}`", name)); } } } @@ -521,7 +521,7 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt for item in items { match item.node { ast::MetaWord(ref word) => { - let hint = match &word[] { + let hint = match &word[..] { // Can't use "extern" because it's not a lexical identifier. "C" => Some(ReprExtern), "packed" => Some(ReprPacked), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3231342cb50..099f6462942 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -360,7 +360,7 @@ impl CodeMap { let mut src = if src.starts_with("\u{feff}") { String::from_str(&src[3..]) } else { - String::from_str(&src[]) + String::from_str(&src[..]) }; // Append '\n' in case it's not already there. diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 7ca0591be50..dfe3477bddc 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -311,7 +311,7 @@ impl<'a> fold::Folder for CfgAttrFolder<'a> { } }; - if attr::cfg_matches(self.diag, &self.config[], &cfg) { + if attr::cfg_matches(self.diag, &self.config[..], &cfg) { Some(respan(mi.span, ast::Attribute_ { id: attr::mk_attr_id(), style: attr.node.style, diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 83a4d938bb5..27219774cf1 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -129,7 +129,7 @@ impl SpanHandler { panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, &format!("unimplemented {}", msg)[]); + self.span_bug(sp, &format!("unimplemented {}", msg)); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -173,7 +173,7 @@ impl Handler { self.err_count.get()); } } - self.fatal(&s[]); + self.fatal(&s[..]); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -189,7 +189,7 @@ impl Handler { panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(&format!("unimplemented {}", msg)[]); + self.bug(&format!("unimplemented {}", msg)); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -311,16 +311,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, } try!(print_maybe_styled(dst, - &format!("{}: ", lvl.to_string())[], + &format!("{}: ", lvl.to_string()), term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, - &format!("{}", msg)[], + &format!("{}", msg), term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style)); + try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style)); } None => () } @@ -419,12 +419,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, &ses[], lvl, msg, code)); + try!(print_diagnostic(dst, &ses[..], lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); } } else { - try!(print_diagnostic(dst, &ss[], lvl, msg, code)); + try!(print_diagnostic(dst, &ss[..], lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); } @@ -436,9 +436,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { - try!(print_diagnostic(dst, &ss[], Help, + try!(print_diagnostic(dst, &ss[..], Help, &format!("pass `--explain {}` to see a detailed \ - explanation", code)[], None)); + explanation", code), None)); } None => () }, @@ -455,7 +455,7 @@ fn highlight_lines(err: &mut EmitterWriter, let fm = &*lines.file; let mut elided = false; - let mut display_lines = &lines.lines[]; + let mut display_lines = &lines.lines[..]; if display_lines.len() > MAX_LINES { display_lines = &display_lines[0..MAX_LINES]; elided = true; @@ -542,7 +542,7 @@ fn highlight_lines(err: &mut EmitterWriter, } try!(print_maybe_styled(err, - &format!("{}\n", s)[], + &format!("{}\n", s), term::attr::ForegroundColor(lvl.color()))); } } @@ -563,7 +563,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, -> old_io::IoResult<()> { let fm = &*lines.file; - let lines = &lines.lines[]; + let lines = &lines.lines[..]; if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0]) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, @@ -610,7 +610,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push('^'); s.push('\n'); print_maybe_styled(w, - &s[], + &s[..], term::attr::ForegroundColor(lvl.color())) } @@ -618,22 +618,25 @@ fn print_macro_backtrace(w: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span) -> old_io::IoResult<()> { - let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| match expn_info { - Some(ei) => { - let ss = ei.callee.span.map_or(String::new(), |span| cm.span_to_string(span)); - let (pre, post) = match ei.callee.format { - codemap::MacroAttribute => ("#[", "]"), - codemap::MacroBang => ("", "!") - }; - try!(print_diagnostic(w, &ss[], Note, - &format!("in expansion of {}{}{}", pre, - ei.callee.name, - post)[], None)); - let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, &ss[], Note, "expansion site", None)); - Ok(Some(ei.call_site)) - } - None => Ok(None) + let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> old_io::IoResult<_> { + match expn_info { + Some(ei) => { + let ss = ei.callee.span.map_or(String::new(), + |span| cm.span_to_string(span)); + let (pre, post) = match ei.callee.format { + codemap::MacroAttribute => ("#[", "]"), + codemap::MacroBang => ("", "!") + }; + try!(print_diagnostic(w, &ss, Note, + &format!("in expansion of {}{}{}", pre, + ei.callee.name, + post), None)); + let ss = cm.span_to_string(ei.call_site); + try!(print_diagnostic(w, &ss, Note, "expansion site", None)); + Ok(Some(ei.call_site)) + } + None => Ok(None) + } })); cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site)) } @@ -643,6 +646,6 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where { match opt { Some(t) => t, - None => diag.handler().bug(&msg()[]), + None => diag.handler().bug(&msg()), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 833a6d52acb..b3afc3fc4dd 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -59,7 +59,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, Some(previous_span) => { ecx.span_warn(span, &format!( "diagnostic code {} already used", &token::get_ident(code) - )[]); + )); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -70,7 +70,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, if !diagnostics.contains_key(&code.name) { ecx.span_err(span, &format!( "used diagnostic code {} not registered", &token::get_ident(code) - )[]); + )); } }); MacExpr::new(quote_expr!(ecx, ())) @@ -95,12 +95,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if diagnostics.insert(code.name, description).is_some() { ecx.span_err(span, &format!( "diagnostic code {} already registered", &token::get_ident(*code) - )[]); + )); } }); let sym = Ident::new(token::gensym(&( "__register_diagnostic_".to_string() + &token::get_ident(*code) - )[])); + ))); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/diagnostics/registry.rs b/src/libsyntax/diagnostics/registry.rs index 62d48189c43..a6cfd1a5a9a 100644 --- a/src/libsyntax/diagnostics/registry.rs +++ b/src/libsyntax/diagnostics/registry.rs @@ -17,10 +17,10 @@ pub struct Registry { impl Registry { pub fn new(descriptions: &[(&'static str, &'static str)]) -> Registry { - Registry { descriptions: descriptions.iter().map(|&tuple| tuple).collect() } + Registry { descriptions: descriptions.iter().cloned().collect() } } pub fn find_description(&self, code: &str) -> Option<&'static str> { - self.descriptions.get(code).map(|desc| *desc) + self.descriptions.get(code).cloned() } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8800ffd1e9b..d4ccabbd63b 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -640,7 +640,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_path(&self) -> Vec<ast::Ident> { let mut v = Vec::new(); v.push(token::str_to_ident(&self.ecfg.crate_name[])); - v.extend(self.mod_path.iter().map(|a| *a)); + v.extend(self.mod_path.iter().cloned()); return v; } pub fn bt_push(&mut self, ei: ExpnInfo) { diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 80d128959ea..38098e50dee 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&accumulator[]))) + token::intern_and_get_ident(&accumulator[..]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 63a8bd9ddf1..9410a51e7a5 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -49,7 +49,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(&res_str[]); + let res = str_to_ident(&res_str[..]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 879718a6399..93098484ae0 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -24,7 +24,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt, { let name = match mitem.node { MetaWord(ref tname) => { - match &tname[] { + match &tname[..] { "Copy" => "Copy", "Send" | "Sync" => { return cx.span_err(span, diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index f878cb5ca8b..b912ed34ae0 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -367,7 +367,7 @@ impl<'a> TraitDef<'a> { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } - }).map(|a| a.clone())); + }).cloned()); push(P(ast::Item { attrs: attrs, ..(*newitem).clone() @@ -410,7 +410,7 @@ impl<'a> TraitDef<'a> { let mut ty_params = ty_params.into_vec(); // Copy the lifetimes - lifetimes.extend(generics.lifetimes.iter().map(|l| (*l).clone())); + lifetimes.extend(generics.lifetimes.iter().cloned()); // Create the type parameters. ty_params.extend(generics.ty_params.iter().map(|ty_param| { @@ -445,14 +445,14 @@ impl<'a> TraitDef<'a> { span: self.span, bound_lifetimes: wb.bound_lifetimes.clone(), bounded_ty: wb.bounded_ty.clone(), - bounds: OwnedSlice::from_vec(wb.bounds.iter().map(|b| b.clone()).collect()) + bounds: OwnedSlice::from_vec(wb.bounds.iter().cloned().collect()) }) } ast::WherePredicate::RegionPredicate(ref rb) => { ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { span: self.span, lifetime: rb.lifetime, - bounds: rb.bounds.iter().map(|b| b.clone()).collect() + bounds: rb.bounds.iter().cloned().collect() }) } ast::WherePredicate::EqPredicate(ref we) => { @@ -500,7 +500,7 @@ impl<'a> TraitDef<'a> { let opt_trait_ref = Some(trait_ref); let ident = ast_util::impl_pretty_name(&opt_trait_ref, &*self_type); let mut a = vec![attr]; - a.extend(self.attributes.iter().map(|a| a.clone())); + a.extend(self.attributes.iter().cloned()); cx.item( self.span, ident, @@ -536,15 +536,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) }; method_def.create_method(cx, @@ -576,15 +576,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - &nonself_args[]) + &nonself_args[..]) }; method_def.create_method(cx, @@ -934,22 +934,22 @@ impl<'a> MethodDef<'a> { .collect::<Vec<String>>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(&name[])) + .map(|name|cx.ident_of(&name[..])) .collect::<Vec<ast::Ident>>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a usize // corresponding to its variant index. let vi_idents: Vec<ast::Ident> = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", &name[]); - cx.ident_of(&vi_suffix[]) }) + .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); + cx.ident_of(&vi_suffix[..]) }) .collect::<Vec<ast::Ident>>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, &variants[], &vi_idents[]); + self_arg_idents, &variants[..], &vi_idents[..]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -976,7 +976,7 @@ impl<'a> MethodDef<'a> { idents }; for self_arg_name in self_arg_names.tail() { - let (p, idents) = mk_self_pat(cx, &self_arg_name[]); + let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); } @@ -1032,7 +1032,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1085,7 +1085,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1391,7 +1391,7 @@ pub fn cs_fold<F>(use_foldl: bool, } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") @@ -1431,7 +1431,7 @@ pub fn cs_same_method<F>(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index 5aa9f9a0c3e..2149c7a7f77 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -14,7 +14,6 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; -use parse::token::InternedString; use ptr::P; pub fn expand_deriving_hash<F>(cx: &mut ExtCtxt, @@ -26,30 +25,26 @@ pub fn expand_deriving_hash<F>(cx: &mut ExtCtxt, { let path = Path::new_(pathvec_std!(cx, core::hash::Hash), None, - vec!(box Literal(Path::new_local("__S"))), true); - let generics = LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec!(("__S", - vec!(path_std!(cx, core::hash::Writer), - path_std!(cx, core::hash::Hasher)))), - }; - let args = Path::new_local("__S"); - let inline = cx.meta_word(span, InternedString::new("inline")); - let attrs = vec!(cx.attribute(span, inline)); + vec!(), true); + let arg = Path::new_local("__H"); let hash_trait_def = TraitDef { span: span, attributes: Vec::new(), path: path, additional_bounds: Vec::new(), - generics: generics, + generics: LifetimeBounds::empty(), methods: vec!( MethodDef { name: "hash", - generics: LifetimeBounds::empty(), + generics: LifetimeBounds { + lifetimes: Vec::new(), + bounds: vec![("__H", + vec![path_std!(cx, core::hash::Hasher)])], + }, explicit_self: borrowed_explicit_self(), - args: vec!(Ptr(box Literal(args), Borrowed(None, MutMutable))), + args: vec!(Ptr(box Literal(arg), Borrowed(None, MutMutable))), ret_ty: nil_ty(), - attributes: attrs, + attributes: vec![], combine_substructure: combine_substructure(box |a, b, c| { hash_substructure(a, b, c) }) diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 0ed9e85e576..f8bc331bfcf 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -102,7 +102,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, |i| push(i))) } - match &tname[] { + match &tname[..] { "Clone" => expand!(clone::expand_deriving_clone), "Hash" => expand!(hash::expand_deriving_hash), diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 3f5947672e0..281f23f9e61 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -128,7 +128,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(&format_string[]); + let s = token::intern_and_get_ident(&format_string[..]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 5d56707c87a..9c04d1e9282 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(..) => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - &s[])))) + &s[..])))) } }; MacExpr::new(e) @@ -101,7 +101,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(_) => { cx.span_err(sp, &msg); cx.expr_usize(sp, 0) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6b7cecee815..d4dda7390a5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -405,7 +405,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -416,7 +416,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - &marked_before[]); + &marked_before[..]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -425,7 +425,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("non-expression macro in expression position: {}", - &extnamestr[] + &extnamestr[..] )[]); return None; } @@ -633,8 +633,8 @@ pub fn expand_item_mac(it: P<ast::Item>, } }); // mark before expansion: - let marked_before = mark_tts(&tts[], fm); - expander.expand(fld.cx, it.span, &marked_before[]) + let marked_before = mark_tts(&tts[..], fm); + expander.expand(fld.cx, it.span, &marked_before[..]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { @@ -652,7 +652,7 @@ pub fn expand_item_mac(it: P<ast::Item>, } }); // mark before expansion: - let marked_tts = mark_tts(&tts[], fm); + let marked_tts = mark_tts(&tts[..], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } MacroRulesTT => { @@ -971,11 +971,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - &marked_before[]).make_pat() { + &marked_before[..]).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -1128,7 +1128,7 @@ fn expand_annotatable(a: Annotatable, if valid_ident { fld.cx.mod_push(it.ident); } - let macro_use = contains_macro_use(fld, &new_attrs[]); + let macro_use = contains_macro_use(fld, &new_attrs[..]); let result = with_exts_frame!(fld.cx.syntax_env, macro_use, noop_fold_item(it, fld)); @@ -1508,7 +1508,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(&tts[]), + self.fold_tts(&tts[..]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1914,7 +1914,7 @@ mod test { .collect(); println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name); let string = token::get_ident(final_varref_ident); - println!("varref's first segment's string: \"{}\"", &string[]); + println!("varref's first segment's string: \"{}\"", &string[..]); println!("binding #{}: {}, resolves to {}", binding_idx, bindings[binding_idx], binding_name); mtwt::with_sctable(|x| mtwt::display_sctable(x)); @@ -1967,10 +1967,10 @@ foo_module!(); let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); - let string = &ident[]; + let string = &ident[..]; "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = &cxbinds[]; + let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 170a455a913..e17329d7d33 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -118,7 +118,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; let interned_name = token::get_ident(ident); - let name = &interned_name[]; + let name = &interned_name[..]; p.expect(&token::Eq); let e = p.parse_expr(); @@ -218,7 +218,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } { @@ -238,7 +238,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } }; @@ -587,7 +587,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P<ast::Expr> { let trait_ = match *ty { Known(ref tyname) => { - match &tyname[] { + match &tyname[..] { "" => "Display", "?" => "Debug", "e" => "LowerExp", diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 67990895d07..2c7bf713aad 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -668,7 +668,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { for i in 0..tt.len() { seq.push(tt.get_tt(i)); } - mk_tts(cx, &seq[]) + mk_tts(cx, &seq[..]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -757,7 +757,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, &tts[]).into_iter()); + vector.extend(mk_tts(cx, &tts[..]).into_iter()); let block = cx.expr_block( cx.block_all(sp, vector, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 7a3a3562bdf..c8d48750c75 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult+'static> { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(&s[]))) + token::intern_and_get_ident(&s[..]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&string[]))) + token::intern_and_get_ident(&string[..]))) } /// include! : parse the given file as an expr @@ -117,7 +117,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree None => self.p.span_fatal( self.p.span, &format!("expected item, found `{}`", - self.p.this_token_to_string())[] + self.p.this_token_to_string()) ) } } @@ -141,7 +141,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), - e)[]); + e)); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -151,7 +151,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = format!("{}", file.display()); - let interned = token::intern_and_get_ident(&src[]); + let interned = token::intern_and_get_ident(&src[..]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -159,7 +159,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, &format!("{} wasn't a utf-8 file", - file.display())[]); + file.display())); return DummyResult::expr(sp); } } @@ -175,11 +175,11 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - &format!("couldn't read {}: {}", file.display(), e)[]); + &format!("couldn't read {}: {}", file.display(), e)); return DummyResult::expr(sp); } Ok(bytes) => { - let bytes = bytes.iter().map(|x| *x).collect(); + let bytes = bytes.iter().cloned().collect(); base::MacExpr::new(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d649e497ef7..664f7b3e088 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos) -> Box<MatcherPos> { - let match_idx_hi = count_names(&ms[]); + let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); box MatcherPos { stack: vec![], @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) p_s.span_diagnostic .span_fatal(sp, &format!("duplicated bind name: {}", - &string)[]) + &string)) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec<TokenTree> ) -> HashMap<Ident, Rc<NamedMatch>> { - match parse(sess, cfg, rdr, &ms[]) { + match parse(sess, cfg, rdr, &ms[..]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } } } @@ -283,7 +283,7 @@ pub fn parse(sess: &ParseSess, -> ParseResult { let mut cur_eis = Vec::new(); cur_eis.push(initial_matcher_pos(Rc::new(ms.iter() - .map(|x| (*x).clone()) + .cloned() .collect()), None, rdr.peek().sp.lo)); @@ -447,7 +447,7 @@ pub fn parse(sess: &ParseSess, for dv in &mut (&mut eof_eis[0]).matches { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, &v[])); + return Success(nameize(sess, ms, &v[..])); } else if eof_eis.len() > 1 { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -533,7 +533,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", - &token_str[])[]) + &token_str[..])) } }, "path" => { @@ -542,7 +542,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { "meta" => token::NtMeta(p.parse_meta_item()), _ => { p.span_fatal_help(sp, - &format!("invalid fragment specifier `{}`", name)[], + &format!("invalid fragment specifier `{}`", name), "valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ and `item`") diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f322cf8bad0..fa6d934a457 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -50,7 +50,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, &msg[]); + parser.span_err(span, &msg[..]); } } } @@ -123,8 +123,8 @@ impl TTMacroExpander for MacroRulesMacroExpander { self.name, self.imported_from, arg, - &self.lhses[], - &self.rhses[]) + &self.lhses, + &self.rhses) } } @@ -151,7 +151,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => &delim.tts[], + TtDelimited(_, ref delim) => &delim.tts[..], _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -159,7 +159,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, None, None, arg.iter() - .map(|x| (*x).clone()) + .cloned() .collect(), true); match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) { @@ -192,13 +192,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, &msg[]) + Error(sp, ref msg) => cx.span_fatal(sp, &msg[..]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, &best_fail_msg[]); + cx.span_fatal(best_fail_spot, &best_fail_msg[..]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 83234e3b7a5..0d92bd761b4 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -255,7 +255,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), &msg[]); + r.sp_diag.span_fatal(sp.clone(), &msg[..]); } LisConstraint(len, _) => { if len == 0 { @@ -309,7 +309,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ &format!("variable '{:?}' is still repeating at this depth", - token::get_ident(ident))[]); + token::get_ident(ident))); } } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 0110823ae98..071158fcebb 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -356,7 +356,7 @@ pub fn emit_feature_err(diag: &SpanHandler, feature: &str, span: Span, explain: diag.span_err(span, explain); diag.span_help(span, &format!("add #![feature({})] to the \ crate attributes to enable", - feature)[]); + feature)); } pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: &str) { @@ -364,7 +364,7 @@ pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: if diag.handler.can_emit_warnings { diag.span_help(span, &format!("add #![feature({})] to the \ crate attributes to silence this warning", - feature)[]); + feature)); } } @@ -438,7 +438,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_item(&mut self, i: &ast::Item) { match i.node { ast::ItemExternCrate(_) => { - if attr::contains_name(&i.attrs[], "macro_reexport") { + if attr::contains_name(&i.attrs[..], "macro_reexport") { self.gate_feature("macro_reexport", i.span, "macros reexports are experimental \ and possibly buggy"); @@ -446,7 +446,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemForeignMod(ref foreign_module) => { - if attr::contains_name(&i.attrs[], "link_args") { + if attr::contains_name(&i.attrs[..], "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -460,17 +460,17 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemFn(..) => { - if attr::contains_name(&i.attrs[], "plugin_registrar") { + if attr::contains_name(&i.attrs[..], "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } - if attr::contains_name(&i.attrs[], "start") { + if attr::contains_name(&i.attrs[..], "start") { self.gate_feature("start", i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } - if attr::contains_name(&i.attrs[], "main") { + if attr::contains_name(&i.attrs[..], "main") { self.gate_feature("main", i.span, "declaration of a nonstandard #[main] \ function may change over time, for now \ @@ -479,7 +479,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemStruct(..) => { - if attr::contains_name(&i.attrs[], "simd") { + if attr::contains_name(&i.attrs[..], "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } @@ -505,7 +505,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { removed in the future"); } - if attr::contains_name(&i.attrs[], + if attr::contains_name(&i.attrs[..], "old_orphan_check") { self.gate_feature( "old_orphan_check", @@ -513,7 +513,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { "the new orphan check rules will eventually be strictly enforced"); } - if attr::contains_name(&i.attrs[], + if attr::contains_name(&i.attrs[..], "old_impl_check") { self.gate_feature("old_impl_check", i.span, @@ -528,7 +528,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - if attr::contains_name(&i.attrs[], "linkage") { + if attr::contains_name(&i.attrs, "linkage") { self.gate_feature("linkage", i.span, "the `linkage` attribute is experimental \ and not portable across platforms") diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index e8bdcd62b58..3a7fa54edbd 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -28,7 +28,6 @@ #![feature(collections)] #![feature(core)] #![feature(env)] -#![feature(hash)] #![feature(int_uint)] #![feature(old_io)] #![feature(libc)] diff --git a/src/libsyntax/owned_slice.rs b/src/libsyntax/owned_slice.rs index 0f9a56baa17..f5201d4a8bc 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/libsyntax/owned_slice.rs @@ -10,7 +10,7 @@ use std::default::Default; use std::fmt; -use std::iter::FromIterator; +use std::iter::{IntoIterator, FromIterator}; use std::ops::Deref; use std::vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; @@ -77,8 +77,8 @@ impl<T: Clone> Clone for OwnedSlice<T> { } impl<T> FromIterator<T> for OwnedSlice<T> { - fn from_iter<I: Iterator<Item=T>>(iter: I) -> OwnedSlice<T> { - OwnedSlice::from_vec(iter.collect()) + fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> OwnedSlice<T> { + OwnedSlice::from_vec(iter.into_iter().collect()) } } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index b17fc7fe82e..1f06db60027 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -61,7 +61,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines - fn vertical_trim(lines: Vec<String> ) -> Vec<String> { + fn vertical_trim(lines: Vec<String>) -> Vec<String> { let mut i = 0; let mut j = lines.len(); // first line of all-stars should be omitted @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines[i..j].iter().map(|x| (*x).clone()).collect(); + lines[i..j].iter().cloned().collect() } /// remove a "[ \t]*\*" block from each line, if possible @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(&line[]) { + if is_doc_comment(&line[..]) { break; } lines.push(line); @@ -224,7 +224,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(&s[], col) { + let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { (&s[col..len]).to_string() @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(&curr_line[]) { + if is_block_doc_comment(&curr_line[..]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 38ba0b38df5..fd08cbd161b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -16,14 +16,13 @@ use ext::tt::transcribe::tt_next_token; use parse::token; use parse::token::{str_to_ident}; -use std::borrow::IntoCow; +use std::borrow::{IntoCow, Cow}; use std::char; use std::fmt; use std::mem::replace; use std::num; use std::rc::Rc; use std::str; -use std::string::CowString; pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag}; @@ -196,7 +195,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +204,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, &m[]); + self.err_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -215,7 +214,7 @@ impl<'a> StringReader<'a> { let from = self.byte_offset(from_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize(); m.push_str(&self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -278,7 +277,7 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, - s: &'b str, errmsg: &'b str) -> CowString<'b> { + s: &'b str, errmsg: &'b str) -> Cow<'b, str> { let mut i = 0; while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); @@ -556,7 +555,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(&string[])) + token::DocComment(token::intern(&string[..])) } else { token::Comment }; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 6ea23cf3f04..7ed48bdbb92 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -258,7 +258,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) unreachable!() } }; - match str::from_utf8(&bytes[]).ok() { + match str::from_utf8(&bytes[..]).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) @@ -398,7 +398,7 @@ pub fn char_lit(lit: &str) -> (char, isize) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = &msg[]; + let msg2 = &msg[..]; fn esc(len: usize, lit: &str) -> Option<(char, isize)> { num::from_str_radix(&lit[2..len], 16).ok() @@ -662,7 +662,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::<String>(); - let mut s = &s2[]; + let mut s = &s2[..]; debug!("integer_lit: {}, {:?}", s, suffix); @@ -819,7 +819,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = &tts[]; + let tts: &[ast::TokenTree] = &tts[..]; match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -1114,24 +1114,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], use_s); + assert_eq!(&vitem_s[..], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], use_s); + assert_eq!(&vitem_s[..], use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], ex_s); + assert_eq!(&vitem_s[..], ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], ex_s); + assert_eq!(&vitem_s[..], ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { @@ -1203,19 +1203,19 @@ mod test { let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[], "/// doc comment"); + assert_eq!(&doc[..], "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[], b); + assert_eq!(&docs[..], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[], "/** doc comment\n * with CRLF */"); + assert_eq!(&doc[..], "/** doc comment\n * with CRLF */"); } #[test] @@ -1235,7 +1235,7 @@ mod test { let span = tts.iter().rev().next().unwrap().get_span(); match sess.span_diagnostic.cm.span_to_snippet(span) { - Ok(s) => assert_eq!(&s[], "{ body }"), + Ok(s) => assert_eq!(&s[..], "{ body }"), Err(_) => panic!("could not get snippet"), } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 1df2e762ee7..8480772ce6c 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -28,6 +28,7 @@ pub enum ObsoleteSyntax { ProcExpr, ClosureType, ClosureKind, + EmptyIndex, } pub trait ParserObsoleteMethods { @@ -40,7 +41,8 @@ pub trait ParserObsoleteMethods { sp: Span, kind: ObsoleteSyntax, kind_str: &str, - desc: &str); + desc: &str, + error: bool); fn is_obsolete_ident(&mut self, ident: &str) -> bool; fn eat_obsolete_ident(&mut self, ident: &str) -> bool; } @@ -48,35 +50,46 @@ pub trait ParserObsoleteMethods { impl<'a> ParserObsoleteMethods for parser::Parser<'a> { /// Reports an obsolete syntax non-fatal error. fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) { - let (kind_str, desc) = match kind { + let (kind_str, desc, error) = match kind { ObsoleteSyntax::ForSized => ( "for Sized?", "no longer required. Traits (and their `Self` type) do not have the `Sized` bound \ by default", + true, ), ObsoleteSyntax::ProcType => ( "the `proc` type", "use unboxed closures instead", + true, ), ObsoleteSyntax::ProcExpr => ( "`proc` expression", "use a `move ||` expression instead", + true, ), ObsoleteSyntax::ClosureType => ( "`|usize| -> bool` closure type", - "use unboxed closures instead, no type annotation needed" + "use unboxed closures instead, no type annotation needed", + true, ), ObsoleteSyntax::ClosureKind => ( "`:`, `&mut:`, or `&:`", - "rely on inference instead" + "rely on inference instead", + true, ), ObsoleteSyntax::Sized => ( "`Sized? T` for removing the `Sized` bound", - "write `T: ?Sized` instead" + "write `T: ?Sized` instead", + true, + ), + ObsoleteSyntax::EmptyIndex => ( + "[]", + "write `[..]` instead", + false, // warning for now ), }; - self.report(sp, kind, kind_str, desc); + self.report(sp, kind, kind_str, desc, error); } /// Reports an obsolete syntax non-fatal error, and returns @@ -90,9 +103,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { sp: Span, kind: ObsoleteSyntax, kind_str: &str, - desc: &str) { - self.span_err(sp, - &format!("obsolete syntax: {}", kind_str)[]); + desc: &str, + error: bool) { + if error { + self.span_err(sp, &format!("obsolete syntax: {}", kind_str)[]); + } else { + self.span_warn(sp, &format!("obsolete syntax: {}", kind_str)[]); + } if !self.obsolete_set.contains(&kind) { self.sess diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 407740e580d..370201e5382 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -240,9 +240,8 @@ macro_rules! maybe_whole { fn maybe_append(mut lhs: Vec<Attribute>, rhs: Option<Vec<Attribute>>) -> Vec<Attribute> { - match rhs { - Some(ref attrs) => lhs.extend(attrs.iter().map(|a| a.clone())), - None => {} + if let Some(ref attrs) = rhs { + lhs.extend(attrs.iter().cloned()) } lhs } @@ -362,7 +361,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, &format!("unexpected token: `{}`", - token_str)[]); + token_str)); } pub fn unexpected(&mut self) -> ! { @@ -381,7 +380,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", token_str, - this_token_str)[]) + this_token_str)) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -422,7 +421,7 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(&expected[]); + let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); self.fatal( &(if expected.len() > 1 { @@ -436,7 +435,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - }[]) + })[..] ) } } @@ -467,9 +466,9 @@ impl<'a> Parser<'a> { debug!("commit_expr {:?}", e); if let ExprPath(..) = e.node { // might be unit-struct construction; check for recoverableinput error. - let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); + let mut expected = edible.iter().cloned().collect::<Vec<_>>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(&expected[]); + self.check_for_erroneous_unit_struct_expecting(&expected[..]); } self.expect_one_of(edible, inedible) } @@ -485,10 +484,9 @@ impl<'a> Parser<'a> { if self.last_token .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { - let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); - expected.push_all(&inedible[]); - self.check_for_erroneous_unit_struct_expecting( - &expected[]); + let mut expected = edible.iter().cloned().collect::<Vec<_>>(); + expected.push_all(&inedible); + self.check_for_erroneous_unit_struct_expecting(&expected); } self.expect_one_of(edible, inedible) } @@ -511,7 +509,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected ident, found `{}`", - token_str)[]) + token_str)) } } } @@ -599,7 +597,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, &format!("expected identifier, found keyword `{}`", - token_str)[]); + token_str)); } } @@ -608,7 +606,7 @@ impl<'a> Parser<'a> { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); self.fatal(&format!("`{}` is a reserved keyword", - token_str)[]) + token_str)) } } @@ -734,7 +732,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", gt_str, - this_token_str)[]) + this_token_str)) } } } @@ -1364,7 +1362,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(&inner_attrs[]); + attrs.push_all(&inner_attrs[..]); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1383,7 +1381,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal(&format!("expected `;` or `{{`, found `{}`", - token_str)[]) + token_str)[..]) } } } @@ -1551,7 +1549,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(&msg[]); + self.fatal(&msg[..]); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1699,14 +1697,14 @@ impl<'a> Parser<'a> { token::StrRaw(s, n) => { (true, LitStr( - token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]), + token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())), ast::RawStr(n))) } token::Binary(i) => (true, LitBinary(parse::binary_lit(i.as_str()))), token::BinaryRaw(i, _) => (true, - LitBinary(Rc::new(i.as_str().as_bytes().iter().map(|&x| x).collect()))), + LitBinary(Rc::new(i.as_str().as_bytes().iter().cloned().collect()))), }; if suffix_illegal { @@ -1944,7 +1942,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(&format!("expected a lifetime name")[]); + self.fatal(&format!("expected a lifetime name")); } } } @@ -1982,7 +1980,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(&msg[]); + self.fatal(&msg[..]); } } } @@ -2497,7 +2495,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - &format!("unexpected token: `{}`", n.as_str())[]); + &format!("unexpected token: `{}`", n.as_str())); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::<f64>().ok() { Some(f) => f, @@ -2506,7 +2504,7 @@ impl<'a> Parser<'a> { self.span_help(last_span, &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as usize, - &float.fract().to_string()[1..])[]); + &float.fract().to_string()[1..])); } self.abort_if_errors(); @@ -2552,8 +2550,9 @@ impl<'a> Parser<'a> { parameters: ast::PathParameters::none(), } }).collect(); + let span = mk_sp(lo, hi); let path = ast::Path { - span: mk_sp(lo, hi), + span: span, global: true, segments: segments, }; @@ -2562,10 +2561,8 @@ impl<'a> Parser<'a> { let ix = self.mk_expr(bracket_pos, hi, range); let index = self.mk_index(e, ix); e = self.mk_expr(lo, hi, index); - // Enable after snapshot. - // self.span_warn(e.span, "deprecated slicing syntax: `[]`"); - // self.span_note(e.span, - // "use `&expr[..]` to construct a slice of the whole of expr"); + + self.obsolete(span, ObsoleteSyntax::EmptyIndex); } else { let ix = self.parse_expr(); hi = self.span.hi; @@ -2639,7 +2636,7 @@ impl<'a> Parser<'a> { match self.token { token::SubstNt(name, _) => self.fatal(&format!("unknown macro variable `{}`", - token::get_ident(name))[]), + token::get_ident(name))), _ => {} } } @@ -2701,7 +2698,7 @@ impl<'a> Parser<'a> { }; let token_str = p.this_token_to_string(); p.fatal(&format!("incorrect close delimiter: `{}`", - token_str)[]) + token_str)) }, /* we ought to allow different depths of unquotation */ token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => { @@ -2822,7 +2819,7 @@ impl<'a> Parser<'a> { let this_token_to_string = self.this_token_to_string(); self.span_err(span, &format!("expected expression, found `{}`", - this_token_to_string)[]); + this_token_to_string)); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3275,7 +3272,7 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", "}", - token_str)[]) + token_str)) } etc = true; break; @@ -3576,7 +3573,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - &format!("expected identifier, found `{}`", tok_str)[]); + &format!("expected identifier, found `{}`", tok_str)); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3673,7 +3670,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.check_keyword(keywords::Let) { - check_expected_item(self, &item_attrs[]); + check_expected_item(self, &item_attrs[..]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3682,7 +3679,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, &item_attrs[]); + check_expected_item(self, &item_attrs[..]); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3710,7 +3707,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(&format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str)[]) + tok_str)) }, }; @@ -3758,7 +3755,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(&item_attrs[]); + let item_err = Parser::expected_item_err(&item_attrs[..]); match self.parse_item_(item_attrs, false) { Ok(i) => { let hi = i.span.hi; @@ -3795,7 +3792,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - &format!("expected `{{`, found `{}`", tok)[], + &format!("expected `{{`, found `{}`", tok), "place this code inside a block"); } @@ -3830,13 +3827,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(&self.parse_outer_attributes()[]); + attributes_box.push_all(&self.parse_outer_attributes()); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attributes_box[])); + Parser::expected_item_err(&attributes_box[..])); attributes_box = Vec::new(); } self.bump(); // empty @@ -3928,7 +3925,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attributes_box[])); + Parser::expected_item_err(&attributes_box[..])); } let hi = self.span.hi; @@ -4383,7 +4380,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `self`, found `{}`", - token_str)[]) + token_str)) } } } @@ -4404,7 +4401,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `Self`, found `{}`", - token_str)[]) + token_str)) } } } @@ -4539,7 +4536,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `,` or `)`, found `{}`", - token_str)[]) + token_str)) } } } @@ -4712,7 +4709,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(&inner_attrs[]); + new_attrs.push_all(&inner_attrs[..]); (ast::MethDecl(ident, generics, abi, @@ -4942,7 +4939,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone()))); } self.bump(); @@ -4950,7 +4947,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `where`, or `{}` after struct \ name, found `{}`", "{", - token_str)[]); + token_str)); } fields @@ -4981,7 +4978,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone()))); } self.parse_where_clause(generics); @@ -4996,7 +4993,7 @@ impl<'a> Parser<'a> { } else { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \ - name, found `{}`", "{", token_str)[]); + name, found `{}`", "{", token_str)); } } @@ -5016,7 +5013,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal_help(span, &format!("expected `,`, or `}}`, found `{}`", - token_str)[], + token_str), "struct fields should be separated by commas") } } @@ -5088,7 +5085,7 @@ impl<'a> Parser<'a> { // Parse all of the items up to closing or an attribute. let mut attrs = first_item_attrs; - attrs.push_all(&self.parse_outer_attributes()[]); + attrs.push_all(&self.parse_outer_attributes()); let mut items = vec![]; loop { @@ -5108,14 +5105,14 @@ impl<'a> Parser<'a> { while self.token != term { let mut attrs = mem::replace(&mut attrs, vec![]); - attrs.push_all(&self.parse_outer_attributes()[]); + attrs.push_all(&self.parse_outer_attributes()); debug!("parse_mod_items: parse_item_(attrs={:?})", attrs); match self.parse_item_(attrs, true /* macros allowed */) { Ok(item) => items.push(item), Err(_) => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected item, found `{}`", - token_str)[]) + token_str)) } } } @@ -5124,7 +5121,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attrs[])); + Parser::expected_item_err(&attrs[..])); } ast::Mod { @@ -5203,8 +5200,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(&default_path_str[]); - let secondary_path = dir_path.join(&secondary_path_str[]); + let default_path = dir_path.join(&default_path_str[..]); + let secondary_path = dir_path.join(&secondary_path_str[..]); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5219,13 +5216,13 @@ impl<'a> Parser<'a> { &format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module)[]); + this_module)); if default_exists || secondary_exists { self.span_note(id_sp, &format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name)[]); + mod_name)); } self.abort_if_errors(); } @@ -5236,12 +5233,12 @@ impl<'a> Parser<'a> { (false, false) => { self.span_fatal_help(id_sp, &format!("file not found for module `{}`", - mod_name)[], + mod_name), &format!("name the file either {} or {} inside \ the directory {:?}", default_path_str, secondary_path_str, - dir_path.display())[]); + dir_path.display())); } (true, true) => { self.span_fatal_help( @@ -5250,7 +5247,7 @@ impl<'a> Parser<'a> { and {}", mod_name, default_path_str, - secondary_path_str)[], + secondary_path_str), "delete or rename one of them to remove the ambiguity"); } } @@ -5272,11 +5269,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in &included_mod_stack[i.. len] { - err.push_str(&p.display().as_cow()[]); + err.push_str(&p.display().as_cow()); err.push_str(" -> "); } - err.push_str(&path.display().as_cow()[]); - self.span_fatal(id_sp, &err[]); + err.push_str(&path.display().as_cow()); + self.span_fatal(id_sp, &err[..]); } None => () } @@ -5381,7 +5378,7 @@ impl<'a> Parser<'a> { self.span_help(span, &format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str())[]); + the_ident.as_str())); None } else { None @@ -5407,7 +5404,7 @@ impl<'a> Parser<'a> { self.span_fatal(span, &format!("expected extern crate name but \ found `{}`", - token_str)[]); + token_str)); } }; @@ -5505,7 +5502,7 @@ impl<'a> Parser<'a> { self.span_err(start_span, &format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident))[]); + token::get_ident(ident))); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5583,7 +5580,7 @@ impl<'a> Parser<'a> { &format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string)[]); + the_string)); None } } @@ -5663,7 +5660,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, &format!("expected `{}` or `fn`, found `{}`", "{", - token_str)[]); + token_str)); } if self.eat_keyword_noexpect(keywords::Virtual) { @@ -5772,7 +5769,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(&attrs[]); + self.parse_item_mod(&attrs[..]); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6057,7 +6054,7 @@ impl<'a> Parser<'a> { fn parse_foreign_items(&mut self, first_item_attrs: Vec<Attribute>) -> Vec<P<ForeignItem>> { let mut attrs = first_item_attrs; - attrs.push_all(&self.parse_outer_attributes()[]); + attrs.push_all(&self.parse_outer_attributes()); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs) { @@ -6078,7 +6075,7 @@ impl<'a> Parser<'a> { if !attrs.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attrs[])); + Parser::expected_item_err(&attrs[..])); } foreign_items diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 0747a97fa37..433c013591c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -482,7 +482,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(&init_vec[]) + interner::StrInterner::prefill(&init_vec[..]) } }} @@ -644,7 +644,7 @@ impl BytesContainer for InternedString { // of `BytesContainer`, which is itself a workaround for the lack of // DST. unsafe { - let this = &self[]; + let this = &self[..]; mem::transmute::<&[u8],&[u8]>(this.container_as_bytes()) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 6c6cf186e70..1593bfb97fe 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -539,8 +539,8 @@ impl Printer { pub fn print(&mut self, token: Token, l: isize) -> old_io::IoResult<()> { debug!("print {} {} (remaining line space={})", tok_str(&token), l, self.space); - debug!("{}", buf_str(&self.token[], - &self.size[], + debug!("{}", buf_str(&self.token, + &self.size, self.left, self.right, 6)); @@ -607,7 +607,7 @@ impl Printer { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(&s[]) + self.print_str(&s[..]) } Token::Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 4b021f2434f..f26578e7401 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -134,7 +134,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, try!(s.print_attribute(&fake_attr)); } - try!(s.print_mod(&krate.module, &krate.attrs[])); + try!(s.print_mod(&krate.module, &krate.attrs)); try!(s.print_remaining_comments()); eof(&mut s.s) } @@ -602,7 +602,7 @@ impl<'a> State<'a> { pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, &text[])); + try!(word(&mut self.s, &text[..])); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -701,7 +701,7 @@ impl<'a> State<'a> { } ast::TyTup(ref elts) => { try!(self.popen()); - try!(self.commasep(Inconsistent, &elts[], + try!(self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -734,10 +734,10 @@ impl<'a> State<'a> { } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); - try!(self.print_bounds("+", &bounds[])); + try!(self.print_bounds("+", &bounds[..])); } ast::TyPolyTraitRef(ref bounds) => { - try!(self.print_bounds("", &bounds[])); + try!(self.print_bounds("", &bounds[..])); } ast::TyQPath(ref qpath) => { try!(self.print_qpath(&**qpath, false)) @@ -765,7 +765,7 @@ impl<'a> State<'a> { item: &ast::ForeignItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(&item.attrs[])); + try!(self.print_outer_attributes(&item.attrs)); match item.node { ast::ForeignItemFn(ref decl, ref generics) => { try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics, @@ -776,7 +776,7 @@ impl<'a> State<'a> { } ast::ForeignItemStatic(ref t, m) => { try!(self.head(&visibility_qualified(item.vis, - "static")[])); + "static"))); if m { try!(self.word_space("mut")); } @@ -793,7 +793,7 @@ impl<'a> State<'a> { fn print_associated_type(&mut self, typedef: &ast::AssociatedType) -> IoResult<()> { - try!(self.print_outer_attributes(&typedef.attrs[])); + try!(self.print_outer_attributes(&typedef.attrs)); try!(self.word_space("type")); try!(self.print_ty_param(&typedef.ty_param)); word(&mut self.s, ";") @@ -812,12 +812,12 @@ impl<'a> State<'a> { pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(&item.attrs[])); + try!(self.print_outer_attributes(&item.attrs)); try!(self.ann.pre(self, NodeItem(item))); match item.node { ast::ItemExternCrate(ref optional_path) => { try!(self.head(&visibility_qualified(item.vis, - "extern crate")[])); + "extern crate"))); if let Some((ref p, style)) = *optional_path { try!(self.print_string(p, style)); try!(space(&mut self.s)); @@ -831,7 +831,7 @@ impl<'a> State<'a> { } ast::ItemUse(ref vp) => { try!(self.head(&visibility_qualified(item.vis, - "use")[])); + "use"))); try!(self.print_view_path(&**vp)); try!(word(&mut self.s, ";")); try!(self.end()); // end inner head-block @@ -839,7 +839,7 @@ impl<'a> State<'a> { } ast::ItemStatic(ref ty, m, ref expr) => { try!(self.head(&visibility_qualified(item.vis, - "static")[])); + "static"))); if m == ast::MutMutable { try!(self.word_space("mut")); } @@ -856,7 +856,7 @@ impl<'a> State<'a> { } ast::ItemConst(ref ty, ref expr) => { try!(self.head(&visibility_qualified(item.vis, - "const")[])); + "const"))); try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); @@ -879,28 +879,28 @@ impl<'a> State<'a> { item.vis )); try!(word(&mut self.s, " ")); - try!(self.print_block_with_attrs(&**body, &item.attrs[])); + try!(self.print_block_with_attrs(&**body, &item.attrs)); } ast::ItemMod(ref _mod) => { try!(self.head(&visibility_qualified(item.vis, - "mod")[])); + "mod"))); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); - try!(self.print_mod(_mod, &item.attrs[])); + try!(self.print_mod(_mod, &item.attrs)); try!(self.bclose(item.span)); } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); - try!(self.word_nbsp(&nmod.abi.to_string()[])); + try!(self.word_nbsp(&nmod.abi.to_string())); try!(self.bopen()); - try!(self.print_foreign_mod(nmod, &item.attrs[])); + try!(self.print_foreign_mod(nmod, &item.attrs)); try!(self.bclose(item.span)); } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); try!(self.ibox(0)); - try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[])); + try!(self.word_nbsp(&visibility_qualified(item.vis, "type"))); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox @@ -922,7 +922,7 @@ impl<'a> State<'a> { )); } ast::ItemStruct(ref struct_def, ref generics) => { - try!(self.head(&visibility_qualified(item.vis,"struct")[])); + try!(self.head(&visibility_qualified(item.vis,"struct"))); try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } @@ -963,7 +963,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.bopen()); - try!(self.print_inner_attributes(&item.attrs[])); + try!(self.print_inner_attributes(&item.attrs)); for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref meth) => { @@ -983,18 +983,17 @@ impl<'a> State<'a> { try!(self.word_nbsp("trait")); try!(self.print_ident(item.ident)); try!(self.print_generics(generics)); - let bounds: Vec<_> = bounds.iter().map(|b| b.clone()).collect(); let mut real_bounds = Vec::with_capacity(bounds.len()); - for b in bounds { - if let TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = b { + for b in bounds.iter() { + if let TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = *b { try!(space(&mut self.s)); try!(self.word_space("for ?")); try!(self.print_trait_ref(&ptr.trait_ref)); } else { - real_bounds.push(b); + real_bounds.push(b.clone()); } } - try!(self.print_bounds(":", &real_bounds[])); + try!(self.print_bounds(":", &real_bounds[..])); try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); @@ -1012,7 +1011,7 @@ impl<'a> State<'a> { try!(self.print_ident(item.ident)); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(&tts[])); + try!(self.print_tts(&tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); @@ -1050,12 +1049,12 @@ impl<'a> State<'a> { generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> IoResult<()> { - try!(self.head(&visibility_qualified(visibility, "enum")[])); + try!(self.head(&visibility_qualified(visibility, "enum"))); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(generics)); try!(space(&mut self.s)); - self.print_variants(&enum_definition.variants[], span) + self.print_variants(&enum_definition.variants, span) } pub fn print_variants(&mut self, @@ -1065,7 +1064,7 @@ impl<'a> State<'a> { for v in variants { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); - try!(self.print_outer_attributes(&v.node.attrs[])); + try!(self.print_outer_attributes(&v.node.attrs)); try!(self.ibox(indent_unit)); try!(self.print_variant(&**v)); try!(word(&mut self.s, ",")); @@ -1093,7 +1092,7 @@ impl<'a> State<'a> { if !struct_def.fields.is_empty() { try!(self.popen()); try!(self.commasep( - Inconsistent, &struct_def.fields[], + Inconsistent, &struct_def.fields, |s, field| { match field.node.kind { ast::NamedField(..) => panic!("unexpected named field"), @@ -1123,7 +1122,7 @@ impl<'a> State<'a> { ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); - try!(self.print_outer_attributes(&field.node.attrs[])); + try!(self.print_outer_attributes(&field.node.attrs)); try!(self.print_visibility(visibility)); try!(self.print_ident(ident)); try!(self.word_nbsp(":")); @@ -1147,7 +1146,7 @@ impl<'a> State<'a> { pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { ast::TtToken(_, ref tk) => { - try!(word(&mut self.s, &token_to_string(tk)[])); + try!(word(&mut self.s, &token_to_string(tk))); match *tk { parse::token::DocComment(..) => { hardbreak(&mut self.s) @@ -1156,11 +1155,11 @@ impl<'a> State<'a> { } } ast::TtDelimited(_, ref delimed) => { - try!(word(&mut self.s, &token_to_string(&delimed.open_token())[])); + try!(word(&mut self.s, &token_to_string(&delimed.open_token()))); try!(space(&mut self.s)); - try!(self.print_tts(&delimed.tts[])); + try!(self.print_tts(&delimed.tts)); try!(space(&mut self.s)); - word(&mut self.s, &token_to_string(&delimed.close_token())[]) + word(&mut self.s, &token_to_string(&delimed.close_token())) }, ast::TtSequence(_, ref seq) => { try!(word(&mut self.s, "$(")); @@ -1170,7 +1169,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, ")")); match seq.separator { Some(ref tk) => { - try!(word(&mut self.s, &token_to_string(tk)[])); + try!(word(&mut self.s, &token_to_string(tk))); } None => {}, } @@ -1210,7 +1209,7 @@ impl<'a> State<'a> { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Consistent, - &args[], + &args[..], |s, arg| s.print_type(&*arg.ty))); try!(self.pclose()); } @@ -1234,7 +1233,7 @@ impl<'a> State<'a> { pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(m.span.lo)); - try!(self.print_outer_attributes(&m.attrs[])); + try!(self.print_outer_attributes(&m.attrs)); try!(self.print_ty_fn(m.abi, m.unsafety, &*m.decl, @@ -1263,7 +1262,7 @@ impl<'a> State<'a> { pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(meth.span.lo)); - try!(self.print_outer_attributes(&meth.attrs[])); + try!(self.print_outer_attributes(&meth.attrs)); match meth.node { ast::MethDecl(ident, ref generics, @@ -1281,7 +1280,7 @@ impl<'a> State<'a> { Some(&explicit_self.node), vis)); try!(word(&mut self.s, " ")); - self.print_block_with_attrs(&**body, &meth.attrs[]) + self.print_block_with_attrs(&**body, &meth.attrs) }, ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _), ..}) => { @@ -1290,7 +1289,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "! ")); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(&tts[])); + try!(self.print_tts(&tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); self.end() @@ -1552,7 +1551,7 @@ impl<'a> State<'a> { fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); - try!(self.commasep_exprs(Inconsistent, &exprs[])); + try!(self.commasep_exprs(Inconsistent, &exprs[..])); try!(word(&mut self.s, "]")); self.end() } @@ -1578,7 +1577,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "{")); try!(self.commasep_cmnt( Consistent, - &fields[], + &fields[..], |s, field| { try!(s.ibox(indent_unit)); try!(s.print_ident(field.ident.node)); @@ -1607,7 +1606,7 @@ impl<'a> State<'a> { fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> { try!(self.popen()); - try!(self.commasep_exprs(Inconsistent, &exprs[])); + try!(self.commasep_exprs(Inconsistent, &exprs[..])); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } @@ -1672,22 +1671,22 @@ impl<'a> State<'a> { try!(self.print_expr_box(place, &**expr)); } ast::ExprVec(ref exprs) => { - try!(self.print_expr_vec(&exprs[])); + try!(self.print_expr_vec(&exprs[..])); } ast::ExprRepeat(ref element, ref count) => { try!(self.print_expr_repeat(&**element, &**count)); } ast::ExprStruct(ref path, ref fields, ref wth) => { - try!(self.print_expr_struct(path, &fields[], wth)); + try!(self.print_expr_struct(path, &fields[..], wth)); } ast::ExprTup(ref exprs) => { - try!(self.print_expr_tup(&exprs[])); + try!(self.print_expr_tup(&exprs[..])); } ast::ExprCall(ref func, ref args) => { - try!(self.print_expr_call(&**func, &args[])); + try!(self.print_expr_call(&**func, &args[..])); } ast::ExprMethodCall(ident, ref tys, ref args) => { - try!(self.print_expr_method_call(ident, &tys[], &args[])); + try!(self.print_expr_method_call(ident, &tys[..], &args[..])); } ast::ExprBinary(op, ref lhs, ref rhs) => { try!(self.print_expr_binary(op, &**lhs, &**rhs)); @@ -1875,11 +1874,11 @@ impl<'a> State<'a> { try!(self.print_string(&a.asm, a.asm_str_style)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, &a.outputs[], + try!(self.commasep(Inconsistent, &a.outputs, |s, &(ref co, ref o, is_rw)| { match co.slice_shift_char() { Some(('=', operand)) if is_rw => { - try!(s.print_string(&format!("+{}", operand)[], + try!(s.print_string(&format!("+{}", operand), ast::CookedStr)) } _ => try!(s.print_string(&co, ast::CookedStr)) @@ -1892,7 +1891,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, &a.inputs[], + try!(self.commasep(Inconsistent, &a.inputs, |s, &(ref co, ref o)| { try!(s.print_string(&co, ast::CookedStr)); try!(s.popen()); @@ -1903,7 +1902,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, &a.clobbers[], + try!(self.commasep(Inconsistent, &a.clobbers, |s, co| { try!(s.print_string(&co, ast::CookedStr)); Ok(()) @@ -1977,7 +1976,7 @@ impl<'a> State<'a> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, &encoded[])) + try!(word(&mut self.s, &encoded[..])) } else { try!(word(&mut self.s, &token::get_ident(ident))) } @@ -1985,7 +1984,7 @@ impl<'a> State<'a> { } pub fn print_usize(&mut self, i: usize) -> IoResult<()> { - word(&mut self.s, &i.to_string()[]) + word(&mut self.s, &i.to_string()) } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { @@ -2075,7 +2074,7 @@ impl<'a> State<'a> { } try!(self.commasep( Inconsistent, - &data.types[], + &data.types, |s, ty| s.print_type(&**ty))); comma = true; } @@ -2098,7 +2097,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "(")); try!(self.commasep( Inconsistent, - &data.inputs[], + &data.inputs, |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ")")); @@ -2151,7 +2150,7 @@ impl<'a> State<'a> { Some(ref args) => { if !args.is_empty() { try!(self.popen()); - try!(self.commasep(Inconsistent, &args[], + try!(self.commasep(Inconsistent, &args[..], |s, p| s.print_pat(&**p))); try!(self.pclose()); } @@ -2163,7 +2162,7 @@ impl<'a> State<'a> { try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( - Consistent, &fields[], + Consistent, &fields[..], |s, f| { try!(s.cbox(indent_unit)); if !f.node.is_shorthand { @@ -2184,7 +2183,7 @@ impl<'a> State<'a> { ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, - &elts[], + &elts[..], |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -2212,7 +2211,7 @@ impl<'a> State<'a> { ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, - &before[], + &before[..], |s, p| s.print_pat(&**p))); if let Some(ref p) = *slice { if !before.is_empty() { try!(self.word_space(",")); } @@ -2226,7 +2225,7 @@ impl<'a> State<'a> { if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, - &after[], + &after[..], |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } @@ -2243,7 +2242,7 @@ impl<'a> State<'a> { } try!(self.cbox(indent_unit)); try!(self.ibox(0)); - try!(self.print_outer_attributes(&arm.attrs[])); + try!(self.print_outer_attributes(&arm.attrs)); let mut first = true; for p in &arm.pats { if first { @@ -2475,7 +2474,7 @@ impl<'a> State<'a> { ints.push(i); } - try!(self.commasep(Inconsistent, &ints[], |s, &idx| { + try!(self.commasep(Inconsistent, &ints[..], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) @@ -2492,7 +2491,7 @@ impl<'a> State<'a> { pub fn print_ty_param(&mut self, param: &ast::TyParam) -> IoResult<()> { try!(self.print_ident(param.ident)); - try!(self.print_bounds(":", ¶m.bounds[])); + try!(self.print_bounds(":", ¶m.bounds)); match param.default { Some(ref default) => { try!(space(&mut self.s)); @@ -2562,7 +2561,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, &name)); } ast::MetaNameValue(ref name, ref value) => { - try!(self.word_space(&name[])); + try!(self.word_space(&name[..])); try!(self.word_space("=")); try!(self.print_literal(value)); } @@ -2570,7 +2569,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, &name)); try!(self.popen()); try!(self.commasep(Consistent, - &items[], + &items[..], |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } @@ -2606,7 +2605,7 @@ impl<'a> State<'a> { try!(self.print_path(path, false)); try!(word(&mut self.s, "::{")); } - try!(self.commasep(Inconsistent, &idents[], |s, w| { + try!(self.commasep(Inconsistent, &idents[..], |s, w| { match w.node { ast::PathListIdent { name, .. } => { s.print_ident(name) @@ -2753,7 +2752,7 @@ impl<'a> State<'a> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { - return word(&mut self.s, &(*ltrl).lit[]); + return word(&mut self.s, &(*ltrl).lit); } _ => () } @@ -2763,33 +2762,33 @@ impl<'a> State<'a> { let mut res = String::from_str("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); res.push('\''); - word(&mut self.s, &res[]) + word(&mut self.s, &res[..]) } ast::LitChar(ch) => { let mut res = String::from_str("'"); res.extend(ch.escape_default()); res.push('\''); - word(&mut self.s, &res[]) + word(&mut self.s, &res[..]) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(&mut self.s, - &ast_util::int_ty_to_string(st, Some(i as i64))[]) + &ast_util::int_ty_to_string(st, Some(i as i64))) } ast::SignedIntLit(st, ast::Minus) => { let istr = ast_util::int_ty_to_string(st, Some(-(i as i64))); word(&mut self.s, - &format!("-{}", istr)[]) + &format!("-{}", istr)) } ast::UnsignedIntLit(ut) => { word(&mut self.s, &ast_util::uint_ty_to_string(ut, Some(i))) } ast::UnsuffixedIntLit(ast::Plus) => { - word(&mut self.s, &format!("{}", i)[]) + word(&mut self.s, &format!("{}", i)) } ast::UnsuffixedIntLit(ast::Minus) => { - word(&mut self.s, &format!("-{}", i)[]) + word(&mut self.s, &format!("-{}", i)) } } } @@ -2798,9 +2797,9 @@ impl<'a> State<'a> { &format!( "{}{}", &f, - &ast_util::float_ty_to_string(t)[])[]) + &ast_util::float_ty_to_string(t))) } - ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]), + ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[..]), ast::LitBool(val) => { if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") } } @@ -2810,7 +2809,7 @@ impl<'a> State<'a> { escaped.extend(ascii::escape_default(ch as u8) .map(|c| c as char)); } - word(&mut self.s, &format!("b\"{}\"", escaped)[]) + word(&mut self.s, &format!("b\"{}\"", escaped)) } } } @@ -2851,7 +2850,7 @@ impl<'a> State<'a> { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1); try!(zerobreak(&mut self.s)); - try!(word(&mut self.s, &cmnt.lines[0][])); + try!(word(&mut self.s, &cmnt.lines[0])); zerobreak(&mut self.s) } comments::Isolated => { @@ -2860,7 +2859,7 @@ impl<'a> State<'a> { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, &line[])); + try!(word(&mut self.s, &line[..])); } try!(hardbreak(&mut self.s)); } @@ -2869,13 +2868,13 @@ impl<'a> State<'a> { comments::Trailing => { try!(word(&mut self.s, " ")); if cmnt.lines.len() == 1 { - try!(word(&mut self.s, &cmnt.lines[0][])); + try!(word(&mut self.s, &cmnt.lines[0])); hardbreak(&mut self.s) } else { try!(self.ibox(0)); for line in &cmnt.lines { if !line.is_empty() { - try!(word(&mut self.s, &line[])); + try!(word(&mut self.s, &line[..])); } try!(hardbreak(&mut self.s)); } @@ -2908,7 +2907,7 @@ impl<'a> State<'a> { string=st)) } }; - word(&mut self.s, &st[]) + word(&mut self.s, &st[..]) } pub fn next_comment(&mut self) -> Option<comments::Comment> { @@ -2939,7 +2938,7 @@ impl<'a> State<'a> { Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(&abi.to_string()[]) + self.word_nbsp(&abi.to_string()) } None => Ok(()) } @@ -2950,7 +2949,7 @@ impl<'a> State<'a> { match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(&abi.to_string()[]) + self.word_nbsp(&abi.to_string()) } None => Ok(()) } @@ -2965,7 +2964,7 @@ impl<'a> State<'a> { if abi != abi::Rust { try!(self.word_nbsp("extern")); - try!(self.word_nbsp(&abi.to_string()[])); + try!(self.word_nbsp(&abi.to_string())); } word(&mut self.s, "fn") diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs index 01f3839b039..adb5383a8fd 100644 --- a/src/libsyntax/ptr.rs +++ b/src/libsyntax/ptr.rs @@ -111,11 +111,18 @@ impl<T: Display> Display for P<T> { } } +#[cfg(stage0)] impl<S: Hasher, T: Hash<S>> Hash<S> for P<T> { fn hash(&self, state: &mut S) { (**self).hash(state); } } +#[cfg(not(stage0))] +impl<T: Hash> Hash for P<T> { + fn hash<H: Hasher>(&self, state: &mut H) { + (**self).hash(state); + } +} impl<T: 'static + Decodable> Decodable for P<T> { fn decode<D: Decoder>(d: &mut D) -> Result<P<T>, D::Error> { diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 98c193c7e6b..4e4a571ede7 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -45,16 +45,16 @@ fn no_prelude(attrs: &[ast::Attribute]) -> bool { attr::contains_name(attrs, "no_implicit_prelude") } -struct StandardLibraryInjector<'a> { - alt_std_name: Option<String> +struct StandardLibraryInjector { + alt_std_name: Option<String>, } -impl<'a> fold::Folder for StandardLibraryInjector<'a> { +impl fold::Folder for StandardLibraryInjector { fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.alt_std_name { - Some(ref s) => token::intern_and_get_ident(&s[]), + Some(ref s) => token::intern_and_get_ident(&s[..]), None => token::intern_and_get_ident("std"), }; @@ -80,9 +80,10 @@ fn inject_crates_ref(krate: ast::Crate, alt_std_name: Option<String>) -> ast::Cr fold.fold_crate(krate) } -struct PreludeInjector<'a>; +struct PreludeInjector; -impl<'a> fold::Folder for PreludeInjector<'a> { + +impl fold::Folder for PreludeInjector { fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6511dffa6bf..7b1fc91e45b 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { self.cx.path.push(ident); } debug!("current path: {}", - ast_util::path_name_i(&self.cx.path[])); + ast_util::path_name_i(&self.cx.path)); if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) { match i.node { @@ -274,8 +274,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(&attrs[], "test") && - !attr::contains_name(&attrs[], "bench") + !attr::contains_name(&attrs[..], "test") && + !attr::contains_name(&attrs[..], "bench") }) } @@ -563,7 +563,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs[]) { - Some(ref s) if "test" == &s[] => true, + Some(ref s) if "test" == &s[..] => true, _ => false } } @@ -603,11 +603,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> { // creates $name: $expr let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); - debug!("encoding {}", ast_util::path_name_i(&path[])); + debug!("encoding {}", ast_util::path_name_i(&path[..])); // path to the #[test] function: "foo::bar::baz" - let path_string = ast_util::path_name_i(&path[]); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[])); + let path_string = ast_util::path_name_i(&path[..]); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..])); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 51144267519..dffeac6f3f7 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -14,13 +14,13 @@ use ast::Name; -use std::borrow::BorrowFrom; +use std::borrow::Borrow; use std::cell::RefCell; use std::cmp::Ordering; use std::collections::HashMap; +#[cfg(stage0)] use std::collections::hash_map::Hasher; use std::fmt; use std::hash::Hash; -use std::collections::hash_map::Hasher; use std::ops::Deref; use std::rc::Rc; @@ -30,6 +30,7 @@ pub struct Interner<T> { } // when traits can extend traits, we should extend index<Name,T> to get [] +#[cfg(stage0)] impl<T: Eq + Hash<Hasher> + Clone + 'static> Interner<T> { pub fn new() -> Interner<T> { Interner { @@ -79,7 +80,71 @@ impl<T: Eq + Hash<Hasher> + Clone + 'static> Interner<T> { } pub fn find<Q: ?Sized>(&self, val: &Q) -> Option<Name> - where Q: BorrowFrom<T> + Eq + Hash<Hasher> { + where T: Borrow<Q>, Q: Eq + Hash<Hasher> { + let map = self.map.borrow(); + match (*map).get(val) { + Some(v) => Some(*v), + None => None, + } + } + + pub fn clear(&self) { + *self.map.borrow_mut() = HashMap::new(); + *self.vect.borrow_mut() = Vec::new(); + } +} +// when traits can extend traits, we should extend index<Name,T> to get [] +#[cfg(not(stage0))] +impl<T: Eq + Hash + Clone + 'static> Interner<T> { + pub fn new() -> Interner<T> { + Interner { + map: RefCell::new(HashMap::new()), + vect: RefCell::new(Vec::new()), + } + } + + pub fn prefill(init: &[T]) -> Interner<T> { + let rv = Interner::new(); + for v in init { + rv.intern((*v).clone()); + } + rv + } + + pub fn intern(&self, val: T) -> Name { + let mut map = self.map.borrow_mut(); + match (*map).get(&val) { + Some(&idx) => return idx, + None => (), + } + + let mut vect = self.vect.borrow_mut(); + let new_idx = Name((*vect).len() as u32); + (*map).insert(val.clone(), new_idx); + (*vect).push(val); + new_idx + } + + pub fn gensym(&self, val: T) -> Name { + let mut vect = self.vect.borrow_mut(); + let new_idx = Name((*vect).len() as u32); + // leave out of .map to avoid colliding + (*vect).push(val); + new_idx + } + + pub fn get(&self, idx: Name) -> T { + let vect = self.vect.borrow(); + (*vect)[idx.usize()].clone() + } + + pub fn len(&self) -> usize { + let vect = self.vect.borrow(); + (*vect).len() + } + + pub fn find<Q: ?Sized>(&self, val: &Q) -> Option<Name> + where T: Borrow<Q>, Q: Eq + Hash { let map = self.map.borrow(); match (*map).get(val) { Some(v) => Some(*v), @@ -110,34 +175,34 @@ impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self[].cmp(&other[]) + self[..].cmp(&other[..]) } } impl fmt::Debug for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Debug; - self[].fmt(f) + self[..].fmt(f) } } impl fmt::Display for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Display; - self[].fmt(f) + self[..].fmt(f) } } -impl BorrowFrom<RcStr> for str { - fn borrow_from(owned: &RcStr) -> &str { - &owned.string[] +impl Borrow<str> for RcStr { + fn borrow(&self) -> &str { + &self.string[..] } } impl Deref for RcStr { type Target = str; - fn deref(&self) -> &str { &self.string[] } + fn deref(&self) -> &str { &self.string[..] } } /// A StrInterner differs from Interner<String> in that it accepts @@ -210,8 +275,17 @@ impl StrInterner { self.vect.borrow().len() } + #[cfg(stage0)] + pub fn find<Q: ?Sized>(&self, val: &Q) -> Option<Name> + where RcStr: Borrow<Q>, Q: Eq + Hash<Hasher> { + match (*self.map.borrow()).get(val) { + Some(v) => Some(*v), + None => None, + } + } + #[cfg(not(stage0))] pub fn find<Q: ?Sized>(&self, val: &Q) -> Option<Name> - where Q: BorrowFrom<RcStr> + Eq + Hash<Hasher> { + where RcStr: Borrow<Q>, Q: Eq + Hash { match (*self.map.borrow()).get(val) { Some(v) => Some(*v), None => None, diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index b2009a7e848..0a39d380904 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -11,7 +11,7 @@ use self::SmallVectorRepr::*; use self::IntoIterRepr::*; -use std::iter::FromIterator; +use std::iter::{IntoIterator, FromIterator}; use std::mem; use std::slice; use std::vec; @@ -30,7 +30,7 @@ enum SmallVectorRepr<T> { } impl<T> FromIterator<T> for SmallVector<T> { - fn from_iter<I: Iterator<Item=T>>(iter: I) -> SmallVector<T> { + fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> SmallVector<T> { let mut v = SmallVector::zero(); v.extend(iter); v @@ -38,7 +38,7 @@ impl<T> FromIterator<T> for SmallVector<T> { } impl<T> Extend<T> for SmallVector<T> { - fn extend<I: Iterator<Item=T>>(&mut self, iter: I) { + fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) { for val in iter { self.push(val); } |
