diff options
| author | Patrick Walton <pcwalton@mimiga.net> | 2013-06-27 17:41:35 -0700 |
|---|---|---|
| committer | Patrick Walton <pcwalton@mimiga.net> | 2013-07-17 14:56:42 -0700 |
| commit | b4e674f6e662bc80f2e7a5a1a9834f2152f08d32 (patch) | |
| tree | 1b567620d7ea1641fa58338b8f6e5c68bb324248 /src/libsyntax | |
| parent | 8c082658bed1877d5741f7badceb8efc3015598d (diff) | |
| download | rust-b4e674f6e662bc80f2e7a5a1a9834f2152f08d32.tar.gz rust-b4e674f6e662bc80f2e7a5a1a9834f2152f08d32.zip | |
librustc: Add a lint mode for unnecessary `copy` and remove a bunch of them.
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast_util.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/attr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/generic.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/liveness.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/pipec.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/proto.rs | 5 | ||||
| -rw-r--r-- | src/libsyntax/ext/trace_macros.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 5 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/fold.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/parse/obsolete.rs | 5 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 65 | ||||
| -rw-r--r-- | src/libsyntax/print/pp.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/visit.rs | 18 |
21 files changed, 112 insertions, 121 deletions
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 00e667d7b88..feadf3fdbf3 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -27,7 +27,9 @@ pub fn path_name_i(idents: &[ident]) -> ~str { idents.map(|i| token::interner_get(i.name)).connect("::") } -pub fn path_to_ident(p: &Path) -> ident { copy *p.idents.last() } +pub fn path_to_ident(p: &Path) -> ident { + *p.idents.last() +} pub fn local_def(id: node_id) -> def_id { ast::def_id { crate: local_crate, node: id } @@ -297,9 +299,9 @@ pub trait inlined_item_utils { impl inlined_item_utils for inlined_item { fn ident(&self) -> ident { match *self { - ii_item(i) => /* FIXME (#2543) */ copy i.ident, - ii_foreign(i) => /* FIXME (#2543) */ copy i.ident, - ii_method(_, _, m) => /* FIXME (#2543) */ copy m.ident, + ii_item(i) => i.ident, + ii_foreign(i) => i.ident, + ii_method(_, _, m) => m.ident, } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 055d5540f8d..18bef1ea17e 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -269,7 +269,7 @@ pub fn sort_meta_items(items: &[@ast::meta_item]) -> ~[@ast::meta_item] { .. /*bad*/ copy **m } } - _ => /*bad*/ copy *m + _ => *m } } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 7e89d040781..dcd9615ffd0 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -354,7 +354,7 @@ impl CodeMap { pub fn span_to_filename(&self, sp: span) -> FileName { let lo = self.lookup_char_pos(sp.lo); - return /* FIXME (#2543) */ copy lo.file.name; + lo.file.name } pub fn span_to_lines(&self, sp: span) -> @FileLines { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index c9bd2986a42..0e464208de3 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -257,7 +257,7 @@ impl ExtCtxt { Some(@ExpnInfo { call_site: span {lo: cs.lo, hi: cs.hi, expn_info: *self.backtrace}, - callee: copy *callee}); + callee: *callee}); } } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 83fce24bef8..89290b78b72 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -347,7 +347,7 @@ impl AstBuilder for @ExtCtxt { fn strip_bounds(&self, generics: &Generics) -> Generics { let new_params = do generics.ty_params.map |ty_param| { - ast::TyParam { bounds: opt_vec::Empty, ..copy *ty_param } + ast::TyParam { bounds: opt_vec::Empty, ..*ty_param } }; Generics { ty_params: new_params, @@ -611,13 +611,13 @@ impl AstBuilder for @ExtCtxt { } fn lambda0(&self, _span: span, blk: ast::blk) -> @ast::expr { let ext_cx = *self; - let blk_e = self.expr(copy blk.span, ast::expr_block(copy blk)); + let blk_e = self.expr(blk.span, ast::expr_block(copy blk)); quote_expr!(|| $blk_e ) } fn lambda1(&self, _span: span, blk: ast::blk, ident: ast::ident) -> @ast::expr { let ext_cx = *self; - let blk_e = self.expr(copy blk.span, ast::expr_block(copy blk)); + let blk_e = self.expr(blk.span, ast::expr_block(copy blk)); quote_expr!(|$ident| $blk_e ) } diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index 3bc16477c80..f90ee1f8d79 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -323,7 +323,7 @@ impl<'self> TraitDef<'self> { let mut trait_generics = self.generics.to_generics(cx, span, type_ident, generics); // Copy the lifetimes for generics.lifetimes.iter().advance |l| { - trait_generics.lifetimes.push(copy *l) + trait_generics.lifetimes.push(*l) }; // Create the type parameters. for generics.ty_params.iter().advance |ty_param| { diff --git a/src/libsyntax/ext/pipes/liveness.rs b/src/libsyntax/ext/pipes/liveness.rs index b1f98d78fb3..6a8f3c89a2f 100644 --- a/src/libsyntax/ext/pipes/liveness.rs +++ b/src/libsyntax/ext/pipes/liveness.rs @@ -45,7 +45,7 @@ use extra::bitv::Bitv; pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) { debug!("initializing colive analysis"); let num_states = proto.num_states(); - let mut colive: ~[~Bitv] = do (copy proto.states).iter().transform() |state| { + let mut colive: ~[~Bitv] = do proto.states.iter().transform() |state| { let mut bv = ~Bitv::new(num_states, false); for state.reachable |s| { bv.set(s.id, true); @@ -85,10 +85,11 @@ pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) { } if self_live.len() > 0 { - let states = self_live.map(|s| copy s.name).connect(" "); + let states = self_live.map(|s| s.name).connect(" "); debug!("protocol %s is unbounded due to loops involving: %s", - copy proto.name, states); + proto.name, + states); // Someday this will be configurable with a warning //cx.span_warn(empty_span(), @@ -98,9 +99,8 @@ pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) { // states)); proto.bounded = Some(false); - } - else { - debug!("protocol %s is bounded. yay!", copy proto.name); + } else { + debug!("protocol %s is bounded. yay!", proto.name); proto.bounded = Some(true); } } diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 165d3c39c6b..73c6c6d5fff 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -65,7 +65,7 @@ pub fn expand_proto(cx: @ExtCtxt, _sp: span, id: ast::ident, tt: ~[ast::token_tree]) -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); - let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic, + let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, None, copy tt); let rdr = tt_rdr as @reader; diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 478c0861990..e5581cada37 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -382,7 +382,7 @@ impl gen_init for protocol { cx.ty_path(path(~[cx.ident_of("super"), cx.ident_of("__Buffer")], - copy self.span) + self.span) .add_tys(cx.ty_vars_global(¶ms)), None) } @@ -432,7 +432,7 @@ impl gen_init for protocol { let mut client_states = ~[]; let mut server_states = ~[]; - for (copy self.states).iter().advance |s| { + for self.states.iter().advance |s| { items.push_all_move(s.to_type_decls(cx)); client_states.push_all_move(s.to_endpoint_decls(cx, send)); @@ -443,11 +443,11 @@ impl gen_init for protocol { items.push(self.gen_buffer_type(cx)) } - items.push(cx.item_mod(copy self.span, + items.push(cx.item_mod(self.span, cx.ident_of("client"), ~[], ~[], client_states)); - items.push(cx.item_mod(copy self.span, + items.push(cx.item_mod(self.span, cx.ident_of("server"), ~[], ~[], server_states)); @@ -455,12 +455,11 @@ impl gen_init for protocol { // XXX: Would be nice if our generated code didn't violate // Rust coding conventions let allows = cx.attribute( - copy self.span, - cx.meta_list(copy self.span, + self.span, + cx.meta_list(self.span, @"allow", - ~[cx.meta_word(copy self.span, @"non_camel_case_types"), - cx.meta_word(copy self.span, @"unused_mut")])); - cx.item_mod(copy self.span, cx.ident_of(copy self.name), - ~[allows], ~[], items) + ~[cx.meta_word(self.span, @"non_camel_case_types"), + cx.meta_word(self.span, @"unused_mut")])); + cx.item_mod(self.span, cx.ident_of(self.name), ~[allows], ~[], items) } } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 5866b8a5af5..92e1b2bd09f 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -214,9 +214,8 @@ pub trait visitor<Tproto, Tstate, Tmessage> { pub fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>( proto: protocol, visitor: V) -> Tproto { - // the copy keywords prevent recursive use of dvec - let states: ~[Tstate] = do (copy proto.states).iter().transform |&s| { - let messages: ~[Tmessage] = do (copy s.messages).iter().transform |m| { + let states: ~[Tstate] = do proto.states.iter().transform |&s| { + let messages: ~[Tmessage] = do s.messages.iter().transform |m| { let message(name, span, tys, this, next) = copy *m; visitor.visit_message(name, span, tys, this, next) }.collect(); diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index ba3b8f22e69..5c6032785e3 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -22,11 +22,9 @@ pub fn expand_trace_macros(cx: @ExtCtxt, -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); - let tt_rdr = new_tt_reader( - copy cx.parse_sess().span_diagnostic, - None, - tt.to_owned() - ); + let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, + None, + tt.to_owned()); let rdr = tt_rdr as @reader; let rust_parser = Parser( sess, diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index a2e3d7bfeca..6b3ce1c9a2f 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -53,8 +53,9 @@ pub fn add_new_extension(cx: @ExtCtxt, // Parse the macro_rules! invocation (`none` is for no interpolations): - let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic, - None, copy arg); + let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic, + None, + copy arg); let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as @reader, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 92feaa154fe..94ecff178ea 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -285,7 +285,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } matched_seq(*) => { r.sp_diag.span_fatal( - copy r.cur_span, /* blame the macro writer */ + r.cur_span, /* blame the macro writer */ fmt!("variable '%s' is still repeating at this depth", ident_to_str(&ident))); } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 059637ca736..11c29c73a2b 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -87,7 +87,7 @@ fn fold_meta_item_(mi: @meta_item, fld: @ast_fold) -> @meta_item { ) } meta_name_value(id, s) => { - meta_name_value(id, /* FIXME (#2543) */ copy s) + meta_name_value(id, s) } }, span: fld.new_span(mi.span) } @@ -258,11 +258,15 @@ fn noop_fold_struct_field(sf: @struct_field, fld: @ast_fold) -> @struct_field { let fold_attribute = |x| fold_attribute_(x, fld); - @spanned { node: ast::struct_field_ { kind: copy sf.node.kind, - id: sf.node.id, - ty: fld.fold_ty(&sf.node.ty), - attrs: sf.node.attrs.map(|e| fold_attribute(*e)) }, - span: sf.span } + @spanned { + node: ast::struct_field_ { + kind: sf.node.kind, + id: sf.node.id, + ty: fld.fold_ty(&sf.node.ty), + attrs: sf.node.attrs.map(|e| fold_attribute(*e)) + }, + span: sf.span + } } pub fn noop_fold_item_underscore(i: &item_, fld: @ast_fold) -> item_ { @@ -346,7 +350,7 @@ fn fold_trait_ref(p: &trait_ref, fld: @ast_fold) -> trait_ref { fn fold_struct_field(f: @struct_field, fld: @ast_fold) -> @struct_field { @spanned { node: ast::struct_field_ { - kind: copy f.node.kind, + kind: f.node.kind, id: fld.new_id(f.node.id), ty: fld.fold_ty(&f.node.ty), attrs: /* FIXME (#2543) */ copy f.node.attrs, @@ -439,7 +443,7 @@ pub fn noop_fold_pat(p: &pat_, fld: @ast_fold) -> pat_ { let pth_ = fld.fold_path(pth); let fs = do fields.map |f| { ast::field_pat { - ident: /* FIXME (#2543) */ copy f.ident, + ident: f.ident, pat: fld.fold_pat(f.pat) } }; @@ -743,7 +747,7 @@ fn noop_fold_variant(v: &variant_, fld: @ast_fold) -> variant_ { None => None }; ast::variant_ { - name: /* FIXME (#2543) */ copy v.name, + name: v.name, attrs: attrs, kind: kind, id: fld.new_id(v.id), @@ -753,7 +757,7 @@ fn noop_fold_variant(v: &variant_, fld: @ast_fold) -> variant_ { } fn noop_fold_ident(i: ident, _fld: @ast_fold) -> ident { - /* FIXME (#2543) */ copy i + i } fn noop_fold_path(p: &Path, fld: @ast_fold) -> Path { @@ -837,7 +841,7 @@ impl ast_fold for AstFoldFns { fn fold_struct_field(@self, sf: @struct_field) -> @struct_field { @spanned { node: ast::struct_field_ { - kind: copy sf.node.kind, + kind: sf.node.kind, id: sf.node.id, ty: self.fold_ty(&sf.node.ty), attrs: copy sf.node.attrs, diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 4a872832952..22e0a150a19 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -94,7 +94,7 @@ fn dup_string_reader(r: @mut StringReader) -> @mut StringReader { curr: r.curr, filemap: r.filemap, peek_tok: copy r.peek_tok, - peek_span: copy r.peek_span + peek_span: r.peek_span } } @@ -103,20 +103,20 @@ impl reader for StringReader { // return the next token. EFFECT: advances the string_reader. fn next_token(@mut self) -> TokenAndSpan { let ret_val = TokenAndSpan { - tok: copy self.peek_tok, - sp: copy self.peek_span, + tok: /*bad*/copy self.peek_tok, + sp: self.peek_span, }; string_advance_token(self); ret_val } fn fatal(@mut self, m: ~str) -> ! { - self.span_diagnostic.span_fatal(copy self.peek_span, m) + self.span_diagnostic.span_fatal(self.peek_span, m) } fn span_diag(@mut self) -> @span_handler { self.span_diagnostic } fn peek(@mut self) -> TokenAndSpan { TokenAndSpan { - tok: copy self.peek_tok, - sp: copy self.peek_span, + tok: /*bad*/copy self.peek_tok, + sp: self.peek_span, } } fn dup(@mut self) -> @reader { dup_string_reader(self) as @reader } @@ -126,13 +126,13 @@ impl reader for TtReader { fn is_eof(@mut self) -> bool { self.cur_tok == token::EOF } fn next_token(@mut self) -> TokenAndSpan { tt_next_token(self) } fn fatal(@mut self, m: ~str) -> ! { - self.sp_diag.span_fatal(copy self.cur_span, m); + self.sp_diag.span_fatal(self.cur_span, m); } fn span_diag(@mut self) -> @span_handler { self.sp_diag } fn peek(@mut self) -> TokenAndSpan { TokenAndSpan { tok: copy self.cur_tok, - sp: copy self.cur_span, + sp: self.cur_span, } } fn dup(@mut self) -> @reader { dup_tt_reader(self) as @reader } @@ -144,7 +144,7 @@ fn string_advance_token(r: @mut StringReader) { match (consume_whitespace_and_comments(r)) { Some(comment) => { r.peek_tok = copy comment.tok; - r.peek_span = copy comment.sp; + r.peek_span = comment.sp; }, None => { if is_eof(r) { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 40f1964bc9c..84cc49192ed 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -307,7 +307,7 @@ pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap) // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. let cfg = ~[]; - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, filemap); + let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap); let p1 = Parser(sess, cfg, srdr as @reader); p1.parse_all_token_trees() } @@ -316,11 +316,7 @@ pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap) pub fn tts_to_parser(sess: @mut ParseSess, tts: ~[ast::token_tree], cfg: ast::crate_cfg) -> Parser { - let trdr = lexer::new_tt_reader( - copy sess.span_diagnostic, - None, - tts - ); + let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts); Parser(sess, cfg, trdr as @reader) } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index fff4c125af6..01ed6531273 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -329,8 +329,9 @@ impl ParserObsoleteMethods for Parser { pub fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool { - if self.is_keyword(keywords::Priv) && self.look_ahead(1) == token::LBRACE { - self.obsolete(copy *self.span, ObsoletePrivSection); + if self.is_keyword(keywords::Priv) && self.look_ahead(1) == + token::LBRACE { + self.obsolete(*self.span, ObsoletePrivSection); self.eat_keyword(keywords::Priv); self.bump(); while *self.token != token::RBRACE { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9e9071d6b8c..497000a6cbf 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -236,8 +236,8 @@ pub fn Parser(sess: @mut ParseSess, sess: sess, cfg: cfg, token: @mut copy tok0.tok, - span: @mut copy tok0.sp, - last_span: @mut copy tok0.sp, + span: @mut tok0.sp, + last_span: @mut tok0.sp, buffer: @mut ([copy tok0, .. 4]), buffer_start: @mut 0, buffer_end: @mut 0, @@ -530,7 +530,7 @@ impl Parser { // advance the parser by one token pub fn bump(&self) { - *self.last_span = copy *self.span; + *self.last_span = *self.span; let next = if *self.buffer_start == *self.buffer_end { self.reader.next_token() } else { @@ -538,8 +538,8 @@ impl Parser { *self.buffer_start = (*self.buffer_start + 1) & 3; next }; - *self.token = copy next.tok; - *self.span = copy next.sp; + *self.token = /*bad*/copy next.tok; + *self.span = next.sp; *self.tokens_consumed += 1u; } // EFFECT: replace the current token and span with the given one @@ -565,7 +565,7 @@ impl Parser { return copy self.buffer[(*self.buffer_start + dist - 1) & 3].tok; } pub fn fatal(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_fatal(*copy self.span, m) + self.sess.span_diagnostic.span_fatal(*self.span, m) } pub fn span_fatal(&self, sp: span, m: &str) -> ! { self.sess.span_diagnostic.span_fatal(sp, m) @@ -574,10 +574,10 @@ impl Parser { self.sess.span_diagnostic.span_note(sp, m) } pub fn bug(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(*copy self.span, m) + self.sess.span_diagnostic.span_bug(*self.span, m) } pub fn warn(&self, m: &str) { - self.sess.span_diagnostic.span_warn(*copy self.span, m) + self.sess.span_diagnostic.span_warn(*self.span, m) } pub fn span_err(&self, sp: span, m: &str) { self.sess.span_diagnostic.span_err(sp, m) @@ -608,7 +608,7 @@ impl Parser { pub fn get_lifetime(&self, tok: &token::Token) -> ast::ident { match *tok { - token::LIFETIME(ref ident) => copy *ident, + token::LIFETIME(ref ident) => *ident, _ => self.bug("not a lifetime"), } } @@ -1259,7 +1259,7 @@ impl Parser { self.obsolete(*self.last_span, ObsoleteLifetimeNotation); match *self.token { token::IDENT(sid, _) => { - let span = copy self.span; + let span = self.span; self.bump(); Some(ast::Lifetime { id: self.get_id(), @@ -1347,7 +1347,7 @@ impl Parser { pub fn parse_lifetime(&self) -> ast::Lifetime { match *self.token { token::LIFETIME(i) => { - let span = copy self.span; + let span = self.span; self.bump(); return ast::Lifetime { id: self.get_id(), @@ -1358,7 +1358,7 @@ impl Parser { // Also accept the (obsolete) syntax `foo/` token::IDENT(i, _) => { - let span = copy self.span; + let span = self.span; self.bump(); self.expect(&token::BINOP(token::SLASH)); self.obsolete(*self.last_span, ObsoleteLifetimeNotation); @@ -2981,7 +2981,7 @@ impl Parser { let lo = self.span.lo; if self.eat_keyword(keywords::Unsafe) { - self.obsolete(copy *self.span, ObsoleteUnsafeBlock); + self.obsolete(*self.span, ObsoleteUnsafeBlock); } self.expect(&token::LBRACE); @@ -2996,7 +2996,7 @@ impl Parser { let lo = self.span.lo; if self.eat_keyword(keywords::Unsafe) { - self.obsolete(copy *self.span, ObsoleteUnsafeBlock); + self.obsolete(*self.span, ObsoleteUnsafeBlock); } self.expect(&token::LBRACE); let (inner, next) = self.parse_inner_attrs_and_next(); @@ -3581,7 +3581,7 @@ impl Parser { ty = self.parse_ty(false); opt_trait_ref } else if self.eat(&token::COLON) { - self.obsolete(copy *self.span, ObsoleteImplSyntax); + self.obsolete(*self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { None @@ -3626,7 +3626,7 @@ impl Parser { self.parse_region_param(); let generics = self.parse_generics(); if self.eat(&token::COLON) { - self.obsolete(copy *self.span, ObsoleteClassTraits); + self.obsolete(*self.span, ObsoleteClassTraits); let _ = self.parse_trait_ref_list(&token::LBRACE); } @@ -3710,7 +3710,7 @@ impl Parser { let a_var = self.parse_name_and_ty(vis, attrs); match *self.token { token::SEMI => { - self.obsolete(copy *self.span, ObsoleteFieldTerminator); + self.obsolete(*self.span, ObsoleteFieldTerminator); self.bump(); } token::COMMA => { @@ -3718,13 +3718,9 @@ impl Parser { } token::RBRACE => {} _ => { - self.span_fatal( - copy *self.span, - fmt!( - "expected `,`, or '}' but found `%s`", - self.this_token_to_str() - ) - ); + self.span_fatal(*self.span, + fmt!("expected `,`, or '}' but found `%s`", + self.this_token_to_str())); } } a_var @@ -4043,26 +4039,19 @@ impl Parser { must_be_named_mod = true; self.expect_keyword(keywords::Mod); } else if *self.token != token::LBRACE { - self.span_fatal( - copy *self.span, - fmt!( - "expected `{` or `mod` but found `%s`", - self.this_token_to_str() - ) - ); + self.span_fatal(*self.span, + fmt!("expected `{` or `mod` but found `%s`", + self.this_token_to_str())); } let (sort, ident) = match *self.token { token::IDENT(*) => (ast::named, self.parse_ident()), _ => { if must_be_named_mod { - self.span_fatal( - copy *self.span, - fmt!( - "expected foreign module name but found `%s`", - self.this_token_to_str() - ) - ); + self.span_fatal(*self.span, + fmt!("expected foreign module name but \ + found `%s`", + self.this_token_to_str())); } (ast::anonymous, diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 7cd3faf9a90..8889fe91cc5 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -424,7 +424,7 @@ impl Printer { pub fn check_stack(&mut self, k: int) { if !self.scan_stack_empty { let x = self.scan_top(); - match copy self.token[x] { + match self.token[x] { BEGIN(_) => { if k > 0 { self.size[self.scan_pop()] = self.size[x] + diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 67d5f4aa845..5b4a6d15a12 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -803,9 +803,17 @@ pub fn print_ty_method(s: @ps, m: &ast::ty_method) { hardbreak_if_not_bol(s); maybe_print_comment(s, m.span.lo); print_outer_attributes(s, m.attrs); - print_ty_fn(s, None, None, &None, m.purity, ast::Many, - &m.decl, Some(m.ident), &None, Some(&m.generics), - Some(/*bad*/ copy m.explicit_self.node)); + print_ty_fn(s, + None, + None, + &None, + m.purity, + ast::Many, + &m.decl, + Some(m.ident), + &None, + Some(&m.generics), + Some(m.explicit_self.node)); word(s.s, ";"); } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 09daa2e648a..5c5d154a445 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -361,18 +361,12 @@ pub fn visit_fn_decl<E: Copy>(fd: &fn_decl, (e, v): (E, vt<E>)) { // because it is not a default impl of any method, though I doubt that really // clarifies anything. - Niko pub fn visit_method_helper<E: Copy>(m: &method, (e, v): (E, vt<E>)) { - (v.visit_fn)( - &fk_method( - /* FIXME (#2543) */ copy m.ident, - &m.generics, - m - ), - &m.decl, - &m.body, - m.span, - m.id, - (e, v) - ); + (v.visit_fn)(&fk_method(m.ident, &m.generics, m), + &m.decl, + &m.body, + m.span, + m.id, + (e, v)); } pub fn visit_fn<E: Copy>(fk: &fn_kind, decl: &fn_decl, body: &blk, _sp: span, |
