diff options
20 files changed, 56 insertions, 57 deletions
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index de80bb47aff..9d56cc83ac8 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -333,7 +333,7 @@ impl From<IdentIsRaw> for bool { } } -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ /// `=` @@ -471,7 +471,7 @@ pub enum TokenKind { Eof, } -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Token { pub kind: TokenKind, pub span: Span, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 3b457303cac..8260842fb87 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -482,7 +482,7 @@ impl TokenStream { Delimiter::Invisible(InvisibleOrigin::FlattenToken), TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span), ), - _ => TokenTree::Token(token.clone(), spacing), + _ => TokenTree::Token(*token, spacing), } } diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 30961a2b404..b663e959744 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -160,7 +160,7 @@ impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'match .is_none_or(|failure| failure.is_better_position(*approx_position)) { self.best_failure = Some(BestFailure { - token: token.clone(), + token: *token, position_in_tokenstream: *approx_position, msg, remaining_matcher: self diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index d709fd79281..0065f83eb4e 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -179,7 +179,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> { for tt in tts { match tt { TokenTree::Token(token) => { - locs.push(MatcherLoc::Token { token: token.clone() }); + locs.push(MatcherLoc::Token { token: *token }); } TokenTree::Delimited(span, _, delimited) => { let open_token = Token::new(token::OpenDelim(delimited.delim), span.open); @@ -648,7 +648,7 @@ impl TtParser { // There are no possible next positions AND we aren't waiting for the black-box // parser: syntax error. return Failure(T::build_failure( - parser.token.clone(), + parser.token, parser.approx_token_stream_pos(), "no rules expected this token in macro call", )); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 77ec598e62a..7ddd64d81d5 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -778,7 +778,7 @@ impl<'tt> FirstSets<'tt> { // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TtHandle::from_token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(*sep)); } // Reverse scan: Sequence comes before `first`. @@ -841,7 +841,7 @@ impl<'tt> FirstSets<'tt> { // If the sequence contents can be empty, then the first // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TtHandle::from_token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(*sep)); } assert!(first.maybe_empty); @@ -917,7 +917,7 @@ impl<'tt> Clone for TtHandle<'tt> { // This variant *must* contain a `mbe::TokenTree::Token`, and not // any other variant of `mbe::TokenTree`. TtHandle::Token(mbe::TokenTree::Token(tok)) => { - TtHandle::Token(mbe::TokenTree::Token(tok.clone())) + TtHandle::Token(mbe::TokenTree::Token(*tok)) } _ => unreachable!(), @@ -1093,7 +1093,7 @@ fn check_matcher_core<'tt>( let mut new; let my_suffix = if let Some(sep) = &seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TtHandle::from_token(sep.clone())); + new.add_one_maybe(TtHandle::from_token(*sep)); &new } else { &suffix_first diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 0ea53627fe7..3f037259956 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -283,7 +283,7 @@ fn parse_tree<'a>( } // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()), + tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -321,7 +321,7 @@ fn parse_kleene_op( match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) { Some(op) => Ok(Ok((op, token.span))), - None => Ok(Err(token.clone())), + None => Ok(Err(*token)), }, tree => Err(tree.map_or(span, tokenstream::TokenTree::span)), } diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 79a5c7ea31b..39186319b1c 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -164,7 +164,7 @@ pub(super) fn transcribe<'a>( if repeat_idx < repeat_len { frame.idx = 0; if let Some(sep) = sep { - result.push(TokenTree::Token(sep.clone(), Spacing::Alone)); + result.push(TokenTree::Token(*sep, Spacing::Alone)); } continue; } @@ -438,7 +438,7 @@ pub(super) fn transcribe<'a>( // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. mbe::TokenTree::Token(token) => { - let mut token = token.clone(); + let mut token = *token; mut_visit::visit_token(&mut marker, &mut token); let tt = TokenTree::Token(token, Spacing::Alone); result.push(tt); diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index ff03b42484b..2bfa1ea4e05 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -376,7 +376,7 @@ pub(super) fn check_for_substitution( ascii_name, }) }; - (token.clone(), sugg) + (*token, sugg) } /// Extract string if found at current position with given delimiters diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index cff998fa137..f1bd6a22730 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { // produce an empty `TokenStream` if no calls were made, and omit the // final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token.clone())) + let tokens = iter::once(FlatToken::Token(self.start_token)) .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) .take(self.num_calls as usize); @@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { - start_token: (self.token.clone(), self.token_spacing), + start_token: (self.token, self.token_spacing), cursor_snapshot: self.token_cursor.clone(), start_pos: self.num_bump_calls, } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ef044fe9d63..bb63e91aecf 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -322,7 +322,7 @@ impl<'a> Parser<'a> { let mut recovered_ident = None; // we take this here so that the correct original token is retained in // the diagnostic, regardless of eager recovery. - let bad_token = self.token.clone(); + let bad_token = self.token; // suggest prepending a keyword in identifier position with `r#` let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { // if the previous token is a valid keyword // that might use a generic, then suggest a correct // generic placement (later on) - let maybe_keyword = self.prev_token.clone(); + let maybe_keyword = self.prev_token; if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { // if we have a valid keyword, attempt to parse generics // also obtain the keywords symbol @@ -530,7 +530,7 @@ impl<'a> Parser<'a> { // let y = 42; let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, - token: self.token.clone(), + token: self.token, unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); @@ -555,7 +555,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let guar = self.dcx().emit_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -801,7 +801,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 83df18ae286..c4ba62a7a4c 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -344,7 +344,7 @@ impl<'a> Parser<'a> { fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.dcx().emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, - token: self.token.clone(), + token: self.token, suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } @@ -417,7 +417,7 @@ impl<'a> Parser<'a> { cur_op_span: Span, ) -> PResult<'a, P<Expr>> { let rhs = if self.is_at_start_of_range_notation_rhs() { - let maybe_lt = self.token.clone(); + let maybe_lt = self.token; let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(Bound::Excluded(prec), attrs) @@ -611,7 +611,7 @@ impl<'a> Parser<'a> { /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { - let negated_token = self.look_ahead(1, |t| t.clone()); + let negated_token = self.look_ahead(1, |t| *t); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise @@ -1606,7 +1606,7 @@ impl<'a> Parser<'a> { } fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> { - let maybe_eq_tok = self.prev_token.clone(); + let maybe_eq_tok = self.prev_token; let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { @@ -2067,7 +2067,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { - let token = self.token.clone(); + let token = self.token; let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.dcx().struct_span_err(token.span, msg) @@ -2368,7 +2368,7 @@ impl<'a> Parser<'a> { fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> { let lo = self.token.span; - let before = self.prev_token.clone(); + let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; @@ -2400,8 +2400,8 @@ impl<'a> Parser<'a> { FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - let prev = self.prev_token.clone(); - let token = self.token.clone(); + let prev = self.prev_token; + let token = self.token; let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { Ok((expr, _)) => expr, @@ -2648,7 +2648,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. - let maybe_fatarrow = self.token.clone(); + let maybe_fatarrow = self.token; let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { @@ -3856,7 +3856,7 @@ impl<'a> Parser<'a> { return Err(this.dcx().create_err(errors::ExpectedStructField { span: this.look_ahead(1, |t| t.span), ident_span: this.token.span, - token: this.look_ahead(1, |t| t.clone()), + token: this.look_ahead(1, |t| *t), })); } let (ident, expr) = if is_shorthand { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 83c0bceb20a..73dc5c17a20 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1739,8 +1739,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; body } else { - let err = - errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); + let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token); return Err(self.dcx().create_err(err)); }; @@ -2310,7 +2309,7 @@ impl<'a> Parser<'a> { || self.token.is_keyword(kw::Union)) && self.look_ahead(1, |t| t.is_ident()) { - let kw_token = self.token.clone(); + let kw_token = self.token; let kw_str = pprust::token_to_string(&kw_token); let item = self.parse_item(ForceCollect::No)?; let mut item = item.unwrap().span; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index bb9393b8918..d7525a0d716 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -326,12 +326,12 @@ impl TokenCursor { // below can be removed. if let Some(tree) = self.curr.curr() { match tree { - &TokenTree::Token(ref token, spacing) => { + &TokenTree::Token(token, spacing) => { debug_assert!(!matches!( token.kind, token::OpenDelim(_) | token::CloseDelim(_) )); - let res = (token.clone(), spacing); + let res = (token, spacing); self.curr.bump(); return res; } @@ -1490,7 +1490,7 @@ impl<'a> Parser<'a> { _ => { let prev_spacing = self.token_spacing; self.bump(); - TokenTree::Token(self.prev_token.clone(), prev_spacing) + TokenTree::Token(self.prev_token, prev_spacing) } } } @@ -1676,7 +1676,7 @@ impl<'a> Parser<'a> { dbg_fmt.field("prev_token", &self.prev_token); let mut tokens = vec![]; for i in 0..lookahead { - let tok = self.look_ahead(i, |tok| tok.kind.clone()); + let tok = self.look_ahead(i, |tok| tok.kind); let is_eof = tok == TokenKind::Eof; tokens.push(tok); if is_eof { diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 576de776684..b6e89cd7fa4 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -168,7 +168,7 @@ impl<'a> Parser<'a> { } else { Err(self.dcx().create_err(UnexpectedNonterminal::Ident { span: self.token.span, - token: self.token.clone(), + token: self.token, })) } } @@ -191,7 +191,7 @@ impl<'a> Parser<'a> { } else { Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime { span: self.token.span, - token: self.token.clone(), + token: self.token, })) } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 9612f71b2af..d5f469f9aa9 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -363,7 +363,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, start: lo, - token: self.token.clone(), + token: self.token, note_double_vert: matches!(self.token.kind, token::OrOr), }); self.bump(); @@ -1519,8 +1519,8 @@ impl<'a> Parser<'a> { etc = PatFieldsRest::Rest; let mut etc_sp = self.token.span; if first_etc_and_maybe_comma_span.is_none() { - if let Some(comma_tok) = self - .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None }) + if let Some(comma_tok) = + self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None }) { let nw_span = self .psess diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 9c6830c3672..30fb96c6ea9 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -393,8 +393,8 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` - let prev_token_before_parsing = self.prev_token.clone(); - let token_before_parsing = self.token.clone(); + let prev_token_before_parsing = self.prev_token; + let token_before_parsing = self.token; let mut snapshot = None; if self.may_recover() && prev_token_before_parsing == token::PathSep diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 953bc303012..824e930aa88 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -685,7 +685,7 @@ impl<'a> Parser<'a> { return Ok((AttrVec::new(), block)); } - let maybe_ident = self.prev_token.clone(); + let maybe_ident = self.prev_token; self.maybe_recover_unexpected_block_label(loop_header); if !self.eat(exp!(OpenBrace)) { return self.error_block_no_opening_brace(); diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 49ae6cb9b72..2f958f4d492 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) { // Do the `assert_eq` outside the closure so that `track_caller` works. // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure // doesn't give the line number in the test below if the assertion fails.) - let tok = p.look_ahead(dist, |tok| tok.clone()); + let tok = p.look_ahead(dist, |tok| *tok); assert_eq!(kind, tok.kind); } diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 93705da22c4..d0cff42a209 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -547,7 +547,7 @@ impl<'a> Parser<'a> { // Recovery mutbl = Mutability::Mut; - let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing); + let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing); self.bump(); self.bump_with((dyn_tok, dyn_tok_sp)); } @@ -886,7 +886,7 @@ impl<'a> Parser<'a> { /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { let lo = self.token.span; - let leading_token = self.prev_token.clone(); + let leading_token = self.prev_token; let has_parens = self.eat(exp!(OpenParen)); let bound = if self.token.is_lifetime() { diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs index ddf3d2ce96a..1e16aace304 100644 --- a/src/tools/rustfmt/src/macros.rs +++ b/src/tools/rustfmt/src/macros.rs @@ -858,18 +858,18 @@ impl MacroArgParser { }; self.result.push(ParsedMacroArg { - kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()), + kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok), }); Some(()) } - fn update_buffer(&mut self, t: &Token) { + fn update_buffer(&mut self, t: Token) { if self.buf.is_empty() { - self.start_tok = t.clone(); + self.start_tok = t; } else { let needs_space = match next_space(&self.last_tok.kind) { - SpaceState::Ident => ident_like(t), - SpaceState::Punctuation => !ident_like(t), + SpaceState::Ident => ident_like(&t), + SpaceState::Punctuation => !ident_like(&t), SpaceState::Always => true, SpaceState::Never => false, }; @@ -878,7 +878,7 @@ impl MacroArgParser { } } - self.buf.push_str(&pprust::token_to_string(t)); + self.buf.push_str(&pprust::token_to_string(&t)); } fn need_space_prefix(&self) -> bool { @@ -937,7 +937,7 @@ impl MacroArgParser { ) if self.is_meta_var => { self.add_meta_variable(&mut iter)?; } - TokenTree::Token(ref t, _) => self.update_buffer(t), + &TokenTree::Token(t, _) => self.update_buffer(t), &TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => { if !self.buf.is_empty() { if next_space(&self.last_tok.kind) == SpaceState::Always { |
