about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libsyntax/attr/mod.rs10
-rw-r--r--src/libsyntax/ext/base.rs5
-rw-r--r--src/libsyntax/ext/expand.rs4
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs10
-rw-r--r--src/libsyntax/ext/tt/quoted.rs10
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs8
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/literal.rs2
-rw-r--r--src/libsyntax/parse/mod.rs14
-rw-r--r--src/libsyntax/parse/parser.rs14
-rw-r--r--src/libsyntax/parse/token.rs8
-rw-r--r--src/libsyntax/tokenstream.rs18
-rw-r--r--src/libsyntax_ext/assert.rs2
-rw-r--r--src/libsyntax_ext/deriving/custom.rs2
-rw-r--r--src/libsyntax_ext/proc_macro_server.rs16
15 files changed, 63 insertions, 62 deletions
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index cc16bac320d..8c9bed57bfd 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -465,10 +465,10 @@ impl MetaItem {
                 let mod_sep_span = Span::new(last_pos,
                                              segment.ident.span.lo(),
                                              segment.ident.span.ctxt());
-                idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
+                idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
             }
-            idents.push(TokenTree::token(segment.ident.span,
-                                         TokenKind::from_ast_ident(segment.ident)).into());
+            idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
+                                         segment.ident.span).into());
             last_pos = segment.ident.span.hi();
         }
         self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -532,7 +532,7 @@ impl MetaItemKind {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::token(span, token::Eq).into()];
+                let mut vec = vec![TokenTree::token(token::Eq, span).into()];
                 lit.tokens().append_to_tree_and_joint_vec(&mut vec);
                 TokenStream::new(vec)
             }
@@ -540,7 +540,7 @@ impl MetaItemKind {
                 let mut tokens = Vec::new();
                 for (i, item) in list.iter().enumerate() {
                     if i > 0 {
-                        tokens.push(TokenTree::token(span, token::Comma).into());
+                        tokens.push(TokenTree::token(token::Comma, span).into());
                     }
                     item.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 3b24837e365..61c736662c7 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -268,8 +268,9 @@ impl<F> TTMacroExpander for F
                 if let tokenstream::TokenTree::Token(token) = tt {
                     if let token::Interpolated(nt) = &token.kind {
                         if let token::NtIdent(ident, is_raw) = **nt {
-                            *tt = tokenstream::TokenTree::token(ident.span,
-                                                                token::Ident(ident.name, is_raw));
+                            *tt = tokenstream::TokenTree::token(
+                                token::Ident(ident.name, is_raw), ident.span
+                            );
                         }
                     }
                 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 4396b9be9bb..7cd847eac46 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             }
             AttrProcMacro(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
+                let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
                     Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
                     Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
                     Annotatable::Expr(expr) => token::NtExpr(expr),
-                }))).into();
+                })), DUMMY_SP).into();
                 let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
                 let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
                 let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 77f53c35b0b..d25339a78f4 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -270,7 +270,7 @@ pub fn compile(
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
             ],
             separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +279,7 @@ pub fn compile(
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
+            tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -613,7 +613,7 @@ impl FirstSets {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
+                            first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -663,7 +663,7 @@ impl FirstSets {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
+                                first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                             }
 
                             assert!(first.maybe_empty);
@@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
+                    new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
                     &new
                 } else {
                     &suffix_first
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 582d87b911d..b4672fb4a58 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -40,7 +40,7 @@ impl Delimited {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(open_span, self.open_token())
+        TokenTree::token(self.open_token(), open_span)
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +50,7 @@ impl Delimited {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(close_span, self.close_token())
+        TokenTree::token(self.close_token(), close_span)
     }
 }
 
@@ -153,7 +153,7 @@ impl TokenTree {
         }
     }
 
-    crate fn token(span: Span, kind: TokenKind) -> TokenTree {
+    crate fn token(kind: TokenKind, span: Span) -> TokenTree {
         TokenTree::Token(Token::new(kind, span))
     }
 }
@@ -325,7 +325,7 @@ where
                 let (ident, is_raw) = token.ident().unwrap();
                 let span = ident.span.with_lo(span.lo());
                 if ident.name == kw::Crate && !is_raw {
-                    TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
+                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
                 } else {
                     TokenTree::MetaVar(span, ident)
                 }
@@ -342,7 +342,7 @@ where
             }
 
             // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::token(span, token::Dollar),
+            None => TokenTree::token(token::Dollar, span),
         },
 
         // `tree` is an arbitrary token. Keep it.
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 1dbb0638df1..b382893ce4e 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -119,7 +119,7 @@ pub fn transcribe(
                             Some((tt, _)) => tt.span(),
                             None => DUMMY_SP,
                         };
-                        result.push(TokenTree::token(prev_span, sep).into());
+                        result.push(TokenTree::token(sep, prev_span).into());
                     }
                     continue;
                 }
@@ -225,7 +225,7 @@ pub fn transcribe(
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
+                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
                             result.push(token.into());
                         }
                     } else {
@@ -241,8 +241,8 @@ pub fn transcribe(
                     let ident =
                         Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
                     sp = sp.apply_mark(cx.current_expansion.mark);
-                    result.push(TokenTree::token(sp, token::Dollar).into());
-                    result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
+                    result.push(TokenTree::token(token::Dollar, sp).into());
+                    result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
                 }
             }
 
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 8040168a67e..d83b76f4d23 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -157,7 +157,7 @@ impl<'a> Parser<'a> {
                self.check(&token::OpenDelim(DelimToken::Brace)) {
                    self.parse_token_tree().into()
             } else if self.eat(&token::Eq) {
-                let eq = TokenTree::token(self.prev_span, token::Eq);
+                let eq = TokenTree::token(token::Eq, self.prev_span);
                 let mut is_interpolated_expr = false;
                 if let token::Interpolated(nt) = &self.token.kind {
                     if let token::NtExpr(..) = **nt {
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
index 7b213655abd..4979a4dd27f 100644
--- a/src/libsyntax/parse/literal.rs
+++ b/src/libsyntax/parse/literal.rs
@@ -261,7 +261,7 @@ impl Lit {
             token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::token(self.span, token).into()
+        TokenTree::token(token, self.span).into()
     }
 }
 
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 2b82767d7e9..8d3518d0373 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -476,23 +476,23 @@ mod tests {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
-                TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
+                TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
-                        TokenTree::token(sp(8, 9), token::Colon).into(),
-                        TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
+                        TokenTree::token(token::Colon, sp(8, 9)).into(),
+                        TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
                     ]).into(),
                 ).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
-                        TokenTree::token(sp(18, 19), token::Semi).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
+                        TokenTree::token(token::Semi, sp(18, 19)).into(),
                     ]).into(),
                 ).into()
             ]);
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 57a49d1524d..e9e908eb858 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -362,11 +362,11 @@ impl TokenCursor {
             delim_span,
             token::Bracket,
             [
-                TokenTree::token(sp, token::Ident(sym::doc, false)),
-                TokenTree::token(sp, token::Eq),
-                TokenTree::token(sp, token::TokenKind::lit(
+                TokenTree::token(token::Ident(sym::doc, false), sp),
+                TokenTree::token(token::Eq, sp),
+                TokenTree::token(token::TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
-                )),
+                ), sp),
             ]
             .iter().cloned().collect::<TokenStream>().into(),
         );
@@ -375,10 +375,10 @@ impl TokenCursor {
             delim_span,
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
-                [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
+                [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::token(sp, token::Pound), body]
+                [TokenTree::token(token::Pound, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             },
         )));
@@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> {
                     };
                     TokenStream::new(vec![
                         args.into(),
-                        TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
+                        TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
                         body.into(),
                     ])
                 } else {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index ba7c88e7000..58c30a07e3e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -763,10 +763,10 @@ impl Nonterminal {
                 prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
             }
             Nonterminal::NtIdent(ident, is_raw) => {
-                Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
+                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
             }
             Nonterminal::NtLifetime(ident) => {
-                Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
+                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
             }
             Nonterminal::NtTT(ref tt) => {
                 Some(tt.clone().into())
@@ -852,7 +852,7 @@ fn prepend_attrs(sess: &ParseSess,
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
             let token = Ident(ident.name, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::token(ident.span, token));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
 
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
@@ -866,7 +866,7 @@ fn prepend_attrs(sess: &ParseSess,
         // The span we list here for `#` and for `[ ... ]` are both wrong in
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
-        builder.push(tokenstream::TokenTree::token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
         let delim_span = DelimSpan::from_single(attr.span);
         builder.push(tokenstream::TokenTree::Delimited(
             delim_span, DelimToken::Bracket, brackets.build().into()));
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index bb80c1a1b3f..b4643229285 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -138,7 +138,7 @@ impl TokenTree {
         TokenStream::new(vec![(self, Joint)])
     }
 
-    pub fn token(span: Span, kind: TokenKind) -> TokenTree {
+    pub fn token(kind: TokenKind, span: Span) -> TokenTree {
         TokenTree::Token(Token::new(kind, span))
     }
 
@@ -149,7 +149,7 @@ impl TokenTree {
         } else {
             span.with_hi(span.lo() + BytePos(delim.len() as u32))
         };
-        TokenTree::token(open_span, token::OpenDelim(delim))
+        TokenTree::token(token::OpenDelim(delim), open_span)
     }
 
     /// Returns the closing delimiter as a token tree.
@@ -159,7 +159,7 @@ impl TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(delim.len() as u32))
         };
-        TokenTree::token(close_span, token::CloseDelim(delim))
+        TokenTree::token(token::CloseDelim(delim), close_span)
     }
 }
 
@@ -212,7 +212,7 @@ impl TokenStream {
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -433,7 +433,7 @@ impl TokenStreamBuilder {
                     let last_stream = self.0.pop().unwrap();
                     self.push_all_but_last_tree(&last_stream);
                     let glued_span = last_token.span.to(token.span);
-                    let glued_tt = TokenTree::token(glued_span, glued_tok);
+                    let glued_tt = TokenTree::token(glued_tok, glued_span);
                     let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
                     self.0.push(glued_tokenstream);
                     self.push_all_but_first_tree(&stream);
@@ -660,7 +660,7 @@ mod tests {
         with_default_globals(|| {
             let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
             let test1: TokenStream =
-                TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into();
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
             let test2 = string_to_ts("foo(bar::baz)");
 
             assert_eq!(test0.is_empty(), true);
@@ -673,9 +673,9 @@ mod tests {
     fn test_dotdotdot() {
         with_default_globals(|| {
             let mut builder = TokenStreamBuilder::new();
-            builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
-            builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
-            builder.push(TokenTree::token(sp(2, 3), token::Dot));
+            builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(2, 3)));
             let stream = builder.build();
             assert!(stream.eq_unspanned(&string_to_ts("...")));
             assert_eq!(stream.trees().count(), 1);
diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs
index e5e422c4d9c..ce1e3276af3 100644
--- a/src/libsyntax_ext/assert.rs
+++ b/src/libsyntax_ext/assert.rs
@@ -30,11 +30,11 @@ pub fn expand_assert<'cx>(
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
             TokenStream::from(TokenTree::token(
-                DUMMY_SP,
                 TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
                     pprust::expr_to_string(&cond_expr).escape_debug()
                 )), None),
+                DUMMY_SP,
             ))
         }).into(),
         delim: MacDelimiter::Parenthesis,
diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs
index 3deab97db88..98465d75e46 100644
--- a/src/libsyntax_ext/deriving/custom.rs
+++ b/src/libsyntax_ext/deriving/custom.rs
@@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
         MarkAttrs(&self.attrs).visit_item(&item);
 
         let token = token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
index ff2835c70f7..00a420d3fa8 100644
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ b/src/libsyntax_ext/proc_macro_server.rs
@@ -163,7 +163,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
                     TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|kind| tokenstream::TokenTree::token(span, kind))
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -210,7 +210,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 .into();
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
-                return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into();
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -219,8 +219,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::token(span, minus);
-                let b = tokenstream::TokenTree::token(span, integer);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -230,12 +230,12 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::token(span, minus);
-                let b = tokenstream::TokenTree::token(span, float);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
             }
         };
 
@@ -265,7 +265,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::token(span, kind);
+        let tree = tokenstream::TokenTree::token(kind, span);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }