about summary refs log tree commit diff
path: root/compiler/rustc_expand/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_expand/src')
-rw-r--r--compiler/rustc_expand/src/config.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/metavar_expr.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs16
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs53
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs36
-rw-r--r--compiler/rustc_expand/src/proc_macro.rs4
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs49
-rw-r--r--compiler/rustc_expand/src/tokenstream/tests.rs17
9 files changed, 89 insertions, 100 deletions
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 2b941ec6809..dfa3bff6903 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -401,7 +401,7 @@ impl<'a> StripUnconfigured<'a> {
         // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
         // for `attr` when we expand it to `#[attr]`
         let mut orig_trees = orig_tokens.into_trees();
-        let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else {
+        let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) = orig_trees.next().unwrap() else {
             panic!("Bad tokens for attribute {:?}", attr);
         };
         let pound_span = pound_token.span;
@@ -409,7 +409,7 @@ impl<'a> StripUnconfigured<'a> {
         let mut trees = vec![(AttrAnnotatedTokenTree::Token(pound_token), Spacing::Alone)];
         if attr.style == AttrStyle::Inner {
             // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
-            let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }) = orig_trees.next().unwrap() else {
+            let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = orig_trees.next().unwrap() else {
                 panic!("Bad tokens for attribute {:?}", attr);
             };
             trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 3d44c408d8f..f7e1575afbf 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -481,7 +481,7 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedTokenTree(ref tt) = *m {
                     let tt = mbe::quoted::parse(
-                        tt.clone().into(),
+                        TokenStream::new(vec![tt.clone()]),
                         true,
                         &sess.parse_sess,
                         def.id,
@@ -505,7 +505,7 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedTokenTree(ref tt) = *m {
                     return mbe::quoted::parse(
-                        tt.clone().into(),
+                        TokenStream::new(vec![tt.clone()]),
                         false,
                         &sess.parse_sess,
                         def.id,
diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs
index 45c462bc425..fc808401a5e 100644
--- a/compiler/rustc_expand/src/mbe/metavar_expr.rs
+++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs
@@ -106,7 +106,7 @@ fn parse_depth<'sess>(
     let Some(tt) = iter.next() else { return Ok(0) };
     let TokenTree::Token(token::Token {
         kind: token::TokenKind::Literal(lit), ..
-    }) = tt else {
+    }, _) = tt else {
         return Err(sess.span_diagnostic.struct_span_err(
             span,
             "meta-variable expression depth must be a literal"
@@ -130,7 +130,7 @@ fn parse_ident<'sess>(
     sess: &'sess ParseSess,
     span: Span,
 ) -> PResult<'sess, Ident> {
-    if let Some(tt) = iter.next() && let TokenTree::Token(token) = tt {
+    if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt {
         if let Some((elem, false)) = token.ident() {
             return Ok(elem);
         }
@@ -153,7 +153,7 @@ fn parse_ident<'sess>(
 /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
 /// iterator is not modified and the result is `false`.
 fn try_eat_comma(iter: &mut CursorRef<'_>) -> bool {
-    if let Some(TokenTree::Token(token::Token { kind: token::Comma, .. })) = iter.look_ahead(0) {
+    if let Some(TokenTree::Token(token::Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) {
         let _ = iter.next();
         return true;
     }
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 707cb73f097..ee17d54f629 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -56,9 +56,9 @@ pub(super) fn parse(
         match tree {
             TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
                 let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
+                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
                         match trees.next() {
-                            Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
+                            Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
                                 Some((frag, _)) => {
                                     let span = token.span.with_lo(start_sp.lo());
 
@@ -146,7 +146,7 @@ fn parse_tree(
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
             // FIXME: Handle `Invisible`-delimited groups in a more systematic way
             // during parsing.
             let mut next = outer_trees.next();
@@ -217,7 +217,7 @@ fn parse_tree(
 
                 // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate`
                 // special metavariable that names the crate of the invocation.
-                Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
+                Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
                     let (ident, is_raw) = token.ident().unwrap();
                     let span = ident.span.with_lo(span.lo());
                     if ident.name == kw::Crate && !is_raw {
@@ -228,7 +228,7 @@ fn parse_tree(
                 }
 
                 // `tree` is followed by another `$`. This is an escaped `$`.
-                Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span })) => {
+                Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
                     if parsing_patterns {
                         span_dollar_dollar_or_metavar_in_the_lhs_err(
                             sess,
@@ -241,7 +241,7 @@ fn parse_tree(
                 }
 
                 // `tree` is followed by some other token. This is an error.
-                Some(tokenstream::TokenTree::Token(token)) => {
+                Some(tokenstream::TokenTree::Token(token, _)) => {
                     let msg = format!(
                         "expected identifier, found `{}`",
                         pprust::token_to_string(&token),
@@ -256,7 +256,7 @@ fn parse_tree(
         }
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
+        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
@@ -291,7 +291,7 @@ fn parse_kleene_op(
     span: Span,
 ) -> Result<Result<(KleeneOp, Span), Token>, Span> {
     match input.next() {
-        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+        Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
             Some(op) => Ok(Ok((op, token.span))),
             None => Ok(Err(token)),
         },
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 3037855ae28..e47ea83ac38 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -3,7 +3,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree,
 use crate::mbe::{self, MetaVarExpr};
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
+use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{pluralize, PResult};
 use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
@@ -105,7 +105,7 @@ pub(super) fn transcribe<'a>(
     //
     // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
     // again, and we are done transcribing.
-    let mut result: Vec<TreeAndSpacing> = Vec::new();
+    let mut result: Vec<TokenTree> = Vec::new();
     let mut result_stack = Vec::new();
     let mut marker = Marker(cx.current_expansion.id, transparency);
 
@@ -123,7 +123,7 @@ pub(super) fn transcribe<'a>(
                 if repeat_idx < repeat_len {
                     *idx = 0;
                     if let Some(sep) = sep {
-                        result.push(TokenTree::Token(sep.clone()).into());
+                        result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
                     }
                     continue;
                 }
@@ -150,7 +150,7 @@ pub(super) fn transcribe<'a>(
                     // Step back into the parent Delimited.
                     let tree = TokenTree::Delimited(span, delim, TokenStream::new(result));
                     result = result_stack.pop().unwrap();
-                    result.push(tree.into());
+                    result.push(tree);
                 }
             }
             continue;
@@ -227,15 +227,15 @@ pub(super) fn transcribe<'a>(
                             // `tt`s are emitted into the output stream directly as "raw tokens",
                             // without wrapping them into groups.
                             let token = tt.clone();
-                            result.push(token.into());
+                            result.push(token);
                         }
                         MatchedNonterminal(ref nt) => {
                             // Other variables are emitted into the output stream as groups with
                             // `Delimiter::Invisible` to maintain parsing priorities.
                             // `Interpolated` is currently used for such groups in rustc parser.
                             marker.visit_span(&mut sp);
-                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
-                            result.push(token.into());
+                            let token = TokenTree::token_alone(token::Interpolated(nt.clone()), sp);
+                            result.push(token);
                         }
                         MatchedSeq(..) => {
                             // We were unable to descend far enough. This is an error.
@@ -250,8 +250,11 @@ pub(super) fn transcribe<'a>(
                     // with modified syntax context. (I believe this supports nested macros).
                     marker.visit_span(&mut sp);
                     marker.visit_ident(&mut original_ident);
-                    result.push(TokenTree::token(token::Dollar, sp).into());
-                    result.push(TokenTree::Token(Token::from_ast_ident(original_ident)).into());
+                    result.push(TokenTree::token_alone(token::Dollar, sp));
+                    result.push(TokenTree::Token(
+                        Token::from_ast_ident(original_ident),
+                        Spacing::Alone,
+                    ));
                 }
             }
 
@@ -281,8 +284,8 @@ pub(super) fn transcribe<'a>(
             mbe::TokenTree::Token(token) => {
                 let mut token = token.clone();
                 mut_visit::visit_token(&mut token, &mut marker);
-                let tt = TokenTree::Token(token);
-                result.push(tt.into());
+                let tt = TokenTree::Token(token, Spacing::Alone);
+                result.push(tt);
             }
 
             // There should be no meta-var declarations in the invocation of a macro.
@@ -532,7 +535,7 @@ fn transcribe_metavar_expr<'a>(
     interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
     marker: &mut Marker,
     repeats: &[(usize, usize)],
-    result: &mut Vec<TreeAndSpacing>,
+    result: &mut Vec<TokenTree>,
     sp: &DelimSpan,
 ) -> PResult<'a, ()> {
     let mut visited_span = || {
@@ -544,11 +547,11 @@ fn transcribe_metavar_expr<'a>(
         MetaVarExpr::Count(original_ident, depth_opt) => {
             let matched = matched_from_ident(cx, original_ident, interp)?;
             let count = count_repetitions(cx, depth_opt, matched, &repeats, sp)?;
-            let tt = TokenTree::token(
+            let tt = TokenTree::token_alone(
                 TokenKind::lit(token::Integer, sym::integer(count), None),
                 visited_span(),
             );
-            result.push(tt.into());
+            result.push(tt);
         }
         MetaVarExpr::Ignore(original_ident) => {
             // Used to ensure that `original_ident` is present in the LHS
@@ -556,25 +559,19 @@ fn transcribe_metavar_expr<'a>(
         }
         MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
             Some((index, _)) => {
-                result.push(
-                    TokenTree::token(
-                        TokenKind::lit(token::Integer, sym::integer(*index), None),
-                        visited_span(),
-                    )
-                    .into(),
-                );
+                result.push(TokenTree::token_alone(
+                    TokenKind::lit(token::Integer, sym::integer(*index), None),
+                    visited_span(),
+                ));
             }
             None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "index")),
         },
         MetaVarExpr::Length(depth) => match repeats.iter().nth_back(depth) {
             Some((_, length)) => {
-                result.push(
-                    TokenTree::token(
-                        TokenKind::lit(token::Integer, sym::integer(*length), None),
-                        visited_span(),
-                    )
-                    .into(),
-                );
+                result.push(TokenTree::token_alone(
+                    TokenKind::lit(token::Integer, sym::integer(*length), None),
+                    visited_span(),
+                ));
             }
             None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "length")),
         },
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index 8da78792758..a3c631d3318 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -66,23 +66,23 @@ fn string_to_tts_macro() {
 
         match tts {
             [
-                TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
-                TokenTree::Token(Token { kind: token::Not, .. }),
-                TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
+                TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
+                TokenTree::Token(Token { kind: token::Not, .. }, _),
+                TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
                 TokenTree::Delimited(_, macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
                 let tts = &macro_tts.trees().collect::<Vec<_>>();
                 match &tts[..] {
                     [
                         TokenTree::Delimited(_, first_delim, first_tts),
-                        TokenTree::Token(Token { kind: token::FatArrow, .. }),
+                        TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
                         TokenTree::Delimited(_, second_delim, second_tts),
                     ] if macro_delim == &Delimiter::Parenthesis => {
                         let tts = &first_tts.trees().collect::<Vec<_>>();
                         match &tts[..] {
                             [
-                                TokenTree::Token(Token { kind: token::Dollar, .. }),
-                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
                             ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
                             }
                             _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@@ -90,8 +90,8 @@ fn string_to_tts_macro() {
                         let tts = &second_tts.trees().collect::<Vec<_>>();
                         match &tts[..] {
                             [
-                                TokenTree::Token(Token { kind: token::Dollar, .. }),
-                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
                             ] if second_delim == &Delimiter::Parenthesis
                                 && name.as_str() == "a" => {}
                             _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@@ -111,29 +111,27 @@ fn string_to_tts_1() {
         let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
         let expected = TokenStream::new(vec![
-            TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
-            TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(3, 4)).into(),
+            TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)),
+            TokenTree::token_alone(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
             TokenTree::Delimited(
                 DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                 Delimiter::Parenthesis,
                 TokenStream::new(vec![
-                    TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(6, 7)).into(),
-                    TokenTree::token(token::Colon, sp(8, 9)).into(),
-                    TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
+                    TokenTree::token_alone(token::Ident(Symbol::intern("b"), false), sp(6, 7)),
+                    TokenTree::token_alone(token::Colon, sp(8, 9)),
+                    TokenTree::token_alone(token::Ident(sym::i32, false), sp(10, 13)),
                 ])
                 .into(),
-            )
-            .into(),
+            ),
             TokenTree::Delimited(
                 DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                 Delimiter::Brace,
                 TokenStream::new(vec![
-                    TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(17, 18)).into(),
-                    TokenTree::token(token::Semi, sp(18, 19)).into(),
+                    TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(17, 18)),
+                    TokenTree::token_alone(token::Semi, sp(18, 19)),
                 ])
                 .into(),
-            )
-            .into(),
+            ),
         ]);
 
         assert_eq!(tts, expected);
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index 9e1cd299fd6..de20bb85972 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -4,7 +4,7 @@ use crate::proc_macro_server;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token;
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::TokenStream;
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::ErrorGuaranteed;
 use rustc_parse::parser::ForceCollect;
@@ -94,7 +94,7 @@ impl MultiItemModifier for DeriveProcMacro {
                 Annotatable::Stmt(stmt) => token::NtStmt(stmt),
                 _ => unreachable!(),
             };
-            TokenTree::token(token::Interpolated(Lrc::new(nt)), DUMMY_SP).into()
+            TokenStream::token_alone(token::Interpolated(Lrc::new(nt)), DUMMY_SP)
         } else {
             item.to_tokens()
         };
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 176c77ca6ed..7d9a4aed0bf 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -92,9 +92,8 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
         let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
         let mut cursor = stream.into_trees();
 
-        while let Some((tree, spacing)) = cursor.next_with_spacing() {
-            let joint = spacing == Joint;
-            let Token { kind, span } = match tree {
+        while let Some(tree) = cursor.next() {
+            let (Token { kind, span }, joint) = match tree {
                 tokenstream::TokenTree::Delimited(span, delim, tts) => {
                     let delimiter = pm::Delimiter::from_internal(delim);
                     trees.push(TokenTree::Group(Group {
@@ -108,7 +107,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                     }));
                     continue;
                 }
-                tokenstream::TokenTree::Token(token) => token,
+                tokenstream::TokenTree::Token(token, spacing) => (token, spacing == Joint),
             };
 
             let mut op = |s: &str| {
@@ -194,7 +193,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                         TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                     ]
                     .into_iter()
-                    .map(|kind| tokenstream::TokenTree::token(kind, span))
+                    .map(|kind| tokenstream::TokenTree::token_alone(kind, span))
                     .collect();
                     trees.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
                     if attr_style == ast::AttrStyle::Inner {
@@ -246,16 +245,15 @@ impl ToInternal<TokenStream> for (TokenTree<TokenStream, Span, Symbol>, &mut Rus
         let (ch, joint, span) = match tree {
             TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
             TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => {
-                return tokenstream::TokenTree::Delimited(
+                return tokenstream::TokenStream::delimited(
                     tokenstream::DelimSpan { open, close },
                     delimiter.to_internal(),
                     stream.unwrap_or_default(),
-                )
-                .into();
+                );
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
                 rustc.sess().symbol_gallery.insert(sym, span);
-                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
+                return tokenstream::TokenStream::token_alone(Ident(sym, is_raw), span);
             }
             TokenTree::Literal(self::Literal {
                 kind: self::LitKind::Integer,
@@ -266,8 +264,8 @@ impl ToInternal<TokenStream> for (TokenTree<TokenStream, Span, Symbol>, &mut Rus
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::token(minus, span);
-                let b = tokenstream::TokenTree::token(integer, span);
+                let a = tokenstream::TokenTree::token_alone(minus, span);
+                let b = tokenstream::TokenTree::token_alone(integer, span);
                 return [a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -279,16 +277,15 @@ impl ToInternal<TokenStream> for (TokenTree<TokenStream, Span, Symbol>, &mut Rus
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::token(minus, span);
-                let b = tokenstream::TokenTree::token(float, span);
+                let a = tokenstream::TokenTree::token_alone(minus, span);
+                let b = tokenstream::TokenTree::token_alone(float, span);
                 return [a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { kind, symbol, suffix, span }) => {
-                return tokenstream::TokenTree::token(
+                return tokenstream::TokenStream::token_alone(
                     TokenKind::lit(kind.to_internal(), symbol, suffix),
                     span,
-                )
-                .into();
+                );
             }
         };
 
@@ -318,8 +315,11 @@ impl ToInternal<TokenStream> for (TokenTree<TokenStream, Span, Symbol>, &mut Rus
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::token(kind, span);
-        TokenStream::new(vec![(tree, if joint { Joint } else { Alone })])
+        if joint {
+            tokenstream::TokenStream::token_joint(kind, span)
+        } else {
+            tokenstream::TokenStream::token_alone(kind, span)
+        }
     }
 }
 
@@ -486,12 +486,11 @@ impl server::TokenStream for Rustc<'_, '_> {
         // We don't use `TokenStream::from_ast` as the tokenstream currently cannot
         // be recovered in the general case.
         match &expr.kind {
-            ast::ExprKind::Lit(l) if l.token.kind == token::Bool => {
-                Ok(tokenstream::TokenTree::token(token::Ident(l.token.symbol, false), l.span)
-                    .into())
-            }
+            ast::ExprKind::Lit(l) if l.token.kind == token::Bool => Ok(
+                tokenstream::TokenStream::token_alone(token::Ident(l.token.symbol, false), l.span),
+            ),
             ast::ExprKind::Lit(l) => {
-                Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into())
+                Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token), l.span))
             }
             ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
                 ast::ExprKind::Lit(l) => match l.token {
@@ -499,8 +498,8 @@ impl server::TokenStream for Rustc<'_, '_> {
                         Ok(Self::TokenStream::from_iter([
                             // FIXME: The span of the `-` token is lost when
                             // parsing, so we cannot faithfully recover it here.
-                            tokenstream::TokenTree::token(token::BinOp(token::Minus), e.span),
-                            tokenstream::TokenTree::token(token::Literal(l.token), l.span),
+                            tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
+                            tokenstream::TokenTree::token_alone(token::Literal(l.token), l.span),
                         ]))
                     }
                     _ => Err(()),
diff --git a/compiler/rustc_expand/src/tokenstream/tests.rs b/compiler/rustc_expand/src/tokenstream/tests.rs
index e4a4db204d9..eed69681011 100644
--- a/compiler/rustc_expand/src/tokenstream/tests.rs
+++ b/compiler/rustc_expand/src/tokenstream/tests.rs
@@ -1,7 +1,7 @@
 use crate::tests::string_to_stream;
 
 use rustc_ast::token;
-use rustc_ast::tokenstream::{Spacing, TokenStream, TokenStreamBuilder, TokenTree};
+use rustc_ast::tokenstream::{TokenStream, TokenStreamBuilder};
 use rustc_span::create_default_session_globals_then;
 use rustc_span::{BytePos, Span, Symbol};
 
@@ -13,10 +13,6 @@ fn sp(a: u32, b: u32) -> Span {
     Span::with_root_ctxt(BytePos(a), BytePos(b))
 }
 
-fn joint(tree: TokenTree) -> TokenStream {
-    TokenStream::new(vec![(tree, Spacing::Joint)])
-}
-
 #[test]
 fn test_concat() {
     create_default_session_globals_then(|| {
@@ -90,9 +86,8 @@ fn test_diseq_1() {
 #[test]
 fn test_is_empty() {
     create_default_session_globals_then(|| {
-        let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
-        let test1: TokenStream =
-            TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(0, 1)).into();
+        let test0 = TokenStream::default();
+        let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1));
         let test2 = string_to_ts("foo(bar::baz)");
 
         assert_eq!(test0.is_empty(), true);
@@ -105,9 +100,9 @@ fn test_is_empty() {
 fn test_dotdotdot() {
     create_default_session_globals_then(|| {
         let mut builder = TokenStreamBuilder::new();
-        builder.push(joint(TokenTree::token(token::Dot, sp(0, 1))));
-        builder.push(joint(TokenTree::token(token::Dot, sp(1, 2))));
-        builder.push(TokenTree::token(token::Dot, sp(2, 3)));
+        builder.push(TokenStream::token_joint(token::Dot, sp(0, 1)));
+        builder.push(TokenStream::token_joint(token::Dot, sp(1, 2)));
+        builder.push(TokenStream::token_alone(token::Dot, sp(2, 3)));
         let stream = builder.build();
         assert!(stream.eq_unspanned(&string_to_ts("...")));
         assert_eq!(stream.trees().count(), 1);