diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index b2207f2281620..0550f53a96fb3 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -44,6 +44,12 @@ pub enum TokenTree {
     Delimited(DelimSpan, DelimToken, TokenStream),
 }
 
+#[derive(Copy, Clone)]
+pub enum CanSynthesizeMissingTokens {
+    Yes,
+    No,
+}
+
 // Ensure all fields of `TokenTree` is `Send` and `Sync`.
 #[cfg(parallel_compiler)]
 fn _dummy()
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index 2e1b5a74a7b7e..82616c21891bb 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -37,7 +37,7 @@
 
 use rustc_ast::node_id::NodeMap;
 use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
-use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
 use rustc_ast::visit::{self, AssocCtxt, Visitor};
 use rustc_ast::walk_list;
 use rustc_ast::{self as ast, *};
@@ -206,7 +206,8 @@ pub trait ResolverAstLowering {
     ) -> LocalDefId;
 }
 
-type NtToTokenstream = fn(&Nonterminal, &ParseSess, Span) -> TokenStream;
+type NtToTokenstream =
+    fn(&Nonterminal, &ParseSess, Span, CanSynthesizeMissingTokens) -> TokenStream;
 
 /// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
 /// and if so, what meaning it has.
@@ -393,6 +394,47 @@ enum AnonymousLifetimeMode {
     PassThrough,
 }
 
+struct TokenStreamLowering<'a> {
+    parse_sess: &'a ParseSess,
+    synthesize_tokens: CanSynthesizeMissingTokens,
+    nt_to_tokenstream: NtToTokenstream,
+}
+
+impl<'a> TokenStreamLowering<'a> {
+    fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
+        tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
+    }
+
+    fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
+        match tree {
+            TokenTree::Token(token) => self.lower_token(token),
+            TokenTree::Delimited(span, delim, tts) => {
+                TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
+            }
+        }
+    }
+
+    fn lower_token(&mut self, token: Token) -> TokenStream {
+        match token.kind {
+            token::Interpolated(nt) => {
+                let tts = (self.nt_to_tokenstream)(
+                    &nt,
+                    self.parse_sess,
+                    token.span,
+                    self.synthesize_tokens,
+                );
+                TokenTree::Delimited(
+                    DelimSpan::from_single(token.span),
+                    DelimToken::NoDelim,
+                    self.lower_token_stream(tts),
+                )
+                .into()
+            }
+            _ => TokenTree::Token(token).into(),
+        }
+    }
+}
+
 struct ImplTraitTypeIdVisitor<'a> {
     ids: &'a mut SmallVec<[NodeId; 1]>,
 }
@@ -955,40 +997,49 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
         match *args {
             MacArgs::Empty => MacArgs::Empty,
             MacArgs::Delimited(dspan, delim, ref tokens) => {
-                MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone()))
-            }
-            MacArgs::Eq(eq_span, ref tokens) => {
-                MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone()))
-            }
-        }
-    }
-
-    fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
-        tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
-    }
-
-    fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
-        match tree {
-            TokenTree::Token(token) => self.lower_token(token),
-            TokenTree::Delimited(span, delim, tts) => {
-                TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
+                // This is either a non-key-value attribute, or a `macro_rules!` body.
+                // We either not have any nonterminals present (in the case of an attribute),
+                // or have tokens available for all nonterminals in the case of a nested
+                // `macro_rules`: e.g:
+                //
+                // ```rust
+                // macro_rules! outer {
+                //     ($e:expr) => {
+                //         macro_rules! inner {
+                //             () => { $e }
+                //         }
+                //     }
+                // }
+                // ```
+                //
+                // In both cases, we don't want to synthesize any tokens
+                MacArgs::Delimited(
+                    dspan,
+                    delim,
+                    self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::No),
+                )
             }
+            // This is an inert key-value attribute - it will never be visible to macros
+            // after it gets lowered to HIR. Therefore, we can synthesize tokens with fake
+            // spans to handle nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
+            MacArgs::Eq(eq_span, ref tokens) => MacArgs::Eq(
+                eq_span,
+                self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::Yes),
+            ),
         }
     }
 
-    fn lower_token(&mut self, token: Token) -> TokenStream {
-        match token.kind {
-            token::Interpolated(nt) => {
-                let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
-                TokenTree::Delimited(
-                    DelimSpan::from_single(token.span),
-                    DelimToken::NoDelim,
-                    self.lower_token_stream(tts),
-                )
-                .into()
-            }
-            _ => TokenTree::Token(token).into(),
+    fn lower_token_stream(
+        &self,
+        tokens: TokenStream,
+        synthesize_tokens: CanSynthesizeMissingTokens,
+    ) -> TokenStream {
+        TokenStreamLowering {
+            parse_sess: &self.sess.parse_sess,
+            synthesize_tokens,
+            nt_to_tokenstream: self.nt_to_tokenstream,
         }
+        .lower_token_stream(tokens)
     }
 
     /// Given an associated type constraint like one of these:
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index 335f3b7a9a011..774a0764d114f 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -2,8 +2,8 @@ use crate::expand::{self, AstFragment, Invocation};
 use crate::module::DirectoryOwnership;
 
 use rustc_ast::ptr::P;
-use rustc_ast::token;
-use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::token::{self, Nonterminal};
+use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
 use rustc_ast::visit::{AssocCtxt, Visitor};
 use rustc_ast::{self as ast, Attribute, NodeId, PatKind};
 use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability};
@@ -119,8 +119,8 @@ impl Annotatable {
         }
     }
 
-    crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
-        let nt = match self {
+    crate fn into_nonterminal(self) -> Nonterminal {
+        match self {
             Annotatable::Item(item) => token::NtItem(item),
             Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => {
                 token::NtItem(P(item.and_then(ast::AssocItem::into_item)))
@@ -137,8 +137,11 @@ impl Annotatable {
             | Annotatable::Param(..)
             | Annotatable::StructField(..)
             | Annotatable::Variant(..) => panic!("unexpected annotatable"),
-        };
-        nt_to_tokenstream(&nt, sess, DUMMY_SP)
+        }
+    }
+
+    crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
+        nt_to_tokenstream(&self.into_nonterminal(), sess, DUMMY_SP, CanSynthesizeMissingTokens::No)
     }
 
     pub fn expect_item(self) -> P<ast::Item> {
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 563783c5b795d..1193f66651ce9 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -29,6 +29,7 @@ use smallvec::SmallVec;
 pub struct StripUnconfigured<'a> {
     pub sess: &'a Session,
     pub features: Option<&'a Features>,
+    pub modified: bool,
 }
 
 fn get_features(
@@ -199,7 +200,7 @@ fn get_features(
 
 // `cfg_attr`-process the crate's attributes and compute the crate's features.
 pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) {
-    let mut strip_unconfigured = StripUnconfigured { sess, features: None };
+    let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false };
 
     let unconfigured_attrs = krate.attrs.clone();
     let diag = &sess.parse_sess.span_diagnostic;
@@ -243,7 +244,12 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
 impl<'a> StripUnconfigured<'a> {
     pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> {
         self.process_cfg_attrs(&mut node);
-        self.in_cfg(node.attrs()).then_some(node)
+        if self.in_cfg(node.attrs()) {
+            Some(node)
+        } else {
+            self.modified = true;
+            None
+        }
     }
 
     /// Parse and expand all `cfg_attr` attributes into a list of attributes
@@ -270,6 +276,9 @@ impl<'a> StripUnconfigured<'a> {
             return vec![attr];
         }
 
+        // A `#[cfg_attr]` either gets removed, or replaced with a new attribute
+        self.modified = true;
+
         let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) {
             None => return vec![],
             Some(r) => r,
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 2da5bde028fc1..5d40d478b963d 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -12,7 +12,7 @@ use rustc_ast::ptr::P;
 use rustc_ast::token;
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::visit::{self, AssocCtxt, Visitor};
-use rustc_ast::{self as ast, AttrItem, Block, LitKind, NodeId, PatKind, Path};
+use rustc_ast::{self as ast, AttrItem, AttrStyle, Block, LitKind, NodeId, PatKind, Path};
 use rustc_ast::{ItemKind, MacArgs, MacCallStmt, MacStmtStyle, StmtKind, Unsafe};
 use rustc_ast_pretty::pprust;
 use rustc_attr::{self as attr, is_builtin_attr, HasAttrs};
@@ -522,12 +522,29 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive)));
                         (item, Vec::new())
                     } else {
-                        let mut item = StripUnconfigured {
+                        let mut visitor = StripUnconfigured {
                             sess: self.cx.sess,
                             features: self.cx.ecfg.features,
-                        }
-                        .fully_configure(item);
+                            modified: false,
+                        };
+                        let mut item = visitor.fully_configure(item);
                         item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive)));
+                        if visitor.modified && !derives.is_empty() {
+                            // Erase the tokens if cfg-stripping modified the item
+                            // This will cause us to synthesize fake tokens
+                            // when `nt_to_tokenstream` is called on this item.
+                            match &mut item {
+                                Annotatable::Item(item) => item.tokens = None,
+                                Annotatable::Stmt(stmt) => {
+                                    if let StmtKind::Item(item) = &mut stmt.kind {
+                                        item.tokens = None
+                                    } else {
+                                        panic!("Unexpected stmt {:?}", stmt);
+                                    }
+                                }
+                                _ => panic!("Unexpected annotatable {:?}", item),
+                            }
+                        }
 
                         invocations.reserve(derives.len());
                         let derive_placeholders = derives
@@ -622,7 +639,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
 
         let invocations = {
             let mut collector = InvocationCollector {
-                cfg: StripUnconfigured { sess: &self.cx.sess, features: self.cx.ecfg.features },
+                cfg: StripUnconfigured {
+                    sess: &self.cx.sess,
+                    features: self.cx.ecfg.features,
+                    modified: false,
+                },
                 cx: self.cx,
                 invocations: Vec::new(),
                 monotonic: self.monotonic,
@@ -716,7 +737,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 SyntaxExtensionKind::Attr(expander) => {
                     self.gate_proc_macro_input(&item);
                     self.gate_proc_macro_attr_item(span, &item);
-                    let tokens = item.into_tokens(&self.cx.sess.parse_sess);
+                    let tokens = match attr.style {
+                        AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess),
+                        // FIXME: Properly collect tokens for inner attributes
+                        AttrStyle::Inner => rustc_parse::fake_token_stream(
+                            &self.cx.sess.parse_sess,
+                            &item.into_nonterminal(),
+                            span,
+                        ),
+                    };
                     let attr_item = attr.unwrap_normal_item();
                     if let MacArgs::Eq(..) = attr_item.args {
                         self.cx.span_err(span, "key-value macro attributes are not supported");
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index 36707a1ae272d..e8e098b621295 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -3,7 +3,7 @@ use crate::proc_macro_server;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token;
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
 use rustc_ast::{self as ast, *};
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::{struct_span_err, Applicability, ErrorReported};
@@ -94,7 +94,12 @@ impl MultiItemModifier for ProcMacroDerive {
         let input = if item.pretty_printing_compatibility_hack() {
             TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
         } else {
-            nt_to_tokenstream(&item, &ecx.sess.parse_sess, DUMMY_SP)
+            nt_to_tokenstream(
+                &item,
+                &ecx.sess.parse_sess,
+                DUMMY_SP,
+                CanSynthesizeMissingTokens::Yes,
+            )
         };
 
         let server = proc_macro_server::Rustc::new(ecx);
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 4cfb188783ba1..02ae842675f37 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -2,7 +2,8 @@ use crate::base::ExtCtxt;
 
 use rustc_ast as ast;
 use rustc_ast::token;
-use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
+use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens};
+use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::Diagnostic;
@@ -178,7 +179,7 @@ impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)>
                 {
                     TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span))
                 } else {
-                    let stream = nt_to_tokenstream(&nt, sess, span);
+                    let stream = nt_to_tokenstream(&nt, sess, span, CanSynthesizeMissingTokens::No);
                     TokenTree::Group(Group {
                         delimiter: Delimiter::None,
                         stream,
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 44999c9b63ab6..9abffbacfc3b3 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -7,22 +7,18 @@
 
 use rustc_ast as ast;
 use rustc_ast::attr::HasAttrs;
-use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree};
+use rustc_ast::token::{self, Nonterminal};
+use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
 use rustc_ast_pretty::pprust;
-use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::{Diagnostic, FatalError, Level, PResult};
 use rustc_session::parse::ParseSess;
-use rustc_span::{symbol::kw, FileName, SourceFile, Span, DUMMY_SP};
+use rustc_span::{FileName, SourceFile, Span};
 
-use smallvec::SmallVec;
-use std::cell::RefCell;
-use std::mem;
 use std::path::Path;
 use std::str;
 
-use tracing::{debug, info};
+use tracing::debug;
 
 pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
 
@@ -237,7 +233,12 @@ pub fn parse_in<'a, T>(
 // NOTE(Centril): The following probably shouldn't be here but it acknowledges the
 // fact that architecturally, we are using parsing (read on below to understand why).
 
-pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream {
+pub fn nt_to_tokenstream(
+    nt: &Nonterminal,
+    sess: &ParseSess,
+    span: Span,
+    synthesize_tokens: CanSynthesizeMissingTokens,
+) -> TokenStream {
     // A `Nonterminal` is often a parsed AST item. At this point we now
     // need to convert the parsed AST to an actual token stream, e.g.
     // un-parse it basically.
@@ -255,9 +256,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
         |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
 
     let tokens = match *nt {
-        Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
+        Nonterminal::NtItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, nt, span, item.tokens.as_ref())
+        }
         Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
-        Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()),
+        Nonterminal::NtStmt(ref stmt) => prepend_attrs(sess, stmt.attrs(), nt, span, stmt.tokens()),
         Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
         Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
         Nonterminal::NtIdent(ident, is_raw) => {
@@ -274,376 +277,45 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
             if expr.tokens.is_none() {
                 debug!("missing tokens for expr {:?}", expr);
             }
-            prepend_attrs(&expr.attrs, expr.tokens.as_ref())
+            prepend_attrs(sess, &expr.attrs, nt, span, expr.tokens.as_ref())
         }
     };
 
-    // Caches the stringification of 'good' `TokenStreams` which passed
-    // `tokenstream_probably_equal_for_proc_macro`. This allows us to avoid
-    // repeatedly stringifying and comparing the same `TokenStream` for deeply
-    // nested nonterminals.
-    //
-    // We cache by the strinification instead of the `TokenStream` to avoid
-    // needing to implement `Hash` for `TokenStream`. Note that it's possible to
-    // have two distinct `TokenStream`s that stringify to the same result
-    // (e.g. if they differ only in hygiene information). However, any
-    // information lost during the stringification process is also intentionally
-    // ignored by `tokenstream_probably_equal_for_proc_macro`, so it's fine
-    // that a single cache entry may 'map' to multiple distinct `TokenStream`s.
-    //
-    // This is a temporary hack to prevent compilation blowup on certain inputs.
-    // The entire pretty-print/retokenize process will be removed soon.
-    thread_local! {
-        static GOOD_TOKEN_CACHE: RefCell<FxHashSet<String>> = Default::default();
-    }
-
-    // FIXME(#43081): Avoid this pretty-print + reparse hack
-    // Pretty-print the AST struct without inserting any parenthesis
-    // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`).
-    // The resulting stream may have incorrect precedence, but it's only
-    // ever used for a comparison against the capture tokenstream.
-    let source = pprust::nonterminal_to_string_no_extra_parens(nt);
-    let filename = FileName::macro_expansion_source_code(&source);
-    let reparsed_tokens = parse_stream_from_source_str(filename, source.clone(), sess, Some(span));
-
-    // During early phases of the compiler the AST could get modified
-    // directly (e.g., attributes added or removed) and the internal cache
-    // of tokens my not be invalidated or updated. Consequently if the
-    // "lossless" token stream disagrees with our actual stringification
-    // (which has historically been much more battle-tested) then we go
-    // with the lossy stream anyway (losing span information).
-    //
-    // Note that the comparison isn't `==` here to avoid comparing spans,
-    // but it *also* is a "probable" equality which is a pretty weird
-    // definition. We mostly want to catch actual changes to the AST
-    // like a `#[cfg]` being processed or some weird `macro_rules!`
-    // expansion.
-    //
-    // What we *don't* want to catch is the fact that a user-defined
-    // literal like `0xf` is stringified as `15`, causing the cached token
-    // stream to not be literal `==` token-wise (ignoring spans) to the
-    // token stream we got from stringification.
-    //
-    // Instead the "probably equal" check here is "does each token
-    // recursively have the same discriminant?" We basically don't look at
-    // the token values here and assume that such fine grained token stream
-    // modifications, including adding/removing typically non-semantic
-    // tokens such as extra braces and commas, don't happen.
     if let Some(tokens) = tokens {
-        if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source)) {
-            return tokens;
-        }
-
-        // Compare with a non-relaxed delim match to start.
-        if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) {
-            GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone()));
-            return tokens;
-        }
-
-        // The check failed. This time, we pretty-print the AST struct with parenthesis
-        // inserted to preserve precedence. This may cause `None`-delimiters in the captured
-        // token stream to match up with inserted parenthesis in the reparsed stream.
-        let source_with_parens = pprust::nonterminal_to_string(nt);
-        let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens);
-
-        if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source_with_parens)) {
-            return tokens;
-        }
-
-        let reparsed_tokens_with_parens = parse_stream_from_source_str(
-            filename_with_parens,
-            source_with_parens,
-            sess,
-            Some(span),
-        );
-
-        // Compare with a relaxed delim match - we want inserted parenthesis in the
-        // reparsed stream to match `None`-delimiters in the original stream.
-        if tokenstream_probably_equal_for_proc_macro(
-            &tokens,
-            &reparsed_tokens_with_parens,
-            sess,
-            true,
-        ) {
-            GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone()));
-            return tokens;
-        }
-
-        info!(
-            "cached tokens found, but they're not \"probably equal\", \
-                going with stringified version"
-        );
-        info!("cached   tokens: {}", pprust::tts_to_string(&tokens));
-        info!("reparsed tokens: {}", pprust::tts_to_string(&reparsed_tokens_with_parens));
-
-        info!("cached   tokens debug: {:?}", tokens);
-        info!("reparsed tokens debug: {:?}", reparsed_tokens_with_parens);
-    }
-    reparsed_tokens
-}
-
-// See comments in `Nonterminal::to_tokenstream` for why we care about
-// *probably* equal here rather than actual equality
-//
-// This is otherwise the same as `eq_unspanned`, only recursing with a
-// different method.
-pub fn tokenstream_probably_equal_for_proc_macro(
-    tokens: &TokenStream,
-    reparsed_tokens: &TokenStream,
-    sess: &ParseSess,
-    relaxed_delim_match: bool,
-) -> bool {
-    // When checking for `probably_eq`, we ignore certain tokens that aren't
-    // preserved in the AST. Because they are not preserved, the pretty
-    // printer arbitrarily adds or removes them when printing as token
-    // streams, making a comparison between a token stream generated from an
-    // AST and a token stream which was parsed into an AST more reliable.
-    fn semantic_tree(tree: &TokenTree) -> bool {
-        if let TokenTree::Token(token) = tree {
-            if let
-                // The pretty printer tends to add trailing commas to
-                // everything, and in particular, after struct fields.
-                | token::Comma
-                // The pretty printer collapses many semicolons into one.
-                | token::Semi
-                // We don't preserve leading `|` tokens in patterns, so
-                // we ignore them entirely
-                | token::BinOp(token::BinOpToken::Or)
-                // We don't preserve trailing '+' tokens in trait bounds,
-                // so we ignore them entirely
-                | token::BinOp(token::BinOpToken::Plus)
-                // The pretty printer can turn `$crate` into `::crate_name`
-                | token::ModSep = token.kind {
-                return false;
-            }
-        }
-        true
-    }
-
-    // When comparing two `TokenStream`s, we ignore the `IsJoint` information.
-    //
-    // However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will
-    // use `Token.glue` on adjacent tokens with the proper `IsJoint`.
-    // Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`)
-    // and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent
-    // when determining if two `TokenStream`s are 'probably equal'.
-    //
-    // Therefore, we use `break_two_token_op` to convert all tokens
-    // to the 'unglued' form (if it exists). This ensures that two
-    // `TokenStream`s which differ only in how their tokens are glued
-    // will be considered 'probably equal', which allows us to keep spans.
-    //
-    // This is important when the original `TokenStream` contained
-    // extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces
-    // will be omitted when we pretty-print, which can cause the original
-    // and reparsed `TokenStream`s to differ in the assignment of `IsJoint`,
-    // leading to some tokens being 'glued' together in one stream but not
-    // the other. See #68489 for more details.
-    fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> {
-        // In almost all cases, we should have either zero or one levels
-        // of 'unglueing'. However, in some unusual cases, we may need
-        // to iterate breaking tokens mutliple times. For example:
-        // '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]'
-        let mut token_trees: SmallVec<[_; 2]>;
-        if let TokenTree::Token(token) = tree {
-            let mut out = SmallVec::<[_; 2]>::new();
-            out.push(token);
-            // Iterate to fixpoint:
-            // * We start off with 'out' containing our initial token, and `temp` empty
-            // * If we are able to break any tokens in `out`, then `out` will have
-            //   at least one more element than 'temp', so we will try to break tokens
-            //   again.
-            // * If we cannot break any tokens in 'out', we are done
-            loop {
-                let mut temp = SmallVec::<[_; 2]>::new();
-                let mut changed = false;
-
-                for token in out.into_iter() {
-                    if let Some((first, second)) = token.kind.break_two_token_op() {
-                        temp.push(Token::new(first, DUMMY_SP));
-                        temp.push(Token::new(second, DUMMY_SP));
-                        changed = true;
-                    } else {
-                        temp.push(token);
-                    }
-                }
-                out = temp;
-                if !changed {
-                    break;
-                }
-            }
-            token_trees = out.into_iter().map(TokenTree::Token).collect();
-        } else {
-            token_trees = SmallVec::new();
-            token_trees.push(tree);
-        }
-        token_trees.into_iter()
-    }
-
-    fn expand_token(tree: TokenTree, sess: &ParseSess) -> impl Iterator<Item = TokenTree> {
-        // When checking tokenstreams for 'probable equality', we are comparing
-        // a captured (from parsing) `TokenStream` to a reparsed tokenstream.
-        // The reparsed Tokenstream will never have `None`-delimited groups,
-        // since they are only ever inserted as a result of macro expansion.
-        // Therefore, inserting a `None`-delimtied group here (when we
-        // convert a nested `Nonterminal` to a tokenstream) would cause
-        // a mismatch with the reparsed tokenstream.
-        //
-        // Note that we currently do not handle the case where the
-        // reparsed stream has a `Parenthesis`-delimited group
-        // inserted. This will cause a spurious mismatch:
-        // issue #75734 tracks resolving this.
-
-        let expanded: SmallVec<[_; 1]> =
-            if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
-                nt_to_tokenstream(nt, sess, *span)
-                    .into_trees()
-                    .flat_map(|t| expand_token(t, sess))
-                    .collect()
-            } else {
-                // Filter before and after breaking tokens,
-                // since we may want to ignore both glued and unglued tokens.
-                std::iter::once(tree)
-                    .filter(semantic_tree)
-                    .flat_map(break_tokens)
-                    .filter(semantic_tree)
-                    .collect()
-            };
-        expanded.into_iter()
-    }
-
-    // Break tokens after we expand any nonterminals, so that we break tokens
-    // that are produced as a result of nonterminal expansion.
-    let tokens = tokens.trees().flat_map(|t| expand_token(t, sess));
-    let reparsed_tokens = reparsed_tokens.trees().flat_map(|t| expand_token(t, sess));
-
-    tokens.eq_by(reparsed_tokens, |t, rt| {
-        tokentree_probably_equal_for_proc_macro(&t, &rt, sess, relaxed_delim_match)
-    })
-}
-
-// See comments in `Nonterminal::to_tokenstream` for why we care about
-// *probably* equal here rather than actual equality
-//
-// This is otherwise the same as `eq_unspanned`, only recursing with a
-// different method.
-pub fn tokentree_probably_equal_for_proc_macro(
-    token: &TokenTree,
-    reparsed_token: &TokenTree,
-    sess: &ParseSess,
-    relaxed_delim_match: bool,
-) -> bool {
-    match (token, reparsed_token) {
-        (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => {
-            token_probably_equal_for_proc_macro(token, reparsed_token)
-        }
-        (
-            TokenTree::Delimited(_, delim, tokens),
-            TokenTree::Delimited(_, reparsed_delim, reparsed_tokens),
-        ) if delim == reparsed_delim => tokenstream_probably_equal_for_proc_macro(
-            tokens,
-            reparsed_tokens,
-            sess,
-            relaxed_delim_match,
-        ),
-        (TokenTree::Delimited(_, DelimToken::NoDelim, tokens), reparsed_token) => {
-            if relaxed_delim_match {
-                if let TokenTree::Delimited(_, DelimToken::Paren, reparsed_tokens) = reparsed_token
-                {
-                    if tokenstream_probably_equal_for_proc_macro(
-                        tokens,
-                        reparsed_tokens,
-                        sess,
-                        relaxed_delim_match,
-                    ) {
-                        return true;
-                    }
-                }
-            }
-            tokens.len() == 1
-                && tokentree_probably_equal_for_proc_macro(
-                    &tokens.trees().next().unwrap(),
-                    reparsed_token,
-                    sess,
-                    relaxed_delim_match,
-                )
-        }
-        _ => false,
+        return tokens;
+    } else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
+        return fake_token_stream(sess, nt, span);
+    } else {
+        let pretty = rustc_ast_pretty::pprust::nonterminal_to_string_no_extra_parens(&nt);
+        panic!("Missing tokens at {:?} for nt {:?}", span, pretty);
     }
 }
 
-// See comments in `Nonterminal::to_tokenstream` for why we care about
-// *probably* equal here rather than actual equality
-fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
-    if mem::discriminant(&first.kind) != mem::discriminant(&other.kind) {
-        return false;
-    }
-    use rustc_ast::token::TokenKind::*;
-    match (&first.kind, &other.kind) {
-        (&Eq, &Eq)
-        | (&Lt, &Lt)
-        | (&Le, &Le)
-        | (&EqEq, &EqEq)
-        | (&Ne, &Ne)
-        | (&Ge, &Ge)
-        | (&Gt, &Gt)
-        | (&AndAnd, &AndAnd)
-        | (&OrOr, &OrOr)
-        | (&Not, &Not)
-        | (&Tilde, &Tilde)
-        | (&At, &At)
-        | (&Dot, &Dot)
-        | (&DotDot, &DotDot)
-        | (&DotDotDot, &DotDotDot)
-        | (&DotDotEq, &DotDotEq)
-        | (&Comma, &Comma)
-        | (&Semi, &Semi)
-        | (&Colon, &Colon)
-        | (&ModSep, &ModSep)
-        | (&RArrow, &RArrow)
-        | (&LArrow, &LArrow)
-        | (&FatArrow, &FatArrow)
-        | (&Pound, &Pound)
-        | (&Dollar, &Dollar)
-        | (&Question, &Question)
-        | (&Eof, &Eof) => true,
-
-        (&BinOp(a), &BinOp(b)) | (&BinOpEq(a), &BinOpEq(b)) => a == b,
-
-        (&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b,
-
-        (&DocComment(a1, a2, a3), &DocComment(b1, b2, b3)) => a1 == b1 && a2 == b2 && a3 == b3,
-
-        (&Literal(a), &Literal(b)) => a == b,
-
-        (&Lifetime(a), &Lifetime(b)) => a == b,
-        (&Ident(a, b), &Ident(c, d)) => {
-            b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
-        }
-
-        (&Interpolated(..), &Interpolated(..)) => panic!("Unexpanded Interpolated!"),
-
-        _ => panic!("forgot to add a token?"),
-    }
+pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal, span: Span) -> TokenStream {
+    let source = pprust::nonterminal_to_string(nt);
+    let filename = FileName::macro_expansion_source_code(&source);
+    parse_stream_from_source_str(filename, source, sess, Some(span))
 }
 
 fn prepend_attrs(
+    sess: &ParseSess,
     attrs: &[ast::Attribute],
+    nt: &Nonterminal,
+    span: Span,
     tokens: Option<&tokenstream::LazyTokenStream>,
 ) -> Option<tokenstream::TokenStream> {
-    let tokens = tokens?.create_token_stream();
     if attrs.is_empty() {
-        return Some(tokens);
+        return Some(tokens?.create_token_stream());
     }
     let mut builder = tokenstream::TokenStreamBuilder::new();
     for attr in attrs {
         // FIXME: Correctly handle tokens for inner attributes.
         // For now, we fall back to reparsing the original AST node
         if attr.style == ast::AttrStyle::Inner {
-            return None;
+            return Some(fake_token_stream(sess, nt, span));
         }
         builder.push(attr.tokens());
     }
-    builder.push(tokens);
+    builder.push(tokens?.create_token_stream());
     Some(builder.build())
 }