Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit f242d6c

Browse files
committedApr 30, 2025·
Auto merge of #127516 - nnethercote:simplify-LazyAttrTokenStream, r=petrochenkov
Simplify `LazyAttrTokenStream` `LazyAttrTokenStream` is an unpleasant type: `Lrc<Box<dyn ToAttrTokenStream>>`. Why does it look like that? - There are two `ToAttrTokenStream` impls, one for the lazy case, and one for the case where we already have an `AttrTokenStream`. - The lazy case (`LazyAttrTokenStreamImpl`) is implemented in `rustc_parse`, but `LazyAttrTokenStream` is defined in `rustc_ast`, which does not depend on `rustc_parse`. The use of the trait lets `rustc_ast` implicitly depend on `rustc_parse`. This explains the `dyn`. - `LazyAttrTokenStream` must have a `size_of` as small as possible, because it's used in many AST nodes. This explains the `Lrc<Box<_>>`, which keeps it to one word. (It's required `Lrc<dyn _>` would be a fat pointer.) This PR moves `LazyAttrTokenStreamImpl` (and a few other token stream things) from `rustc_parse` to `rustc_ast`. This lets us replace the `ToAttrTokenStream` trait with a two-variant enum and also remove the `Box`, changing `LazyAttrTokenStream` to `Lrc<LazyAttrTokenStreamInner>`. Plus it does a few cleanups. r? `@petrochenkov`
2 parents 0fbb922 + 880e6f7 commit f242d6c

File tree

14 files changed

+399
-378
lines changed

14 files changed

+399
-378
lines changed
 

‎compiler/rustc_ast/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,15 @@
1212
test(attr(deny(warnings)))
1313
)]
1414
#![doc(rust_logo)]
15+
#![feature(array_windows)]
1516
#![feature(associated_type_defaults)]
1617
#![feature(box_patterns)]
1718
#![feature(if_let_guard)]
1819
#![feature(negative_impls)]
1920
#![feature(never_type)]
2021
#![feature(rustdoc_internals)]
2122
#![feature(stmt_expr_attributes)]
23+
#![recursion_limit = "256"]
2224
// tidy-alphabetical-end
2325

2426
pub mod util {

‎compiler/rustc_ast/src/mut_visit.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -836,7 +836,7 @@ fn visit_lazy_tts_opt_mut<T: MutVisitor>(vis: &mut T, lazy_tts: Option<&mut Lazy
836836
if let Some(lazy_tts) = lazy_tts {
837837
let mut tts = lazy_tts.to_attr_token_stream();
838838
visit_attr_tts(vis, &mut tts);
839-
*lazy_tts = LazyAttrTokenStream::new(tts);
839+
*lazy_tts = LazyAttrTokenStream::new_direct(tts);
840840
}
841841
}
842842
}

‎compiler/rustc_ast/src/tokenstream.rs

Lines changed: 366 additions & 17 deletions
Large diffs are not rendered by default.

‎compiler/rustc_attr_parsing/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@
8080
#![cfg_attr(bootstrap, feature(let_chains))]
8181
#![doc(rust_logo)]
8282
#![feature(rustdoc_internals)]
83+
#![recursion_limit = "256"]
8384
// tidy-alphabetical-end
8485

8586
#[macro_use]

‎compiler/rustc_builtin_macros/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
#![feature(rustdoc_internals)]
1919
#![feature(string_from_utf8_lossy_owned)]
2020
#![feature(try_blocks)]
21+
#![recursion_limit = "256"]
2122
// tidy-alphabetical-end
2223

2324
extern crate proc_macro;

‎compiler/rustc_codegen_ssa/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#![feature(string_from_utf8_lossy_owned)]
1515
#![feature(trait_alias)]
1616
#![feature(try_blocks)]
17+
#![recursion_limit = "256"]
1718
// tidy-alphabetical-end
1819

1920
//! This crate contains codegen code that is used by all codegen backends (LLVM and others).

‎compiler/rustc_expand/src/config.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ pub(crate) fn attr_into_trace(mut attr: Attribute, trace_name: Symbol) -> Attrib
162162
let NormalAttr { item, tokens } = &mut **normal;
163163
item.path.segments[0].ident.name = trace_name;
164164
// This makes the trace attributes unobservable to token-based proc macros.
165-
*tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default()));
165+
*tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::default()));
166166
}
167167
AttrKind::DocComment(..) => unreachable!(),
168168
}
@@ -192,7 +192,7 @@ impl<'a> StripUnconfigured<'a> {
192192
if self.config_tokens {
193193
if let Some(Some(tokens)) = node.tokens_mut() {
194194
let attr_stream = tokens.to_attr_token_stream();
195-
*tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream));
195+
*tokens = LazyAttrTokenStream::new_direct(self.configure_tokens(&attr_stream));
196196
}
197197
}
198198
}
@@ -223,7 +223,7 @@ impl<'a> StripUnconfigured<'a> {
223223
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
224224

225225
if self.in_cfg(&target.attrs) {
226-
target.tokens = LazyAttrTokenStream::new(
226+
target.tokens = LazyAttrTokenStream::new_direct(
227227
self.configure_tokens(&target.tokens.to_attr_token_stream()),
228228
);
229229
Some(AttrTokenTree::AttrsTarget(target))
@@ -361,7 +361,7 @@ impl<'a> StripUnconfigured<'a> {
361361
.to_attr_token_stream(),
362362
));
363363

364-
let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
364+
let tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::new(trees)));
365365
let attr = ast::attr::mk_attr_from_item(
366366
&self.sess.psess.attr_id_generator,
367367
item,

‎compiler/rustc_hir/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#![feature(never_type)]
1515
#![feature(rustc_attrs)]
1616
#![feature(variant_count)]
17+
#![recursion_limit = "256"]
1718
// tidy-alphabetical-end
1819

1920
extern crate self as rustc_hir;

‎compiler/rustc_middle/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
#![feature(try_trait_v2_yeet)]
6262
#![feature(type_alias_impl_trait)]
6363
#![feature(yeet_expr)]
64+
#![recursion_limit = "256"]
6465
// tidy-alphabetical-end
6566

6667
#[cfg(test)]

‎compiler/rustc_parse/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,13 @@
55
#![allow(rustc::diagnostic_outside_of_impl)]
66
#![allow(rustc::untranslatable_diagnostic)]
77
#![cfg_attr(bootstrap, feature(let_chains))]
8-
#![feature(array_windows)]
98
#![feature(assert_matches)]
109
#![feature(box_patterns)]
1110
#![feature(debug_closure_helpers)]
1211
#![feature(if_let_guard)]
1312
#![feature(iter_intersperse)]
1413
#![feature(string_from_utf8_lossy_owned)]
14+
#![recursion_limit = "256"]
1515
// tidy-alphabetical-end
1616

1717
use std::path::{Path, PathBuf};

‎compiler/rustc_parse/src/parser/attr.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
use rustc_ast as ast;
22
use rustc_ast::token::{self, MetaVarKind};
3+
use rustc_ast::tokenstream::ParserRange;
34
use rustc_ast::{Attribute, attr};
45
use rustc_errors::codes::*;
56
use rustc_errors::{Diag, PResult};
@@ -8,8 +9,7 @@ use thin_vec::ThinVec;
89
use tracing::debug;
910

1011
use super::{
11-
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
12-
UsePreAttrPos,
12+
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
1313
};
1414
use crate::{errors, exp, fluent_generated as fluent};
1515

‎compiler/rustc_parse/src/parser/attr_wrapper.rs

Lines changed: 13 additions & 181 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,18 @@
11
use std::borrow::Cow;
2-
use std::{iter, mem};
2+
use std::mem;
33

4-
use rustc_ast::token::{Delimiter, Token};
4+
use rustc_ast::token::Token;
55
use rustc_ast::tokenstream::{
6-
AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream,
7-
Spacing, ToAttrTokenStream,
6+
AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor,
87
};
98
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens};
109
use rustc_data_structures::fx::FxHashSet;
1110
use rustc_errors::PResult;
1211
use rustc_session::parse::ParseSess;
13-
use rustc_span::{DUMMY_SP, Span, sym};
12+
use rustc_span::{DUMMY_SP, sym};
13+
use thin_vec::ThinVec;
1414

15-
use super::{
16-
Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange,
17-
TokenCursor, Trailing,
18-
};
15+
use super::{Capturing, ForceCollect, Parser, Trailing};
1916

2017
// When collecting tokens, this fully captures the start point. Usually its
2118
// just after outer attributes, but occasionally it's before.
@@ -94,95 +91,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
9491
})
9592
}
9693

97-
// From a value of this type we can reconstruct the `TokenStream` seen by the
98-
// `f` callback passed to a call to `Parser::collect_tokens`, by
99-
// replaying the getting of the tokens. This saves us producing a `TokenStream`
100-
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
101-
// passed to a proc macro. In practice, token stream creation happens rarely
102-
// compared to calls to `collect_tokens` (see some statistics in #78736) so we
103-
// are doing as little up-front work as possible.
104-
//
105-
// This also makes `Parser` very cheap to clone, since
106-
// there is no intermediate collection buffer to clone.
107-
struct LazyAttrTokenStreamImpl {
108-
start_token: (Token, Spacing),
109-
cursor_snapshot: TokenCursor,
110-
num_calls: u32,
111-
break_last_token: u32,
112-
node_replacements: Box<[NodeReplacement]>,
113-
}
114-
115-
impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
116-
fn to_attr_token_stream(&self) -> AttrTokenStream {
117-
// The token produced by the final call to `{,inlined_}next` was not
118-
// actually consumed by the callback. The combination of chaining the
119-
// initial token and using `take` produces the desired result - we
120-
// produce an empty `TokenStream` if no calls were made, and omit the
121-
// final token otherwise.
122-
let mut cursor_snapshot = self.cursor_snapshot.clone();
123-
let tokens = iter::once(FlatToken::Token(self.start_token))
124-
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
125-
.take(self.num_calls as usize);
126-
127-
if self.node_replacements.is_empty() {
128-
make_attr_token_stream(tokens, self.break_last_token)
129-
} else {
130-
let mut tokens: Vec<_> = tokens.collect();
131-
let mut node_replacements = self.node_replacements.to_vec();
132-
node_replacements.sort_by_key(|(range, _)| range.0.start);
133-
134-
#[cfg(debug_assertions)]
135-
for [(node_range, tokens), (next_node_range, next_tokens)] in
136-
node_replacements.array_windows()
137-
{
138-
assert!(
139-
node_range.0.end <= next_node_range.0.start
140-
|| node_range.0.end >= next_node_range.0.end,
141-
"Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
142-
node_range,
143-
tokens,
144-
next_node_range,
145-
next_tokens,
146-
);
147-
}
148-
149-
// Process the replace ranges, starting from the highest start
150-
// position and working our way back. If have tokens like:
151-
//
152-
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
153-
//
154-
// Then we will generate replace ranges for both
155-
// the `#[cfg(FALSE)] field: bool` and the entire
156-
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
157-
//
158-
// By starting processing from the replace range with the greatest
159-
// start position, we ensure that any (outer) replace range which
160-
// encloses another (inner) replace range will fully overwrite the
161-
// inner range's replacement.
162-
for (node_range, target) in node_replacements.into_iter().rev() {
163-
assert!(
164-
!node_range.0.is_empty(),
165-
"Cannot replace an empty node range: {:?}",
166-
node_range.0
167-
);
168-
169-
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
170-
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
171-
// total length of `tokens` constant throughout the replacement process, allowing
172-
// us to do all replacements without adjusting indices.
173-
let target_len = target.is_some() as usize;
174-
tokens.splice(
175-
(node_range.0.start as usize)..(node_range.0.end as usize),
176-
target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
177-
iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len),
178-
),
179-
);
180-
}
181-
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
182-
}
183-
}
184-
}
185-
18694
impl<'a> Parser<'a> {
18795
pub(super) fn collect_pos(&self) -> CollectPos {
18896
CollectPos {
@@ -387,10 +295,10 @@ impl<'a> Parser<'a> {
387295

388296
// This is hot enough for `deep-vector` that checking the conditions for an empty iterator
389297
// is measurably faster than actually executing the iterator.
390-
let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end
298+
let node_replacements = if parser_replacements_start == parser_replacements_end
391299
&& inner_attr_parser_replacements.is_empty()
392300
{
393-
Box::new([])
301+
ThinVec::new()
394302
} else {
395303
// Grab any replace ranges that occur *inside* the current AST node. Convert them
396304
// from `ParserRange` form to `NodeRange` form. We will perform the actual
@@ -429,13 +337,13 @@ impl<'a> Parser<'a> {
429337
// - `attrs`: includes the outer and the inner attr.
430338
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
431339

432-
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
433-
start_token: collect_pos.start_token,
434-
cursor_snapshot: collect_pos.cursor_snapshot,
340+
let tokens = LazyAttrTokenStream::new_pending(
341+
collect_pos.start_token,
342+
collect_pos.cursor_snapshot,
435343
num_calls,
436-
break_last_token: self.break_last_token,
344+
self.break_last_token,
437345
node_replacements,
438-
});
346+
);
439347
let mut tokens_used = false;
440348

441349
// If in "definite capture mode" we need to register a replace range
@@ -483,71 +391,6 @@ impl<'a> Parser<'a> {
483391
}
484392
}
485393

486-
/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
487-
/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
488-
/// close delims.
489-
fn make_attr_token_stream(
490-
iter: impl Iterator<Item = FlatToken>,
491-
break_last_token: u32,
492-
) -> AttrTokenStream {
493-
#[derive(Debug)]
494-
struct FrameData {
495-
// This is `None` for the first frame, `Some` for all others.
496-
open_delim_sp: Option<(Delimiter, Span, Spacing)>,
497-
inner: Vec<AttrTokenTree>,
498-
}
499-
// The stack always has at least one element. Storing it separately makes for shorter code.
500-
let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] };
501-
let mut stack_rest = vec![];
502-
for flat_token in iter {
503-
match flat_token {
504-
FlatToken::Token((token @ Token { kind, span }, spacing)) => {
505-
if let Some(delim) = kind.open_delim() {
506-
stack_rest.push(mem::replace(
507-
&mut stack_top,
508-
FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] },
509-
));
510-
} else if let Some(delim) = kind.close_delim() {
511-
let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
512-
let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
513-
assert!(
514-
open_delim.eq_ignoring_invisible_origin(&delim),
515-
"Mismatched open/close delims: open={open_delim:?} close={span:?}"
516-
);
517-
let dspan = DelimSpan::from_pair(open_sp, span);
518-
let dspacing = DelimSpacing::new(open_spacing, spacing);
519-
let stream = AttrTokenStream::new(frame_data.inner);
520-
let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
521-
stack_top.inner.push(delimited);
522-
} else {
523-
stack_top.inner.push(AttrTokenTree::Token(token, spacing))
524-
}
525-
}
526-
FlatToken::AttrsTarget(target) => {
527-
stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
528-
}
529-
FlatToken::Empty => {}
530-
}
531-
}
532-
533-
if break_last_token > 0 {
534-
let last_token = stack_top.inner.pop().unwrap();
535-
if let AttrTokenTree::Token(last_token, spacing) = last_token {
536-
let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap();
537-
538-
// Tokens are always ASCII chars, so we can use byte arithmetic here.
539-
let mut first_span = last_token.span.shrink_to_lo();
540-
first_span =
541-
first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token));
542-
543-
stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing));
544-
} else {
545-
panic!("Unexpected last token {last_token:?}")
546-
}
547-
}
548-
AttrTokenStream::new(stack_top.inner)
549-
}
550-
551394
/// Tokens are needed if:
552395
/// - any non-single-segment attributes (other than doc comments) are present,
553396
/// e.g. `rustfmt::skip`; or
@@ -562,14 +405,3 @@ fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
562405
}
563406
})
564407
}
565-
566-
// Some types are used a lot. Make sure they don't unintentionally get bigger.
567-
#[cfg(target_pointer_width = "64")]
568-
mod size_asserts {
569-
use rustc_data_structures::static_assert_size;
570-
571-
use super::*;
572-
// tidy-alphabetical-start
573-
static_assert_size!(LazyAttrTokenStreamImpl, 96);
574-
// tidy-alphabetical-end
575-
}

‎compiler/rustc_parse/src/parser/mod.rs

Lines changed: 4 additions & 172 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ pub mod token_type;
1212
mod ty;
1313

1414
use std::assert_matches::debug_assert_matches;
15-
use std::ops::Range;
1615
use std::{fmt, mem, slice};
1716

1817
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
@@ -25,7 +24,9 @@ use rustc_ast::ptr::P;
2524
use rustc_ast::token::{
2625
self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
2726
};
28-
use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
27+
use rustc_ast::tokenstream::{
28+
ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
29+
};
2930
use rustc_ast::util::case::Case;
3031
use rustc_ast::{
3132
self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
@@ -37,7 +38,7 @@ use rustc_data_structures::fx::FxHashMap;
3738
use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
3839
use rustc_index::interval::IntervalSet;
3940
use rustc_session::parse::ParseSess;
40-
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
41+
use rustc_span::{Ident, Span, Symbol, kw, sym};
4142
use thin_vec::ThinVec;
4243
use token_type::TokenTypeSet;
4344
pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
@@ -187,57 +188,6 @@ struct ClosureSpans {
187188
body: Span,
188189
}
189190

190-
/// A token range within a `Parser`'s full token stream.
191-
#[derive(Clone, Debug)]
192-
struct ParserRange(Range<u32>);
193-
194-
/// A token range within an individual AST node's (lazy) token stream, i.e.
195-
/// relative to that node's first token. Distinct from `ParserRange` so the two
196-
/// kinds of range can't be mixed up.
197-
#[derive(Clone, Debug)]
198-
struct NodeRange(Range<u32>);
199-
200-
/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
201-
/// (replacement) or be replaced by nothing (deletion). This is used in two
202-
/// places during token collection.
203-
///
204-
/// 1. Replacement. During the parsing of an AST node that may have a
205-
/// `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
206-
/// or `#[cfg_attr]`, we replace the entire inner AST node with
207-
/// `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
208-
/// `AttrTokenStream`.
209-
///
210-
/// 2. Deletion. We delete inner attributes from all collected token streams,
211-
/// and instead track them through the `attrs` field on the AST node. This
212-
/// lets us manipulate them similarly to outer attributes. When we create a
213-
/// `TokenStream`, the inner attributes are inserted into the proper place
214-
/// in the token stream.
215-
///
216-
/// Each replacement starts off in `ParserReplacement` form but is converted to
217-
/// `NodeReplacement` form when it is attached to a single AST node, via
218-
/// `LazyAttrTokenStreamImpl`.
219-
type ParserReplacement = (ParserRange, Option<AttrsTarget>);
220-
221-
/// See the comment on `ParserReplacement`.
222-
type NodeReplacement = (NodeRange, Option<AttrsTarget>);
223-
224-
impl NodeRange {
225-
// Converts a range within a parser's tokens to a range within a
226-
// node's tokens beginning at `start_pos`.
227-
//
228-
// For example, imagine a parser with 50 tokens in its token stream, a
229-
// function that spans `ParserRange(20..40)` and an inner attribute within
230-
// that function that spans `ParserRange(30..35)`. We would find the inner
231-
// attribute's range within the function's tokens by subtracting 20, which
232-
// is the position of the function's start token. This gives
233-
// `NodeRange(10..15)`.
234-
fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
235-
assert!(!parser_range.is_empty());
236-
assert!(parser_range.start >= start_pos);
237-
NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
238-
}
239-
}
240-
241191
/// Controls how we capture tokens. Capturing can be expensive,
242192
/// so we try to avoid performing capturing in cases where
243193
/// we will never need an `AttrTokenStream`.
@@ -260,104 +210,6 @@ struct CaptureState {
260210
seen_attrs: IntervalSet<AttrId>,
261211
}
262212

263-
#[derive(Clone, Debug)]
264-
struct TokenTreeCursor {
265-
stream: TokenStream,
266-
/// Points to the current token tree in the stream. In `TokenCursor::curr`,
267-
/// this can be any token tree. In `TokenCursor::stack`, this is always a
268-
/// `TokenTree::Delimited`.
269-
index: usize,
270-
}
271-
272-
impl TokenTreeCursor {
273-
#[inline]
274-
fn new(stream: TokenStream) -> Self {
275-
TokenTreeCursor { stream, index: 0 }
276-
}
277-
278-
#[inline]
279-
fn curr(&self) -> Option<&TokenTree> {
280-
self.stream.get(self.index)
281-
}
282-
283-
fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
284-
self.stream.get(self.index + n)
285-
}
286-
287-
#[inline]
288-
fn bump(&mut self) {
289-
self.index += 1;
290-
}
291-
}
292-
293-
/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
294-
/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
295-
/// use this type to emit them as a linear sequence. But a linear sequence is
296-
/// what the parser expects, for the most part.
297-
#[derive(Clone, Debug)]
298-
struct TokenCursor {
299-
// Cursor for the current (innermost) token stream. The index within the
300-
// cursor can point to any token tree in the stream (or one past the end).
301-
// The delimiters for this token stream are found in `self.stack.last()`;
302-
// if that is `None` we are in the outermost token stream which never has
303-
// delimiters.
304-
curr: TokenTreeCursor,
305-
306-
// Token streams surrounding the current one. The index within each cursor
307-
// always points to a `TokenTree::Delimited`.
308-
stack: Vec<TokenTreeCursor>,
309-
}
310-
311-
impl TokenCursor {
312-
fn next(&mut self) -> (Token, Spacing) {
313-
self.inlined_next()
314-
}
315-
316-
/// This always-inlined version should only be used on hot code paths.
317-
#[inline(always)]
318-
fn inlined_next(&mut self) -> (Token, Spacing) {
319-
loop {
320-
// FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
321-
// #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
322-
// below can be removed.
323-
if let Some(tree) = self.curr.curr() {
324-
match tree {
325-
&TokenTree::Token(token, spacing) => {
326-
debug_assert!(!token.kind.is_delim());
327-
let res = (token, spacing);
328-
self.curr.bump();
329-
return res;
330-
}
331-
&TokenTree::Delimited(sp, spacing, delim, ref tts) => {
332-
let trees = TokenTreeCursor::new(tts.clone());
333-
self.stack.push(mem::replace(&mut self.curr, trees));
334-
if !delim.skip() {
335-
return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
336-
}
337-
// No open delimiter to return; continue on to the next iteration.
338-
}
339-
};
340-
} else if let Some(parent) = self.stack.pop() {
341-
// We have exhausted this token stream. Move back to its parent token stream.
342-
let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
343-
panic!("parent should be Delimited")
344-
};
345-
self.curr = parent;
346-
self.curr.bump(); // move past the `Delimited`
347-
if !delim.skip() {
348-
return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
349-
}
350-
// No close delimiter to return; continue on to the next iteration.
351-
} else {
352-
// We have exhausted the outermost token stream. The use of
353-
// `Spacing::Alone` is arbitrary and immaterial, because the
354-
// `Eof` token's spacing is never used.
355-
return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
356-
}
357-
}
358-
}
359-
}
360-
361213
/// A sequence separator.
362214
#[derive(Debug)]
363215
struct SeqSep<'a> {
@@ -1742,26 +1594,6 @@ impl<'a> Parser<'a> {
17421594
}
17431595
}
17441596

1745-
/// A helper struct used when building an `AttrTokenStream` from
1746-
/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
1747-
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
1748-
/// is then 'parsed' to build up an `AttrTokenStream` with nested
1749-
/// `AttrTokenTree::Delimited` tokens.
1750-
#[derive(Debug, Clone)]
1751-
enum FlatToken {
1752-
/// A token - this holds both delimiter (e.g. '{' and '}')
1753-
/// and non-delimiter tokens
1754-
Token((Token, Spacing)),
1755-
/// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
1756-
/// directly into the constructed `AttrTokenStream` as an
1757-
/// `AttrTokenTree::AttrsTarget`.
1758-
AttrsTarget(AttrsTarget),
1759-
/// A special 'empty' token that is ignored during the conversion
1760-
/// to an `AttrTokenStream`. This is used to simplify the
1761-
/// handling of replace ranges.
1762-
Empty,
1763-
}
1764-
17651597
// Metavar captures of various kinds.
17661598
#[derive(Clone, Debug)]
17671599
pub enum ParseNtResult {

‎compiler/rustc_resolve/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#![feature(iter_intersperse)]
2020
#![feature(rustc_attrs)]
2121
#![feature(rustdoc_internals)]
22+
#![recursion_limit = "256"]
2223
// tidy-alphabetical-end
2324

2425
use std::cell::{Cell, RefCell};

0 commit comments

Comments
 (0)
Please sign in to comment.