Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 71f462d

Browse files
authoredAug 15, 2024··
Unrolled build for rust-lang#129065
Rollup merge of rust-lang#129065 - nnethercote:PartialEq-TokenKind, r=spastorino Use `impl PartialEq<TokenKind> for Token` more. This lets us compare a `Token` with a `TokenKind`. It's used a lot, but can be used even more, avoiding the need for some `.kind` uses. r? `@spastorino`
2 parents d2b5aa6 + 7923b20 commit 71f462d

File tree

16 files changed

+122
-129
lines changed

16 files changed

+122
-129
lines changed
 

‎compiler/rustc_builtin_macros/src/asm.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ pub fn parse_asm_args<'a>(
328328
/// Otherwise, the suggestion will be incorrect.
329329
fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
330330
// Tool-only output
331-
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
331+
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
332332
p.dcx().emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span });
333333
}
334334

@@ -338,7 +338,7 @@ fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
338338
/// Otherwise, the suggestion will be incorrect.
339339
fn err_unsupported_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
340340
// Tool-only output
341-
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
341+
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
342342
p.dcx().emit_err(errors::GlobalAsmUnsupportedOption { span, symbol, full_span });
343343
}
344344

‎compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1154,7 +1154,7 @@ fn check_matcher_core<'tt>(
11541154
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
11551155
&& matches!(
11561156
next_token,
1157-
TokenTree::Token(token) if token.kind == BinOp(token::BinOpToken::Or)
1157+
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
11581158
)
11591159
{
11601160
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.

‎compiler/rustc_lint/src/builtin.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1853,7 +1853,7 @@ impl KeywordIdents {
18531853
if !prev_dollar {
18541854
self.check_ident_token(cx, UnderMacro(true), ident);
18551855
}
1856-
} else if token.kind == TokenKind::Dollar {
1856+
} else if *token == TokenKind::Dollar {
18571857
prev_dollar = true;
18581858
continue;
18591859
}

‎compiler/rustc_parse/src/lexer/tokentrees.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
229229
} else {
230230
let this_spacing = if next_tok.is_punct() {
231231
Spacing::Joint
232-
} else if next_tok.kind == token::Eof {
232+
} else if next_tok == token::Eof {
233233
Spacing::Alone
234234
} else {
235235
Spacing::JointHidden

‎compiler/rustc_parse/src/parser/attr.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ impl<'a> Parser<'a> {
162162
}
163163
loop {
164164
// skip any other attributes, we want the item
165-
if snapshot.token.kind == token::Pound {
165+
if snapshot.token == token::Pound {
166166
if let Err(err) = snapshot.parse_attribute(InnerAttrPolicy::Permitted) {
167167
err.cancel();
168168
return Some(replacement_span);
@@ -343,7 +343,7 @@ impl<'a> Parser<'a> {
343343

344344
// Presumably, the majority of the time there will only be one attr.
345345
let mut expanded_attrs = Vec::with_capacity(1);
346-
while self.token.kind != token::Eof {
346+
while self.token != token::Eof {
347347
let lo = self.token.span;
348348
let item = self.parse_attr_item(ForceCollect::Yes)?;
349349
expanded_attrs.push((item, lo.to(self.prev_token.span)));
@@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
359359
pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
360360
// Presumably, the majority of the time there will only be one attr.
361361
let mut nmis = ThinVec::with_capacity(1);
362-
while self.token.kind != token::Eof {
362+
while self.token != token::Eof {
363363
nmis.push(self.parse_meta_item_inner()?);
364364
if !self.eat(&token::Comma) {
365365
break;

‎compiler/rustc_parse/src/parser/diagnostics.rs

Lines changed: 26 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -474,8 +474,8 @@ impl<'a> Parser<'a> {
474474
// If this isn't the case however, and the suggestion is a token the
475475
// content of which is the same as the found token's, we remove it as well.
476476
if !eq {
477-
if let TokenType::Token(kind) = &token {
478-
if kind == &self.token.kind {
477+
if let TokenType::Token(kind) = token {
478+
if self.token == *kind {
479479
return false;
480480
}
481481
}
@@ -506,7 +506,7 @@ impl<'a> Parser<'a> {
506506
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
507507
// The current token is in the same line as the prior token, not recoverable.
508508
} else if [token::Comma, token::Colon].contains(&self.token.kind)
509-
&& self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
509+
&& self.prev_token == token::CloseDelim(Delimiter::Parenthesis)
510510
{
511511
// Likely typo: The current token is on a new line and is expected to be
512512
// `.`, `;`, `?`, or an operator after a close delimiter token.
@@ -518,7 +518,7 @@ impl<'a> Parser<'a> {
518518
// https://github.com/rust-lang/rust/issues/72253
519519
} else if self.look_ahead(1, |t| {
520520
t == &token::CloseDelim(Delimiter::Brace)
521-
|| t.can_begin_expr() && t.kind != token::Colon
521+
|| t.can_begin_expr() && *t != token::Colon
522522
}) && [token::Comma, token::Colon].contains(&self.token.kind)
523523
{
524524
// Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
@@ -562,7 +562,7 @@ impl<'a> Parser<'a> {
562562
}
563563
}
564564

565-
if self.token.kind == TokenKind::EqEq
565+
if self.token == TokenKind::EqEq
566566
&& self.prev_token.is_ident()
567567
&& expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
568568
{
@@ -655,9 +655,9 @@ impl<'a> Parser<'a> {
655655
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
656656
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
657657
// error rather than replacing it entirely.
658-
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
658+
if ((self.prev_token == TokenKind::Ident(sym::c, IdentIsRaw::No)
659659
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
660-
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
660+
|| (self.prev_token == TokenKind::Ident(sym::cr, IdentIsRaw::No)
661661
&& matches!(
662662
&self.token.kind,
663663
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
@@ -673,7 +673,7 @@ impl<'a> Parser<'a> {
673673
// `pub` may be used for an item or `pub(crate)`
674674
if self.prev_token.is_ident_named(sym::public)
675675
&& (self.token.can_begin_item()
676-
|| self.token.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
676+
|| self.token == TokenKind::OpenDelim(Delimiter::Parenthesis))
677677
{
678678
err.span_suggestion_short(
679679
self.prev_token.span,
@@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
772772
),
773773
);
774774
if self.token == token::Pound
775-
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket))
775+
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
776776
{
777777
// We have
778778
// #[attr]
@@ -867,7 +867,7 @@ impl<'a> Parser<'a> {
867867
let str_span = self.prev_token.span;
868868
let mut span = self.token.span;
869869
let mut count = 0;
870-
while self.token.kind == TokenKind::Pound
870+
while self.token == TokenKind::Pound
871871
&& !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo()))
872872
{
873873
span = span.with_hi(self.token.span.hi());
@@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
11671167
return;
11681168
}
11691169

1170-
if token::PathSep == self.token.kind && segment.args.is_none() {
1170+
if self.token == token::PathSep && segment.args.is_none() {
11711171
let snapshot = self.create_snapshot_for_diagnostic();
11721172
self.bump();
11731173
let lo = self.token.span;
@@ -1176,13 +1176,11 @@ impl<'a> Parser<'a> {
11761176
let span = lo.to(self.prev_token.span);
11771177
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
11781178
let mut trailing_span = self.prev_token.span.shrink_to_hi();
1179-
while self.token.kind == token::BinOp(token::Shr)
1180-
|| self.token.kind == token::Gt
1181-
{
1179+
while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
11821180
trailing_span = trailing_span.to(self.token.span);
11831181
self.bump();
11841182
}
1185-
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
1183+
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
11861184
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
11871185
segment.args = Some(AngleBracketedArgs { args, span }.into());
11881186

@@ -1430,7 +1428,7 @@ impl<'a> Parser<'a> {
14301428
self.restore_snapshot(snapshot);
14311429
}
14321430
}
1433-
return if token::PathSep == self.token.kind {
1431+
return if self.token == token::PathSep {
14341432
// We have some certainty that this was a bad turbofish at this point.
14351433
// `foo< bar >::`
14361434
if let ExprKind::Binary(o, ..) = inner_op.kind
@@ -1462,7 +1460,7 @@ impl<'a> Parser<'a> {
14621460
Err(self.dcx().create_err(err))
14631461
}
14641462
}
1465-
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
1463+
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
14661464
// We have high certainty that this was a bad turbofish at this point.
14671465
// `foo< bar >(`
14681466
if let ExprKind::Binary(o, ..) = inner_op.kind
@@ -1528,7 +1526,7 @@ impl<'a> Parser<'a> {
15281526
];
15291527
self.consume_tts(1, &modifiers);
15301528

1531-
if self.token.kind == token::Eof {
1529+
if self.token == token::Eof {
15321530
// Not entirely sure that what we consumed were fn arguments, rollback.
15331531
self.restore_snapshot(snapshot);
15341532
Err(())
@@ -1811,7 +1809,7 @@ impl<'a> Parser<'a> {
18111809
/// This function gets called in places where a semicolon is NOT expected and if there's a
18121810
/// semicolon it emits the appropriate error and returns true.
18131811
pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool {
1814-
if self.token.kind != TokenKind::Semi {
1812+
if self.token != TokenKind::Semi {
18151813
return false;
18161814
}
18171815

@@ -2405,10 +2403,10 @@ impl<'a> Parser<'a> {
24052403
modifier: &[(token::TokenKind, i64)],
24062404
) {
24072405
while acc > 0 {
2408-
if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
2406+
if let Some((_, val)) = modifier.iter().find(|(t, _)| self.token == *t) {
24092407
acc += *val;
24102408
}
2411-
if self.token.kind == token::Eof {
2409+
if self.token == token::Eof {
24122410
break;
24132411
}
24142412
self.bump();
@@ -2598,7 +2596,7 @@ impl<'a> Parser<'a> {
25982596
}
25992597
})
26002598
.is_some()
2601-
|| self.token.kind == TokenKind::Dot;
2599+
|| self.token == TokenKind::Dot;
26022600
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
26032601
// type params has been parsed.
26042602
let was_op =
@@ -2617,7 +2615,7 @@ impl<'a> Parser<'a> {
26172615
})() {
26182616
Ok(expr) => {
26192617
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
2620-
if token::EqEq == snapshot.token.kind {
2618+
if snapshot.token == token::EqEq {
26212619
err.span_suggestion(
26222620
snapshot.token.span,
26232621
"if you meant to use an associated type binding, replace `==` with `=`",
@@ -2627,7 +2625,7 @@ impl<'a> Parser<'a> {
26272625
let guar = err.emit();
26282626
let value = self.mk_expr_err(start.to(expr.span), guar);
26292627
return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
2630-
} else if token::Colon == snapshot.token.kind
2628+
} else if snapshot.token == token::Colon
26312629
&& expr.span.lo() == snapshot.token.span.hi()
26322630
&& matches!(expr.kind, ExprKind::Path(..))
26332631
{
@@ -2642,8 +2640,7 @@ impl<'a> Parser<'a> {
26422640
return Ok(GenericArg::Type(
26432641
self.mk_ty(start.to(expr.span), TyKind::Err(guar)),
26442642
));
2645-
} else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg()
2646-
{
2643+
} else if self.token == token::Comma || self.token.kind.should_end_const_arg() {
26472644
// Avoid the following output by checking that we consumed a full const arg:
26482645
// help: expressions must be enclosed in braces to be used as const generic
26492646
// arguments
@@ -2846,8 +2843,8 @@ impl<'a> Parser<'a> {
28462843
pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
28472844
// Check for `'a : {`
28482845
if !(self.check_lifetime()
2849-
&& self.look_ahead(1, |tok| tok.kind == token::Colon)
2850-
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace)))
2846+
&& self.look_ahead(1, |t| *t == token::Colon)
2847+
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)))
28512848
{
28522849
return false;
28532850
}
@@ -3001,7 +2998,7 @@ impl<'a> Parser<'a> {
30012998
// >>>>>>>
30022999
let mut end = None;
30033000
loop {
3004-
if self.token.kind == TokenKind::Eof {
3001+
if self.token == TokenKind::Eof {
30053002
break;
30063003
}
30073004
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))

‎compiler/rustc_parse/src/parser/expr.rs

Lines changed: 35 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ impl<'a> Parser<'a> {
165165

166166
// Look for JS' `===` and `!==` and recover
167167
if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
168-
&& self.token.kind == token::Eq
168+
&& self.token == token::Eq
169169
&& self.prev_token.span.hi() == self.token.span.lo()
170170
{
171171
let sp = op.span.to(self.token.span);
@@ -190,7 +190,7 @@ impl<'a> Parser<'a> {
190190

191191
// Look for PHP's `<>` and recover
192192
if op.node == AssocOp::Less
193-
&& self.token.kind == token::Gt
193+
&& self.token == token::Gt
194194
&& self.prev_token.span.hi() == self.token.span.lo()
195195
{
196196
let sp = op.span.to(self.token.span);
@@ -208,7 +208,7 @@ impl<'a> Parser<'a> {
208208

209209
// Look for C++'s `<=>` and recover
210210
if op.node == AssocOp::LessEqual
211-
&& self.token.kind == token::Gt
211+
&& self.token == token::Gt
212212
&& self.prev_token.span.hi() == self.token.span.lo()
213213
{
214214
let sp = op.span.to(self.token.span);
@@ -882,7 +882,7 @@ impl<'a> Parser<'a> {
882882
let mut res = ensure_sufficient_stack(|| {
883883
loop {
884884
let has_question =
885-
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
885+
if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
886886
// We are using noexpect here because we don't expect a `?` directly after
887887
// a `return` which could be suggested otherwise.
888888
self.eat_noexpect(&token::Question)
@@ -894,20 +894,19 @@ impl<'a> Parser<'a> {
894894
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
895895
continue;
896896
}
897-
let has_dot =
898-
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
899-
// We are using noexpect here because we don't expect a `.` directly after
900-
// a `return` which could be suggested otherwise.
901-
self.eat_noexpect(&token::Dot)
902-
} else if self.token.kind == TokenKind::RArrow && self.may_recover() {
903-
// Recovery for `expr->suffix`.
904-
self.bump();
905-
let span = self.prev_token.span;
906-
self.dcx().emit_err(errors::ExprRArrowCall { span });
907-
true
908-
} else {
909-
self.eat(&token::Dot)
910-
};
897+
let has_dot = if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
898+
// We are using noexpect here because we don't expect a `.` directly after
899+
// a `return` which could be suggested otherwise.
900+
self.eat_noexpect(&token::Dot)
901+
} else if self.token == TokenKind::RArrow && self.may_recover() {
902+
// Recovery for `expr->suffix`.
903+
self.bump();
904+
let span = self.prev_token.span;
905+
self.dcx().emit_err(errors::ExprRArrowCall { span });
906+
true
907+
} else {
908+
self.eat(&token::Dot)
909+
};
911910
if has_dot {
912911
// expr.f
913912
e = self.parse_dot_suffix_expr(lo, e)?;
@@ -1206,7 +1205,7 @@ impl<'a> Parser<'a> {
12061205
}
12071206

12081207
fn mk_expr_tuple_field_access(
1209-
&mut self,
1208+
&self,
12101209
lo: Span,
12111210
ident_span: Span,
12121211
base: P<Expr>,
@@ -1221,7 +1220,7 @@ impl<'a> Parser<'a> {
12211220

12221221
/// Parse a function call expression, `expr(...)`.
12231222
fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
1224-
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
1223+
let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
12251224
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
12261225
} else {
12271226
None
@@ -1585,7 +1584,7 @@ impl<'a> Parser<'a> {
15851584
// Suggests using '<=' if there is an error parsing qpath when the previous token
15861585
// is an '=' token. Only emits suggestion if the '<' token and '=' token are
15871586
// directly adjacent (i.e. '=<')
1588-
if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
1587+
if maybe_eq_tok == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
15891588
let eq_lt = maybe_eq_tok.span.to(lt_span);
15901589
err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified);
15911590
}
@@ -2230,7 +2229,7 @@ impl<'a> Parser<'a> {
22302229
return Ok(());
22312230
}
22322231

2233-
if self.token.kind == token::Comma {
2232+
if self.token == token::Comma {
22342233
if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) {
22352234
return Ok(());
22362235
}
@@ -2360,7 +2359,7 @@ impl<'a> Parser<'a> {
23602359
None => {}
23612360
}
23622361

2363-
if self.token.kind == TokenKind::Semi
2362+
if self.token == TokenKind::Semi
23642363
&& matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
23652364
&& self.may_recover()
23662365
{
@@ -2557,7 +2556,7 @@ impl<'a> Parser<'a> {
25572556
);
25582557
} else {
25592558
// Look for usages of '=>' where '>=' might be intended
2560-
if maybe_fatarrow.kind == token::FatArrow {
2559+
if maybe_fatarrow == token::FatArrow {
25612560
err.span_suggestion(
25622561
maybe_fatarrow.span,
25632562
"you might have meant to write a \"greater than or equal to\" comparison",
@@ -2606,7 +2605,7 @@ impl<'a> Parser<'a> {
26062605
missing_let: None,
26072606
comparison: None,
26082607
};
2609-
if self.prev_token.kind == token::BinOp(token::Or) {
2608+
if self.prev_token == token::BinOp(token::Or) {
26102609
// This was part of a closure, the that part of the parser recover.
26112610
return Err(self.dcx().create_err(err));
26122611
} else {
@@ -2742,7 +2741,7 @@ impl<'a> Parser<'a> {
27422741
}
27432742

27442743
fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
2745-
let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
2744+
let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
27462745
// Record whether we are about to parse `for (`.
27472746
// This is used below for recovery in case of `for ( $stuff ) $block`
27482747
// in which case we will suggest `for $stuff $block`.
@@ -2776,7 +2775,7 @@ impl<'a> Parser<'a> {
27762775
return Err(err);
27772776
}
27782777
};
2779-
return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
2778+
return if self.token == token::CloseDelim(Delimiter::Parenthesis) {
27802779
// We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
27812780
// parser state and emit a targeted suggestion.
27822781
let span = vec![start_span, self.token.span];
@@ -2995,7 +2994,7 @@ impl<'a> Parser<'a> {
29952994
first_expr: &P<Expr>,
29962995
arrow_span: Span,
29972996
) -> Option<(Span, ErrorGuaranteed)> {
2998-
if self.token.kind != token::Semi {
2997+
if self.token != token::Semi {
29992998
return None;
30002999
}
30013000
let start_snapshot = self.create_snapshot_for_diagnostic();
@@ -3024,18 +3023,18 @@ impl<'a> Parser<'a> {
30243023
// We might have either a `,` -> `;` typo, or a block without braces. We need
30253024
// a more subtle parsing strategy.
30263025
loop {
3027-
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
3026+
if self.token == token::CloseDelim(Delimiter::Brace) {
30283027
// We have reached the closing brace of the `match` expression.
30293028
return Some(err(self, stmts));
30303029
}
3031-
if self.token.kind == token::Comma {
3030+
if self.token == token::Comma {
30323031
self.restore_snapshot(start_snapshot);
30333032
return None;
30343033
}
30353034
let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
30363035
match self.parse_pat_no_top_alt(None, None) {
30373036
Ok(_pat) => {
3038-
if self.token.kind == token::FatArrow {
3037+
if self.token == token::FatArrow {
30393038
// Reached arm end.
30403039
self.restore_snapshot(pre_pat_snapshot);
30413040
return Some(err(self, stmts));
@@ -3286,7 +3285,7 @@ impl<'a> Parser<'a> {
32863285
}
32873286

32883287
fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
3289-
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
3288+
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
32903289
// Detect and recover from `($pat if $cond) => $arm`.
32913290
let left = self.token.span;
32923291
match self.parse_pat_allow_top_alt(
@@ -3344,7 +3343,7 @@ impl<'a> Parser<'a> {
33443343
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
33453344
let msg = "you might have meant to start a match arm after the match guard";
33463345
if self.eat(&token::CloseDelim(Delimiter::Brace)) {
3347-
let applicability = if self.token.kind != token::FatArrow {
3346+
let applicability = if self.token != token::FatArrow {
33483347
// We have high confidence that we indeed didn't have a struct
33493348
// literal in the match guard, but rather we had some operation
33503349
// that ended in a path, immediately followed by a block that was
@@ -3565,7 +3564,7 @@ impl<'a> Parser<'a> {
35653564
&& self.look_ahead(1, |t| {
35663565
AssocOp::from_token(t).is_some()
35673566
|| matches!(t.kind, token::OpenDelim(_))
3568-
|| t.kind == token::Dot
3567+
|| *t == token::Dot
35693568
})
35703569
{
35713570
// Looks like they tried to write a shorthand, complex expression.
@@ -3850,11 +3849,11 @@ impl<'a> Parser<'a> {
38503849
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
38513850
let res = f(this, attrs)?;
38523851
let trailing = (this.restrictions.contains(Restrictions::STMT_EXPR)
3853-
&& this.token.kind == token::Semi)
3852+
&& this.token == token::Semi)
38543853
// FIXME: pass an additional condition through from the place
38553854
// where we know we need a comma, rather than assuming that
38563855
// `#[attr] expr,` always captures a trailing comma.
3857-
|| this.token.kind == token::Comma;
3856+
|| this.token == token::Comma;
38583857
Ok((res, trailing))
38593858
})
38603859
}

‎compiler/rustc_parse/src/parser/generics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ impl<'a> Parser<'a> {
393393

394394
if let Some(struct_) = struct_
395395
&& self.may_recover()
396-
&& self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
396+
&& self.token == token::OpenDelim(Delimiter::Parenthesis)
397397
{
398398
snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
399399
};

‎compiler/rustc_parse/src/parser/item.rs

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,7 @@ impl<'a> Parser<'a> {
354354
fn is_reuse_path_item(&mut self) -> bool {
355355
// no: `reuse ::path` for compatibility reasons with macro invocations
356356
self.token.is_keyword(kw::Reuse)
357-
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
357+
&& self.look_ahead(1, |t| t.is_path_start() && *t != token::PathSep)
358358
}
359359

360360
/// Are we sure this could not possibly be a macro invocation?
@@ -499,7 +499,7 @@ impl<'a> Parser<'a> {
499499
let mut err = self.dcx().struct_span_err(end.span, msg);
500500
if end.is_doc_comment() {
501501
err.span_label(end.span, "this doc comment doesn't document anything");
502-
} else if self.token.kind == TokenKind::Semi {
502+
} else if self.token == TokenKind::Semi {
503503
err.span_suggestion_verbose(
504504
self.token.span,
505505
"consider removing this semicolon",
@@ -777,12 +777,12 @@ impl<'a> Parser<'a> {
777777
&& self
778778
.span_to_snippet(self.prev_token.span)
779779
.is_ok_and(|snippet| snippet == "}")
780-
&& self.token.kind == token::Semi;
780+
&& self.token == token::Semi;
781781
let mut semicolon_span = self.token.span;
782782
if !is_unnecessary_semicolon {
783783
// #105369, Detect spurious `;` before assoc fn body
784784
is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
785-
&& self.prev_token.kind == token::Semi;
785+
&& self.prev_token == token::Semi;
786786
semicolon_span = self.prev_token.span;
787787
}
788788
// We have to bail or we'll potentially never make progress.
@@ -1194,7 +1194,7 @@ impl<'a> Parser<'a> {
11941194
// FIXME: This recovery should be tested better.
11951195
if safety == Safety::Default
11961196
&& self.token.is_keyword(kw::Unsafe)
1197-
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
1197+
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
11981198
{
11991199
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
12001200
safety = Safety::Unsafe(self.token.span);
@@ -1258,7 +1258,7 @@ impl<'a> Parser<'a> {
12581258
&& self.is_keyword_ahead(1, &[kw::Extern])
12591259
&& self.look_ahead(
12601260
2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
1261-
|t| t.kind == token::OpenDelim(Delimiter::Brace),
1261+
|t| *t == token::OpenDelim(Delimiter::Brace),
12621262
)
12631263
}
12641264

@@ -1343,7 +1343,7 @@ impl<'a> Parser<'a> {
13431343
) -> PResult<'a, (Ident, StaticItem)> {
13441344
let ident = self.parse_ident()?;
13451345

1346-
if self.token.kind == TokenKind::Lt && self.may_recover() {
1346+
if self.token == TokenKind::Lt && self.may_recover() {
13471347
let generics = self.parse_generics()?;
13481348
self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
13491349
}
@@ -1914,7 +1914,7 @@ impl<'a> Parser<'a> {
19141914
let mut err = self.dcx().struct_span_err(sp, msg);
19151915

19161916
if self.token.is_ident()
1917-
|| (self.token.kind == TokenKind::Pound
1917+
|| (self.token == TokenKind::Pound
19181918
&& (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
19191919
{
19201920
// This is likely another field, TokenKind::Pound is used for `#[..]`
@@ -1937,8 +1937,8 @@ impl<'a> Parser<'a> {
19371937
fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
19381938
if let Err(err) = self.expect(&token::Colon) {
19391939
let sm = self.psess.source_map();
1940-
let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
1941-
let semi_typo = self.token.kind == token::Semi
1940+
let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
1941+
let semi_typo = self.token == token::Semi
19421942
&& self.look_ahead(1, |t| {
19431943
t.is_path_start()
19441944
// We check that we are in a situation like `foo; bar` to avoid bad suggestions
@@ -1974,7 +1974,7 @@ impl<'a> Parser<'a> {
19741974
attrs: AttrVec,
19751975
) -> PResult<'a, FieldDef> {
19761976
let name = self.parse_field_ident(adt_ty, lo)?;
1977-
if self.token.kind == token::Not {
1977+
if self.token == token::Not {
19781978
if let Err(mut err) = self.unexpected() {
19791979
// Encounter the macro invocation
19801980
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
@@ -1983,10 +1983,10 @@ impl<'a> Parser<'a> {
19831983
}
19841984
self.expect_field_ty_separator()?;
19851985
let ty = self.parse_ty_for_field_def()?;
1986-
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
1986+
if self.token == token::Colon && self.look_ahead(1, |t| *t != token::Colon) {
19871987
self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
19881988
}
1989-
if self.token.kind == token::Eq {
1989+
if self.token == token::Eq {
19901990
self.bump();
19911991
let const_expr = self.parse_expr_anon_const()?;
19921992
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
@@ -2064,7 +2064,7 @@ impl<'a> Parser<'a> {
20642064
.parse_ident_common(false)
20652065
// Cancel this error, we don't need it.
20662066
.map_err(|err| err.cancel())
2067-
&& self.token.kind == TokenKind::Colon
2067+
&& self.token == TokenKind::Colon
20682068
{
20692069
err.span_suggestion(
20702070
removal_span,
@@ -2367,12 +2367,12 @@ impl<'a> Parser<'a> {
23672367
match self.expected_one_of_not_found(&[], expected) {
23682368
Ok(error_guaranteed) => Ok(error_guaranteed),
23692369
Err(mut err) => {
2370-
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
2370+
if self.token == token::CloseDelim(Delimiter::Brace) {
23712371
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
23722372
// the AST for typechecking.
23732373
err.span_label(ident_span, "while parsing this `fn`");
23742374
Ok(err.emit())
2375-
} else if self.token.kind == token::RArrow
2375+
} else if self.token == token::RArrow
23762376
&& let Some(fn_params_end) = fn_params_end
23772377
{
23782378
// Instead of a function body, the parser has encountered a right arrow
@@ -2445,7 +2445,7 @@ impl<'a> Parser<'a> {
24452445
fn_params_end: Option<Span>,
24462446
) -> PResult<'a, Option<P<Block>>> {
24472447
let has_semi = if req_body {
2448-
self.token.kind == TokenKind::Semi
2448+
self.token == TokenKind::Semi
24492449
} else {
24502450
// Only include `;` in list of expected tokens if body is not required
24512451
self.check(&TokenKind::Semi)
@@ -2458,7 +2458,7 @@ impl<'a> Parser<'a> {
24582458
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
24592459
self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
24602460
.map(|(attrs, body)| (attrs, Some(body)))?
2461-
} else if self.token.kind == token::Eq {
2461+
} else if self.token == token::Eq {
24622462
// Recover `fn foo() = $expr;`.
24632463
self.bump(); // `=`
24642464
let eq_sp = self.prev_token.span;
@@ -2761,7 +2761,7 @@ impl<'a> Parser<'a> {
27612761
pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
27622762
let mut first_param = true;
27632763
// Parse the arguments, starting out with `self` being allowed...
2764-
if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
2764+
if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis)
27652765
// might be typo'd trait impl, handled elsewhere
27662766
&& !self.token.is_keyword(kw::For)
27672767
{

‎compiler/rustc_parse/src/parser/mod.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
527527
} else if inedible.contains(&self.token.kind) {
528528
// leave it in the input
529529
Ok(Recovered::No)
530-
} else if self.token.kind != token::Eof
530+
} else if self.token != token::Eof
531531
&& self.last_unexpected_token_span == Some(self.token.span)
532532
{
533533
FatalError.raise();
@@ -756,7 +756,7 @@ impl<'a> Parser<'a> {
756756
/// compound tokens like multi-character operators in process.
757757
/// Returns `true` if the token was eaten.
758758
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
759-
if self.token.kind == expected {
759+
if self.token == expected {
760760
self.bump();
761761
return true;
762762
}
@@ -882,7 +882,7 @@ impl<'a> Parser<'a> {
882882
let token_str = pprust::token_kind_to_string(t);
883883

884884
match self.current_closure.take() {
885-
Some(closure_spans) if self.token.kind == TokenKind::Semi => {
885+
Some(closure_spans) if self.token == TokenKind::Semi => {
886886
// Finding a semicolon instead of a comma
887887
// after a closure body indicates that the
888888
// closure body may be a block but the user
@@ -910,7 +910,7 @@ impl<'a> Parser<'a> {
910910
// If this was a missing `@` in a binding pattern
911911
// bail with a suggestion
912912
// https://github.com/rust-lang/rust/issues/72373
913-
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
913+
if self.prev_token.is_ident() && self.token == token::DotDot {
914914
let msg = format!(
915915
"if you meant to bind the contents of the rest of the array \
916916
pattern into `{}`, use `@`",

‎compiler/rustc_parse/src/parser/pat.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -369,17 +369,17 @@ impl<'a> Parser<'a> {
369369
.and_then(|(ident, _)| ident.name.as_str().chars().next())
370370
.is_some_and(char::is_lowercase)
371371
})
372-
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Parenthesis));
372+
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Parenthesis));
373373

374374
// Check for operators.
375375
// `|` is excluded as it is used in pattern alternatives and lambdas,
376376
// `?` is included for error propagation,
377377
// `[` is included for indexing operations,
378378
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`)
379379
let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
380-
|| self.token.kind == token::Question
381-
|| (self.token.kind == token::OpenDelim(Delimiter::Bracket)
382-
&& self.look_ahead(1, |tok| tok.kind != token::CloseDelim(Delimiter::Bracket)));
380+
|| self.token == token::Question
381+
|| (self.token == token::OpenDelim(Delimiter::Bracket)
382+
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket)));
383383

384384
if !has_trailing_method && !has_trailing_operator {
385385
// Nothing to recover here.
@@ -413,7 +413,7 @@ impl<'a> Parser<'a> {
413413
let is_bound = is_end_bound
414414
// is_start_bound: either `..` or `)..`
415415
|| self.token.is_range_separator()
416-
|| self.token.kind == token::CloseDelim(Delimiter::Parenthesis)
416+
|| self.token == token::CloseDelim(Delimiter::Parenthesis)
417417
&& self.look_ahead(1, Token::is_range_separator);
418418

419419
// Check that `parse_expr_assoc_with` didn't eat a rhs.
@@ -450,7 +450,7 @@ impl<'a> Parser<'a> {
450450
lo = self.token.span;
451451
}
452452

453-
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
453+
let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
454454
self.parse_pat_deref(expected)?
455455
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
456456
self.parse_pat_tuple_or_parens()?
@@ -625,7 +625,7 @@ impl<'a> Parser<'a> {
625625
///
626626
/// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
627627
fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
628-
if self.token.kind != token::At {
628+
if self.token != token::At {
629629
// Next token is not `@` so it's not going to be an intersection pattern.
630630
return Ok(lhs);
631631
}
@@ -958,14 +958,14 @@ impl<'a> Parser<'a> {
958958
self.check_inline_const(dist)
959959
|| self.look_ahead(dist, |t| {
960960
t.is_path_start() // e.g. `MY_CONST`;
961-
|| t.kind == token::Dot // e.g. `.5` for recovery;
961+
|| *t == token::Dot // e.g. `.5` for recovery;
962962
|| matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
963963
|| t.is_bool_lit()
964964
|| t.is_whole_expr()
965965
|| t.is_lifetime() // recover `'a` instead of `'a'`
966966
|| (self.may_recover() // recover leading `(`
967-
&& t.kind == token::OpenDelim(Delimiter::Parenthesis)
968-
&& self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
967+
&& *t == token::OpenDelim(Delimiter::Parenthesis)
968+
&& self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis))
969969
&& self.is_pat_range_end_start(dist + 1))
970970
})
971971
}

‎compiler/rustc_parse/src/parser/path.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -358,9 +358,9 @@ impl<'a> Parser<'a> {
358358
})?;
359359
let span = lo.to(self.prev_token.span);
360360
AngleBracketedArgs { args, span }.into()
361-
} else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
361+
} else if self.token == token::OpenDelim(Delimiter::Parenthesis)
362362
// FIXME(return_type_notation): Could also recover `...` here.
363-
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
363+
&& self.look_ahead(1, |t| *t == token::DotDot)
364364
{
365365
self.bump(); // (
366366
self.bump(); // ..
@@ -384,7 +384,7 @@ impl<'a> Parser<'a> {
384384
let token_before_parsing = self.token.clone();
385385
let mut snapshot = None;
386386
if self.may_recover()
387-
&& prev_token_before_parsing.kind == token::PathSep
387+
&& prev_token_before_parsing == token::PathSep
388388
&& (style == PathStyle::Expr && self.token.can_begin_expr()
389389
|| style == PathStyle::Pat && self.token.can_begin_pattern())
390390
{
@@ -393,7 +393,7 @@ impl<'a> Parser<'a> {
393393

394394
let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) {
395395
Ok(output) => output,
396-
Err(mut error) if prev_token_before_parsing.kind == token::PathSep => {
396+
Err(mut error) if prev_token_before_parsing == token::PathSep => {
397397
error.span_label(
398398
prev_token_before_parsing.span.to(token_before_parsing.span),
399399
"while parsing this parenthesized list of type arguments starting here",

‎compiler/rustc_parse/src/parser/stmt.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ impl<'a> Parser<'a> {
6868
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
6969
this.expect_keyword(kw::Let)?;
7070
let local = this.parse_local(attrs)?;
71-
let trailing = capture_semi && this.token.kind == token::Semi;
71+
let trailing = capture_semi && this.token == token::Semi;
7272
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
7373
})?
7474
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
@@ -760,7 +760,7 @@ impl<'a> Parser<'a> {
760760
)
761761
),
762762
);
763-
let suggest_eq = if self.token.kind == token::Dot
763+
let suggest_eq = if self.token == token::Dot
764764
&& let _ = self.bump()
765765
&& let mut snapshot = self.create_snapshot_for_diagnostic()
766766
&& let Ok(_) = snapshot

‎compiler/rustc_parse/src/parser/ty.rs

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
420420
let mut trailing_plus = false;
421421
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
422422
let ty = p.parse_ty()?;
423-
trailing_plus = p.prev_token.kind == TokenKind::BinOp(token::Plus);
423+
trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
424424
Ok(ty)
425425
})?;
426426

@@ -499,8 +499,8 @@ impl<'a> Parser<'a> {
499499
let elt_ty = match self.parse_ty() {
500500
Ok(ty) => ty,
501501
Err(err)
502-
if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
503-
| self.look_ahead(1, |t| t.kind == token::Semi) =>
502+
if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket))
503+
| self.look_ahead(1, |t| *t == token::Semi) =>
504504
{
505505
// Recover from `[LIT; EXPR]` and `[LIT]`
506506
self.bump();
@@ -601,7 +601,7 @@ impl<'a> Parser<'a> {
601601
let span_start = self.token.span;
602602
let ast::FnHeader { ext, safety, constness, coroutine_kind } =
603603
self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
604-
if self.may_recover() && self.token.kind == TokenKind::Lt {
604+
if self.may_recover() && self.token == TokenKind::Lt {
605605
self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
606606
}
607607
let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
@@ -681,7 +681,7 @@ impl<'a> Parser<'a> {
681681
// Always parse bounds greedily for better error recovery.
682682
let bounds = self.parse_generic_bounds()?;
683683

684-
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
684+
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
685685

686686
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
687687
}
@@ -727,8 +727,7 @@ impl<'a> Parser<'a> {
727727
self.check_keyword(kw::Dyn)
728728
&& (self.token.uninterpolated_span().at_least_rust_2018()
729729
|| self.look_ahead(1, |t| {
730-
(can_begin_dyn_bound_in_edition_2015(t)
731-
|| t.kind == TokenKind::BinOp(token::Star))
730+
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
732731
&& !can_continue_type_after_non_fn_ident(t)
733732
}))
734733
}
@@ -750,7 +749,7 @@ impl<'a> Parser<'a> {
750749

751750
// Always parse bounds greedily for better error recovery.
752751
let bounds = self.parse_generic_bounds()?;
753-
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
752+
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
754753
Ok(TyKind::TraitObject(bounds, syntax))
755754
}
756755

@@ -1060,7 +1059,7 @@ impl<'a> Parser<'a> {
10601059
}
10611060

10621061
let mut path = if self.token.is_keyword(kw::Fn)
1063-
&& self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
1062+
&& self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis))
10641063
&& let Some(path) = self.recover_path_from_fn()
10651064
{
10661065
path

‎src/tools/clippy/clippy_dev/src/new_lint.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -470,7 +470,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
470470
});
471471

472472
// Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl
473-
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token_kind == TokenKind::Ident) {
473+
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token == TokenKind::Ident) {
474474
let mut iter = iter
475475
.by_ref()
476476
.filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. }));
@@ -480,7 +480,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
480480
// matches `!{`
481481
match_tokens!(iter, Bang OpenBrace);
482482
if let Some(LintDeclSearchResult { range, .. }) =
483-
iter.find(|result| result.token_kind == TokenKind::CloseBrace)
483+
iter.find(|result| result.token == TokenKind::CloseBrace)
484484
{
485485
last_decl_curly_offset = Some(range.end);
486486
}

‎src/tools/rustfmt/src/parse/macros/mod.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,7 @@ pub(crate) struct ParsedMacroArgs {
8484
fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
8585
for &keyword in RUST_KW.iter() {
8686
if parser.token.is_keyword(keyword)
87-
&& parser.look_ahead(1, |t| {
88-
t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
89-
})
87+
&& parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
9088
{
9189
parser.bump();
9290
return Some(MacroArg::Keyword(
@@ -131,7 +129,7 @@ pub(crate) fn parse_macro_args(
131129
Some(arg) => {
132130
args.push(arg);
133131
parser.bump();
134-
if parser.token.kind == TokenKind::Eof && args.len() == 2 {
132+
if parser.token == TokenKind::Eof && args.len() == 2 {
135133
vec_with_semi = true;
136134
break;
137135
}
@@ -150,7 +148,7 @@ pub(crate) fn parse_macro_args(
150148

151149
parser.bump();
152150

153-
if parser.token.kind == TokenKind::Eof {
151+
if parser.token == TokenKind::Eof {
154152
trailing_comma = true;
155153
break;
156154
}

0 commit comments

Comments
 (0)
This repository has been archived.