rustc_parse/parser/
mod.rs

1pub mod asm;
2pub mod attr;
3mod attr_wrapper;
4mod diagnostics;
5mod expr;
6mod generics;
7mod item;
8mod nonterminal;
9mod pat;
10mod path;
11mod stmt;
12pub mod token_type;
13mod ty;
14
15use std::assert_matches::debug_assert_matches;
16use std::{fmt, mem, slice};
17
18use attr_wrapper::{AttrWrapper, UsePreAttrPos};
19pub use diagnostics::AttemptLocalParseRecovery;
20pub(crate) use expr::ForbiddenLetReason;
21pub(crate) use item::FnParseMode;
22pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
23use path::PathStyle;
24use rustc_ast::ptr::P;
25use rustc_ast::token::{
26    self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
27};
28use rustc_ast::tokenstream::{
29    ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
30};
31use rustc_ast::util::case::Case;
32use rustc_ast::{
33    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
34    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
35    Visibility, VisibilityKind,
36};
37use rustc_ast_pretty::pprust;
38use rustc_data_structures::fx::FxHashMap;
39use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
40use rustc_index::interval::IntervalSet;
41use rustc_session::parse::ParseSess;
42use rustc_span::{Ident, Span, Symbol, kw, sym};
43use thin_vec::ThinVec;
44use token_type::TokenTypeSet;
45pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
46use tracing::debug;
47
48use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
49use crate::exp;
50
51#[cfg(test)]
52mod tests;
53
54// Ideally, these tests would be in `rustc_ast`. But they depend on having a
55// parser, so they are here.
56#[cfg(test)]
57mod tokenstream {
58    mod tests;
59}
60
61bitflags::bitflags! {
62    /// Restrictions applied while parsing.
63    ///
64    /// The parser maintains a bitset of restrictions it will honor while
65    /// parsing. This is essentially used as a way of tracking state of what
66    /// is being parsed and to change behavior based on that.
67    #[derive(Clone, Copy, Debug)]
68    struct Restrictions: u8 {
69        /// Restricts expressions for use in statement position.
70        ///
71        /// When expressions are used in various places, like statements or
72        /// match arms, this is used to stop parsing once certain tokens are
73        /// reached.
74        ///
75        /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed
76        /// as two separate expression statements (`if` and a reference to 1).
77        /// Otherwise it is parsed as a bitwise AND where `if` is on the left
78        /// and 1 is on the right.
79        const STMT_EXPR         = 1 << 0;
80        /// Do not allow struct literals.
81        ///
82        /// There are several places in the grammar where we don't want to
83        /// allow struct literals because they can require lookahead, or
84        /// otherwise could be ambiguous or cause confusion. For example,
85        /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or
86        /// just `Foo` is the condition, followed by a consequent block,
87        /// followed by an empty block.
88        ///
89        /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html).
90        const NO_STRUCT_LITERAL = 1 << 1;
91        /// Used to provide better error messages for const generic arguments.
92        ///
93        /// An un-braced const generic argument is limited to a very small
94        /// subset of expressions. This is used to detect the situation where
95        /// an expression outside of that subset is used, and to suggest to
96        /// wrap the expression in braces.
97        const CONST_EXPR        = 1 << 2;
98        /// Allows `let` expressions.
99        ///
100        /// `let pattern = scrutinee` is parsed as an expression, but it is
101        /// only allowed in let chains (`if` and `while` conditions).
102        /// Otherwise it is not an expression (note that `let` in statement
103        /// positions is treated as a `StmtKind::Let` statement, which has a
104        /// slightly different grammar).
105        const ALLOW_LET         = 1 << 3;
106        /// Used to detect a missing `=>` in a match guard.
107        ///
108        /// This is used for error handling in a match guard to give a better
109        /// error message if the `=>` is missing. It is set when parsing the
110        /// guard expression.
111        const IN_IF_GUARD       = 1 << 4;
112        /// Used to detect the incorrect use of expressions in patterns.
113        ///
114        /// This is used for error handling while parsing a pattern. During
115        /// error recovery, this will be set to try to parse the pattern as an
116        /// expression, but halts parsing the expression when reaching certain
117        /// tokens like `=`.
118        const IS_PAT            = 1 << 5;
119    }
120}
121
122#[derive(Clone, Copy, PartialEq, Debug)]
123enum SemiColonMode {
124    Break,
125    Ignore,
126    Comma,
127}
128
129#[derive(Clone, Copy, PartialEq, Debug)]
130enum BlockMode {
131    Break,
132    Ignore,
133}
134
135/// Whether or not we should force collection of tokens for an AST node,
136/// regardless of whether or not it has attributes
137#[derive(Clone, Copy, Debug, PartialEq)]
138pub enum ForceCollect {
139    Yes,
140    No,
141}
142
143/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
144#[macro_export]
145macro_rules! maybe_recover_from_interpolated_ty_qpath {
146    ($self: expr, $allow_qpath_recovery: expr) => {
147        if $allow_qpath_recovery
148            && $self.may_recover()
149            && let Some(mv_kind) = $self.token.is_metavar_seq()
150            && let token::MetaVarKind::Ty { .. } = mv_kind
151            && $self.check_noexpect_past_close_delim(&token::PathSep)
152        {
153            // Reparse the type, then move to recovery.
154            let ty = $self
155                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
156                .expect("metavar seq ty");
157
158            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
159        }
160    };
161}
162
163#[derive(Clone, Copy, Debug)]
164pub enum Recovery {
165    Allowed,
166    Forbidden,
167}
168
169#[derive(Clone)]
170pub struct Parser<'a> {
171    pub psess: &'a ParseSess,
172    /// The current token.
173    pub token: Token,
174    /// The spacing for the current token.
175    token_spacing: Spacing,
176    /// The previous token.
177    pub prev_token: Token,
178    pub capture_cfg: bool,
179    restrictions: Restrictions,
180    expected_token_types: TokenTypeSet,
181    token_cursor: TokenCursor,
182    // The number of calls to `bump`, i.e. the position in the token stream.
183    num_bump_calls: u32,
184    // During parsing we may sometimes need to "unglue" a glued token into two
185    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
186    // and `>` and `=`), so the parser can consume them one at a time. This
187    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
188    // will not be incremented), since the "unglued" tokens due not exist in
189    // the original `TokenStream`.
190    //
191    // If we end up consuming all the component tokens, this is not an issue,
192    // because we'll end up capturing the single "glued" token.
193    //
194    // However, sometimes we may want to capture not all of the original
195    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
196    // requires us to unglue the trailing `>>` token. The `break_last_token`
197    // field is used to track these tokens. They get appended to the captured
198    // stream when we evaluate a `LazyAttrTokenStream`.
199    //
200    // This value is always 0, 1, or 2. It can only reach 2 when splitting
201    // `>>=` or `<<=`.
202    break_last_token: u32,
203    /// This field is used to keep track of how many left angle brackets we have seen. This is
204    /// required in order to detect extra leading left angle brackets (`<` characters) and error
205    /// appropriately.
206    ///
207    /// See the comments in the `parse_path_segment` function for more details.
208    unmatched_angle_bracket_count: u16,
209    angle_bracket_nesting: u16,
210
211    last_unexpected_token_span: Option<Span>,
212    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
213    subparser_name: Option<&'static str>,
214    capture_state: CaptureState,
215    /// This allows us to recover when the user forget to add braces around
216    /// multiple statements in the closure body.
217    current_closure: Option<ClosureSpans>,
218    /// Whether the parser is allowed to do recovery.
219    /// This is disabled when parsing macro arguments, see #103534
220    recovery: Recovery,
221}
222
223// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
224// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
225// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
226#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
227rustc_data_structures::static_assert_size!(Parser<'_>, 288);
228
229/// Stores span information about a closure.
230#[derive(Clone, Debug)]
231struct ClosureSpans {
232    whole_closure: Span,
233    closing_pipe: Span,
234    body: Span,
235}
236
237/// Controls how we capture tokens. Capturing can be expensive,
238/// so we try to avoid performing capturing in cases where
239/// we will never need an `AttrTokenStream`.
240#[derive(Copy, Clone, Debug)]
241enum Capturing {
242    /// We aren't performing any capturing - this is the default mode.
243    No,
244    /// We are capturing tokens
245    Yes,
246}
247
248// This state is used by `Parser::collect_tokens`.
249#[derive(Clone, Debug)]
250struct CaptureState {
251    capturing: Capturing,
252    parser_replacements: Vec<ParserReplacement>,
253    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
254    // `IntervalSet` is good for perf because attrs are mostly added to this
255    // set in contiguous ranges.
256    seen_attrs: IntervalSet<AttrId>,
257}
258
259/// A sequence separator.
260#[derive(Debug)]
261struct SeqSep<'a> {
262    /// The separator token.
263    sep: Option<ExpTokenPair<'a>>,
264    /// `true` if a trailing separator is allowed.
265    trailing_sep_allowed: bool,
266}
267
268impl<'a> SeqSep<'a> {
269    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
270        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
271    }
272
273    fn none() -> SeqSep<'a> {
274        SeqSep { sep: None, trailing_sep_allowed: false }
275    }
276}
277
278#[derive(Debug)]
279pub enum FollowedByType {
280    Yes,
281    No,
282}
283
284#[derive(Copy, Clone, Debug)]
285enum Trailing {
286    No,
287    Yes,
288}
289
290impl From<bool> for Trailing {
291    fn from(b: bool) -> Trailing {
292        if b { Trailing::Yes } else { Trailing::No }
293    }
294}
295
296#[derive(Clone, Copy, Debug, PartialEq, Eq)]
297pub(super) enum TokenDescription {
298    ReservedIdentifier,
299    Keyword,
300    ReservedKeyword,
301    DocComment,
302
303    // Expanded metavariables are wrapped in invisible delimiters which aren't
304    // pretty-printed. In error messages we must handle these specially
305    // otherwise we get confusing things in messages like "expected `(`, found
306    // ``". It's better to say e.g. "expected `(`, found type metavariable".
307    MetaVar(MetaVarKind),
308}
309
310impl TokenDescription {
311    pub(super) fn from_token(token: &Token) -> Option<Self> {
312        match token.kind {
313            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
314            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
315            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
316            token::DocComment(..) => Some(TokenDescription::DocComment),
317            token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
318                Some(TokenDescription::MetaVar(kind))
319            }
320            _ => None,
321        }
322    }
323}
324
325pub fn token_descr(token: &Token) -> String {
326    let s = pprust::token_to_string(token).to_string();
327
328    match (TokenDescription::from_token(token), &token.kind) {
329        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
330        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
331        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
332        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
333        // Deliberately doesn't print `s`, which is empty.
334        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
335        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
336        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
337        (None, _) => format!("`{s}`"),
338    }
339}
340
341impl<'a> Parser<'a> {
342    pub fn new(
343        psess: &'a ParseSess,
344        stream: TokenStream,
345        subparser_name: Option<&'static str>,
346    ) -> Self {
347        let mut parser = Parser {
348            psess,
349            token: Token::dummy(),
350            token_spacing: Spacing::Alone,
351            prev_token: Token::dummy(),
352            capture_cfg: false,
353            restrictions: Restrictions::empty(),
354            expected_token_types: TokenTypeSet::new(),
355            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
356            num_bump_calls: 0,
357            break_last_token: 0,
358            unmatched_angle_bracket_count: 0,
359            angle_bracket_nesting: 0,
360            last_unexpected_token_span: None,
361            subparser_name,
362            capture_state: CaptureState {
363                capturing: Capturing::No,
364                parser_replacements: Vec::new(),
365                inner_attr_parser_ranges: Default::default(),
366                seen_attrs: IntervalSet::new(u32::MAX as usize),
367            },
368            current_closure: None,
369            recovery: Recovery::Allowed,
370        };
371
372        // Make parser point to the first token.
373        parser.bump();
374
375        // Change this from 1 back to 0 after the bump. This eases debugging of
376        // `Parser::collect_tokens` because 0-indexed token positions are nicer
377        // than 1-indexed token positions.
378        parser.num_bump_calls = 0;
379
380        parser
381    }
382
383    #[inline]
384    pub fn recovery(mut self, recovery: Recovery) -> Self {
385        self.recovery = recovery;
386        self
387    }
388
389    #[inline]
390    fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
391        let old = mem::replace(&mut self.recovery, recovery);
392        let res = f(self);
393        self.recovery = old;
394        res
395    }
396
397    /// Whether the parser is allowed to recover from broken code.
398    ///
399    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
400    /// is not allowed. All recovery done by the parser must be gated behind this check.
401    ///
402    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
403    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
404    #[inline]
405    fn may_recover(&self) -> bool {
406        matches!(self.recovery, Recovery::Allowed)
407    }
408
409    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
410    /// (both those functions never return "Ok", and so can lie like that in the type).
411    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
412        match self.expect_one_of(&[], &[]) {
413            Err(e) => Err(e),
414            // We can get `Ok(true)` from `recover_closing_delimiter`
415            // which is called in `expected_one_of_not_found`.
416            Ok(_) => FatalError.raise(),
417        }
418    }
419
420    pub fn unexpected(&mut self) -> PResult<'a, ()> {
421        self.unexpected_any()
422    }
423
424    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
425    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
426        if self.expected_token_types.is_empty() {
427            if self.token == *exp.tok {
428                self.bump();
429                Ok(Recovered::No)
430            } else {
431                self.unexpected_try_recover(exp.tok)
432            }
433        } else {
434            self.expect_one_of(slice::from_ref(&exp), &[])
435        }
436    }
437
438    /// Expect next token to be edible or inedible token. If edible,
439    /// then consume it; if inedible, then return without consuming
440    /// anything. Signal a fatal error if next token is unexpected.
441    fn expect_one_of(
442        &mut self,
443        edible: &[ExpTokenPair<'_>],
444        inedible: &[ExpTokenPair<'_>],
445    ) -> PResult<'a, Recovered> {
446        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
447            self.bump();
448            Ok(Recovered::No)
449        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
450            // leave it in the input
451            Ok(Recovered::No)
452        } else if self.token != token::Eof
453            && self.last_unexpected_token_span == Some(self.token.span)
454        {
455            FatalError.raise();
456        } else {
457            self.expected_one_of_not_found(edible, inedible)
458                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
459        }
460    }
461
462    // Public for rustfmt usage.
463    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
464        self.parse_ident_common(true)
465    }
466
467    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
468        let (ident, is_raw) = self.ident_or_err(recover)?;
469
470        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
471            let err = self.expected_ident_found_err();
472            if recover {
473                err.emit();
474            } else {
475                return Err(err);
476            }
477        }
478        self.bump();
479        Ok(ident)
480    }
481
482    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
483        match self.token.ident() {
484            Some(ident) => Ok(ident),
485            None => self.expected_ident_found(recover),
486        }
487    }
488
489    /// Checks if the next token is `tok`, and returns `true` if so.
490    ///
491    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
492    /// encountered.
493    #[inline]
494    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
495        let is_present = self.token == *exp.tok;
496        if !is_present {
497            self.expected_token_types.insert(exp.token_type);
498        }
499        is_present
500    }
501
502    #[inline]
503    #[must_use]
504    fn check_noexpect(&self, tok: &TokenKind) -> bool {
505        self.token == *tok
506    }
507
508    // Check the first token after the delimiter that closes the current
509    // delimited sequence. (Panics if used in the outermost token stream, which
510    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
511    // past the entire `TokenTree::Delimited` in a single step, avoiding the
512    // need for unbounded token lookahead.
513    //
514    // Primarily used when `self.token` matches `OpenInvisible(_))`, to look
515    // ahead through the current metavar expansion.
516    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
517        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
518        tree_cursor.bump();
519        matches!(
520            tree_cursor.curr(),
521            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
522        )
523    }
524
525    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
526    ///
527    /// the main purpose of this function is to reduce the cluttering of the suggestions list
528    /// which using the normal eat method could introduce in some cases.
529    #[inline]
530    #[must_use]
531    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
532        let is_present = self.check_noexpect(tok);
533        if is_present {
534            self.bump()
535        }
536        is_present
537    }
538
539    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
540    #[inline]
541    #[must_use]
542    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
543        let is_present = self.check(exp);
544        if is_present {
545            self.bump()
546        }
547        is_present
548    }
549
550    /// If the next token is the given keyword, returns `true` without eating it.
551    /// An expectation is also added for diagnostics purposes.
552    #[inline]
553    #[must_use]
554    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
555        let is_keyword = self.token.is_keyword(exp.kw);
556        if !is_keyword {
557            self.expected_token_types.insert(exp.token_type);
558        }
559        is_keyword
560    }
561
562    #[inline]
563    #[must_use]
564    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
565        if self.check_keyword(exp) {
566            true
567        } else if case == Case::Insensitive
568            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
569            // Do an ASCII case-insensitive match, because all keywords are ASCII.
570            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
571        {
572            true
573        } else {
574            false
575        }
576    }
577
578    /// If the next token is the given keyword, eats it and returns `true`.
579    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
580    // Public for rustc_builtin_macros and rustfmt usage.
581    #[inline]
582    #[must_use]
583    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
584        let is_keyword = self.check_keyword(exp);
585        if is_keyword {
586            self.bump();
587        }
588        is_keyword
589    }
590
591    /// Eats a keyword, optionally ignoring the case.
592    /// If the case differs (and is ignored) an error is issued.
593    /// This is useful for recovery.
594    #[inline]
595    #[must_use]
596    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
597        if self.eat_keyword(exp) {
598            true
599        } else if case == Case::Insensitive
600            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
601            // Do an ASCII case-insensitive match, because all keywords are ASCII.
602            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
603        {
604            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
605            self.bump();
606            true
607        } else {
608            false
609        }
610    }
611
612    /// If the next token is the given keyword, eats it and returns `true`.
613    /// Otherwise, returns `false`. No expectation is added.
614    // Public for rustc_builtin_macros usage.
615    #[inline]
616    #[must_use]
617    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
618        let is_keyword = self.token.is_keyword(kw);
619        if is_keyword {
620            self.bump();
621        }
622        is_keyword
623    }
624
625    /// If the given word is not a keyword, signals an error.
626    /// If the next token is not the given word, signals an error.
627    /// Otherwise, eats it.
628    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
629        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
630    }
631
632    /// Consume a sequence produced by a metavar expansion, if present.
633    fn eat_metavar_seq<T>(
634        &mut self,
635        mv_kind: MetaVarKind,
636        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
637    ) -> Option<T> {
638        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
639    }
640
641    /// A slightly more general form of `eat_metavar_seq`, for use with the
642    /// `MetaVarKind` variants that have parameters, where an exact match isn't
643    /// desired.
644    fn eat_metavar_seq_with_matcher<T>(
645        &mut self,
646        match_mv_kind: impl Fn(MetaVarKind) -> bool,
647        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
648    ) -> Option<T> {
649        if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
650            && match_mv_kind(mv_kind)
651        {
652            self.bump();
653
654            // Recovery is disabled when parsing macro arguments, so it must
655            // also be disabled when reparsing pasted macro arguments,
656            // otherwise we get inconsistent results (e.g. #137874).
657            let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
658
659            let res = match res {
660                Ok(res) => res,
661                Err(err) => {
662                    // This can occur in unusual error cases, e.g. #139445.
663                    err.delay_as_bug();
664                    return None;
665                }
666            };
667
668            if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
669                && match_mv_kind(mv_kind)
670            {
671                self.bump();
672                Some(res)
673            } else {
674                // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
675                // where the reparse attempt of an invalid expr consumed the trailing invisible
676                // delimiter.
677                self.dcx()
678                    .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
679                None
680            }
681        } else {
682            None
683        }
684    }
685
686    /// Is the given keyword `kw` followed by a non-reserved identifier?
687    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
688        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
689    }
690
691    #[inline]
692    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
693        if !ok {
694            self.expected_token_types.insert(token_type);
695        }
696        ok
697    }
698
699    fn check_ident(&mut self) -> bool {
700        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
701    }
702
703    fn check_path(&mut self) -> bool {
704        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
705    }
706
707    fn check_type(&mut self) -> bool {
708        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
709    }
710
711    fn check_const_arg(&mut self) -> bool {
712        self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
713    }
714
715    fn check_const_closure(&self) -> bool {
716        self.is_keyword_ahead(0, &[kw::Const])
717            && self.look_ahead(1, |t| match &t.kind {
718                // async closures do not work with const closures, so we do not parse that here.
719                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
720                | token::OrOr
721                | token::Or => true,
722                _ => false,
723            })
724    }
725
726    fn check_inline_const(&self, dist: usize) -> bool {
727        self.is_keyword_ahead(dist, &[kw::Const])
728            && self.look_ahead(dist + 1, |t| match &t.kind {
729                token::OpenBrace => true,
730                token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
731                _ => false,
732            })
733    }
734
735    /// Checks to see if the next token is either `+` or `+=`.
736    /// Otherwise returns `false`.
737    #[inline]
738    fn check_plus(&mut self) -> bool {
739        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
740    }
741
742    /// Eats the expected token if it's present possibly breaking
743    /// compound tokens like multi-character operators in process.
744    /// Returns `true` if the token was eaten.
745    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
746        if self.token == *exp.tok {
747            self.bump();
748            return true;
749        }
750        match self.token.kind.break_two_token_op(1) {
751            Some((first, second)) if first == *exp.tok => {
752                let first_span = self.psess.source_map().start_point(self.token.span);
753                let second_span = self.token.span.with_lo(first_span.hi());
754                self.token = Token::new(first, first_span);
755                // Keep track of this token - if we end token capturing now,
756                // we'll want to append this token to the captured stream.
757                //
758                // If we consume any additional tokens, then this token
759                // is not needed (we'll capture the entire 'glued' token),
760                // and `bump` will set this field to 0.
761                self.break_last_token += 1;
762                // Use the spacing of the glued token as the spacing of the
763                // unglued second token.
764                self.bump_with((Token::new(second, second_span), self.token_spacing));
765                true
766            }
767            _ => {
768                self.expected_token_types.insert(exp.token_type);
769                false
770            }
771        }
772    }
773
774    /// Eats `+` possibly breaking tokens like `+=` in process.
775    fn eat_plus(&mut self) -> bool {
776        self.break_and_eat(exp!(Plus))
777    }
778
779    /// Eats `&` possibly breaking tokens like `&&` in process.
780    /// Signals an error if `&` is not eaten.
781    fn expect_and(&mut self) -> PResult<'a, ()> {
782        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
783    }
784
785    /// Eats `|` possibly breaking tokens like `||` in process.
786    /// Signals an error if `|` was not eaten.
787    fn expect_or(&mut self) -> PResult<'a, ()> {
788        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
789    }
790
791    /// Eats `<` possibly breaking tokens like `<<` in process.
792    fn eat_lt(&mut self) -> bool {
793        let ate = self.break_and_eat(exp!(Lt));
794        if ate {
795            // See doc comment for `unmatched_angle_bracket_count`.
796            self.unmatched_angle_bracket_count += 1;
797            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
798        }
799        ate
800    }
801
802    /// Eats `<` possibly breaking tokens like `<<` in process.
803    /// Signals an error if `<` was not eaten.
804    fn expect_lt(&mut self) -> PResult<'a, ()> {
805        if self.eat_lt() { Ok(()) } else { self.unexpected() }
806    }
807
808    /// Eats `>` possibly breaking tokens like `>>` in process.
809    /// Signals an error if `>` was not eaten.
810    fn expect_gt(&mut self) -> PResult<'a, ()> {
811        if self.break_and_eat(exp!(Gt)) {
812            // See doc comment for `unmatched_angle_bracket_count`.
813            if self.unmatched_angle_bracket_count > 0 {
814                self.unmatched_angle_bracket_count -= 1;
815                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
816            }
817            Ok(())
818        } else {
819            self.unexpected()
820        }
821    }
822
823    /// Checks if the next token is contained within `closes`, and returns `true` if so.
824    fn expect_any_with_type(
825        &mut self,
826        closes_expected: &[ExpTokenPair<'_>],
827        closes_not_expected: &[&TokenKind],
828    ) -> bool {
829        closes_expected.iter().any(|&close| self.check(close))
830            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
831    }
832
833    /// Parses a sequence until the specified delimiters. The function
834    /// `f` must consume tokens until reaching the next separator or
835    /// closing bracket.
836    fn parse_seq_to_before_tokens<T>(
837        &mut self,
838        closes_expected: &[ExpTokenPair<'_>],
839        closes_not_expected: &[&TokenKind],
840        sep: SeqSep<'_>,
841        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
842    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
843        let mut first = true;
844        let mut recovered = Recovered::No;
845        let mut trailing = Trailing::No;
846        let mut v = ThinVec::new();
847
848        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
849            if self.token.kind.is_close_delim_or_eof() {
850                break;
851            }
852            if let Some(exp) = sep.sep {
853                if first {
854                    // no separator for the first element
855                    first = false;
856                } else {
857                    // check for separator
858                    match self.expect(exp) {
859                        Ok(Recovered::No) => {
860                            self.current_closure.take();
861                        }
862                        Ok(Recovered::Yes(guar)) => {
863                            self.current_closure.take();
864                            recovered = Recovered::Yes(guar);
865                            break;
866                        }
867                        Err(mut expect_err) => {
868                            let sp = self.prev_token.span.shrink_to_hi();
869                            let token_str = pprust::token_kind_to_string(exp.tok);
870
871                            match self.current_closure.take() {
872                                Some(closure_spans) if self.token == TokenKind::Semi => {
873                                    // Finding a semicolon instead of a comma
874                                    // after a closure body indicates that the
875                                    // closure body may be a block but the user
876                                    // forgot to put braces around its
877                                    // statements.
878
879                                    self.recover_missing_braces_around_closure_body(
880                                        closure_spans,
881                                        expect_err,
882                                    )?;
883
884                                    continue;
885                                }
886
887                                _ => {
888                                    // Attempt to keep parsing if it was a similar separator.
889                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
890                                        self.bump();
891                                    }
892                                }
893                            }
894
895                            // If this was a missing `@` in a binding pattern
896                            // bail with a suggestion
897                            // https://github.com/rust-lang/rust/issues/72373
898                            if self.prev_token.is_ident() && self.token == token::DotDot {
899                                let msg = format!(
900                                    "if you meant to bind the contents of the rest of the array \
901                                     pattern into `{}`, use `@`",
902                                    pprust::token_to_string(&self.prev_token)
903                                );
904                                expect_err
905                                    .with_span_suggestion_verbose(
906                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
907                                        msg,
908                                        " @ ",
909                                        Applicability::MaybeIncorrect,
910                                    )
911                                    .emit();
912                                break;
913                            }
914
915                            // Attempt to keep parsing if it was an omitted separator.
916                            self.last_unexpected_token_span = None;
917                            match f(self) {
918                                Ok(t) => {
919                                    // Parsed successfully, therefore most probably the code only
920                                    // misses a separator.
921                                    expect_err
922                                        .with_span_suggestion_short(
923                                            sp,
924                                            format!("missing `{token_str}`"),
925                                            token_str,
926                                            Applicability::MaybeIncorrect,
927                                        )
928                                        .emit();
929
930                                    v.push(t);
931                                    continue;
932                                }
933                                Err(e) => {
934                                    // Parsing failed, therefore it must be something more serious
935                                    // than just a missing separator.
936                                    for xx in &e.children {
937                                        // Propagate the help message from sub error `e` to main
938                                        // error `expect_err`.
939                                        expect_err.children.push(xx.clone());
940                                    }
941                                    e.cancel();
942                                    if self.token == token::Colon {
943                                        // We will try to recover in
944                                        // `maybe_recover_struct_lit_bad_delims`.
945                                        return Err(expect_err);
946                                    } else if let [exp] = closes_expected
947                                        && exp.token_type == TokenType::CloseParen
948                                    {
949                                        return Err(expect_err);
950                                    } else {
951                                        expect_err.emit();
952                                        break;
953                                    }
954                                }
955                            }
956                        }
957                    }
958                }
959            }
960            if sep.trailing_sep_allowed
961                && self.expect_any_with_type(closes_expected, closes_not_expected)
962            {
963                trailing = Trailing::Yes;
964                break;
965            }
966
967            let t = f(self)?;
968            v.push(t);
969        }
970
971        Ok((v, trailing, recovered))
972    }
973
974    fn recover_missing_braces_around_closure_body(
975        &mut self,
976        closure_spans: ClosureSpans,
977        mut expect_err: Diag<'_>,
978    ) -> PResult<'a, ()> {
979        let initial_semicolon = self.token.span;
980
981        while self.eat(exp!(Semi)) {
982            let _ = self
983                .parse_stmt_without_recovery(false, ForceCollect::No, false)
984                .unwrap_or_else(|e| {
985                    e.cancel();
986                    None
987                });
988        }
989
990        expect_err
991            .primary_message("closure bodies that contain statements must be surrounded by braces");
992
993        let preceding_pipe_span = closure_spans.closing_pipe;
994        let following_token_span = self.token.span;
995
996        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
997        first_note.push_span_label(
998            initial_semicolon,
999            "this `;` turns the preceding closure into a statement",
1000        );
1001        first_note.push_span_label(
1002            closure_spans.body,
1003            "this expression is a statement because of the trailing semicolon",
1004        );
1005        expect_err.span_note(first_note, "statement found outside of a block");
1006
1007        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1008        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1009        second_note.push_span_label(
1010            following_token_span,
1011            "...but likely you meant the closure to end here",
1012        );
1013        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1014
1015        expect_err.span(vec![preceding_pipe_span, following_token_span]);
1016
1017        let opening_suggestion_str = " {".to_string();
1018        let closing_suggestion_str = "}".to_string();
1019
1020        expect_err.multipart_suggestion(
1021            "try adding braces",
1022            vec![
1023                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1024                (following_token_span.shrink_to_lo(), closing_suggestion_str),
1025            ],
1026            Applicability::MaybeIncorrect,
1027        );
1028
1029        expect_err.emit();
1030
1031        Ok(())
1032    }
1033
1034    /// Parses a sequence, not including the delimiters. The function
1035    /// `f` must consume tokens until reaching the next separator or
1036    /// closing bracket.
1037    fn parse_seq_to_before_end<T>(
1038        &mut self,
1039        close: ExpTokenPair<'_>,
1040        sep: SeqSep<'_>,
1041        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1042    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1043        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1044    }
1045
1046    /// Parses a sequence, including only the closing delimiter. The function
1047    /// `f` must consume tokens until reaching the next separator or
1048    /// closing bracket.
1049    fn parse_seq_to_end<T>(
1050        &mut self,
1051        close: ExpTokenPair<'_>,
1052        sep: SeqSep<'_>,
1053        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1054    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1055        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1056        if matches!(recovered, Recovered::No) && !self.eat(close) {
1057            self.dcx().span_delayed_bug(
1058                self.token.span,
1059                "recovered but `parse_seq_to_before_end` did not give us the close token",
1060            );
1061        }
1062        Ok((val, trailing))
1063    }
1064
1065    /// Parses a sequence, including both delimiters. The function
1066    /// `f` must consume tokens until reaching the next separator or
1067    /// closing bracket.
1068    fn parse_unspanned_seq<T>(
1069        &mut self,
1070        open: ExpTokenPair<'_>,
1071        close: ExpTokenPair<'_>,
1072        sep: SeqSep<'_>,
1073        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1074    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1075        self.expect(open)?;
1076        self.parse_seq_to_end(close, sep, f)
1077    }
1078
1079    /// Parses a comma-separated sequence, including both delimiters.
1080    /// The function `f` must consume tokens until reaching the next separator or
1081    /// closing bracket.
1082    fn parse_delim_comma_seq<T>(
1083        &mut self,
1084        open: ExpTokenPair<'_>,
1085        close: ExpTokenPair<'_>,
1086        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1087    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1088        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1089    }
1090
1091    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
1092    /// The function `f` must consume tokens until reaching the next separator or
1093    /// closing bracket.
1094    fn parse_paren_comma_seq<T>(
1095        &mut self,
1096        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1097    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1098        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1099    }
1100
1101    /// Advance the parser by one token using provided token as the next one.
1102    fn bump_with(&mut self, next: (Token, Spacing)) {
1103        self.inlined_bump_with(next)
1104    }
1105
1106    /// This always-inlined version should only be used on hot code paths.
1107    #[inline(always)]
1108    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1109        // Update the current and previous tokens.
1110        self.prev_token = mem::replace(&mut self.token, next_token);
1111        self.token_spacing = next_spacing;
1112
1113        // Diagnostics.
1114        self.expected_token_types.clear();
1115    }
1116
1117    /// Advance the parser by one token.
1118    pub fn bump(&mut self) {
1119        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1120        // than `.0`/`.1` access.
1121        let mut next = self.token_cursor.inlined_next();
1122        self.num_bump_calls += 1;
1123        // We got a token from the underlying cursor and no longer need to
1124        // worry about an unglued token. See `break_and_eat` for more details.
1125        self.break_last_token = 0;
1126        if next.0.span.is_dummy() {
1127            // Tweak the location for better diagnostics, but keep syntactic context intact.
1128            let fallback_span = self.token.span;
1129            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1130        }
1131        debug_assert!(!matches!(
1132            next.0.kind,
1133            token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1134        ));
1135        self.inlined_bump_with(next)
1136    }
1137
1138    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1139    /// When `dist == 0` then the current token is looked at. `Eof` will be
1140    /// returned if the look-ahead is any distance past the end of the tokens.
1141    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1142        if dist == 0 {
1143            return looker(&self.token);
1144        }
1145
1146        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
1147        // have a fast special case for that.
1148        if dist == 1 {
1149            // The index is zero because the tree cursor's index always points
1150            // to the next token to be gotten.
1151            match self.token_cursor.curr.curr() {
1152                Some(tree) => {
1153                    // Indexing stayed within the current token tree.
1154                    match tree {
1155                        TokenTree::Token(token, _) => return looker(token),
1156                        &TokenTree::Delimited(dspan, _, delim, _) => {
1157                            if !delim.skip() {
1158                                return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1159                            }
1160                        }
1161                    }
1162                }
1163                None => {
1164                    // The tree cursor lookahead went (one) past the end of the
1165                    // current token tree. Try to return a close delimiter.
1166                    if let Some(last) = self.token_cursor.stack.last()
1167                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1168                        && !delim.skip()
1169                    {
1170                        // We are not in the outermost token stream, so we have
1171                        // delimiters. Also, those delimiters are not skipped.
1172                        return looker(&Token::new(delim.as_close_token_kind(), span.close));
1173                    }
1174                }
1175            }
1176        }
1177
1178        // Just clone the token cursor and use `next`, skipping delimiters as
1179        // necessary. Slow but simple.
1180        let mut cursor = self.token_cursor.clone();
1181        let mut i = 0;
1182        let mut token = Token::dummy();
1183        while i < dist {
1184            token = cursor.next().0;
1185            if matches!(
1186                token.kind,
1187                token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1188            ) {
1189                continue;
1190            }
1191            i += 1;
1192        }
1193        looker(&token)
1194    }
1195
1196    /// Like `lookahead`, but skips over token trees rather than tokens. Useful
1197    /// when looking past possible metavariable pasting sites.
1198    pub fn tree_look_ahead<R>(
1199        &self,
1200        dist: usize,
1201        looker: impl FnOnce(&TokenTree) -> R,
1202    ) -> Option<R> {
1203        assert_ne!(dist, 0);
1204        self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1205    }
1206
1207    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1208    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1209        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1210    }
1211
1212    /// Parses asyncness: `async` or nothing.
1213    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1214        let span = self.token_uninterpolated_span();
1215        if self.eat_keyword_case(exp!(Async), case) {
1216            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
1217            // error if edition <= 2024, like we do with async and edition <= 2018?
1218            if self.token_uninterpolated_span().at_least_rust_2024()
1219                && self.eat_keyword_case(exp!(Gen), case)
1220            {
1221                let gen_span = self.prev_token_uninterpolated_span();
1222                Some(CoroutineKind::AsyncGen {
1223                    span: span.to(gen_span),
1224                    closure_id: DUMMY_NODE_ID,
1225                    return_impl_trait_id: DUMMY_NODE_ID,
1226                })
1227            } else {
1228                Some(CoroutineKind::Async {
1229                    span,
1230                    closure_id: DUMMY_NODE_ID,
1231                    return_impl_trait_id: DUMMY_NODE_ID,
1232                })
1233            }
1234        } else if self.token_uninterpolated_span().at_least_rust_2024()
1235            && self.eat_keyword_case(exp!(Gen), case)
1236        {
1237            Some(CoroutineKind::Gen {
1238                span,
1239                closure_id: DUMMY_NODE_ID,
1240                return_impl_trait_id: DUMMY_NODE_ID,
1241            })
1242        } else {
1243            None
1244        }
1245    }
1246
1247    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
1248    fn parse_safety(&mut self, case: Case) -> Safety {
1249        if self.eat_keyword_case(exp!(Unsafe), case) {
1250            Safety::Unsafe(self.prev_token_uninterpolated_span())
1251        } else if self.eat_keyword_case(exp!(Safe), case) {
1252            Safety::Safe(self.prev_token_uninterpolated_span())
1253        } else {
1254            Safety::Default
1255        }
1256    }
1257
1258    /// Parses constness: `const` or nothing.
1259    fn parse_constness(&mut self, case: Case) -> Const {
1260        self.parse_constness_(case, false)
1261    }
1262
1263    /// Parses constness for closures (case sensitive, feature-gated)
1264    fn parse_closure_constness(&mut self) -> Const {
1265        let constness = self.parse_constness_(Case::Sensitive, true);
1266        if let Const::Yes(span) = constness {
1267            self.psess.gated_spans.gate(sym::const_closures, span);
1268        }
1269        constness
1270    }
1271
1272    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1273        // Avoid const blocks and const closures to be parsed as const items
1274        if (self.check_const_closure() == is_closure)
1275            && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1276            && self.eat_keyword_case(exp!(Const), case)
1277        {
1278            Const::Yes(self.prev_token_uninterpolated_span())
1279        } else {
1280            Const::No
1281        }
1282    }
1283
1284    /// Parses inline const expressions.
1285    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1286        self.expect_keyword(exp!(Const))?;
1287        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1288        let anon_const = AnonConst {
1289            id: DUMMY_NODE_ID,
1290            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1291        };
1292        let blk_span = anon_const.value.span;
1293        let kind = if pat {
1294            let guar = self
1295                .dcx()
1296                .struct_span_err(blk_span, "`inline_const_pat` has been removed")
1297                .with_help("use a named `const`-item or an `if`-guard instead")
1298                .emit();
1299            ExprKind::Err(guar)
1300        } else {
1301            ExprKind::ConstBlock(anon_const)
1302        };
1303        Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1304    }
1305
1306    /// Parses mutability (`mut` or nothing).
1307    fn parse_mutability(&mut self) -> Mutability {
1308        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1309    }
1310
1311    /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
1312    fn parse_byref(&mut self) -> ByRef {
1313        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1314    }
1315
1316    /// Possibly parses mutability (`const` or `mut`).
1317    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1318        if self.eat_keyword(exp!(Mut)) {
1319            Some(Mutability::Mut)
1320        } else if self.eat_keyword(exp!(Const)) {
1321            Some(Mutability::Not)
1322        } else {
1323            None
1324        }
1325    }
1326
1327    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1328        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1329        {
1330            if let Some(suffix) = suffix {
1331                self.expect_no_tuple_index_suffix(self.token.span, suffix);
1332            }
1333            self.bump();
1334            Ok(Ident::new(symbol, self.prev_token.span))
1335        } else {
1336            self.parse_ident_common(true)
1337        }
1338    }
1339
1340    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1341        if let Some(args) = self.parse_delim_args_inner() {
1342            Ok(P(args))
1343        } else {
1344            self.unexpected_any()
1345        }
1346    }
1347
1348    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1349        Ok(if let Some(args) = self.parse_delim_args_inner() {
1350            AttrArgs::Delimited(args)
1351        } else if self.eat(exp!(Eq)) {
1352            let eq_span = self.prev_token.span;
1353            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1354        } else {
1355            AttrArgs::Empty
1356        })
1357    }
1358
1359    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1360        let delimited = self.check(exp!(OpenParen))
1361            || self.check(exp!(OpenBracket))
1362            || self.check(exp!(OpenBrace));
1363
1364        delimited.then(|| {
1365            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1366                unreachable!()
1367            };
1368            DelimArgs { dspan, delim, tokens }
1369        })
1370    }
1371
1372    /// Parses a single token tree from the input.
1373    pub fn parse_token_tree(&mut self) -> TokenTree {
1374        if self.token.kind.open_delim().is_some() {
1375            // Clone the `TokenTree::Delimited` that we are currently
1376            // within. That's what we are going to return.
1377            let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1378            debug_assert_matches!(tree, TokenTree::Delimited(..));
1379
1380            // Advance the token cursor through the entire delimited
1381            // sequence. After getting the `OpenDelim` we are *within* the
1382            // delimited sequence, i.e. at depth `d`. After getting the
1383            // matching `CloseDelim` we are *after* the delimited sequence,
1384            // i.e. at depth `d - 1`.
1385            let target_depth = self.token_cursor.stack.len() - 1;
1386            loop {
1387                // Advance one token at a time, so `TokenCursor::next()`
1388                // can capture these tokens if necessary.
1389                self.bump();
1390                if self.token_cursor.stack.len() == target_depth {
1391                    debug_assert!(self.token.kind.close_delim().is_some());
1392                    break;
1393                }
1394            }
1395
1396            // Consume close delimiter
1397            self.bump();
1398            tree
1399        } else {
1400            assert!(!self.token.kind.is_close_delim_or_eof());
1401            let prev_spacing = self.token_spacing;
1402            self.bump();
1403            TokenTree::Token(self.prev_token, prev_spacing)
1404        }
1405    }
1406
1407    pub fn parse_tokens(&mut self) -> TokenStream {
1408        let mut result = Vec::new();
1409        loop {
1410            if self.token.kind.is_close_delim_or_eof() {
1411                break;
1412            } else {
1413                result.push(self.parse_token_tree());
1414            }
1415        }
1416        TokenStream::new(result)
1417    }
1418
1419    /// Evaluates the closure with restrictions in place.
1420    ///
1421    /// Afters the closure is evaluated, restrictions are reset.
1422    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1423        let old = self.restrictions;
1424        self.restrictions = res;
1425        let res = f(self);
1426        self.restrictions = old;
1427        res
1428    }
1429
1430    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1431    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1432    /// If the following element can't be a tuple (i.e., it's a function definition), then
1433    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1434    /// so emit a proper diagnostic.
1435    // Public for rustfmt usage.
1436    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1437        if let Some(vis) = self
1438            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1439        {
1440            return Ok(vis);
1441        }
1442
1443        if !self.eat_keyword(exp!(Pub)) {
1444            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1445            // keyword to grab a span from for inherited visibility; an empty span at the
1446            // beginning of the current token would seem to be the "Schelling span".
1447            return Ok(Visibility {
1448                span: self.token.span.shrink_to_lo(),
1449                kind: VisibilityKind::Inherited,
1450                tokens: None,
1451            });
1452        }
1453        let lo = self.prev_token.span;
1454
1455        if self.check(exp!(OpenParen)) {
1456            // We don't `self.bump()` the `(` yet because this might be a struct definition where
1457            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1458            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1459            // by the following tokens.
1460            if self.is_keyword_ahead(1, &[kw::In]) {
1461                // Parse `pub(in path)`.
1462                self.bump(); // `(`
1463                self.bump(); // `in`
1464                let path = self.parse_path(PathStyle::Mod)?; // `path`
1465                self.expect(exp!(CloseParen))?; // `)`
1466                let vis = VisibilityKind::Restricted {
1467                    path: P(path),
1468                    id: ast::DUMMY_NODE_ID,
1469                    shorthand: false,
1470                };
1471                return Ok(Visibility {
1472                    span: lo.to(self.prev_token.span),
1473                    kind: vis,
1474                    tokens: None,
1475                });
1476            } else if self.look_ahead(2, |t| t == &token::CloseParen)
1477                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1478            {
1479                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1480                self.bump(); // `(`
1481                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1482                self.expect(exp!(CloseParen))?; // `)`
1483                let vis = VisibilityKind::Restricted {
1484                    path: P(path),
1485                    id: ast::DUMMY_NODE_ID,
1486                    shorthand: true,
1487                };
1488                return Ok(Visibility {
1489                    span: lo.to(self.prev_token.span),
1490                    kind: vis,
1491                    tokens: None,
1492                });
1493            } else if let FollowedByType::No = fbt {
1494                // Provide this diagnostic if a type cannot follow;
1495                // in particular, if this is not a tuple struct.
1496                self.recover_incorrect_vis_restriction()?;
1497                // Emit diagnostic, but continue with public visibility.
1498            }
1499        }
1500
1501        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1502    }
1503
1504    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1505    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1506        self.bump(); // `(`
1507        let path = self.parse_path(PathStyle::Mod)?;
1508        self.expect(exp!(CloseParen))?; // `)`
1509
1510        let path_str = pprust::path_to_string(&path);
1511        self.dcx()
1512            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1513
1514        Ok(())
1515    }
1516
1517    /// Parses `extern string_literal?`.
1518    fn parse_extern(&mut self, case: Case) -> Extern {
1519        if self.eat_keyword_case(exp!(Extern), case) {
1520            let mut extern_span = self.prev_token.span;
1521            let abi = self.parse_abi();
1522            if let Some(abi) = abi {
1523                extern_span = extern_span.to(abi.span);
1524            }
1525            Extern::from_abi(abi, extern_span)
1526        } else {
1527            Extern::None
1528        }
1529    }
1530
1531    /// Parses a string literal as an ABI spec.
1532    fn parse_abi(&mut self) -> Option<StrLit> {
1533        match self.parse_str_lit() {
1534            Ok(str_lit) => Some(str_lit),
1535            Err(Some(lit)) => match lit.kind {
1536                ast::LitKind::Err(_) => None,
1537                _ => {
1538                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1539                    None
1540                }
1541            },
1542            Err(None) => None,
1543        }
1544    }
1545
1546    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1547        &mut self,
1548        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1549    ) -> PResult<'a, R> {
1550        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1551        // `ForceCollect::Yes`
1552        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1553            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1554        })
1555    }
1556
1557    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
1558    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1559        if self.check(exp!(PathSep)) {
1560            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1561                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1562                self.look_ahead(2, looker)
1563            } else {
1564                self.look_ahead(1, looker)
1565            }
1566        } else {
1567            false
1568        }
1569    }
1570
1571    /// `::{` or `::*`
1572    fn is_import_coupler(&mut self) -> bool {
1573        self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1574    }
1575
1576    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
1577    // Only used when debugging.
1578    #[allow(unused)]
1579    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1580        fmt::from_fn(move |f| {
1581            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
1582
1583            // we don't need N spans, but we want at least one, so print all of prev_token
1584            dbg_fmt.field("prev_token", &self.prev_token);
1585            let mut tokens = vec![];
1586            for i in 0..lookahead {
1587                let tok = self.look_ahead(i, |tok| tok.kind);
1588                let is_eof = tok == TokenKind::Eof;
1589                tokens.push(tok);
1590                if is_eof {
1591                    // Don't look ahead past EOF.
1592                    break;
1593                }
1594            }
1595            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1596            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1597
1598            // some fields are interesting for certain values, as they relate to macro parsing
1599            if let Some(subparser) = self.subparser_name {
1600                dbg_fmt.field("subparser_name", &subparser);
1601            }
1602            if let Recovery::Forbidden = self.recovery {
1603                dbg_fmt.field("recovery", &self.recovery);
1604            }
1605
1606            // imply there's "more to know" than this view
1607            dbg_fmt.finish_non_exhaustive()
1608        })
1609    }
1610
1611    pub fn clear_expected_token_types(&mut self) {
1612        self.expected_token_types.clear();
1613    }
1614
1615    pub fn approx_token_stream_pos(&self) -> u32 {
1616        self.num_bump_calls
1617    }
1618
1619    /// For interpolated `self.token`, returns a span of the fragment to which
1620    /// the interpolated token refers. For all other tokens this is just a
1621    /// regular span. It is particularly important to use this for identifiers
1622    /// and lifetimes for which spans affect name resolution and edition
1623    /// checks. Note that keywords are also identifiers, so they should use
1624    /// this if they keep spans or perform edition checks.
1625    pub fn token_uninterpolated_span(&self) -> Span {
1626        match &self.token.kind {
1627            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1628            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1629            _ => self.token.span,
1630        }
1631    }
1632
1633    /// Like `token_uninterpolated_span`, but works on `self.prev_token`.
1634    pub fn prev_token_uninterpolated_span(&self) -> Span {
1635        match &self.prev_token.kind {
1636            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1637            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1638            _ => self.prev_token.span,
1639        }
1640    }
1641}
1642
1643// Metavar captures of various kinds.
1644#[derive(Clone, Debug)]
1645pub enum ParseNtResult {
1646    Tt(TokenTree),
1647    Ident(Ident, IdentIsRaw),
1648    Lifetime(Ident, IdentIsRaw),
1649    Item(P<ast::Item>),
1650    Block(P<ast::Block>),
1651    Stmt(P<ast::Stmt>),
1652    Pat(P<ast::Pat>, NtPatKind),
1653    Expr(P<ast::Expr>, NtExprKind),
1654    Literal(P<ast::Expr>),
1655    Ty(P<ast::Ty>),
1656    Meta(P<ast::AttrItem>),
1657    Path(P<ast::Path>),
1658    Vis(P<ast::Visibility>),
1659}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy