rustc_parse/parser/
mod.rs

1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14// Parsers for non-functionlike builtin macros are defined in rustc_parse so they can be used by
15// both rustc_builtin_macros and rustfmt.
16pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25pub(crate) use item::FnParseMode;
26pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
27use path::PathStyle;
28use rustc_ast::ptr::P;
29use rustc_ast::token::{
30    self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
31};
32use rustc_ast::tokenstream::{
33    ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
34};
35use rustc_ast::util::case::Case;
36use rustc_ast::{
37    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
38    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
39    Visibility, VisibilityKind,
40};
41use rustc_ast_pretty::pprust;
42use rustc_data_structures::fx::FxHashMap;
43use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
44use rustc_index::interval::IntervalSet;
45use rustc_session::parse::ParseSess;
46use rustc_span::{Ident, Span, Symbol, kw, sym};
47use thin_vec::ThinVec;
48use token_type::TokenTypeSet;
49pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
50use tracing::debug;
51
52use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
53use crate::exp;
54
55#[cfg(test)]
56mod tests;
57
58// Ideally, these tests would be in `rustc_ast`. But they depend on having a
59// parser, so they are here.
60#[cfg(test)]
61mod tokenstream {
62    mod tests;
63}
64
65bitflags::bitflags! {
66    /// Restrictions applied while parsing.
67    ///
68    /// The parser maintains a bitset of restrictions it will honor while
69    /// parsing. This is essentially used as a way of tracking state of what
70    /// is being parsed and to change behavior based on that.
71    #[derive(Clone, Copy, Debug)]
72    struct Restrictions: u8 {
73        /// Restricts expressions for use in statement position.
74        ///
75        /// When expressions are used in various places, like statements or
76        /// match arms, this is used to stop parsing once certain tokens are
77        /// reached.
78        ///
79        /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed
80        /// as two separate expression statements (`if` and a reference to 1).
81        /// Otherwise it is parsed as a bitwise AND where `if` is on the left
82        /// and 1 is on the right.
83        const STMT_EXPR         = 1 << 0;
84        /// Do not allow struct literals.
85        ///
86        /// There are several places in the grammar where we don't want to
87        /// allow struct literals because they can require lookahead, or
88        /// otherwise could be ambiguous or cause confusion. For example,
89        /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or
90        /// just `Foo` is the condition, followed by a consequent block,
91        /// followed by an empty block.
92        ///
93        /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html).
94        const NO_STRUCT_LITERAL = 1 << 1;
95        /// Used to provide better error messages for const generic arguments.
96        ///
97        /// An un-braced const generic argument is limited to a very small
98        /// subset of expressions. This is used to detect the situation where
99        /// an expression outside of that subset is used, and to suggest to
100        /// wrap the expression in braces.
101        const CONST_EXPR        = 1 << 2;
102        /// Allows `let` expressions.
103        ///
104        /// `let pattern = scrutinee` is parsed as an expression, but it is
105        /// only allowed in let chains (`if` and `while` conditions).
106        /// Otherwise it is not an expression (note that `let` in statement
107        /// positions is treated as a `StmtKind::Let` statement, which has a
108        /// slightly different grammar).
109        const ALLOW_LET         = 1 << 3;
110        /// Used to detect a missing `=>` in a match guard.
111        ///
112        /// This is used for error handling in a match guard to give a better
113        /// error message if the `=>` is missing. It is set when parsing the
114        /// guard expression.
115        const IN_IF_GUARD       = 1 << 4;
116        /// Used to detect the incorrect use of expressions in patterns.
117        ///
118        /// This is used for error handling while parsing a pattern. During
119        /// error recovery, this will be set to try to parse the pattern as an
120        /// expression, but halts parsing the expression when reaching certain
121        /// tokens like `=`.
122        const IS_PAT            = 1 << 5;
123    }
124}
125
126#[derive(Clone, Copy, PartialEq, Debug)]
127enum SemiColonMode {
128    Break,
129    Ignore,
130    Comma,
131}
132
133#[derive(Clone, Copy, PartialEq, Debug)]
134enum BlockMode {
135    Break,
136    Ignore,
137}
138
139/// Whether or not we should force collection of tokens for an AST node,
140/// regardless of whether or not it has attributes
141#[derive(Clone, Copy, Debug, PartialEq)]
142pub enum ForceCollect {
143    Yes,
144    No,
145}
146
147/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
148#[macro_export]
149macro_rules! maybe_recover_from_interpolated_ty_qpath {
150    ($self: expr, $allow_qpath_recovery: expr) => {
151        if $allow_qpath_recovery
152            && $self.may_recover()
153            && let Some(mv_kind) = $self.token.is_metavar_seq()
154            && let token::MetaVarKind::Ty { .. } = mv_kind
155            && $self.check_noexpect_past_close_delim(&token::PathSep)
156        {
157            // Reparse the type, then move to recovery.
158            let ty = $self
159                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
160                .expect("metavar seq ty");
161
162            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
163        }
164    };
165}
166
167#[derive(Clone, Copy, Debug)]
168pub enum Recovery {
169    Allowed,
170    Forbidden,
171}
172
173#[derive(Clone)]
174pub struct Parser<'a> {
175    pub psess: &'a ParseSess,
176    /// The current token.
177    pub token: Token,
178    /// The spacing for the current token.
179    token_spacing: Spacing,
180    /// The previous token.
181    pub prev_token: Token,
182    pub capture_cfg: bool,
183    restrictions: Restrictions,
184    expected_token_types: TokenTypeSet,
185    token_cursor: TokenCursor,
186    // The number of calls to `bump`, i.e. the position in the token stream.
187    num_bump_calls: u32,
188    // During parsing we may sometimes need to "unglue" a glued token into two
189    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
190    // and `>` and `=`), so the parser can consume them one at a time. This
191    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
192    // will not be incremented), since the "unglued" tokens due not exist in
193    // the original `TokenStream`.
194    //
195    // If we end up consuming all the component tokens, this is not an issue,
196    // because we'll end up capturing the single "glued" token.
197    //
198    // However, sometimes we may want to capture not all of the original
199    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
200    // requires us to unglue the trailing `>>` token. The `break_last_token`
201    // field is used to track these tokens. They get appended to the captured
202    // stream when we evaluate a `LazyAttrTokenStream`.
203    //
204    // This value is always 0, 1, or 2. It can only reach 2 when splitting
205    // `>>=` or `<<=`.
206    break_last_token: u32,
207    /// This field is used to keep track of how many left angle brackets we have seen. This is
208    /// required in order to detect extra leading left angle brackets (`<` characters) and error
209    /// appropriately.
210    ///
211    /// See the comments in the `parse_path_segment` function for more details.
212    unmatched_angle_bracket_count: u16,
213    angle_bracket_nesting: u16,
214
215    last_unexpected_token_span: Option<Span>,
216    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
217    subparser_name: Option<&'static str>,
218    capture_state: CaptureState,
219    /// This allows us to recover when the user forget to add braces around
220    /// multiple statements in the closure body.
221    current_closure: Option<ClosureSpans>,
222    /// Whether the parser is allowed to do recovery.
223    /// This is disabled when parsing macro arguments, see #103534
224    recovery: Recovery,
225}
226
227// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
228// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
229// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
230#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
231rustc_data_structures::static_assert_size!(Parser<'_>, 288);
232
233/// Stores span information about a closure.
234#[derive(Clone, Debug)]
235struct ClosureSpans {
236    whole_closure: Span,
237    closing_pipe: Span,
238    body: Span,
239}
240
241/// Controls how we capture tokens. Capturing can be expensive,
242/// so we try to avoid performing capturing in cases where
243/// we will never need an `AttrTokenStream`.
244#[derive(Copy, Clone, Debug)]
245enum Capturing {
246    /// We aren't performing any capturing - this is the default mode.
247    No,
248    /// We are capturing tokens
249    Yes,
250}
251
252// This state is used by `Parser::collect_tokens`.
253#[derive(Clone, Debug)]
254struct CaptureState {
255    capturing: Capturing,
256    parser_replacements: Vec<ParserReplacement>,
257    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
258    // `IntervalSet` is good for perf because attrs are mostly added to this
259    // set in contiguous ranges.
260    seen_attrs: IntervalSet<AttrId>,
261}
262
263/// A sequence separator.
264#[derive(Debug)]
265struct SeqSep<'a> {
266    /// The separator token.
267    sep: Option<ExpTokenPair<'a>>,
268    /// `true` if a trailing separator is allowed.
269    trailing_sep_allowed: bool,
270}
271
272impl<'a> SeqSep<'a> {
273    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
274        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
275    }
276
277    fn none() -> SeqSep<'a> {
278        SeqSep { sep: None, trailing_sep_allowed: false }
279    }
280}
281
282#[derive(Debug)]
283pub enum FollowedByType {
284    Yes,
285    No,
286}
287
288#[derive(Copy, Clone, Debug)]
289enum Trailing {
290    No,
291    Yes,
292}
293
294impl From<bool> for Trailing {
295    fn from(b: bool) -> Trailing {
296        if b { Trailing::Yes } else { Trailing::No }
297    }
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq)]
301pub(super) enum TokenDescription {
302    ReservedIdentifier,
303    Keyword,
304    ReservedKeyword,
305    DocComment,
306
307    // Expanded metavariables are wrapped in invisible delimiters which aren't
308    // pretty-printed. In error messages we must handle these specially
309    // otherwise we get confusing things in messages like "expected `(`, found
310    // ``". It's better to say e.g. "expected `(`, found type metavariable".
311    MetaVar(MetaVarKind),
312}
313
314impl TokenDescription {
315    pub(super) fn from_token(token: &Token) -> Option<Self> {
316        match token.kind {
317            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
318            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
319            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
320            token::DocComment(..) => Some(TokenDescription::DocComment),
321            token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
322                Some(TokenDescription::MetaVar(kind))
323            }
324            _ => None,
325        }
326    }
327}
328
329pub fn token_descr(token: &Token) -> String {
330    let s = pprust::token_to_string(token).to_string();
331
332    match (TokenDescription::from_token(token), &token.kind) {
333        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
334        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
335        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
336        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
337        // Deliberately doesn't print `s`, which is empty.
338        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
339        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
340        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
341        (None, _) => format!("`{s}`"),
342    }
343}
344
345impl<'a> Parser<'a> {
346    pub fn new(
347        psess: &'a ParseSess,
348        stream: TokenStream,
349        subparser_name: Option<&'static str>,
350    ) -> Self {
351        let mut parser = Parser {
352            psess,
353            token: Token::dummy(),
354            token_spacing: Spacing::Alone,
355            prev_token: Token::dummy(),
356            capture_cfg: false,
357            restrictions: Restrictions::empty(),
358            expected_token_types: TokenTypeSet::new(),
359            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
360            num_bump_calls: 0,
361            break_last_token: 0,
362            unmatched_angle_bracket_count: 0,
363            angle_bracket_nesting: 0,
364            last_unexpected_token_span: None,
365            subparser_name,
366            capture_state: CaptureState {
367                capturing: Capturing::No,
368                parser_replacements: Vec::new(),
369                inner_attr_parser_ranges: Default::default(),
370                seen_attrs: IntervalSet::new(u32::MAX as usize),
371            },
372            current_closure: None,
373            recovery: Recovery::Allowed,
374        };
375
376        // Make parser point to the first token.
377        parser.bump();
378
379        // Change this from 1 back to 0 after the bump. This eases debugging of
380        // `Parser::collect_tokens` because 0-indexed token positions are nicer
381        // than 1-indexed token positions.
382        parser.num_bump_calls = 0;
383
384        parser
385    }
386
387    #[inline]
388    pub fn recovery(mut self, recovery: Recovery) -> Self {
389        self.recovery = recovery;
390        self
391    }
392
393    #[inline]
394    fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
395        let old = mem::replace(&mut self.recovery, recovery);
396        let res = f(self);
397        self.recovery = old;
398        res
399    }
400
401    /// Whether the parser is allowed to recover from broken code.
402    ///
403    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
404    /// is not allowed. All recovery done by the parser must be gated behind this check.
405    ///
406    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
407    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
408    #[inline]
409    fn may_recover(&self) -> bool {
410        matches!(self.recovery, Recovery::Allowed)
411    }
412
413    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
414    /// (both those functions never return "Ok", and so can lie like that in the type).
415    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
416        match self.expect_one_of(&[], &[]) {
417            Err(e) => Err(e),
418            // We can get `Ok(true)` from `recover_closing_delimiter`
419            // which is called in `expected_one_of_not_found`.
420            Ok(_) => FatalError.raise(),
421        }
422    }
423
424    pub fn unexpected(&mut self) -> PResult<'a, ()> {
425        self.unexpected_any()
426    }
427
428    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
429    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
430        if self.expected_token_types.is_empty() {
431            if self.token == *exp.tok {
432                self.bump();
433                Ok(Recovered::No)
434            } else {
435                self.unexpected_try_recover(exp.tok)
436            }
437        } else {
438            self.expect_one_of(slice::from_ref(&exp), &[])
439        }
440    }
441
442    /// Expect next token to be edible or inedible token. If edible,
443    /// then consume it; if inedible, then return without consuming
444    /// anything. Signal a fatal error if next token is unexpected.
445    fn expect_one_of(
446        &mut self,
447        edible: &[ExpTokenPair<'_>],
448        inedible: &[ExpTokenPair<'_>],
449    ) -> PResult<'a, Recovered> {
450        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
451            self.bump();
452            Ok(Recovered::No)
453        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
454            // leave it in the input
455            Ok(Recovered::No)
456        } else if self.token != token::Eof
457            && self.last_unexpected_token_span == Some(self.token.span)
458        {
459            FatalError.raise();
460        } else {
461            self.expected_one_of_not_found(edible, inedible)
462                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
463        }
464    }
465
466    // Public for rustfmt usage.
467    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
468        self.parse_ident_common(true)
469    }
470
471    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
472        let (ident, is_raw) = self.ident_or_err(recover)?;
473
474        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
475            let err = self.expected_ident_found_err();
476            if recover {
477                err.emit();
478            } else {
479                return Err(err);
480            }
481        }
482        self.bump();
483        Ok(ident)
484    }
485
486    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
487        match self.token.ident() {
488            Some(ident) => Ok(ident),
489            None => self.expected_ident_found(recover),
490        }
491    }
492
493    /// Checks if the next token is `tok`, and returns `true` if so.
494    ///
495    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
496    /// encountered.
497    #[inline]
498    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
499        let is_present = self.token == *exp.tok;
500        if !is_present {
501            self.expected_token_types.insert(exp.token_type);
502        }
503        is_present
504    }
505
506    #[inline]
507    #[must_use]
508    fn check_noexpect(&self, tok: &TokenKind) -> bool {
509        self.token == *tok
510    }
511
512    // Check the first token after the delimiter that closes the current
513    // delimited sequence. (Panics if used in the outermost token stream, which
514    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
515    // past the entire `TokenTree::Delimited` in a single step, avoiding the
516    // need for unbounded token lookahead.
517    //
518    // Primarily used when `self.token` matches `OpenInvisible(_))`, to look
519    // ahead through the current metavar expansion.
520    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
521        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
522        tree_cursor.bump();
523        matches!(
524            tree_cursor.curr(),
525            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
526        )
527    }
528
529    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
530    ///
531    /// the main purpose of this function is to reduce the cluttering of the suggestions list
532    /// which using the normal eat method could introduce in some cases.
533    #[inline]
534    #[must_use]
535    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
536        let is_present = self.check_noexpect(tok);
537        if is_present {
538            self.bump()
539        }
540        is_present
541    }
542
543    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
544    #[inline]
545    #[must_use]
546    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
547        let is_present = self.check(exp);
548        if is_present {
549            self.bump()
550        }
551        is_present
552    }
553
554    /// If the next token is the given keyword, returns `true` without eating it.
555    /// An expectation is also added for diagnostics purposes.
556    #[inline]
557    #[must_use]
558    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
559        let is_keyword = self.token.is_keyword(exp.kw);
560        if !is_keyword {
561            self.expected_token_types.insert(exp.token_type);
562        }
563        is_keyword
564    }
565
566    #[inline]
567    #[must_use]
568    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
569        if self.check_keyword(exp) {
570            true
571        } else if case == Case::Insensitive
572            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
573            // Do an ASCII case-insensitive match, because all keywords are ASCII.
574            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
575        {
576            true
577        } else {
578            false
579        }
580    }
581
582    /// If the next token is the given keyword, eats it and returns `true`.
583    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
584    // Public for rustc_builtin_macros and rustfmt usage.
585    #[inline]
586    #[must_use]
587    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
588        let is_keyword = self.check_keyword(exp);
589        if is_keyword {
590            self.bump();
591        }
592        is_keyword
593    }
594
595    /// Eats a keyword, optionally ignoring the case.
596    /// If the case differs (and is ignored) an error is issued.
597    /// This is useful for recovery.
598    #[inline]
599    #[must_use]
600    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
601        if self.eat_keyword(exp) {
602            true
603        } else if case == Case::Insensitive
604            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
605            // Do an ASCII case-insensitive match, because all keywords are ASCII.
606            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
607        {
608            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
609            self.bump();
610            true
611        } else {
612            false
613        }
614    }
615
616    /// If the next token is the given keyword, eats it and returns `true`.
617    /// Otherwise, returns `false`. No expectation is added.
618    // Public for rustc_builtin_macros usage.
619    #[inline]
620    #[must_use]
621    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
622        let is_keyword = self.token.is_keyword(kw);
623        if is_keyword {
624            self.bump();
625        }
626        is_keyword
627    }
628
629    /// If the given word is not a keyword, signals an error.
630    /// If the next token is not the given word, signals an error.
631    /// Otherwise, eats it.
632    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
633        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
634    }
635
636    /// Consume a sequence produced by a metavar expansion, if present.
637    fn eat_metavar_seq<T>(
638        &mut self,
639        mv_kind: MetaVarKind,
640        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
641    ) -> Option<T> {
642        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
643    }
644
645    /// A slightly more general form of `eat_metavar_seq`, for use with the
646    /// `MetaVarKind` variants that have parameters, where an exact match isn't
647    /// desired.
648    fn eat_metavar_seq_with_matcher<T>(
649        &mut self,
650        match_mv_kind: impl Fn(MetaVarKind) -> bool,
651        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
652    ) -> Option<T> {
653        if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
654            && match_mv_kind(mv_kind)
655        {
656            self.bump();
657
658            // Recovery is disabled when parsing macro arguments, so it must
659            // also be disabled when reparsing pasted macro arguments,
660            // otherwise we get inconsistent results (e.g. #137874).
661            let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
662
663            let res = match res {
664                Ok(res) => res,
665                Err(err) => {
666                    // This can occur in unusual error cases, e.g. #139445.
667                    err.delay_as_bug();
668                    return None;
669                }
670            };
671
672            if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
673                && match_mv_kind(mv_kind)
674            {
675                self.bump();
676                Some(res)
677            } else {
678                // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
679                // where the reparse attempt of an invalid expr consumed the trailing invisible
680                // delimiter.
681                self.dcx()
682                    .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
683                None
684            }
685        } else {
686            None
687        }
688    }
689
690    /// Is the given keyword `kw` followed by a non-reserved identifier?
691    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
692        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
693    }
694
695    #[inline]
696    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
697        if !ok {
698            self.expected_token_types.insert(token_type);
699        }
700        ok
701    }
702
703    fn check_ident(&mut self) -> bool {
704        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
705    }
706
707    fn check_path(&mut self) -> bool {
708        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
709    }
710
711    fn check_type(&mut self) -> bool {
712        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
713    }
714
715    fn check_const_arg(&mut self) -> bool {
716        self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
717    }
718
719    fn check_const_closure(&self) -> bool {
720        self.is_keyword_ahead(0, &[kw::Const])
721            && self.look_ahead(1, |t| match &t.kind {
722                // async closures do not work with const closures, so we do not parse that here.
723                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
724                | token::OrOr
725                | token::Or => true,
726                _ => false,
727            })
728    }
729
730    fn check_inline_const(&self, dist: usize) -> bool {
731        self.is_keyword_ahead(dist, &[kw::Const])
732            && self.look_ahead(dist + 1, |t| match &t.kind {
733                token::OpenBrace => true,
734                token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
735                _ => false,
736            })
737    }
738
739    /// Checks to see if the next token is either `+` or `+=`.
740    /// Otherwise returns `false`.
741    #[inline]
742    fn check_plus(&mut self) -> bool {
743        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
744    }
745
746    /// Eats the expected token if it's present possibly breaking
747    /// compound tokens like multi-character operators in process.
748    /// Returns `true` if the token was eaten.
749    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
750        if self.token == *exp.tok {
751            self.bump();
752            return true;
753        }
754        match self.token.kind.break_two_token_op(1) {
755            Some((first, second)) if first == *exp.tok => {
756                let first_span = self.psess.source_map().start_point(self.token.span);
757                let second_span = self.token.span.with_lo(first_span.hi());
758                self.token = Token::new(first, first_span);
759                // Keep track of this token - if we end token capturing now,
760                // we'll want to append this token to the captured stream.
761                //
762                // If we consume any additional tokens, then this token
763                // is not needed (we'll capture the entire 'glued' token),
764                // and `bump` will set this field to 0.
765                self.break_last_token += 1;
766                // Use the spacing of the glued token as the spacing of the
767                // unglued second token.
768                self.bump_with((Token::new(second, second_span), self.token_spacing));
769                true
770            }
771            _ => {
772                self.expected_token_types.insert(exp.token_type);
773                false
774            }
775        }
776    }
777
778    /// Eats `+` possibly breaking tokens like `+=` in process.
779    fn eat_plus(&mut self) -> bool {
780        self.break_and_eat(exp!(Plus))
781    }
782
783    /// Eats `&` possibly breaking tokens like `&&` in process.
784    /// Signals an error if `&` is not eaten.
785    fn expect_and(&mut self) -> PResult<'a, ()> {
786        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
787    }
788
789    /// Eats `|` possibly breaking tokens like `||` in process.
790    /// Signals an error if `|` was not eaten.
791    fn expect_or(&mut self) -> PResult<'a, ()> {
792        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
793    }
794
795    /// Eats `<` possibly breaking tokens like `<<` in process.
796    fn eat_lt(&mut self) -> bool {
797        let ate = self.break_and_eat(exp!(Lt));
798        if ate {
799            // See doc comment for `unmatched_angle_bracket_count`.
800            self.unmatched_angle_bracket_count += 1;
801            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
802        }
803        ate
804    }
805
806    /// Eats `<` possibly breaking tokens like `<<` in process.
807    /// Signals an error if `<` was not eaten.
808    fn expect_lt(&mut self) -> PResult<'a, ()> {
809        if self.eat_lt() { Ok(()) } else { self.unexpected() }
810    }
811
812    /// Eats `>` possibly breaking tokens like `>>` in process.
813    /// Signals an error if `>` was not eaten.
814    fn expect_gt(&mut self) -> PResult<'a, ()> {
815        if self.break_and_eat(exp!(Gt)) {
816            // See doc comment for `unmatched_angle_bracket_count`.
817            if self.unmatched_angle_bracket_count > 0 {
818                self.unmatched_angle_bracket_count -= 1;
819                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
820            }
821            Ok(())
822        } else {
823            self.unexpected()
824        }
825    }
826
827    /// Checks if the next token is contained within `closes`, and returns `true` if so.
828    fn expect_any_with_type(
829        &mut self,
830        closes_expected: &[ExpTokenPair<'_>],
831        closes_not_expected: &[&TokenKind],
832    ) -> bool {
833        closes_expected.iter().any(|&close| self.check(close))
834            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
835    }
836
837    /// Parses a sequence until the specified delimiters. The function
838    /// `f` must consume tokens until reaching the next separator or
839    /// closing bracket.
840    fn parse_seq_to_before_tokens<T>(
841        &mut self,
842        closes_expected: &[ExpTokenPair<'_>],
843        closes_not_expected: &[&TokenKind],
844        sep: SeqSep<'_>,
845        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
846    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
847        let mut first = true;
848        let mut recovered = Recovered::No;
849        let mut trailing = Trailing::No;
850        let mut v = ThinVec::new();
851
852        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
853            if self.token.kind.is_close_delim_or_eof() {
854                break;
855            }
856            if let Some(exp) = sep.sep {
857                if first {
858                    // no separator for the first element
859                    first = false;
860                } else {
861                    // check for separator
862                    match self.expect(exp) {
863                        Ok(Recovered::No) => {
864                            self.current_closure.take();
865                        }
866                        Ok(Recovered::Yes(guar)) => {
867                            self.current_closure.take();
868                            recovered = Recovered::Yes(guar);
869                            break;
870                        }
871                        Err(mut expect_err) => {
872                            let sp = self.prev_token.span.shrink_to_hi();
873                            let token_str = pprust::token_kind_to_string(exp.tok);
874
875                            match self.current_closure.take() {
876                                Some(closure_spans) if self.token == TokenKind::Semi => {
877                                    // Finding a semicolon instead of a comma
878                                    // after a closure body indicates that the
879                                    // closure body may be a block but the user
880                                    // forgot to put braces around its
881                                    // statements.
882
883                                    self.recover_missing_braces_around_closure_body(
884                                        closure_spans,
885                                        expect_err,
886                                    )?;
887
888                                    continue;
889                                }
890
891                                _ => {
892                                    // Attempt to keep parsing if it was a similar separator.
893                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
894                                        self.bump();
895                                    }
896                                }
897                            }
898
899                            // If this was a missing `@` in a binding pattern
900                            // bail with a suggestion
901                            // https://github.com/rust-lang/rust/issues/72373
902                            if self.prev_token.is_ident() && self.token == token::DotDot {
903                                let msg = format!(
904                                    "if you meant to bind the contents of the rest of the array \
905                                     pattern into `{}`, use `@`",
906                                    pprust::token_to_string(&self.prev_token)
907                                );
908                                expect_err
909                                    .with_span_suggestion_verbose(
910                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
911                                        msg,
912                                        " @ ",
913                                        Applicability::MaybeIncorrect,
914                                    )
915                                    .emit();
916                                break;
917                            }
918
919                            // Attempt to keep parsing if it was an omitted separator.
920                            self.last_unexpected_token_span = None;
921                            match f(self) {
922                                Ok(t) => {
923                                    // Parsed successfully, therefore most probably the code only
924                                    // misses a separator.
925                                    expect_err
926                                        .with_span_suggestion_short(
927                                            sp,
928                                            format!("missing `{token_str}`"),
929                                            token_str,
930                                            Applicability::MaybeIncorrect,
931                                        )
932                                        .emit();
933
934                                    v.push(t);
935                                    continue;
936                                }
937                                Err(e) => {
938                                    // Parsing failed, therefore it must be something more serious
939                                    // than just a missing separator.
940                                    for xx in &e.children {
941                                        // Propagate the help message from sub error `e` to main
942                                        // error `expect_err`.
943                                        expect_err.children.push(xx.clone());
944                                    }
945                                    e.cancel();
946                                    if self.token == token::Colon {
947                                        // We will try to recover in
948                                        // `maybe_recover_struct_lit_bad_delims`.
949                                        return Err(expect_err);
950                                    } else if let [exp] = closes_expected
951                                        && exp.token_type == TokenType::CloseParen
952                                    {
953                                        return Err(expect_err);
954                                    } else {
955                                        expect_err.emit();
956                                        break;
957                                    }
958                                }
959                            }
960                        }
961                    }
962                }
963            }
964            if sep.trailing_sep_allowed
965                && self.expect_any_with_type(closes_expected, closes_not_expected)
966            {
967                trailing = Trailing::Yes;
968                break;
969            }
970
971            let t = f(self)?;
972            v.push(t);
973        }
974
975        Ok((v, trailing, recovered))
976    }
977
978    fn recover_missing_braces_around_closure_body(
979        &mut self,
980        closure_spans: ClosureSpans,
981        mut expect_err: Diag<'_>,
982    ) -> PResult<'a, ()> {
983        let initial_semicolon = self.token.span;
984
985        while self.eat(exp!(Semi)) {
986            let _ = self
987                .parse_stmt_without_recovery(false, ForceCollect::No, false)
988                .unwrap_or_else(|e| {
989                    e.cancel();
990                    None
991                });
992        }
993
994        expect_err
995            .primary_message("closure bodies that contain statements must be surrounded by braces");
996
997        let preceding_pipe_span = closure_spans.closing_pipe;
998        let following_token_span = self.token.span;
999
1000        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1001        first_note.push_span_label(
1002            initial_semicolon,
1003            "this `;` turns the preceding closure into a statement",
1004        );
1005        first_note.push_span_label(
1006            closure_spans.body,
1007            "this expression is a statement because of the trailing semicolon",
1008        );
1009        expect_err.span_note(first_note, "statement found outside of a block");
1010
1011        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1012        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1013        second_note.push_span_label(
1014            following_token_span,
1015            "...but likely you meant the closure to end here",
1016        );
1017        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1018
1019        expect_err.span(vec![preceding_pipe_span, following_token_span]);
1020
1021        let opening_suggestion_str = " {".to_string();
1022        let closing_suggestion_str = "}".to_string();
1023
1024        expect_err.multipart_suggestion(
1025            "try adding braces",
1026            vec![
1027                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1028                (following_token_span.shrink_to_lo(), closing_suggestion_str),
1029            ],
1030            Applicability::MaybeIncorrect,
1031        );
1032
1033        expect_err.emit();
1034
1035        Ok(())
1036    }
1037
1038    /// Parses a sequence, not including the delimiters. The function
1039    /// `f` must consume tokens until reaching the next separator or
1040    /// closing bracket.
1041    fn parse_seq_to_before_end<T>(
1042        &mut self,
1043        close: ExpTokenPair<'_>,
1044        sep: SeqSep<'_>,
1045        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1046    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1047        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1048    }
1049
1050    /// Parses a sequence, including only the closing delimiter. The function
1051    /// `f` must consume tokens until reaching the next separator or
1052    /// closing bracket.
1053    fn parse_seq_to_end<T>(
1054        &mut self,
1055        close: ExpTokenPair<'_>,
1056        sep: SeqSep<'_>,
1057        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1058    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1059        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1060        if matches!(recovered, Recovered::No) && !self.eat(close) {
1061            self.dcx().span_delayed_bug(
1062                self.token.span,
1063                "recovered but `parse_seq_to_before_end` did not give us the close token",
1064            );
1065        }
1066        Ok((val, trailing))
1067    }
1068
1069    /// Parses a sequence, including both delimiters. The function
1070    /// `f` must consume tokens until reaching the next separator or
1071    /// closing bracket.
1072    fn parse_unspanned_seq<T>(
1073        &mut self,
1074        open: ExpTokenPair<'_>,
1075        close: ExpTokenPair<'_>,
1076        sep: SeqSep<'_>,
1077        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1078    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1079        self.expect(open)?;
1080        self.parse_seq_to_end(close, sep, f)
1081    }
1082
1083    /// Parses a comma-separated sequence, including both delimiters.
1084    /// The function `f` must consume tokens until reaching the next separator or
1085    /// closing bracket.
1086    fn parse_delim_comma_seq<T>(
1087        &mut self,
1088        open: ExpTokenPair<'_>,
1089        close: ExpTokenPair<'_>,
1090        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1091    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1092        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1093    }
1094
1095    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
1096    /// The function `f` must consume tokens until reaching the next separator or
1097    /// closing bracket.
1098    fn parse_paren_comma_seq<T>(
1099        &mut self,
1100        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1101    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1102        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1103    }
1104
1105    /// Advance the parser by one token using provided token as the next one.
1106    fn bump_with(&mut self, next: (Token, Spacing)) {
1107        self.inlined_bump_with(next)
1108    }
1109
1110    /// This always-inlined version should only be used on hot code paths.
1111    #[inline(always)]
1112    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1113        // Update the current and previous tokens.
1114        self.prev_token = mem::replace(&mut self.token, next_token);
1115        self.token_spacing = next_spacing;
1116
1117        // Diagnostics.
1118        self.expected_token_types.clear();
1119    }
1120
1121    /// Advance the parser by one token.
1122    pub fn bump(&mut self) {
1123        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1124        // than `.0`/`.1` access.
1125        let mut next = self.token_cursor.inlined_next();
1126        self.num_bump_calls += 1;
1127        // We got a token from the underlying cursor and no longer need to
1128        // worry about an unglued token. See `break_and_eat` for more details.
1129        self.break_last_token = 0;
1130        if next.0.span.is_dummy() {
1131            // Tweak the location for better diagnostics, but keep syntactic context intact.
1132            let fallback_span = self.token.span;
1133            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1134        }
1135        debug_assert!(!matches!(
1136            next.0.kind,
1137            token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1138        ));
1139        self.inlined_bump_with(next)
1140    }
1141
1142    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1143    /// When `dist == 0` then the current token is looked at. `Eof` will be
1144    /// returned if the look-ahead is any distance past the end of the tokens.
1145    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1146        if dist == 0 {
1147            return looker(&self.token);
1148        }
1149
1150        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
1151        // have a fast special case for that.
1152        if dist == 1 {
1153            // The index is zero because the tree cursor's index always points
1154            // to the next token to be gotten.
1155            match self.token_cursor.curr.curr() {
1156                Some(tree) => {
1157                    // Indexing stayed within the current token tree.
1158                    match tree {
1159                        TokenTree::Token(token, _) => return looker(token),
1160                        &TokenTree::Delimited(dspan, _, delim, _) => {
1161                            if !delim.skip() {
1162                                return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1163                            }
1164                        }
1165                    }
1166                }
1167                None => {
1168                    // The tree cursor lookahead went (one) past the end of the
1169                    // current token tree. Try to return a close delimiter.
1170                    if let Some(last) = self.token_cursor.stack.last()
1171                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1172                        && !delim.skip()
1173                    {
1174                        // We are not in the outermost token stream, so we have
1175                        // delimiters. Also, those delimiters are not skipped.
1176                        return looker(&Token::new(delim.as_close_token_kind(), span.close));
1177                    }
1178                }
1179            }
1180        }
1181
1182        // Just clone the token cursor and use `next`, skipping delimiters as
1183        // necessary. Slow but simple.
1184        let mut cursor = self.token_cursor.clone();
1185        let mut i = 0;
1186        let mut token = Token::dummy();
1187        while i < dist {
1188            token = cursor.next().0;
1189            if matches!(
1190                token.kind,
1191                token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1192            ) {
1193                continue;
1194            }
1195            i += 1;
1196        }
1197        looker(&token)
1198    }
1199
1200    /// Like `lookahead`, but skips over token trees rather than tokens. Useful
1201    /// when looking past possible metavariable pasting sites.
1202    pub fn tree_look_ahead<R>(
1203        &self,
1204        dist: usize,
1205        looker: impl FnOnce(&TokenTree) -> R,
1206    ) -> Option<R> {
1207        assert_ne!(dist, 0);
1208        self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1209    }
1210
1211    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1212    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1213        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1214    }
1215
1216    /// Parses asyncness: `async` or nothing.
1217    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1218        let span = self.token_uninterpolated_span();
1219        if self.eat_keyword_case(exp!(Async), case) {
1220            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
1221            // error if edition <= 2024, like we do with async and edition <= 2018?
1222            if self.token_uninterpolated_span().at_least_rust_2024()
1223                && self.eat_keyword_case(exp!(Gen), case)
1224            {
1225                let gen_span = self.prev_token_uninterpolated_span();
1226                Some(CoroutineKind::AsyncGen {
1227                    span: span.to(gen_span),
1228                    closure_id: DUMMY_NODE_ID,
1229                    return_impl_trait_id: DUMMY_NODE_ID,
1230                })
1231            } else {
1232                Some(CoroutineKind::Async {
1233                    span,
1234                    closure_id: DUMMY_NODE_ID,
1235                    return_impl_trait_id: DUMMY_NODE_ID,
1236                })
1237            }
1238        } else if self.token_uninterpolated_span().at_least_rust_2024()
1239            && self.eat_keyword_case(exp!(Gen), case)
1240        {
1241            Some(CoroutineKind::Gen {
1242                span,
1243                closure_id: DUMMY_NODE_ID,
1244                return_impl_trait_id: DUMMY_NODE_ID,
1245            })
1246        } else {
1247            None
1248        }
1249    }
1250
1251    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
1252    fn parse_safety(&mut self, case: Case) -> Safety {
1253        if self.eat_keyword_case(exp!(Unsafe), case) {
1254            Safety::Unsafe(self.prev_token_uninterpolated_span())
1255        } else if self.eat_keyword_case(exp!(Safe), case) {
1256            Safety::Safe(self.prev_token_uninterpolated_span())
1257        } else {
1258            Safety::Default
1259        }
1260    }
1261
1262    /// Parses constness: `const` or nothing.
1263    fn parse_constness(&mut self, case: Case) -> Const {
1264        self.parse_constness_(case, false)
1265    }
1266
1267    /// Parses constness for closures (case sensitive, feature-gated)
1268    fn parse_closure_constness(&mut self) -> Const {
1269        let constness = self.parse_constness_(Case::Sensitive, true);
1270        if let Const::Yes(span) = constness {
1271            self.psess.gated_spans.gate(sym::const_closures, span);
1272        }
1273        constness
1274    }
1275
1276    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1277        // Avoid const blocks and const closures to be parsed as const items
1278        if (self.check_const_closure() == is_closure)
1279            && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1280            && self.eat_keyword_case(exp!(Const), case)
1281        {
1282            Const::Yes(self.prev_token_uninterpolated_span())
1283        } else {
1284            Const::No
1285        }
1286    }
1287
1288    /// Parses inline const expressions.
1289    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1290        self.expect_keyword(exp!(Const))?;
1291        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1292        let anon_const = AnonConst {
1293            id: DUMMY_NODE_ID,
1294            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1295        };
1296        let blk_span = anon_const.value.span;
1297        let kind = if pat {
1298            let guar = self
1299                .dcx()
1300                .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1301                .with_help(
1302                    "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1303                )
1304                .emit();
1305            ExprKind::Err(guar)
1306        } else {
1307            ExprKind::ConstBlock(anon_const)
1308        };
1309        Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1310    }
1311
1312    /// Parses mutability (`mut` or nothing).
1313    fn parse_mutability(&mut self) -> Mutability {
1314        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1315    }
1316
1317    /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
1318    fn parse_byref(&mut self) -> ByRef {
1319        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1320    }
1321
1322    /// Possibly parses mutability (`const` or `mut`).
1323    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1324        if self.eat_keyword(exp!(Mut)) {
1325            Some(Mutability::Mut)
1326        } else if self.eat_keyword(exp!(Const)) {
1327            Some(Mutability::Not)
1328        } else {
1329            None
1330        }
1331    }
1332
1333    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1334        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1335        {
1336            if let Some(suffix) = suffix {
1337                self.expect_no_tuple_index_suffix(self.token.span, suffix);
1338            }
1339            self.bump();
1340            Ok(Ident::new(symbol, self.prev_token.span))
1341        } else {
1342            self.parse_ident_common(true)
1343        }
1344    }
1345
1346    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1347        if let Some(args) = self.parse_delim_args_inner() {
1348            Ok(P(args))
1349        } else {
1350            self.unexpected_any()
1351        }
1352    }
1353
1354    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1355        Ok(if let Some(args) = self.parse_delim_args_inner() {
1356            AttrArgs::Delimited(args)
1357        } else if self.eat(exp!(Eq)) {
1358            let eq_span = self.prev_token.span;
1359            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1360        } else {
1361            AttrArgs::Empty
1362        })
1363    }
1364
1365    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1366        let delimited = self.check(exp!(OpenParen))
1367            || self.check(exp!(OpenBracket))
1368            || self.check(exp!(OpenBrace));
1369
1370        delimited.then(|| {
1371            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1372                unreachable!()
1373            };
1374            DelimArgs { dspan, delim, tokens }
1375        })
1376    }
1377
1378    /// Parses a single token tree from the input.
1379    pub fn parse_token_tree(&mut self) -> TokenTree {
1380        if self.token.kind.open_delim().is_some() {
1381            // Clone the `TokenTree::Delimited` that we are currently
1382            // within. That's what we are going to return.
1383            let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1384            debug_assert_matches!(tree, TokenTree::Delimited(..));
1385
1386            // Advance the token cursor through the entire delimited
1387            // sequence. After getting the `OpenDelim` we are *within* the
1388            // delimited sequence, i.e. at depth `d`. After getting the
1389            // matching `CloseDelim` we are *after* the delimited sequence,
1390            // i.e. at depth `d - 1`.
1391            let target_depth = self.token_cursor.stack.len() - 1;
1392            loop {
1393                // Advance one token at a time, so `TokenCursor::next()`
1394                // can capture these tokens if necessary.
1395                self.bump();
1396                if self.token_cursor.stack.len() == target_depth {
1397                    debug_assert!(self.token.kind.close_delim().is_some());
1398                    break;
1399                }
1400            }
1401
1402            // Consume close delimiter
1403            self.bump();
1404            tree
1405        } else {
1406            assert!(!self.token.kind.is_close_delim_or_eof());
1407            let prev_spacing = self.token_spacing;
1408            self.bump();
1409            TokenTree::Token(self.prev_token, prev_spacing)
1410        }
1411    }
1412
1413    pub fn parse_tokens(&mut self) -> TokenStream {
1414        let mut result = Vec::new();
1415        loop {
1416            if self.token.kind.is_close_delim_or_eof() {
1417                break;
1418            } else {
1419                result.push(self.parse_token_tree());
1420            }
1421        }
1422        TokenStream::new(result)
1423    }
1424
1425    /// Evaluates the closure with restrictions in place.
1426    ///
1427    /// Afters the closure is evaluated, restrictions are reset.
1428    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1429        let old = self.restrictions;
1430        self.restrictions = res;
1431        let res = f(self);
1432        self.restrictions = old;
1433        res
1434    }
1435
1436    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1437    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1438    /// If the following element can't be a tuple (i.e., it's a function definition), then
1439    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1440    /// so emit a proper diagnostic.
1441    // Public for rustfmt usage.
1442    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1443        if let Some(vis) = self
1444            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1445        {
1446            return Ok(vis);
1447        }
1448
1449        if !self.eat_keyword(exp!(Pub)) {
1450            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1451            // keyword to grab a span from for inherited visibility; an empty span at the
1452            // beginning of the current token would seem to be the "Schelling span".
1453            return Ok(Visibility {
1454                span: self.token.span.shrink_to_lo(),
1455                kind: VisibilityKind::Inherited,
1456                tokens: None,
1457            });
1458        }
1459        let lo = self.prev_token.span;
1460
1461        if self.check(exp!(OpenParen)) {
1462            // We don't `self.bump()` the `(` yet because this might be a struct definition where
1463            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1464            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1465            // by the following tokens.
1466            if self.is_keyword_ahead(1, &[kw::In]) {
1467                // Parse `pub(in path)`.
1468                self.bump(); // `(`
1469                self.bump(); // `in`
1470                let path = self.parse_path(PathStyle::Mod)?; // `path`
1471                self.expect(exp!(CloseParen))?; // `)`
1472                let vis = VisibilityKind::Restricted {
1473                    path: P(path),
1474                    id: ast::DUMMY_NODE_ID,
1475                    shorthand: false,
1476                };
1477                return Ok(Visibility {
1478                    span: lo.to(self.prev_token.span),
1479                    kind: vis,
1480                    tokens: None,
1481                });
1482            } else if self.look_ahead(2, |t| t == &token::CloseParen)
1483                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1484            {
1485                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1486                self.bump(); // `(`
1487                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1488                self.expect(exp!(CloseParen))?; // `)`
1489                let vis = VisibilityKind::Restricted {
1490                    path: P(path),
1491                    id: ast::DUMMY_NODE_ID,
1492                    shorthand: true,
1493                };
1494                return Ok(Visibility {
1495                    span: lo.to(self.prev_token.span),
1496                    kind: vis,
1497                    tokens: None,
1498                });
1499            } else if let FollowedByType::No = fbt {
1500                // Provide this diagnostic if a type cannot follow;
1501                // in particular, if this is not a tuple struct.
1502                self.recover_incorrect_vis_restriction()?;
1503                // Emit diagnostic, but continue with public visibility.
1504            }
1505        }
1506
1507        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1508    }
1509
1510    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1511    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1512        self.bump(); // `(`
1513        let path = self.parse_path(PathStyle::Mod)?;
1514        self.expect(exp!(CloseParen))?; // `)`
1515
1516        let path_str = pprust::path_to_string(&path);
1517        self.dcx()
1518            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1519
1520        Ok(())
1521    }
1522
1523    /// Parses `extern string_literal?`.
1524    fn parse_extern(&mut self, case: Case) -> Extern {
1525        if self.eat_keyword_case(exp!(Extern), case) {
1526            let mut extern_span = self.prev_token.span;
1527            let abi = self.parse_abi();
1528            if let Some(abi) = abi {
1529                extern_span = extern_span.to(abi.span);
1530            }
1531            Extern::from_abi(abi, extern_span)
1532        } else {
1533            Extern::None
1534        }
1535    }
1536
1537    /// Parses a string literal as an ABI spec.
1538    fn parse_abi(&mut self) -> Option<StrLit> {
1539        match self.parse_str_lit() {
1540            Ok(str_lit) => Some(str_lit),
1541            Err(Some(lit)) => match lit.kind {
1542                ast::LitKind::Err(_) => None,
1543                _ => {
1544                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1545                    None
1546                }
1547            },
1548            Err(None) => None,
1549        }
1550    }
1551
1552    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1553        &mut self,
1554        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1555    ) -> PResult<'a, R> {
1556        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1557        // `ForceCollect::Yes`
1558        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1559            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1560        })
1561    }
1562
1563    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
1564    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1565        if self.check(exp!(PathSep)) {
1566            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1567                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1568                self.look_ahead(2, looker)
1569            } else {
1570                self.look_ahead(1, looker)
1571            }
1572        } else {
1573            false
1574        }
1575    }
1576
1577    /// `::{` or `::*`
1578    fn is_import_coupler(&mut self) -> bool {
1579        self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1580    }
1581
1582    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
1583    // Only used when debugging.
1584    #[allow(unused)]
1585    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1586        fmt::from_fn(move |f| {
1587            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
1588
1589            // we don't need N spans, but we want at least one, so print all of prev_token
1590            dbg_fmt.field("prev_token", &self.prev_token);
1591            let mut tokens = vec![];
1592            for i in 0..lookahead {
1593                let tok = self.look_ahead(i, |tok| tok.kind);
1594                let is_eof = tok == TokenKind::Eof;
1595                tokens.push(tok);
1596                if is_eof {
1597                    // Don't look ahead past EOF.
1598                    break;
1599                }
1600            }
1601            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1602            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1603
1604            // some fields are interesting for certain values, as they relate to macro parsing
1605            if let Some(subparser) = self.subparser_name {
1606                dbg_fmt.field("subparser_name", &subparser);
1607            }
1608            if let Recovery::Forbidden = self.recovery {
1609                dbg_fmt.field("recovery", &self.recovery);
1610            }
1611
1612            // imply there's "more to know" than this view
1613            dbg_fmt.finish_non_exhaustive()
1614        })
1615    }
1616
1617    pub fn clear_expected_token_types(&mut self) {
1618        self.expected_token_types.clear();
1619    }
1620
1621    pub fn approx_token_stream_pos(&self) -> u32 {
1622        self.num_bump_calls
1623    }
1624
1625    /// For interpolated `self.token`, returns a span of the fragment to which
1626    /// the interpolated token refers. For all other tokens this is just a
1627    /// regular span. It is particularly important to use this for identifiers
1628    /// and lifetimes for which spans affect name resolution and edition
1629    /// checks. Note that keywords are also identifiers, so they should use
1630    /// this if they keep spans or perform edition checks.
1631    pub fn token_uninterpolated_span(&self) -> Span {
1632        match &self.token.kind {
1633            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1634            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1635            _ => self.token.span,
1636        }
1637    }
1638
1639    /// Like `token_uninterpolated_span`, but works on `self.prev_token`.
1640    pub fn prev_token_uninterpolated_span(&self) -> Span {
1641        match &self.prev_token.kind {
1642            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1643            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1644            _ => self.prev_token.span,
1645        }
1646    }
1647}
1648
1649// Metavar captures of various kinds.
1650#[derive(Clone, Debug)]
1651pub enum ParseNtResult {
1652    Tt(TokenTree),
1653    Ident(Ident, IdentIsRaw),
1654    Lifetime(Ident, IdentIsRaw),
1655    Item(P<ast::Item>),
1656    Block(P<ast::Block>),
1657    Stmt(P<ast::Stmt>),
1658    Pat(P<ast::Pat>, NtPatKind),
1659    Expr(P<ast::Expr>, NtExprKind),
1660    Literal(P<ast::Expr>),
1661    Ty(P<ast::Ty>),
1662    Meta(P<ast::AttrItem>),
1663    Path(P<ast::Path>),
1664    Vis(P<ast::Visibility>),
1665}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy