1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25pub(crate) use item::{FnContext, FnParseMode};
26pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
27pub use path::PathStyle;
28use rustc_ast::token::{
29 self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
30};
31use rustc_ast::tokenstream::{
32 ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
33};
34use rustc_ast::util::case::Case;
35use rustc_ast::{
36 self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
37 DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
38 Visibility, VisibilityKind,
39};
40use rustc_ast_pretty::pprust;
41use rustc_data_structures::fx::FxHashMap;
42use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
43use rustc_index::interval::IntervalSet;
44use rustc_session::parse::ParseSess;
45use rustc_span::{Ident, Span, Symbol, kw, sym};
46use thin_vec::ThinVec;
47use token_type::TokenTypeSet;
48pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
49use tracing::debug;
50
51use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
52use crate::exp;
53
54#[cfg(test)]
55mod tests;
56
57#[cfg(test)]
60mod tokenstream {
61 mod tests;
62}
63
64bitflags::bitflags! {
65 #[derive(Clone, Copy, Debug)]
71 struct Restrictions: u8 {
72 const STMT_EXPR = 1 << 0;
83 const NO_STRUCT_LITERAL = 1 << 1;
94 const CONST_EXPR = 1 << 2;
101 const ALLOW_LET = 1 << 3;
109 const IN_IF_GUARD = 1 << 4;
115 const IS_PAT = 1 << 5;
122 }
123}
124
125#[derive(Clone, Copy, PartialEq, Debug)]
126enum SemiColonMode {
127 Break,
128 Ignore,
129 Comma,
130}
131
132#[derive(Clone, Copy, PartialEq, Debug)]
133enum BlockMode {
134 Break,
135 Ignore,
136}
137
138#[derive(Clone, Copy, Debug, PartialEq)]
141pub enum ForceCollect {
142 Yes,
143 No,
144}
145
146#[macro_export]
148macro_rules! maybe_recover_from_interpolated_ty_qpath {
149 ($self: expr, $allow_qpath_recovery: expr) => {
150 if $allow_qpath_recovery
151 && $self.may_recover()
152 && let Some(mv_kind) = $self.token.is_metavar_seq()
153 && let token::MetaVarKind::Ty { .. } = mv_kind
154 && $self.check_noexpect_past_close_delim(&token::PathSep)
155 {
156 let ty = $self
158 .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
159 .expect("metavar seq ty");
160
161 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
162 }
163 };
164}
165
166#[derive(Clone, Copy, Debug)]
167pub enum Recovery {
168 Allowed,
169 Forbidden,
170}
171
172#[derive(Clone)]
173pub struct Parser<'a> {
174 pub psess: &'a ParseSess,
175 pub token: Token,
177 token_spacing: Spacing,
179 pub prev_token: Token,
181 pub capture_cfg: bool,
182 restrictions: Restrictions,
183 expected_token_types: TokenTypeSet,
184 token_cursor: TokenCursor,
185 num_bump_calls: u32,
187 break_last_token: u32,
206 unmatched_angle_bracket_count: u16,
212 angle_bracket_nesting: u16,
213
214 last_unexpected_token_span: Option<Span>,
215 subparser_name: Option<&'static str>,
217 capture_state: CaptureState,
218 current_closure: Option<ClosureSpans>,
221 recovery: Recovery,
224}
225
226#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
230rustc_data_structures::static_assert_size!(Parser<'_>, 288);
231
232#[derive(Clone, Debug)]
234struct ClosureSpans {
235 whole_closure: Span,
236 closing_pipe: Span,
237 body: Span,
238}
239
240#[derive(Copy, Clone, Debug)]
244enum Capturing {
245 No,
247 Yes,
249}
250
251#[derive(Clone, Debug)]
253struct CaptureState {
254 capturing: Capturing,
255 parser_replacements: Vec<ParserReplacement>,
256 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
257 seen_attrs: IntervalSet<AttrId>,
260}
261
262#[derive(Debug)]
264struct SeqSep<'a> {
265 sep: Option<ExpTokenPair<'a>>,
267 trailing_sep_allowed: bool,
269}
270
271impl<'a> SeqSep<'a> {
272 fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
273 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
274 }
275
276 fn none() -> SeqSep<'a> {
277 SeqSep { sep: None, trailing_sep_allowed: false }
278 }
279}
280
281#[derive(Debug)]
282pub enum FollowedByType {
283 Yes,
284 No,
285}
286
287#[derive(Copy, Clone, Debug)]
288pub enum Trailing {
289 No,
290 Yes,
291}
292
293impl From<bool> for Trailing {
294 fn from(b: bool) -> Trailing {
295 if b { Trailing::Yes } else { Trailing::No }
296 }
297}
298
299#[derive(Clone, Copy, Debug, PartialEq, Eq)]
300pub(super) enum TokenDescription {
301 ReservedIdentifier,
302 Keyword,
303 ReservedKeyword,
304 DocComment,
305
306 MetaVar(MetaVarKind),
311}
312
313impl TokenDescription {
314 pub(super) fn from_token(token: &Token) -> Option<Self> {
315 match token.kind {
316 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
317 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
318 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
319 token::DocComment(..) => Some(TokenDescription::DocComment),
320 token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
321 Some(TokenDescription::MetaVar(kind))
322 }
323 _ => None,
324 }
325 }
326}
327
328pub fn token_descr(token: &Token) -> String {
329 let s = pprust::token_to_string(token).to_string();
330
331 match (TokenDescription::from_token(token), &token.kind) {
332 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
333 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
334 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
335 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
336 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
338 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
339 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
340 (None, _) => format!("`{s}`"),
341 }
342}
343
344impl<'a> Parser<'a> {
345 pub fn new(
346 psess: &'a ParseSess,
347 stream: TokenStream,
348 subparser_name: Option<&'static str>,
349 ) -> Self {
350 let mut parser = Parser {
351 psess,
352 token: Token::dummy(),
353 token_spacing: Spacing::Alone,
354 prev_token: Token::dummy(),
355 capture_cfg: false,
356 restrictions: Restrictions::empty(),
357 expected_token_types: TokenTypeSet::new(),
358 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
359 num_bump_calls: 0,
360 break_last_token: 0,
361 unmatched_angle_bracket_count: 0,
362 angle_bracket_nesting: 0,
363 last_unexpected_token_span: None,
364 subparser_name,
365 capture_state: CaptureState {
366 capturing: Capturing::No,
367 parser_replacements: Vec::new(),
368 inner_attr_parser_ranges: Default::default(),
369 seen_attrs: IntervalSet::new(u32::MAX as usize),
370 },
371 current_closure: None,
372 recovery: Recovery::Allowed,
373 };
374
375 parser.bump();
377
378 parser.num_bump_calls = 0;
382
383 parser
384 }
385
386 #[inline]
387 pub fn recovery(mut self, recovery: Recovery) -> Self {
388 self.recovery = recovery;
389 self
390 }
391
392 #[inline]
393 fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
394 let old = mem::replace(&mut self.recovery, recovery);
395 let res = f(self);
396 self.recovery = old;
397 res
398 }
399
400 #[inline]
408 fn may_recover(&self) -> bool {
409 matches!(self.recovery, Recovery::Allowed)
410 }
411
412 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
415 match self.expect_one_of(&[], &[]) {
416 Err(e) => Err(e),
417 Ok(_) => FatalError.raise(),
420 }
421 }
422
423 pub fn unexpected(&mut self) -> PResult<'a, ()> {
424 self.unexpected_any()
425 }
426
427 pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
429 if self.expected_token_types.is_empty() {
430 if self.token == *exp.tok {
431 self.bump();
432 Ok(Recovered::No)
433 } else {
434 self.unexpected_try_recover(exp.tok)
435 }
436 } else {
437 self.expect_one_of(slice::from_ref(&exp), &[])
438 }
439 }
440
441 fn expect_one_of(
445 &mut self,
446 edible: &[ExpTokenPair<'_>],
447 inedible: &[ExpTokenPair<'_>],
448 ) -> PResult<'a, Recovered> {
449 if edible.iter().any(|exp| exp.tok == &self.token.kind) {
450 self.bump();
451 Ok(Recovered::No)
452 } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
453 Ok(Recovered::No)
455 } else if self.token != token::Eof
456 && self.last_unexpected_token_span == Some(self.token.span)
457 {
458 FatalError.raise();
459 } else {
460 self.expected_one_of_not_found(edible, inedible)
461 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
462 }
463 }
464
465 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
467 self.parse_ident_common(true)
468 }
469
470 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
471 let (ident, is_raw) = self.ident_or_err(recover)?;
472
473 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
474 let err = self.expected_ident_found_err();
475 if recover {
476 err.emit();
477 } else {
478 return Err(err);
479 }
480 }
481 self.bump();
482 Ok(ident)
483 }
484
485 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
486 match self.token.ident() {
487 Some(ident) => Ok(ident),
488 None => self.expected_ident_found(recover),
489 }
490 }
491
492 #[inline]
497 pub fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
498 let is_present = self.token == *exp.tok;
499 if !is_present {
500 self.expected_token_types.insert(exp.token_type);
501 }
502 is_present
503 }
504
505 #[inline]
506 #[must_use]
507 fn check_noexpect(&self, tok: &TokenKind) -> bool {
508 self.token == *tok
509 }
510
511 fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
520 let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
521 tree_cursor.bump();
522 matches!(
523 tree_cursor.curr(),
524 Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
525 )
526 }
527
528 #[inline]
533 #[must_use]
534 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
535 let is_present = self.check_noexpect(tok);
536 if is_present {
537 self.bump()
538 }
539 is_present
540 }
541
542 #[inline]
544 #[must_use]
545 pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
546 let is_present = self.check(exp);
547 if is_present {
548 self.bump()
549 }
550 is_present
551 }
552
553 #[inline]
556 #[must_use]
557 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
558 let is_keyword = self.token.is_keyword(exp.kw);
559 if !is_keyword {
560 self.expected_token_types.insert(exp.token_type);
561 }
562 is_keyword
563 }
564
565 #[inline]
566 #[must_use]
567 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
568 if self.check_keyword(exp) {
569 true
570 } else if case == Case::Insensitive
571 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
572 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
574 {
575 true
576 } else {
577 false
578 }
579 }
580
581 #[inline]
585 #[must_use]
586 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
587 let is_keyword = self.check_keyword(exp);
588 if is_keyword {
589 self.bump();
590 }
591 is_keyword
592 }
593
594 #[inline]
598 #[must_use]
599 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
600 if self.eat_keyword(exp) {
601 true
602 } else if case == Case::Insensitive
603 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
604 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
606 {
607 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
608 self.bump();
609 true
610 } else {
611 false
612 }
613 }
614
615 #[inline]
619 #[must_use]
620 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
621 let is_keyword = self.token.is_keyword(kw);
622 if is_keyword {
623 self.bump();
624 }
625 is_keyword
626 }
627
628 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
632 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
633 }
634
635 pub fn eat_metavar_seq<T>(
637 &mut self,
638 mv_kind: MetaVarKind,
639 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
640 ) -> Option<T> {
641 self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
642 }
643
644 fn eat_metavar_seq_with_matcher<T>(
648 &mut self,
649 match_mv_kind: impl Fn(MetaVarKind) -> bool,
650 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
651 ) -> Option<T> {
652 if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
653 && match_mv_kind(mv_kind)
654 {
655 self.bump();
656
657 let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
661
662 let res = match res {
663 Ok(res) => res,
664 Err(err) => {
665 err.delay_as_bug();
667 return None;
668 }
669 };
670
671 if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
672 && match_mv_kind(mv_kind)
673 {
674 self.bump();
675 Some(res)
676 } else {
677 self.dcx()
681 .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
682 None
683 }
684 } else {
685 None
686 }
687 }
688
689 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
691 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
692 }
693
694 #[inline]
695 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
696 if !ok {
697 self.expected_token_types.insert(token_type);
698 }
699 ok
700 }
701
702 fn check_ident(&mut self) -> bool {
703 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
704 }
705
706 fn check_path(&mut self) -> bool {
707 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
708 }
709
710 fn check_type(&mut self) -> bool {
711 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
712 }
713
714 fn check_const_arg(&mut self) -> bool {
715 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
716 }
717
718 fn check_const_closure(&self) -> bool {
719 self.is_keyword_ahead(0, &[kw::Const])
720 && self.look_ahead(1, |t| match &t.kind {
721 token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
723 | token::OrOr
724 | token::Or => true,
725 _ => false,
726 })
727 }
728
729 fn check_inline_const(&self, dist: usize) -> bool {
730 self.is_keyword_ahead(dist, &[kw::Const])
731 && self.look_ahead(dist + 1, |t| match &t.kind {
732 token::OpenBrace => true,
733 token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
734 _ => false,
735 })
736 }
737
738 #[inline]
741 fn check_plus(&mut self) -> bool {
742 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
743 }
744
745 fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
749 if self.token == *exp.tok {
750 self.bump();
751 return true;
752 }
753 match self.token.kind.break_two_token_op(1) {
754 Some((first, second)) if first == *exp.tok => {
755 let first_span = self.psess.source_map().start_point(self.token.span);
756 let second_span = self.token.span.with_lo(first_span.hi());
757 self.token = Token::new(first, first_span);
758 self.break_last_token += 1;
765 self.bump_with((Token::new(second, second_span), self.token_spacing));
768 true
769 }
770 _ => {
771 self.expected_token_types.insert(exp.token_type);
772 false
773 }
774 }
775 }
776
777 fn eat_plus(&mut self) -> bool {
779 self.break_and_eat(exp!(Plus))
780 }
781
782 fn expect_and(&mut self) -> PResult<'a, ()> {
785 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
786 }
787
788 fn expect_or(&mut self) -> PResult<'a, ()> {
791 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
792 }
793
794 fn eat_lt(&mut self) -> bool {
796 let ate = self.break_and_eat(exp!(Lt));
797 if ate {
798 self.unmatched_angle_bracket_count += 1;
800 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
801 }
802 ate
803 }
804
805 fn expect_lt(&mut self) -> PResult<'a, ()> {
808 if self.eat_lt() { Ok(()) } else { self.unexpected() }
809 }
810
811 fn expect_gt(&mut self) -> PResult<'a, ()> {
814 if self.break_and_eat(exp!(Gt)) {
815 if self.unmatched_angle_bracket_count > 0 {
817 self.unmatched_angle_bracket_count -= 1;
818 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
819 }
820 Ok(())
821 } else {
822 self.unexpected()
823 }
824 }
825
826 fn expect_any_with_type(
828 &mut self,
829 closes_expected: &[ExpTokenPair<'_>],
830 closes_not_expected: &[&TokenKind],
831 ) -> bool {
832 closes_expected.iter().any(|&close| self.check(close))
833 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
834 }
835
836 fn parse_seq_to_before_tokens<T>(
840 &mut self,
841 closes_expected: &[ExpTokenPair<'_>],
842 closes_not_expected: &[&TokenKind],
843 sep: SeqSep<'_>,
844 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
845 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
846 let mut first = true;
847 let mut recovered = Recovered::No;
848 let mut trailing = Trailing::No;
849 let mut v = ThinVec::new();
850
851 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
852 if self.token.kind.is_close_delim_or_eof() {
853 break;
854 }
855 if let Some(exp) = sep.sep {
856 if first {
857 first = false;
859 } else {
860 match self.expect(exp) {
862 Ok(Recovered::No) => {
863 self.current_closure.take();
864 }
865 Ok(Recovered::Yes(guar)) => {
866 self.current_closure.take();
867 recovered = Recovered::Yes(guar);
868 break;
869 }
870 Err(mut expect_err) => {
871 let sp = self.prev_token.span.shrink_to_hi();
872 let token_str = pprust::token_kind_to_string(exp.tok);
873
874 match self.current_closure.take() {
875 Some(closure_spans) if self.token == TokenKind::Semi => {
876 self.recover_missing_braces_around_closure_body(
883 closure_spans,
884 expect_err,
885 )?;
886
887 continue;
888 }
889
890 _ => {
891 if exp.tok.similar_tokens().contains(&self.token.kind) {
893 self.bump();
894 }
895 }
896 }
897
898 if self.prev_token.is_ident() && self.token == token::DotDot {
902 let msg = format!(
903 "if you meant to bind the contents of the rest of the array \
904 pattern into `{}`, use `@`",
905 pprust::token_to_string(&self.prev_token)
906 );
907 expect_err
908 .with_span_suggestion_verbose(
909 self.prev_token.span.shrink_to_hi().until(self.token.span),
910 msg,
911 " @ ",
912 Applicability::MaybeIncorrect,
913 )
914 .emit();
915 break;
916 }
917
918 self.last_unexpected_token_span = None;
920 match f(self) {
921 Ok(t) => {
922 expect_err
925 .with_span_suggestion_short(
926 sp,
927 format!("missing `{token_str}`"),
928 token_str,
929 Applicability::MaybeIncorrect,
930 )
931 .emit();
932
933 v.push(t);
934 continue;
935 }
936 Err(e) => {
937 for xx in &e.children {
940 expect_err.children.push(xx.clone());
943 }
944 e.cancel();
945 if self.token == token::Colon {
946 return Err(expect_err);
949 } else if let [exp] = closes_expected
950 && exp.token_type == TokenType::CloseParen
951 {
952 return Err(expect_err);
953 } else {
954 expect_err.emit();
955 break;
956 }
957 }
958 }
959 }
960 }
961 }
962 }
963 if sep.trailing_sep_allowed
964 && self.expect_any_with_type(closes_expected, closes_not_expected)
965 {
966 trailing = Trailing::Yes;
967 break;
968 }
969
970 let t = f(self)?;
971 v.push(t);
972 }
973
974 Ok((v, trailing, recovered))
975 }
976
977 fn recover_missing_braces_around_closure_body(
978 &mut self,
979 closure_spans: ClosureSpans,
980 mut expect_err: Diag<'_>,
981 ) -> PResult<'a, ()> {
982 let initial_semicolon = self.token.span;
983
984 while self.eat(exp!(Semi)) {
985 let _ = self
986 .parse_stmt_without_recovery(false, ForceCollect::No, false)
987 .unwrap_or_else(|e| {
988 e.cancel();
989 None
990 });
991 }
992
993 expect_err
994 .primary_message("closure bodies that contain statements must be surrounded by braces");
995
996 let preceding_pipe_span = closure_spans.closing_pipe;
997 let following_token_span = self.token.span;
998
999 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1000 first_note.push_span_label(
1001 initial_semicolon,
1002 "this `;` turns the preceding closure into a statement",
1003 );
1004 first_note.push_span_label(
1005 closure_spans.body,
1006 "this expression is a statement because of the trailing semicolon",
1007 );
1008 expect_err.span_note(first_note, "statement found outside of a block");
1009
1010 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1011 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1012 second_note.push_span_label(
1013 following_token_span,
1014 "...but likely you meant the closure to end here",
1015 );
1016 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1017
1018 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1019
1020 let opening_suggestion_str = " {".to_string();
1021 let closing_suggestion_str = "}".to_string();
1022
1023 expect_err.multipart_suggestion(
1024 "try adding braces",
1025 vec![
1026 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1027 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1028 ],
1029 Applicability::MaybeIncorrect,
1030 );
1031
1032 expect_err.emit();
1033
1034 Ok(())
1035 }
1036
1037 fn parse_seq_to_before_end<T>(
1041 &mut self,
1042 close: ExpTokenPair<'_>,
1043 sep: SeqSep<'_>,
1044 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1045 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1046 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1047 }
1048
1049 fn parse_seq_to_end<T>(
1053 &mut self,
1054 close: ExpTokenPair<'_>,
1055 sep: SeqSep<'_>,
1056 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1057 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1058 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1059 if matches!(recovered, Recovered::No) && !self.eat(close) {
1060 self.dcx().span_delayed_bug(
1061 self.token.span,
1062 "recovered but `parse_seq_to_before_end` did not give us the close token",
1063 );
1064 }
1065 Ok((val, trailing))
1066 }
1067
1068 fn parse_unspanned_seq<T>(
1072 &mut self,
1073 open: ExpTokenPair<'_>,
1074 close: ExpTokenPair<'_>,
1075 sep: SeqSep<'_>,
1076 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1077 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1078 self.expect(open)?;
1079 self.parse_seq_to_end(close, sep, f)
1080 }
1081
1082 fn parse_delim_comma_seq<T>(
1086 &mut self,
1087 open: ExpTokenPair<'_>,
1088 close: ExpTokenPair<'_>,
1089 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1090 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1091 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1092 }
1093
1094 pub fn parse_paren_comma_seq<T>(
1098 &mut self,
1099 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1100 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1101 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1102 }
1103
1104 fn bump_with(&mut self, next: (Token, Spacing)) {
1106 self.inlined_bump_with(next)
1107 }
1108
1109 #[inline(always)]
1111 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1112 self.prev_token = mem::replace(&mut self.token, next_token);
1114 self.token_spacing = next_spacing;
1115
1116 self.expected_token_types.clear();
1118 }
1119
1120 pub fn bump(&mut self) {
1122 let mut next = self.token_cursor.inlined_next();
1125 self.num_bump_calls += 1;
1126 self.break_last_token = 0;
1129 if next.0.span.is_dummy() {
1130 let fallback_span = self.token.span;
1132 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1133 }
1134 debug_assert!(!matches!(
1135 next.0.kind,
1136 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1137 ));
1138 self.inlined_bump_with(next)
1139 }
1140
1141 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1145 if dist == 0 {
1146 return looker(&self.token);
1147 }
1148
1149 if dist == 1 {
1152 match self.token_cursor.curr.curr() {
1155 Some(tree) => {
1156 match tree {
1158 TokenTree::Token(token, _) => return looker(token),
1159 &TokenTree::Delimited(dspan, _, delim, _) => {
1160 if !delim.skip() {
1161 return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1162 }
1163 }
1164 }
1165 }
1166 None => {
1167 if let Some(last) = self.token_cursor.stack.last()
1170 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1171 && !delim.skip()
1172 {
1173 return looker(&Token::new(delim.as_close_token_kind(), span.close));
1176 }
1177 }
1178 }
1179 }
1180
1181 let mut cursor = self.token_cursor.clone();
1184 let mut i = 0;
1185 let mut token = Token::dummy();
1186 while i < dist {
1187 token = cursor.next().0;
1188 if matches!(
1189 token.kind,
1190 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1191 ) {
1192 continue;
1193 }
1194 i += 1;
1195 }
1196 looker(&token)
1197 }
1198
1199 pub fn tree_look_ahead<R>(
1202 &self,
1203 dist: usize,
1204 looker: impl FnOnce(&TokenTree) -> R,
1205 ) -> Option<R> {
1206 assert_ne!(dist, 0);
1207 self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1208 }
1209
1210 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1212 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1213 }
1214
1215 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1217 let span = self.token_uninterpolated_span();
1218 if self.eat_keyword_case(exp!(Async), case) {
1219 if self.token_uninterpolated_span().at_least_rust_2024()
1222 && self.eat_keyword_case(exp!(Gen), case)
1223 {
1224 let gen_span = self.prev_token_uninterpolated_span();
1225 Some(CoroutineKind::AsyncGen {
1226 span: span.to(gen_span),
1227 closure_id: DUMMY_NODE_ID,
1228 return_impl_trait_id: DUMMY_NODE_ID,
1229 })
1230 } else {
1231 Some(CoroutineKind::Async {
1232 span,
1233 closure_id: DUMMY_NODE_ID,
1234 return_impl_trait_id: DUMMY_NODE_ID,
1235 })
1236 }
1237 } else if self.token_uninterpolated_span().at_least_rust_2024()
1238 && self.eat_keyword_case(exp!(Gen), case)
1239 {
1240 Some(CoroutineKind::Gen {
1241 span,
1242 closure_id: DUMMY_NODE_ID,
1243 return_impl_trait_id: DUMMY_NODE_ID,
1244 })
1245 } else {
1246 None
1247 }
1248 }
1249
1250 fn parse_safety(&mut self, case: Case) -> Safety {
1252 if self.eat_keyword_case(exp!(Unsafe), case) {
1253 Safety::Unsafe(self.prev_token_uninterpolated_span())
1254 } else if self.eat_keyword_case(exp!(Safe), case) {
1255 Safety::Safe(self.prev_token_uninterpolated_span())
1256 } else {
1257 Safety::Default
1258 }
1259 }
1260
1261 fn parse_constness(&mut self, case: Case) -> Const {
1263 self.parse_constness_(case, false)
1264 }
1265
1266 fn parse_closure_constness(&mut self) -> Const {
1268 let constness = self.parse_constness_(Case::Sensitive, true);
1269 if let Const::Yes(span) = constness {
1270 self.psess.gated_spans.gate(sym::const_closures, span);
1271 }
1272 constness
1273 }
1274
1275 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1276 if (self.check_const_closure() == is_closure)
1278 && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1279 && self.eat_keyword_case(exp!(Const), case)
1280 {
1281 Const::Yes(self.prev_token_uninterpolated_span())
1282 } else {
1283 Const::No
1284 }
1285 }
1286
1287 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, Box<Expr>> {
1289 self.expect_keyword(exp!(Const))?;
1290 let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1291 let anon_const = AnonConst {
1292 id: DUMMY_NODE_ID,
1293 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1294 };
1295 let blk_span = anon_const.value.span;
1296 let kind = if pat {
1297 let guar = self
1298 .dcx()
1299 .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1300 .with_help(
1301 "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1302 )
1303 .emit();
1304 ExprKind::Err(guar)
1305 } else {
1306 ExprKind::ConstBlock(anon_const)
1307 };
1308 Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1309 }
1310
1311 fn parse_mutability(&mut self) -> Mutability {
1313 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1314 }
1315
1316 fn parse_byref(&mut self) -> ByRef {
1318 if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1319 }
1320
1321 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1323 if self.eat_keyword(exp!(Mut)) {
1324 Some(Mutability::Mut)
1325 } else if self.eat_keyword(exp!(Const)) {
1326 Some(Mutability::Not)
1327 } else {
1328 None
1329 }
1330 }
1331
1332 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1333 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1334 {
1335 if let Some(suffix) = suffix {
1336 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1337 }
1338 self.bump();
1339 Ok(Ident::new(symbol, self.prev_token.span))
1340 } else {
1341 self.parse_ident_common(true)
1342 }
1343 }
1344
1345 fn parse_delim_args(&mut self) -> PResult<'a, Box<DelimArgs>> {
1346 if let Some(args) = self.parse_delim_args_inner() {
1347 Ok(Box::new(args))
1348 } else {
1349 self.unexpected_any()
1350 }
1351 }
1352
1353 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1354 Ok(if let Some(args) = self.parse_delim_args_inner() {
1355 AttrArgs::Delimited(args)
1356 } else if self.eat(exp!(Eq)) {
1357 let eq_span = self.prev_token.span;
1358 let expr = self.parse_expr_force_collect()?;
1359 AttrArgs::Eq { eq_span, expr }
1360 } else {
1361 AttrArgs::Empty
1362 })
1363 }
1364
1365 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1366 let delimited = self.check(exp!(OpenParen))
1367 || self.check(exp!(OpenBracket))
1368 || self.check(exp!(OpenBrace));
1369
1370 delimited.then(|| {
1371 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1372 unreachable!()
1373 };
1374 DelimArgs { dspan, delim, tokens }
1375 })
1376 }
1377
1378 pub fn parse_token_tree(&mut self) -> TokenTree {
1380 if self.token.kind.open_delim().is_some() {
1381 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1384 debug_assert_matches!(tree, TokenTree::Delimited(..));
1385
1386 let target_depth = self.token_cursor.stack.len() - 1;
1392
1393 if let Capturing::No = self.capture_state.capturing {
1394 self.token_cursor.curr.bump_to_end();
1399 self.bump();
1400 debug_assert_eq!(self.token_cursor.stack.len(), target_depth);
1401 } else {
1402 loop {
1403 self.bump();
1406 if self.token_cursor.stack.len() == target_depth {
1407 break;
1408 }
1409 }
1410 }
1411 debug_assert!(self.token.kind.close_delim().is_some());
1412
1413 self.bump();
1415 tree
1416 } else {
1417 assert!(!self.token.kind.is_close_delim_or_eof());
1418 let prev_spacing = self.token_spacing;
1419 self.bump();
1420 TokenTree::Token(self.prev_token, prev_spacing)
1421 }
1422 }
1423
1424 pub fn parse_tokens(&mut self) -> TokenStream {
1425 let mut result = Vec::new();
1426 loop {
1427 if self.token.kind.is_close_delim_or_eof() {
1428 break;
1429 } else {
1430 result.push(self.parse_token_tree());
1431 }
1432 }
1433 TokenStream::new(result)
1434 }
1435
1436 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1440 let old = self.restrictions;
1441 self.restrictions = res;
1442 let res = f(self);
1443 self.restrictions = old;
1444 res
1445 }
1446
1447 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1454 if let Some(vis) = self
1455 .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1456 {
1457 return Ok(vis);
1458 }
1459
1460 if !self.eat_keyword(exp!(Pub)) {
1461 return Ok(Visibility {
1465 span: self.token.span.shrink_to_lo(),
1466 kind: VisibilityKind::Inherited,
1467 tokens: None,
1468 });
1469 }
1470 let lo = self.prev_token.span;
1471
1472 if self.check(exp!(OpenParen)) {
1473 if self.is_keyword_ahead(1, &[kw::In]) {
1478 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1484 path: Box::new(path),
1485 id: ast::DUMMY_NODE_ID,
1486 shorthand: false,
1487 };
1488 return Ok(Visibility {
1489 span: lo.to(self.prev_token.span),
1490 kind: vis,
1491 tokens: None,
1492 });
1493 } else if self.look_ahead(2, |t| t == &token::CloseParen)
1494 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1495 {
1496 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1501 path: Box::new(path),
1502 id: ast::DUMMY_NODE_ID,
1503 shorthand: true,
1504 };
1505 return Ok(Visibility {
1506 span: lo.to(self.prev_token.span),
1507 kind: vis,
1508 tokens: None,
1509 });
1510 } else if let FollowedByType::No = fbt {
1511 self.recover_incorrect_vis_restriction()?;
1514 }
1516 }
1517
1518 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1519 }
1520
1521 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1523 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1525 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1528 self.dcx()
1529 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1530
1531 Ok(())
1532 }
1533
1534 fn parse_extern(&mut self, case: Case) -> Extern {
1536 if self.eat_keyword_case(exp!(Extern), case) {
1537 let mut extern_span = self.prev_token.span;
1538 let abi = self.parse_abi();
1539 if let Some(abi) = abi {
1540 extern_span = extern_span.to(abi.span);
1541 }
1542 Extern::from_abi(abi, extern_span)
1543 } else {
1544 Extern::None
1545 }
1546 }
1547
1548 fn parse_abi(&mut self) -> Option<StrLit> {
1550 match self.parse_str_lit() {
1551 Ok(str_lit) => Some(str_lit),
1552 Err(Some(lit)) => match lit.kind {
1553 ast::LitKind::Err(_) => None,
1554 _ => {
1555 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1556 None
1557 }
1558 },
1559 Err(None) => None,
1560 }
1561 }
1562
1563 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1564 &mut self,
1565 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1566 ) -> PResult<'a, R> {
1567 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1570 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1571 })
1572 }
1573
1574 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1576 if self.check(exp!(PathSep)) {
1577 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1578 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1579 self.look_ahead(2, looker)
1580 } else {
1581 self.look_ahead(1, looker)
1582 }
1583 } else {
1584 false
1585 }
1586 }
1587
1588 fn is_import_coupler(&mut self) -> bool {
1590 self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1591 }
1592
1593 #[allow(unused)]
1596 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1597 fmt::from_fn(move |f| {
1598 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1602 let mut tokens = vec![];
1603 for i in 0..lookahead {
1604 let tok = self.look_ahead(i, |tok| tok.kind);
1605 let is_eof = tok == TokenKind::Eof;
1606 tokens.push(tok);
1607 if is_eof {
1608 break;
1610 }
1611 }
1612 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1613 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1614
1615 if let Some(subparser) = self.subparser_name {
1617 dbg_fmt.field("subparser_name", &subparser);
1618 }
1619 if let Recovery::Forbidden = self.recovery {
1620 dbg_fmt.field("recovery", &self.recovery);
1621 }
1622
1623 dbg_fmt.finish_non_exhaustive()
1625 })
1626 }
1627
1628 pub fn clear_expected_token_types(&mut self) {
1629 self.expected_token_types.clear();
1630 }
1631
1632 pub fn approx_token_stream_pos(&self) -> u32 {
1633 self.num_bump_calls
1634 }
1635
1636 pub fn token_uninterpolated_span(&self) -> Span {
1643 match &self.token.kind {
1644 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1645 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1646 _ => self.token.span,
1647 }
1648 }
1649
1650 pub fn prev_token_uninterpolated_span(&self) -> Span {
1652 match &self.prev_token.kind {
1653 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1654 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1655 _ => self.prev_token.span,
1656 }
1657 }
1658}
1659
1660#[derive(Clone, Debug)]
1662pub enum ParseNtResult {
1663 Tt(TokenTree),
1664 Ident(Ident, IdentIsRaw),
1665 Lifetime(Ident, IdentIsRaw),
1666 Item(Box<ast::Item>),
1667 Block(Box<ast::Block>),
1668 Stmt(Box<ast::Stmt>),
1669 Pat(Box<ast::Pat>, NtPatKind),
1670 Expr(Box<ast::Expr>, NtExprKind),
1671 Literal(Box<ast::Expr>),
1672 Ty(Box<ast::Ty>),
1673 Meta(Box<ast::AttrItem>),
1674 Path(Box<ast::Path>),
1675 Vis(Box<ast::Visibility>),
1676}