1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11pub mod token_type;
12mod ty;
13
14pub mod asm;
17pub mod cfg_select;
18
19use std::assert_matches::debug_assert_matches;
20use std::{fmt, mem, slice};
21
22use attr_wrapper::{AttrWrapper, UsePreAttrPos};
23pub use diagnostics::AttemptLocalParseRecovery;
24pub(crate) use expr::ForbiddenLetReason;
25pub(crate) use item::FnParseMode;
26pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
27use path::PathStyle;
28use rustc_ast::ptr::P;
29use rustc_ast::token::{
30 self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
31};
32use rustc_ast::tokenstream::{
33 ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
34};
35use rustc_ast::util::case::Case;
36use rustc_ast::{
37 self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
38 DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
39 Visibility, VisibilityKind,
40};
41use rustc_ast_pretty::pprust;
42use rustc_data_structures::fx::FxHashMap;
43use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
44use rustc_index::interval::IntervalSet;
45use rustc_session::parse::ParseSess;
46use rustc_span::{Ident, Span, Symbol, kw, sym};
47use thin_vec::ThinVec;
48use token_type::TokenTypeSet;
49pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
50use tracing::debug;
51
52use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
53use crate::exp;
54
55#[cfg(test)]
56mod tests;
57
58#[cfg(test)]
61mod tokenstream {
62 mod tests;
63}
64
65bitflags::bitflags! {
66 #[derive(Clone, Copy, Debug)]
72 struct Restrictions: u8 {
73 const STMT_EXPR = 1 << 0;
84 const NO_STRUCT_LITERAL = 1 << 1;
95 const CONST_EXPR = 1 << 2;
102 const ALLOW_LET = 1 << 3;
110 const IN_IF_GUARD = 1 << 4;
116 const IS_PAT = 1 << 5;
123 }
124}
125
126#[derive(Clone, Copy, PartialEq, Debug)]
127enum SemiColonMode {
128 Break,
129 Ignore,
130 Comma,
131}
132
133#[derive(Clone, Copy, PartialEq, Debug)]
134enum BlockMode {
135 Break,
136 Ignore,
137}
138
139#[derive(Clone, Copy, Debug, PartialEq)]
142pub enum ForceCollect {
143 Yes,
144 No,
145}
146
147#[macro_export]
149macro_rules! maybe_recover_from_interpolated_ty_qpath {
150 ($self: expr, $allow_qpath_recovery: expr) => {
151 if $allow_qpath_recovery
152 && $self.may_recover()
153 && let Some(mv_kind) = $self.token.is_metavar_seq()
154 && let token::MetaVarKind::Ty { .. } = mv_kind
155 && $self.check_noexpect_past_close_delim(&token::PathSep)
156 {
157 let ty = $self
159 .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
160 .expect("metavar seq ty");
161
162 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
163 }
164 };
165}
166
167#[derive(Clone, Copy, Debug)]
168pub enum Recovery {
169 Allowed,
170 Forbidden,
171}
172
173#[derive(Clone)]
174pub struct Parser<'a> {
175 pub psess: &'a ParseSess,
176 pub token: Token,
178 token_spacing: Spacing,
180 pub prev_token: Token,
182 pub capture_cfg: bool,
183 restrictions: Restrictions,
184 expected_token_types: TokenTypeSet,
185 token_cursor: TokenCursor,
186 num_bump_calls: u32,
188 break_last_token: u32,
207 unmatched_angle_bracket_count: u16,
213 angle_bracket_nesting: u16,
214
215 last_unexpected_token_span: Option<Span>,
216 subparser_name: Option<&'static str>,
218 capture_state: CaptureState,
219 current_closure: Option<ClosureSpans>,
222 recovery: Recovery,
225}
226
227#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
231rustc_data_structures::static_assert_size!(Parser<'_>, 288);
232
233#[derive(Clone, Debug)]
235struct ClosureSpans {
236 whole_closure: Span,
237 closing_pipe: Span,
238 body: Span,
239}
240
241#[derive(Copy, Clone, Debug)]
245enum Capturing {
246 No,
248 Yes,
250}
251
252#[derive(Clone, Debug)]
254struct CaptureState {
255 capturing: Capturing,
256 parser_replacements: Vec<ParserReplacement>,
257 inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
258 seen_attrs: IntervalSet<AttrId>,
261}
262
263#[derive(Debug)]
265struct SeqSep<'a> {
266 sep: Option<ExpTokenPair<'a>>,
268 trailing_sep_allowed: bool,
270}
271
272impl<'a> SeqSep<'a> {
273 fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
274 SeqSep { sep: Some(sep), trailing_sep_allowed: true }
275 }
276
277 fn none() -> SeqSep<'a> {
278 SeqSep { sep: None, trailing_sep_allowed: false }
279 }
280}
281
282#[derive(Debug)]
283pub enum FollowedByType {
284 Yes,
285 No,
286}
287
288#[derive(Copy, Clone, Debug)]
289enum Trailing {
290 No,
291 Yes,
292}
293
294impl From<bool> for Trailing {
295 fn from(b: bool) -> Trailing {
296 if b { Trailing::Yes } else { Trailing::No }
297 }
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq)]
301pub(super) enum TokenDescription {
302 ReservedIdentifier,
303 Keyword,
304 ReservedKeyword,
305 DocComment,
306
307 MetaVar(MetaVarKind),
312}
313
314impl TokenDescription {
315 pub(super) fn from_token(token: &Token) -> Option<Self> {
316 match token.kind {
317 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
318 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
319 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
320 token::DocComment(..) => Some(TokenDescription::DocComment),
321 token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
322 Some(TokenDescription::MetaVar(kind))
323 }
324 _ => None,
325 }
326 }
327}
328
329pub fn token_descr(token: &Token) -> String {
330 let s = pprust::token_to_string(token).to_string();
331
332 match (TokenDescription::from_token(token), &token.kind) {
333 (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
334 (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
335 (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
336 (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
337 (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
339 (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
340 (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
341 (None, _) => format!("`{s}`"),
342 }
343}
344
345impl<'a> Parser<'a> {
346 pub fn new(
347 psess: &'a ParseSess,
348 stream: TokenStream,
349 subparser_name: Option<&'static str>,
350 ) -> Self {
351 let mut parser = Parser {
352 psess,
353 token: Token::dummy(),
354 token_spacing: Spacing::Alone,
355 prev_token: Token::dummy(),
356 capture_cfg: false,
357 restrictions: Restrictions::empty(),
358 expected_token_types: TokenTypeSet::new(),
359 token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
360 num_bump_calls: 0,
361 break_last_token: 0,
362 unmatched_angle_bracket_count: 0,
363 angle_bracket_nesting: 0,
364 last_unexpected_token_span: None,
365 subparser_name,
366 capture_state: CaptureState {
367 capturing: Capturing::No,
368 parser_replacements: Vec::new(),
369 inner_attr_parser_ranges: Default::default(),
370 seen_attrs: IntervalSet::new(u32::MAX as usize),
371 },
372 current_closure: None,
373 recovery: Recovery::Allowed,
374 };
375
376 parser.bump();
378
379 parser.num_bump_calls = 0;
383
384 parser
385 }
386
387 #[inline]
388 pub fn recovery(mut self, recovery: Recovery) -> Self {
389 self.recovery = recovery;
390 self
391 }
392
393 #[inline]
394 fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
395 let old = mem::replace(&mut self.recovery, recovery);
396 let res = f(self);
397 self.recovery = old;
398 res
399 }
400
401 #[inline]
409 fn may_recover(&self) -> bool {
410 matches!(self.recovery, Recovery::Allowed)
411 }
412
413 pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
416 match self.expect_one_of(&[], &[]) {
417 Err(e) => Err(e),
418 Ok(_) => FatalError.raise(),
421 }
422 }
423
424 pub fn unexpected(&mut self) -> PResult<'a, ()> {
425 self.unexpected_any()
426 }
427
428 pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
430 if self.expected_token_types.is_empty() {
431 if self.token == *exp.tok {
432 self.bump();
433 Ok(Recovered::No)
434 } else {
435 self.unexpected_try_recover(exp.tok)
436 }
437 } else {
438 self.expect_one_of(slice::from_ref(&exp), &[])
439 }
440 }
441
442 fn expect_one_of(
446 &mut self,
447 edible: &[ExpTokenPair<'_>],
448 inedible: &[ExpTokenPair<'_>],
449 ) -> PResult<'a, Recovered> {
450 if edible.iter().any(|exp| exp.tok == &self.token.kind) {
451 self.bump();
452 Ok(Recovered::No)
453 } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
454 Ok(Recovered::No)
456 } else if self.token != token::Eof
457 && self.last_unexpected_token_span == Some(self.token.span)
458 {
459 FatalError.raise();
460 } else {
461 self.expected_one_of_not_found(edible, inedible)
462 .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
463 }
464 }
465
466 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
468 self.parse_ident_common(true)
469 }
470
471 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
472 let (ident, is_raw) = self.ident_or_err(recover)?;
473
474 if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
475 let err = self.expected_ident_found_err();
476 if recover {
477 err.emit();
478 } else {
479 return Err(err);
480 }
481 }
482 self.bump();
483 Ok(ident)
484 }
485
486 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
487 match self.token.ident() {
488 Some(ident) => Ok(ident),
489 None => self.expected_ident_found(recover),
490 }
491 }
492
493 #[inline]
498 fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
499 let is_present = self.token == *exp.tok;
500 if !is_present {
501 self.expected_token_types.insert(exp.token_type);
502 }
503 is_present
504 }
505
506 #[inline]
507 #[must_use]
508 fn check_noexpect(&self, tok: &TokenKind) -> bool {
509 self.token == *tok
510 }
511
512 fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
521 let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
522 tree_cursor.bump();
523 matches!(
524 tree_cursor.curr(),
525 Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
526 )
527 }
528
529 #[inline]
534 #[must_use]
535 fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
536 let is_present = self.check_noexpect(tok);
537 if is_present {
538 self.bump()
539 }
540 is_present
541 }
542
543 #[inline]
545 #[must_use]
546 pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
547 let is_present = self.check(exp);
548 if is_present {
549 self.bump()
550 }
551 is_present
552 }
553
554 #[inline]
557 #[must_use]
558 fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
559 let is_keyword = self.token.is_keyword(exp.kw);
560 if !is_keyword {
561 self.expected_token_types.insert(exp.token_type);
562 }
563 is_keyword
564 }
565
566 #[inline]
567 #[must_use]
568 fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
569 if self.check_keyword(exp) {
570 true
571 } else if case == Case::Insensitive
572 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
573 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
575 {
576 true
577 } else {
578 false
579 }
580 }
581
582 #[inline]
586 #[must_use]
587 pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
588 let is_keyword = self.check_keyword(exp);
589 if is_keyword {
590 self.bump();
591 }
592 is_keyword
593 }
594
595 #[inline]
599 #[must_use]
600 fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
601 if self.eat_keyword(exp) {
602 true
603 } else if case == Case::Insensitive
604 && let Some((ident, IdentIsRaw::No)) = self.token.ident()
605 && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
607 {
608 self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
609 self.bump();
610 true
611 } else {
612 false
613 }
614 }
615
616 #[inline]
620 #[must_use]
621 pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
622 let is_keyword = self.token.is_keyword(kw);
623 if is_keyword {
624 self.bump();
625 }
626 is_keyword
627 }
628
629 pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
633 if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
634 }
635
636 fn eat_metavar_seq<T>(
638 &mut self,
639 mv_kind: MetaVarKind,
640 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
641 ) -> Option<T> {
642 self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
643 }
644
645 fn eat_metavar_seq_with_matcher<T>(
649 &mut self,
650 match_mv_kind: impl Fn(MetaVarKind) -> bool,
651 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
652 ) -> Option<T> {
653 if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
654 && match_mv_kind(mv_kind)
655 {
656 self.bump();
657
658 let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
662
663 let res = match res {
664 Ok(res) => res,
665 Err(err) => {
666 err.delay_as_bug();
668 return None;
669 }
670 };
671
672 if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
673 && match_mv_kind(mv_kind)
674 {
675 self.bump();
676 Some(res)
677 } else {
678 self.dcx()
682 .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
683 None
684 }
685 } else {
686 None
687 }
688 }
689
690 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
692 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
693 }
694
695 #[inline]
696 fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
697 if !ok {
698 self.expected_token_types.insert(token_type);
699 }
700 ok
701 }
702
703 fn check_ident(&mut self) -> bool {
704 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
705 }
706
707 fn check_path(&mut self) -> bool {
708 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
709 }
710
711 fn check_type(&mut self) -> bool {
712 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
713 }
714
715 fn check_const_arg(&mut self) -> bool {
716 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
717 }
718
719 fn check_const_closure(&self) -> bool {
720 self.is_keyword_ahead(0, &[kw::Const])
721 && self.look_ahead(1, |t| match &t.kind {
722 token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
724 | token::OrOr
725 | token::Or => true,
726 _ => false,
727 })
728 }
729
730 fn check_inline_const(&self, dist: usize) -> bool {
731 self.is_keyword_ahead(dist, &[kw::Const])
732 && self.look_ahead(dist + 1, |t| match &t.kind {
733 token::OpenBrace => true,
734 token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
735 _ => false,
736 })
737 }
738
739 #[inline]
742 fn check_plus(&mut self) -> bool {
743 self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
744 }
745
746 fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
750 if self.token == *exp.tok {
751 self.bump();
752 return true;
753 }
754 match self.token.kind.break_two_token_op(1) {
755 Some((first, second)) if first == *exp.tok => {
756 let first_span = self.psess.source_map().start_point(self.token.span);
757 let second_span = self.token.span.with_lo(first_span.hi());
758 self.token = Token::new(first, first_span);
759 self.break_last_token += 1;
766 self.bump_with((Token::new(second, second_span), self.token_spacing));
769 true
770 }
771 _ => {
772 self.expected_token_types.insert(exp.token_type);
773 false
774 }
775 }
776 }
777
778 fn eat_plus(&mut self) -> bool {
780 self.break_and_eat(exp!(Plus))
781 }
782
783 fn expect_and(&mut self) -> PResult<'a, ()> {
786 if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
787 }
788
789 fn expect_or(&mut self) -> PResult<'a, ()> {
792 if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
793 }
794
795 fn eat_lt(&mut self) -> bool {
797 let ate = self.break_and_eat(exp!(Lt));
798 if ate {
799 self.unmatched_angle_bracket_count += 1;
801 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
802 }
803 ate
804 }
805
806 fn expect_lt(&mut self) -> PResult<'a, ()> {
809 if self.eat_lt() { Ok(()) } else { self.unexpected() }
810 }
811
812 fn expect_gt(&mut self) -> PResult<'a, ()> {
815 if self.break_and_eat(exp!(Gt)) {
816 if self.unmatched_angle_bracket_count > 0 {
818 self.unmatched_angle_bracket_count -= 1;
819 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
820 }
821 Ok(())
822 } else {
823 self.unexpected()
824 }
825 }
826
827 fn expect_any_with_type(
829 &mut self,
830 closes_expected: &[ExpTokenPair<'_>],
831 closes_not_expected: &[&TokenKind],
832 ) -> bool {
833 closes_expected.iter().any(|&close| self.check(close))
834 || closes_not_expected.iter().any(|k| self.check_noexpect(k))
835 }
836
837 fn parse_seq_to_before_tokens<T>(
841 &mut self,
842 closes_expected: &[ExpTokenPair<'_>],
843 closes_not_expected: &[&TokenKind],
844 sep: SeqSep<'_>,
845 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
846 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
847 let mut first = true;
848 let mut recovered = Recovered::No;
849 let mut trailing = Trailing::No;
850 let mut v = ThinVec::new();
851
852 while !self.expect_any_with_type(closes_expected, closes_not_expected) {
853 if self.token.kind.is_close_delim_or_eof() {
854 break;
855 }
856 if let Some(exp) = sep.sep {
857 if first {
858 first = false;
860 } else {
861 match self.expect(exp) {
863 Ok(Recovered::No) => {
864 self.current_closure.take();
865 }
866 Ok(Recovered::Yes(guar)) => {
867 self.current_closure.take();
868 recovered = Recovered::Yes(guar);
869 break;
870 }
871 Err(mut expect_err) => {
872 let sp = self.prev_token.span.shrink_to_hi();
873 let token_str = pprust::token_kind_to_string(exp.tok);
874
875 match self.current_closure.take() {
876 Some(closure_spans) if self.token == TokenKind::Semi => {
877 self.recover_missing_braces_around_closure_body(
884 closure_spans,
885 expect_err,
886 )?;
887
888 continue;
889 }
890
891 _ => {
892 if exp.tok.similar_tokens().contains(&self.token.kind) {
894 self.bump();
895 }
896 }
897 }
898
899 if self.prev_token.is_ident() && self.token == token::DotDot {
903 let msg = format!(
904 "if you meant to bind the contents of the rest of the array \
905 pattern into `{}`, use `@`",
906 pprust::token_to_string(&self.prev_token)
907 );
908 expect_err
909 .with_span_suggestion_verbose(
910 self.prev_token.span.shrink_to_hi().until(self.token.span),
911 msg,
912 " @ ",
913 Applicability::MaybeIncorrect,
914 )
915 .emit();
916 break;
917 }
918
919 self.last_unexpected_token_span = None;
921 match f(self) {
922 Ok(t) => {
923 expect_err
926 .with_span_suggestion_short(
927 sp,
928 format!("missing `{token_str}`"),
929 token_str,
930 Applicability::MaybeIncorrect,
931 )
932 .emit();
933
934 v.push(t);
935 continue;
936 }
937 Err(e) => {
938 for xx in &e.children {
941 expect_err.children.push(xx.clone());
944 }
945 e.cancel();
946 if self.token == token::Colon {
947 return Err(expect_err);
950 } else if let [exp] = closes_expected
951 && exp.token_type == TokenType::CloseParen
952 {
953 return Err(expect_err);
954 } else {
955 expect_err.emit();
956 break;
957 }
958 }
959 }
960 }
961 }
962 }
963 }
964 if sep.trailing_sep_allowed
965 && self.expect_any_with_type(closes_expected, closes_not_expected)
966 {
967 trailing = Trailing::Yes;
968 break;
969 }
970
971 let t = f(self)?;
972 v.push(t);
973 }
974
975 Ok((v, trailing, recovered))
976 }
977
978 fn recover_missing_braces_around_closure_body(
979 &mut self,
980 closure_spans: ClosureSpans,
981 mut expect_err: Diag<'_>,
982 ) -> PResult<'a, ()> {
983 let initial_semicolon = self.token.span;
984
985 while self.eat(exp!(Semi)) {
986 let _ = self
987 .parse_stmt_without_recovery(false, ForceCollect::No, false)
988 .unwrap_or_else(|e| {
989 e.cancel();
990 None
991 });
992 }
993
994 expect_err
995 .primary_message("closure bodies that contain statements must be surrounded by braces");
996
997 let preceding_pipe_span = closure_spans.closing_pipe;
998 let following_token_span = self.token.span;
999
1000 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
1001 first_note.push_span_label(
1002 initial_semicolon,
1003 "this `;` turns the preceding closure into a statement",
1004 );
1005 first_note.push_span_label(
1006 closure_spans.body,
1007 "this expression is a statement because of the trailing semicolon",
1008 );
1009 expect_err.span_note(first_note, "statement found outside of a block");
1010
1011 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1012 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1013 second_note.push_span_label(
1014 following_token_span,
1015 "...but likely you meant the closure to end here",
1016 );
1017 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1018
1019 expect_err.span(vec![preceding_pipe_span, following_token_span]);
1020
1021 let opening_suggestion_str = " {".to_string();
1022 let closing_suggestion_str = "}".to_string();
1023
1024 expect_err.multipart_suggestion(
1025 "try adding braces",
1026 vec![
1027 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1028 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1029 ],
1030 Applicability::MaybeIncorrect,
1031 );
1032
1033 expect_err.emit();
1034
1035 Ok(())
1036 }
1037
1038 fn parse_seq_to_before_end<T>(
1042 &mut self,
1043 close: ExpTokenPair<'_>,
1044 sep: SeqSep<'_>,
1045 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1046 ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
1047 self.parse_seq_to_before_tokens(&[close], &[], sep, f)
1048 }
1049
1050 fn parse_seq_to_end<T>(
1054 &mut self,
1055 close: ExpTokenPair<'_>,
1056 sep: SeqSep<'_>,
1057 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1058 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1059 let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
1060 if matches!(recovered, Recovered::No) && !self.eat(close) {
1061 self.dcx().span_delayed_bug(
1062 self.token.span,
1063 "recovered but `parse_seq_to_before_end` did not give us the close token",
1064 );
1065 }
1066 Ok((val, trailing))
1067 }
1068
1069 fn parse_unspanned_seq<T>(
1073 &mut self,
1074 open: ExpTokenPair<'_>,
1075 close: ExpTokenPair<'_>,
1076 sep: SeqSep<'_>,
1077 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1078 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1079 self.expect(open)?;
1080 self.parse_seq_to_end(close, sep, f)
1081 }
1082
1083 fn parse_delim_comma_seq<T>(
1087 &mut self,
1088 open: ExpTokenPair<'_>,
1089 close: ExpTokenPair<'_>,
1090 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1091 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1092 self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
1093 }
1094
1095 fn parse_paren_comma_seq<T>(
1099 &mut self,
1100 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1101 ) -> PResult<'a, (ThinVec<T>, Trailing)> {
1102 self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
1103 }
1104
1105 fn bump_with(&mut self, next: (Token, Spacing)) {
1107 self.inlined_bump_with(next)
1108 }
1109
1110 #[inline(always)]
1112 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1113 self.prev_token = mem::replace(&mut self.token, next_token);
1115 self.token_spacing = next_spacing;
1116
1117 self.expected_token_types.clear();
1119 }
1120
1121 pub fn bump(&mut self) {
1123 let mut next = self.token_cursor.inlined_next();
1126 self.num_bump_calls += 1;
1127 self.break_last_token = 0;
1130 if next.0.span.is_dummy() {
1131 let fallback_span = self.token.span;
1133 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1134 }
1135 debug_assert!(!matches!(
1136 next.0.kind,
1137 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1138 ));
1139 self.inlined_bump_with(next)
1140 }
1141
1142 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1146 if dist == 0 {
1147 return looker(&self.token);
1148 }
1149
1150 if dist == 1 {
1153 match self.token_cursor.curr.curr() {
1156 Some(tree) => {
1157 match tree {
1159 TokenTree::Token(token, _) => return looker(token),
1160 &TokenTree::Delimited(dspan, _, delim, _) => {
1161 if !delim.skip() {
1162 return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
1163 }
1164 }
1165 }
1166 }
1167 None => {
1168 if let Some(last) = self.token_cursor.stack.last()
1171 && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
1172 && !delim.skip()
1173 {
1174 return looker(&Token::new(delim.as_close_token_kind(), span.close));
1177 }
1178 }
1179 }
1180 }
1181
1182 let mut cursor = self.token_cursor.clone();
1185 let mut i = 0;
1186 let mut token = Token::dummy();
1187 while i < dist {
1188 token = cursor.next().0;
1189 if matches!(
1190 token.kind,
1191 token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
1192 ) {
1193 continue;
1194 }
1195 i += 1;
1196 }
1197 looker(&token)
1198 }
1199
1200 pub fn tree_look_ahead<R>(
1203 &self,
1204 dist: usize,
1205 looker: impl FnOnce(&TokenTree) -> R,
1206 ) -> Option<R> {
1207 assert_ne!(dist, 0);
1208 self.token_cursor.curr.look_ahead(dist - 1).map(looker)
1209 }
1210
1211 pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1213 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1214 }
1215
1216 fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
1218 let span = self.token_uninterpolated_span();
1219 if self.eat_keyword_case(exp!(Async), case) {
1220 if self.token_uninterpolated_span().at_least_rust_2024()
1223 && self.eat_keyword_case(exp!(Gen), case)
1224 {
1225 let gen_span = self.prev_token_uninterpolated_span();
1226 Some(CoroutineKind::AsyncGen {
1227 span: span.to(gen_span),
1228 closure_id: DUMMY_NODE_ID,
1229 return_impl_trait_id: DUMMY_NODE_ID,
1230 })
1231 } else {
1232 Some(CoroutineKind::Async {
1233 span,
1234 closure_id: DUMMY_NODE_ID,
1235 return_impl_trait_id: DUMMY_NODE_ID,
1236 })
1237 }
1238 } else if self.token_uninterpolated_span().at_least_rust_2024()
1239 && self.eat_keyword_case(exp!(Gen), case)
1240 {
1241 Some(CoroutineKind::Gen {
1242 span,
1243 closure_id: DUMMY_NODE_ID,
1244 return_impl_trait_id: DUMMY_NODE_ID,
1245 })
1246 } else {
1247 None
1248 }
1249 }
1250
1251 fn parse_safety(&mut self, case: Case) -> Safety {
1253 if self.eat_keyword_case(exp!(Unsafe), case) {
1254 Safety::Unsafe(self.prev_token_uninterpolated_span())
1255 } else if self.eat_keyword_case(exp!(Safe), case) {
1256 Safety::Safe(self.prev_token_uninterpolated_span())
1257 } else {
1258 Safety::Default
1259 }
1260 }
1261
1262 fn parse_constness(&mut self, case: Case) -> Const {
1264 self.parse_constness_(case, false)
1265 }
1266
1267 fn parse_closure_constness(&mut self) -> Const {
1269 let constness = self.parse_constness_(Case::Sensitive, true);
1270 if let Const::Yes(span) = constness {
1271 self.psess.gated_spans.gate(sym::const_closures, span);
1272 }
1273 constness
1274 }
1275
1276 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1277 if (self.check_const_closure() == is_closure)
1279 && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
1280 && self.eat_keyword_case(exp!(Const), case)
1281 {
1282 Const::Yes(self.prev_token_uninterpolated_span())
1283 } else {
1284 Const::No
1285 }
1286 }
1287
1288 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1290 self.expect_keyword(exp!(Const))?;
1291 let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
1292 let anon_const = AnonConst {
1293 id: DUMMY_NODE_ID,
1294 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1295 };
1296 let blk_span = anon_const.value.span;
1297 let kind = if pat {
1298 let guar = self
1299 .dcx()
1300 .struct_span_err(blk_span, "const blocks cannot be used as patterns")
1301 .with_help(
1302 "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
1303 )
1304 .emit();
1305 ExprKind::Err(guar)
1306 } else {
1307 ExprKind::ConstBlock(anon_const)
1308 };
1309 Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
1310 }
1311
1312 fn parse_mutability(&mut self) -> Mutability {
1314 if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
1315 }
1316
1317 fn parse_byref(&mut self) -> ByRef {
1319 if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
1320 }
1321
1322 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1324 if self.eat_keyword(exp!(Mut)) {
1325 Some(Mutability::Mut)
1326 } else if self.eat_keyword(exp!(Const)) {
1327 Some(Mutability::Not)
1328 } else {
1329 None
1330 }
1331 }
1332
1333 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1334 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1335 {
1336 if let Some(suffix) = suffix {
1337 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1338 }
1339 self.bump();
1340 Ok(Ident::new(symbol, self.prev_token.span))
1341 } else {
1342 self.parse_ident_common(true)
1343 }
1344 }
1345
1346 fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1347 if let Some(args) = self.parse_delim_args_inner() {
1348 Ok(P(args))
1349 } else {
1350 self.unexpected_any()
1351 }
1352 }
1353
1354 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1355 Ok(if let Some(args) = self.parse_delim_args_inner() {
1356 AttrArgs::Delimited(args)
1357 } else if self.eat(exp!(Eq)) {
1358 let eq_span = self.prev_token.span;
1359 AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
1360 } else {
1361 AttrArgs::Empty
1362 })
1363 }
1364
1365 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1366 let delimited = self.check(exp!(OpenParen))
1367 || self.check(exp!(OpenBracket))
1368 || self.check(exp!(OpenBrace));
1369
1370 delimited.then(|| {
1371 let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
1372 unreachable!()
1373 };
1374 DelimArgs { dspan, delim, tokens }
1375 })
1376 }
1377
1378 pub fn parse_token_tree(&mut self) -> TokenTree {
1380 if self.token.kind.open_delim().is_some() {
1381 let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
1384 debug_assert_matches!(tree, TokenTree::Delimited(..));
1385
1386 let target_depth = self.token_cursor.stack.len() - 1;
1392 loop {
1393 self.bump();
1396 if self.token_cursor.stack.len() == target_depth {
1397 debug_assert!(self.token.kind.close_delim().is_some());
1398 break;
1399 }
1400 }
1401
1402 self.bump();
1404 tree
1405 } else {
1406 assert!(!self.token.kind.is_close_delim_or_eof());
1407 let prev_spacing = self.token_spacing;
1408 self.bump();
1409 TokenTree::Token(self.prev_token, prev_spacing)
1410 }
1411 }
1412
1413 pub fn parse_tokens(&mut self) -> TokenStream {
1414 let mut result = Vec::new();
1415 loop {
1416 if self.token.kind.is_close_delim_or_eof() {
1417 break;
1418 } else {
1419 result.push(self.parse_token_tree());
1420 }
1421 }
1422 TokenStream::new(result)
1423 }
1424
1425 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1429 let old = self.restrictions;
1430 self.restrictions = res;
1431 let res = f(self);
1432 self.restrictions = old;
1433 res
1434 }
1435
1436 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1443 if let Some(vis) = self
1444 .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
1445 {
1446 return Ok(vis);
1447 }
1448
1449 if !self.eat_keyword(exp!(Pub)) {
1450 return Ok(Visibility {
1454 span: self.token.span.shrink_to_lo(),
1455 kind: VisibilityKind::Inherited,
1456 tokens: None,
1457 });
1458 }
1459 let lo = self.prev_token.span;
1460
1461 if self.check(exp!(OpenParen)) {
1462 if self.is_keyword_ahead(1, &[kw::In]) {
1467 self.bump(); self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1473 path: P(path),
1474 id: ast::DUMMY_NODE_ID,
1475 shorthand: false,
1476 };
1477 return Ok(Visibility {
1478 span: lo.to(self.prev_token.span),
1479 kind: vis,
1480 tokens: None,
1481 });
1482 } else if self.look_ahead(2, |t| t == &token::CloseParen)
1483 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1484 {
1485 self.bump(); let path = self.parse_path(PathStyle::Mod)?; self.expect(exp!(CloseParen))?; let vis = VisibilityKind::Restricted {
1490 path: P(path),
1491 id: ast::DUMMY_NODE_ID,
1492 shorthand: true,
1493 };
1494 return Ok(Visibility {
1495 span: lo.to(self.prev_token.span),
1496 kind: vis,
1497 tokens: None,
1498 });
1499 } else if let FollowedByType::No = fbt {
1500 self.recover_incorrect_vis_restriction()?;
1503 }
1505 }
1506
1507 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1508 }
1509
1510 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1512 self.bump(); let path = self.parse_path(PathStyle::Mod)?;
1514 self.expect(exp!(CloseParen))?; let path_str = pprust::path_to_string(&path);
1517 self.dcx()
1518 .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1519
1520 Ok(())
1521 }
1522
1523 fn parse_extern(&mut self, case: Case) -> Extern {
1525 if self.eat_keyword_case(exp!(Extern), case) {
1526 let mut extern_span = self.prev_token.span;
1527 let abi = self.parse_abi();
1528 if let Some(abi) = abi {
1529 extern_span = extern_span.to(abi.span);
1530 }
1531 Extern::from_abi(abi, extern_span)
1532 } else {
1533 Extern::None
1534 }
1535 }
1536
1537 fn parse_abi(&mut self) -> Option<StrLit> {
1539 match self.parse_str_lit() {
1540 Ok(str_lit) => Some(str_lit),
1541 Err(Some(lit)) => match lit.kind {
1542 ast::LitKind::Err(_) => None,
1543 _ => {
1544 self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
1545 None
1546 }
1547 },
1548 Err(None) => None,
1549 }
1550 }
1551
1552 fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1553 &mut self,
1554 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1555 ) -> PResult<'a, R> {
1556 self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
1559 Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
1560 })
1561 }
1562
1563 fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
1565 if self.check(exp!(PathSep)) {
1566 if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
1567 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
1568 self.look_ahead(2, looker)
1569 } else {
1570 self.look_ahead(1, looker)
1571 }
1572 } else {
1573 false
1574 }
1575 }
1576
1577 fn is_import_coupler(&mut self) -> bool {
1579 self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
1580 }
1581
1582 #[allow(unused)]
1585 pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
1586 fmt::from_fn(move |f| {
1587 let mut dbg_fmt = f.debug_struct("Parser"); dbg_fmt.field("prev_token", &self.prev_token);
1591 let mut tokens = vec![];
1592 for i in 0..lookahead {
1593 let tok = self.look_ahead(i, |tok| tok.kind);
1594 let is_eof = tok == TokenKind::Eof;
1595 tokens.push(tok);
1596 if is_eof {
1597 break;
1599 }
1600 }
1601 dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
1602 dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
1603
1604 if let Some(subparser) = self.subparser_name {
1606 dbg_fmt.field("subparser_name", &subparser);
1607 }
1608 if let Recovery::Forbidden = self.recovery {
1609 dbg_fmt.field("recovery", &self.recovery);
1610 }
1611
1612 dbg_fmt.finish_non_exhaustive()
1614 })
1615 }
1616
1617 pub fn clear_expected_token_types(&mut self) {
1618 self.expected_token_types.clear();
1619 }
1620
1621 pub fn approx_token_stream_pos(&self) -> u32 {
1622 self.num_bump_calls
1623 }
1624
1625 pub fn token_uninterpolated_span(&self) -> Span {
1632 match &self.token.kind {
1633 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1634 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
1635 _ => self.token.span,
1636 }
1637 }
1638
1639 pub fn prev_token_uninterpolated_span(&self) -> Span {
1641 match &self.prev_token.kind {
1642 token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
1643 token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
1644 _ => self.prev_token.span,
1645 }
1646 }
1647}
1648
1649#[derive(Clone, Debug)]
1651pub enum ParseNtResult {
1652 Tt(TokenTree),
1653 Ident(Ident, IdentIsRaw),
1654 Lifetime(Ident, IdentIsRaw),
1655 Item(P<ast::Item>),
1656 Block(P<ast::Block>),
1657 Stmt(P<ast::Stmt>),
1658 Pat(P<ast::Pat>, NtPatKind),
1659 Expr(P<ast::Expr>, NtExprKind),
1660 Literal(P<ast::Expr>),
1661 Ty(P<ast::Ty>),
1662 Meta(P<ast::AttrItem>),
1663 Path(P<ast::Path>),
1664 Vis(P<ast::Visibility>),
1665}