1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{self, DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
14use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
15use rustc_feature::Features;
16use rustc_hir as hir;
17use rustc_hir::attrs::AttributeKind;
18use rustc_hir::def::MacroKinds;
19use rustc_hir::find_attr;
20use rustc_lint_defs::BuiltinLintDiag;
21use rustc_lint_defs::builtin::{
22 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
23};
24use rustc_parse::exp;
25use rustc_parse::parser::{Parser, Recovery};
26use rustc_session::Session;
27use rustc_session::parse::{ParseSess, feature_err};
28use rustc_span::edition::Edition;
29use rustc_span::hygiene::Transparency;
30use rustc_span::{Ident, Span, kw, sym};
31use tracing::{debug, instrument, trace, trace_span};
32
33use super::diagnostics::failed_to_match_macro;
34use super::macro_parser::{NamedMatches, NamedParseResult};
35use super::{SequenceRepetition, diagnostics};
36use crate::base::{
37 AttrProcMacro, DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult,
38 SyntaxExtension, SyntaxExtensionKind, TTMacroExpander,
39};
40use crate::errors;
41use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
42use crate::mbe::macro_check::check_meta_variables;
43use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
44use crate::mbe::quoted::{RulePart, parse_one_tt};
45use crate::mbe::transcribe::transcribe;
46use crate::mbe::{self, KleeneOp};
47
48pub(crate) struct ParserAnyMacro<'a> {
49 parser: Parser<'a>,
50
51 site_span: Span,
53 macro_ident: Ident,
55 lint_node_id: NodeId,
56 is_trailing_mac: bool,
57 arm_span: Span,
58 is_local: bool,
60}
61
62impl<'a> ParserAnyMacro<'a> {
63 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
64 let ParserAnyMacro {
65 site_span,
66 macro_ident,
67 ref mut parser,
68 lint_node_id,
69 arm_span,
70 is_trailing_mac,
71 is_local,
72 } = *self;
73 let snapshot = &mut parser.create_snapshot_for_diagnostic();
74 let fragment = match parse_ast_fragment(parser, kind) {
75 Ok(f) => f,
76 Err(err) => {
77 let guar = diagnostics::emit_frag_parse_err(
78 err, parser, snapshot, site_span, arm_span, kind,
79 );
80 return kind.dummy(site_span, guar);
81 }
82 };
83
84 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
88 if is_local {
89 parser.psess.buffer_lint(
90 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
91 parser.token.span,
92 lint_node_id,
93 BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
94 );
95 }
96 parser.bump();
97 }
98
99 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
101 ensure_complete_parse(parser, &path, kind.name(), site_span);
102 fragment
103 }
104
105 #[instrument(skip(cx, tts))]
106 pub(crate) fn from_tts<'cx>(
107 cx: &'cx mut ExtCtxt<'a>,
108 tts: TokenStream,
109 site_span: Span,
110 arm_span: Span,
111 is_local: bool,
112 macro_ident: Ident,
113 ) -> Self {
114 Self {
115 parser: Parser::new(&cx.sess.psess, tts, None),
116
117 site_span,
121 macro_ident,
122 lint_node_id: cx.current_expansion.lint_node_id,
123 is_trailing_mac: cx.current_expansion.is_trailing_mac,
124 arm_span,
125 is_local,
126 }
127 }
128}
129
130pub(super) enum MacroRule {
131 Func { lhs: Vec<MatcherLoc>, lhs_span: Span, rhs: mbe::TokenTree },
133 Attr {
135 args: Vec<MatcherLoc>,
136 args_span: Span,
137 body: Vec<MatcherLoc>,
138 body_span: Span,
139 rhs: mbe::TokenTree,
140 },
141}
142
143pub struct MacroRulesMacroExpander {
144 node_id: NodeId,
145 name: Ident,
146 span: Span,
147 transparency: Transparency,
148 kinds: MacroKinds,
149 rules: Vec<MacroRule>,
150}
151
152impl MacroRulesMacroExpander {
153 pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, MultiSpan)> {
154 let (span, rhs) = match self.rules[rule_i] {
156 MacroRule::Func { lhs_span, ref rhs, .. } => (MultiSpan::from_span(lhs_span), rhs),
157 MacroRule::Attr { args_span, body_span, ref rhs, .. } => {
158 (MultiSpan::from_spans(vec![args_span, body_span]), rhs)
159 }
160 };
161 if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) }
162 }
163
164 pub fn kinds(&self) -> MacroKinds {
165 self.kinds
166 }
167}
168
169impl TTMacroExpander for MacroRulesMacroExpander {
170 fn expand<'cx>(
171 &self,
172 cx: &'cx mut ExtCtxt<'_>,
173 sp: Span,
174 input: TokenStream,
175 ) -> MacroExpanderResult<'cx> {
176 ExpandResult::Ready(expand_macro(
177 cx,
178 sp,
179 self.span,
180 self.node_id,
181 self.name,
182 self.transparency,
183 input,
184 &self.rules,
185 ))
186 }
187}
188
189impl AttrProcMacro for MacroRulesMacroExpander {
190 fn expand(
191 &self,
192 cx: &mut ExtCtxt<'_>,
193 sp: Span,
194 args: TokenStream,
195 body: TokenStream,
196 ) -> Result<TokenStream, ErrorGuaranteed> {
197 expand_macro_attr(
198 cx,
199 sp,
200 self.span,
201 self.node_id,
202 self.name,
203 self.transparency,
204 args,
205 body,
206 &self.rules,
207 )
208 }
209}
210
211struct DummyExpander(ErrorGuaranteed);
212
213impl TTMacroExpander for DummyExpander {
214 fn expand<'cx>(
215 &self,
216 _: &'cx mut ExtCtxt<'_>,
217 span: Span,
218 _: TokenStream,
219 ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
220 ExpandResult::Ready(DummyResult::any(span, self.0))
221 }
222}
223
224fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
225 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
226 cx_expansions.entry(sp).or_default().push(message);
227}
228
229pub(super) trait Tracker<'matcher> {
230 type Failure;
232
233 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
237
238 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
240
241 fn after_arm(&mut self, _in_body: bool, _result: &NamedParseResult<Self::Failure>) {}
244
245 fn description() -> &'static str;
247
248 fn recovery() -> Recovery {
249 Recovery::Forbidden
250 }
251}
252
253pub(super) struct NoopTracker;
256
257impl<'matcher> Tracker<'matcher> for NoopTracker {
258 type Failure = ();
259
260 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
261
262 fn description() -> &'static str {
263 "none"
264 }
265}
266
267#[instrument(skip(cx, transparency, arg, rules))]
269fn expand_macro<'cx>(
270 cx: &'cx mut ExtCtxt<'_>,
271 sp: Span,
272 def_span: Span,
273 node_id: NodeId,
274 name: Ident,
275 transparency: Transparency,
276 arg: TokenStream,
277 rules: &[MacroRule],
278) -> Box<dyn MacResult + 'cx> {
279 let psess = &cx.sess.psess;
280
281 if cx.trace_macros() {
282 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
283 trace_macros_note(&mut cx.expansions, sp, msg);
284 }
285
286 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
288
289 match try_success_result {
290 Ok((rule_index, rule, named_matches)) => {
291 let MacroRule::Func { rhs, .. } = rule else {
292 panic!("try_match_macro returned non-func rule");
293 };
294 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
295 cx.dcx().span_bug(sp, "malformed macro rhs");
296 };
297 let arm_span = rhs_span.entire();
298
299 let id = cx.current_expansion.id;
301 let tts = match transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id) {
302 Ok(tts) => tts,
303 Err(err) => {
304 let guar = err.emit();
305 return DummyResult::any(arm_span, guar);
306 }
307 };
308
309 if cx.trace_macros() {
310 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
311 trace_macros_note(&mut cx.expansions, sp, msg);
312 }
313
314 let is_local = is_defined_in_current_crate(node_id);
315 if is_local {
316 cx.resolver.record_macro_rule_usage(node_id, rule_index);
317 }
318
319 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
321 }
322 Err(CanRetry::No(guar)) => {
323 debug!("Will not retry matching as an error was emitted already");
324 DummyResult::any(sp, guar)
325 }
326 Err(CanRetry::Yes) => {
327 let (span, guar) =
329 failed_to_match_macro(cx.psess(), sp, def_span, name, None, &arg, rules);
330 cx.macro_error_and_trace_macros_diag();
331 DummyResult::any(span, guar)
332 }
333 }
334}
335
336#[instrument(skip(cx, transparency, args, body, rules))]
338fn expand_macro_attr(
339 cx: &mut ExtCtxt<'_>,
340 sp: Span,
341 def_span: Span,
342 node_id: NodeId,
343 name: Ident,
344 transparency: Transparency,
345 args: TokenStream,
346 body: TokenStream,
347 rules: &[MacroRule],
348) -> Result<TokenStream, ErrorGuaranteed> {
349 let psess = &cx.sess.psess;
350 let is_local = node_id != DUMMY_NODE_ID;
353
354 if cx.trace_macros() {
355 let msg = format!(
356 "expanding `#[{name}({})] {}`",
357 pprust::tts_to_string(&args),
358 pprust::tts_to_string(&body),
359 );
360 trace_macros_note(&mut cx.expansions, sp, msg);
361 }
362
363 match try_match_macro_attr(psess, name, &args, &body, rules, &mut NoopTracker) {
365 Ok((i, rule, named_matches)) => {
366 let MacroRule::Attr { rhs, .. } = rule else {
367 panic!("try_macro_match_attr returned non-attr rule");
368 };
369 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
370 cx.dcx().span_bug(sp, "malformed macro rhs");
371 };
372
373 let id = cx.current_expansion.id;
374 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id)
375 .map_err(|e| e.emit())?;
376
377 if cx.trace_macros() {
378 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
379 trace_macros_note(&mut cx.expansions, sp, msg);
380 }
381
382 if is_local {
383 cx.resolver.record_macro_rule_usage(node_id, i);
384 }
385
386 Ok(tts)
387 }
388 Err(CanRetry::No(guar)) => Err(guar),
389 Err(CanRetry::Yes) => {
390 let (_, guar) =
392 failed_to_match_macro(cx.psess(), sp, def_span, name, Some(&args), &body, rules);
393 cx.trace_macros_diag();
394 Err(guar)
395 }
396 }
397}
398
399pub(super) enum CanRetry {
400 Yes,
401 No(ErrorGuaranteed),
403}
404
405#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
409pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
410 psess: &ParseSess,
411 name: Ident,
412 arg: &TokenStream,
413 rules: &'matcher [MacroRule],
414 track: &mut T,
415) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
416 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
436 let mut tt_parser = TtParser::new(name);
438 for (i, rule) in rules.iter().enumerate() {
439 let MacroRule::Func { lhs, .. } = rule else { continue };
440 let _tracing_span = trace_span!("Matching arm", %i);
441
442 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
447
448 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
449
450 track.after_arm(true, &result);
451
452 match result {
453 Success(named_matches) => {
454 debug!("Parsed arm successfully");
455 psess.gated_spans.merge(gated_spans_snapshot);
458
459 return Ok((i, rule, named_matches));
460 }
461 Failure(_) => {
462 trace!("Failed to match arm, trying the next one");
463 }
465 Error(_, _) => {
466 debug!("Fatal error occurred during matching");
467 return Err(CanRetry::Yes);
469 }
470 ErrorReported(guarantee) => {
471 debug!("Fatal error occurred and was reported during matching");
472 return Err(CanRetry::No(guarantee));
474 }
475 }
476
477 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
480 }
481
482 Err(CanRetry::Yes)
483}
484
485#[instrument(level = "debug", skip(psess, attr_args, attr_body, rules, track), fields(tracking = %T::description()))]
489pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>(
490 psess: &ParseSess,
491 name: Ident,
492 attr_args: &TokenStream,
493 attr_body: &TokenStream,
494 rules: &'matcher [MacroRule],
495 track: &mut T,
496) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
497 let args_parser = parser_from_cx(psess, attr_args.clone(), T::recovery());
499 let body_parser = parser_from_cx(psess, attr_body.clone(), T::recovery());
500 let mut tt_parser = TtParser::new(name);
501 for (i, rule) in rules.iter().enumerate() {
502 let MacroRule::Attr { args, body, .. } = rule else { continue };
503
504 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
505
506 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&args_parser), args, track);
507 track.after_arm(false, &result);
508
509 let mut named_matches = match result {
510 Success(named_matches) => named_matches,
511 Failure(_) => {
512 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
513 continue;
514 }
515 Error(_, _) => return Err(CanRetry::Yes),
516 ErrorReported(guar) => return Err(CanRetry::No(guar)),
517 };
518
519 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
520 track.after_arm(true, &result);
521
522 match result {
523 Success(body_named_matches) => {
524 psess.gated_spans.merge(gated_spans_snapshot);
525 named_matches.extend(body_named_matches);
526 return Ok((i, rule, named_matches));
527 }
528 Failure(_) => {
529 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
530 }
531 Error(_, _) => return Err(CanRetry::Yes),
532 ErrorReported(guar) => return Err(CanRetry::No(guar)),
533 }
534 }
535
536 Err(CanRetry::Yes)
537}
538
539pub fn compile_declarative_macro(
541 sess: &Session,
542 features: &Features,
543 macro_def: &ast::MacroDef,
544 ident: Ident,
545 attrs: &[hir::Attribute],
546 span: Span,
547 node_id: NodeId,
548 edition: Edition,
549) -> (SyntaxExtension, usize) {
550 let mk_syn_ext = |kind| {
551 let is_local = is_defined_in_current_crate(node_id);
552 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
553 };
554 let dummy_syn_ext =
555 |guar| (mk_syn_ext(SyntaxExtensionKind::LegacyBang(Arc::new(DummyExpander(guar)))), 0);
556
557 let macro_rules = macro_def.macro_rules;
558 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
559
560 let body = macro_def.body.tokens.clone();
561 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
562
563 let mut guar = None;
566 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
567
568 let mut kinds = MacroKinds::empty();
569 let mut rules = Vec::new();
570
571 while p.token != token::Eof {
572 let args = if p.eat_keyword_noexpect(sym::attr) {
573 kinds |= MacroKinds::ATTR;
574 if !features.macro_attr() {
575 feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable")
576 .emit();
577 }
578 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr args") {
579 return dummy_syn_ext(guar);
580 }
581 let args = p.parse_token_tree();
582 check_args_parens(sess, &args);
583 let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition);
584 check_emission(check_lhs(sess, node_id, &args));
585 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") {
586 return dummy_syn_ext(guar);
587 }
588 Some(args)
589 } else {
590 kinds |= MacroKinds::BANG;
591 None
592 };
593 let lhs_tt = p.parse_token_tree();
594 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
595 check_emission(check_lhs(sess, node_id, &lhs_tt));
596 if let Err(e) = p.expect(exp!(FatArrow)) {
597 return dummy_syn_ext(e.emit());
598 }
599 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
600 return dummy_syn_ext(guar);
601 }
602 let rhs_tt = p.parse_token_tree();
603 let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
604 check_emission(check_rhs(sess, &rhs_tt));
605 check_emission(check_meta_variables(&sess.psess, node_id, args.as_ref(), &lhs_tt, &rhs_tt));
606 let lhs_span = lhs_tt.span();
607 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
610 mbe::macro_parser::compute_locs(&delimited.tts)
611 } else {
612 return dummy_syn_ext(guar.unwrap());
613 };
614 if let Some(args) = args {
615 let args_span = args.span();
616 let mbe::TokenTree::Delimited(.., delimited) = args else {
617 return dummy_syn_ext(guar.unwrap());
618 };
619 let args = mbe::macro_parser::compute_locs(&delimited.tts);
620 let body_span = lhs_span;
621 rules.push(MacroRule::Attr { args, args_span, body: lhs, body_span, rhs: rhs_tt });
622 } else {
623 rules.push(MacroRule::Func { lhs, lhs_span, rhs: rhs_tt });
624 }
625 if p.token == token::Eof {
626 break;
627 }
628 if let Err(e) = p.expect(exp_sep) {
629 return dummy_syn_ext(e.emit());
630 }
631 }
632
633 if rules.is_empty() {
634 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
635 return dummy_syn_ext(guar);
636 }
637 assert!(!kinds.is_empty());
638
639 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
640 .unwrap_or(Transparency::fallback(macro_rules));
641
642 if let Some(guar) = guar {
643 return dummy_syn_ext(guar);
646 }
647
648 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
650
651 let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
652 (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
653}
654
655fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
656 if p.token == token::Eof {
657 let err_sp = p.token.span.shrink_to_hi();
658 let guar = sess
659 .dcx()
660 .struct_span_err(err_sp, "macro definition ended unexpectedly")
661 .with_span_label(err_sp, msg)
662 .emit();
663 return Some(guar);
664 }
665 None
666}
667
668fn check_args_parens(sess: &Session, args: &tokenstream::TokenTree) {
669 if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args
671 && *delim != Delimiter::Parenthesis
672 {
673 sess.dcx().emit_err(errors::MacroArgsBadDelim {
674 span: dspan.entire(),
675 sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close },
676 });
677 }
678}
679
680fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
681 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
682 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
683 e1.and(e2)
684}
685
686fn check_lhs_nt_follows(
687 sess: &Session,
688 node_id: NodeId,
689 lhs: &mbe::TokenTree,
690) -> Result<(), ErrorGuaranteed> {
691 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
694 check_matcher(sess, node_id, &delimited.tts)
695 } else {
696 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
697 Err(sess.dcx().span_err(lhs.span(), msg))
698 }
699}
700
701fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
702 if seq.separator.is_some() {
703 false
704 } else {
705 let mut is_empty = true;
706 let mut iter = seq.tts.iter().peekable();
707 while let Some(tt) = iter.next() {
708 match tt {
709 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
710 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
711 let mut now = t;
712 while let Some(&mbe::TokenTree::Token(
713 next @ Token { kind: DocComment(..), .. },
714 )) = iter.peek()
715 {
716 now = next;
717 iter.next();
718 }
719 let span = t.span.to(now.span);
720 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
721 }
722 mbe::TokenTree::Sequence(_, sub_seq)
723 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
724 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
725 _ => is_empty = false,
726 }
727 }
728 is_empty
729 }
730}
731
732fn check_redundant_vis_repetition(
737 err: &mut Diag<'_>,
738 sess: &Session,
739 seq: &SequenceRepetition,
740 span: &DelimSpan,
741) {
742 let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
743 let is_vis = seq.tts.first().map_or(false, |tt| {
744 matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
745 });
746
747 if is_vis && is_zero_or_one {
748 err.note("a `vis` fragment can already be empty");
749 err.multipart_suggestion(
750 "remove the `$(` and `)?`",
751 vec![
752 (
753 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
754 "".to_string(),
755 ),
756 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
757 ],
758 Applicability::MaybeIncorrect,
759 );
760 }
761}
762
763fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
766 use mbe::TokenTree;
767 for tt in tts {
768 match tt {
769 TokenTree::Token(..)
770 | TokenTree::MetaVar(..)
771 | TokenTree::MetaVarDecl { .. }
772 | TokenTree::MetaVarExpr(..) => (),
773 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
774 TokenTree::Sequence(span, seq) => {
775 if is_empty_token_tree(sess, seq) {
776 let sp = span.entire();
777 let mut err =
778 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
779 check_redundant_vis_repetition(&mut err, sess, seq, span);
780 return Err(err.emit());
781 }
782 check_lhs_no_empty_seq(sess, &seq.tts)?
783 }
784 }
785 }
786
787 Ok(())
788}
789
790fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
791 match *rhs {
792 mbe::TokenTree::Delimited(..) => Ok(()),
793 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
794 }
795}
796
797fn check_matcher(
798 sess: &Session,
799 node_id: NodeId,
800 matcher: &[mbe::TokenTree],
801) -> Result<(), ErrorGuaranteed> {
802 let first_sets = FirstSets::new(matcher);
803 let empty_suffix = TokenSet::empty();
804 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
805 Ok(())
806}
807
808fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
809 match rhs {
810 mbe::TokenTree::Delimited(.., d) => {
811 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
812 if let mbe::TokenTree::Token(ident) = ident
813 && let TokenKind::Ident(ident, _) = ident.kind
814 && ident == sym::compile_error
815 && let mbe::TokenTree::Token(bang) = bang
816 && let TokenKind::Bang = bang.kind
817 && let mbe::TokenTree::Delimited(.., del) = args
818 && !del.delim.skip()
819 {
820 true
821 } else {
822 false
823 }
824 });
825 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
826 }
827 _ => false,
828 }
829}
830
831struct FirstSets<'tt> {
844 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
851}
852
853impl<'tt> FirstSets<'tt> {
854 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
855 use mbe::TokenTree;
856
857 let mut sets = FirstSets { first: FxHashMap::default() };
858 build_recur(&mut sets, tts);
859 return sets;
860
861 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
865 let mut first = TokenSet::empty();
866 for tt in tts.iter().rev() {
867 match tt {
868 TokenTree::Token(..)
869 | TokenTree::MetaVar(..)
870 | TokenTree::MetaVarDecl { .. }
871 | TokenTree::MetaVarExpr(..) => {
872 first.replace_with(TtHandle::TtRef(tt));
873 }
874 TokenTree::Delimited(span, _, delimited) => {
875 build_recur(sets, &delimited.tts);
876 first.replace_with(TtHandle::from_token_kind(
877 delimited.delim.as_open_token_kind(),
878 span.open,
879 ));
880 }
881 TokenTree::Sequence(sp, seq_rep) => {
882 let subfirst = build_recur(sets, &seq_rep.tts);
883
884 match sets.first.entry(sp.entire()) {
885 Entry::Vacant(vac) => {
886 vac.insert(Some(subfirst.clone()));
887 }
888 Entry::Occupied(mut occ) => {
889 occ.insert(None);
896 }
897 }
898
899 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
903 first.add_one_maybe(TtHandle::from_token(*sep));
904 }
905
906 if subfirst.maybe_empty
908 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
909 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
910 {
911 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
914 } else {
915 first = subfirst;
918 }
919 }
920 }
921 }
922
923 first
924 }
925 }
926
927 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
930 use mbe::TokenTree;
931
932 let mut first = TokenSet::empty();
933 for tt in tts.iter() {
934 assert!(first.maybe_empty);
935 match tt {
936 TokenTree::Token(..)
937 | TokenTree::MetaVar(..)
938 | TokenTree::MetaVarDecl { .. }
939 | TokenTree::MetaVarExpr(..) => {
940 first.add_one(TtHandle::TtRef(tt));
941 return first;
942 }
943 TokenTree::Delimited(span, _, delimited) => {
944 first.add_one(TtHandle::from_token_kind(
945 delimited.delim.as_open_token_kind(),
946 span.open,
947 ));
948 return first;
949 }
950 TokenTree::Sequence(sp, seq_rep) => {
951 let subfirst_owned;
952 let subfirst = match self.first.get(&sp.entire()) {
953 Some(Some(subfirst)) => subfirst,
954 Some(&None) => {
955 subfirst_owned = self.first(&seq_rep.tts);
956 &subfirst_owned
957 }
958 None => {
959 panic!("We missed a sequence during FirstSets construction");
960 }
961 };
962
963 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
966 first.add_one_maybe(TtHandle::from_token(*sep));
967 }
968
969 assert!(first.maybe_empty);
970 first.add_all(subfirst);
971 if subfirst.maybe_empty
972 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
973 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
974 {
975 first.maybe_empty = true;
979 continue;
980 } else {
981 return first;
982 }
983 }
984 }
985 }
986
987 assert!(first.maybe_empty);
990 first
991 }
992}
993
994#[derive(Debug)]
999enum TtHandle<'tt> {
1000 TtRef(&'tt mbe::TokenTree),
1002
1003 Token(mbe::TokenTree),
1008}
1009
1010impl<'tt> TtHandle<'tt> {
1011 fn from_token(tok: Token) -> Self {
1012 TtHandle::Token(mbe::TokenTree::Token(tok))
1013 }
1014
1015 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
1016 TtHandle::from_token(Token::new(kind, span))
1017 }
1018
1019 fn get(&'tt self) -> &'tt mbe::TokenTree {
1021 match self {
1022 TtHandle::TtRef(tt) => tt,
1023 TtHandle::Token(token_tt) => token_tt,
1024 }
1025 }
1026}
1027
1028impl<'tt> PartialEq for TtHandle<'tt> {
1029 fn eq(&self, other: &TtHandle<'tt>) -> bool {
1030 self.get() == other.get()
1031 }
1032}
1033
1034impl<'tt> Clone for TtHandle<'tt> {
1035 fn clone(&self) -> Self {
1036 match self {
1037 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
1038
1039 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
1042 TtHandle::Token(mbe::TokenTree::Token(*tok))
1043 }
1044
1045 _ => unreachable!(),
1046 }
1047 }
1048}
1049
1050#[derive(Clone, Debug)]
1061struct TokenSet<'tt> {
1062 tokens: Vec<TtHandle<'tt>>,
1063 maybe_empty: bool,
1064}
1065
1066impl<'tt> TokenSet<'tt> {
1067 fn empty() -> Self {
1069 TokenSet { tokens: Vec::new(), maybe_empty: true }
1070 }
1071
1072 fn singleton(tt: TtHandle<'tt>) -> Self {
1075 TokenSet { tokens: vec![tt], maybe_empty: false }
1076 }
1077
1078 fn replace_with(&mut self, tt: TtHandle<'tt>) {
1081 self.tokens.clear();
1082 self.tokens.push(tt);
1083 self.maybe_empty = false;
1084 }
1085
1086 fn replace_with_irrelevant(&mut self) {
1090 self.tokens.clear();
1091 self.maybe_empty = false;
1092 }
1093
1094 fn add_one(&mut self, tt: TtHandle<'tt>) {
1096 if !self.tokens.contains(&tt) {
1097 self.tokens.push(tt);
1098 }
1099 self.maybe_empty = false;
1100 }
1101
1102 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
1104 if !self.tokens.contains(&tt) {
1105 self.tokens.push(tt);
1106 }
1107 }
1108
1109 fn add_all(&mut self, other: &Self) {
1117 for tt in &other.tokens {
1118 if !self.tokens.contains(tt) {
1119 self.tokens.push(tt.clone());
1120 }
1121 }
1122 if !other.maybe_empty {
1123 self.maybe_empty = false;
1124 }
1125 }
1126}
1127
1128fn check_matcher_core<'tt>(
1140 sess: &Session,
1141 node_id: NodeId,
1142 first_sets: &FirstSets<'tt>,
1143 matcher: &'tt [mbe::TokenTree],
1144 follow: &TokenSet<'tt>,
1145) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
1146 use mbe::TokenTree;
1147
1148 let mut last = TokenSet::empty();
1149
1150 let mut errored = Ok(());
1151
1152 'each_token: for i in 0..matcher.len() {
1156 let token = &matcher[i];
1157 let suffix = &matcher[i + 1..];
1158
1159 let build_suffix_first = || {
1160 let mut s = first_sets.first(suffix);
1161 if s.maybe_empty {
1162 s.add_all(follow);
1163 }
1164 s
1165 };
1166
1167 let suffix_first;
1171
1172 match token {
1175 TokenTree::Token(..)
1176 | TokenTree::MetaVar(..)
1177 | TokenTree::MetaVarDecl { .. }
1178 | TokenTree::MetaVarExpr(..) => {
1179 if token_can_be_followed_by_any(token) {
1180 last.replace_with_irrelevant();
1182 continue 'each_token;
1185 } else {
1186 last.replace_with(TtHandle::TtRef(token));
1187 suffix_first = build_suffix_first();
1188 }
1189 }
1190 TokenTree::Delimited(span, _, d) => {
1191 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1192 d.delim.as_close_token_kind(),
1193 span.close,
1194 ));
1195 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
1196 last.replace_with_irrelevant();
1198
1199 continue 'each_token;
1202 }
1203 TokenTree::Sequence(_, seq_rep) => {
1204 suffix_first = build_suffix_first();
1205 let mut new;
1216 let my_suffix = if let Some(sep) = &seq_rep.separator {
1217 new = suffix_first.clone();
1218 new.add_one_maybe(TtHandle::from_token(*sep));
1219 &new
1220 } else {
1221 &suffix_first
1222 };
1223
1224 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1228 if next.maybe_empty {
1229 last.add_all(&next);
1230 } else {
1231 last = next;
1232 }
1233
1234 continue 'each_token;
1237 }
1238 }
1239
1240 for tt in &last.tokens {
1245 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1246 for next_token in &suffix_first.tokens {
1247 let next_token = next_token.get();
1248
1249 if is_defined_in_current_crate(node_id)
1256 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1257 && matches!(
1258 next_token,
1259 TokenTree::Token(token) if *token == token::Or
1260 )
1261 {
1262 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1264 span,
1265 name,
1266 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1267 });
1268 sess.psess.buffer_lint(
1269 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1270 span,
1271 ast::CRATE_NODE_ID,
1272 BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1273 );
1274 }
1275 match is_in_follow(next_token, kind) {
1276 IsInFollow::Yes => {}
1277 IsInFollow::No(possible) => {
1278 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1279 {
1280 "is"
1281 } else {
1282 "may be"
1283 };
1284
1285 let sp = next_token.span();
1286 let mut err = sess.dcx().struct_span_err(
1287 sp,
1288 format!(
1289 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1290 is not allowed for `{frag}` fragments",
1291 name = name,
1292 frag = kind,
1293 next = quoted_tt_to_string(next_token),
1294 may_be = may_be
1295 ),
1296 );
1297 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1298
1299 if kind == NonterminalKind::Pat(PatWithOr)
1300 && sess.psess.edition.at_least_rust_2021()
1301 && next_token.is_token(&token::Or)
1302 {
1303 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1304 span,
1305 name,
1306 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1307 });
1308 err.span_suggestion(
1309 span,
1310 "try a `pat_param` fragment specifier instead",
1311 suggestion,
1312 Applicability::MaybeIncorrect,
1313 );
1314 }
1315
1316 let msg = "allowed there are: ";
1317 match possible {
1318 &[] => {}
1319 &[t] => {
1320 err.note(format!(
1321 "only {t} is allowed after `{kind}` fragments",
1322 ));
1323 }
1324 ts => {
1325 err.note(format!(
1326 "{}{} or {}",
1327 msg,
1328 ts[..ts.len() - 1].to_vec().join(", "),
1329 ts[ts.len() - 1],
1330 ));
1331 }
1332 }
1333 errored = Err(err.emit());
1334 }
1335 }
1336 }
1337 }
1338 }
1339 }
1340 errored?;
1341 Ok(last)
1342}
1343
1344fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1345 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1346 frag_can_be_followed_by_any(kind)
1347 } else {
1348 true
1350 }
1351}
1352
1353fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1362 matches!(
1363 kind,
1364 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1372}
1373
1374enum IsInFollow {
1375 Yes,
1376 No(&'static [&'static str]),
1377}
1378
1379fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1388 use mbe::TokenTree;
1389
1390 if let TokenTree::Token(Token { kind, .. }) = tok
1391 && kind.close_delim().is_some()
1392 {
1393 IsInFollow::Yes
1396 } else {
1397 match kind {
1398 NonterminalKind::Item => {
1399 IsInFollow::Yes
1402 }
1403 NonterminalKind::Block => {
1404 IsInFollow::Yes
1407 }
1408 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1409 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1410 match tok {
1411 TokenTree::Token(token) => match token.kind {
1412 FatArrow | Comma | Semi => IsInFollow::Yes,
1413 _ => IsInFollow::No(TOKENS),
1414 },
1415 _ => IsInFollow::No(TOKENS),
1416 }
1417 }
1418 NonterminalKind::Pat(PatParam { .. }) => {
1419 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1420 match tok {
1421 TokenTree::Token(token) => match token.kind {
1422 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1423 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1424 IsInFollow::Yes
1425 }
1426 _ => IsInFollow::No(TOKENS),
1427 },
1428 _ => IsInFollow::No(TOKENS),
1429 }
1430 }
1431 NonterminalKind::Pat(PatWithOr) => {
1432 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1433 match tok {
1434 TokenTree::Token(token) => match token.kind {
1435 FatArrow | Comma | Eq => IsInFollow::Yes,
1436 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1437 IsInFollow::Yes
1438 }
1439 _ => IsInFollow::No(TOKENS),
1440 },
1441 _ => IsInFollow::No(TOKENS),
1442 }
1443 }
1444 NonterminalKind::Path | NonterminalKind::Ty => {
1445 const TOKENS: &[&str] = &[
1446 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1447 "`where`",
1448 ];
1449 match tok {
1450 TokenTree::Token(token) => match token.kind {
1451 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1452 | Semi | Or => IsInFollow::Yes,
1453 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1454 IsInFollow::Yes
1455 }
1456 _ => IsInFollow::No(TOKENS),
1457 },
1458 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1459 _ => IsInFollow::No(TOKENS),
1460 }
1461 }
1462 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1463 IsInFollow::Yes
1465 }
1466 NonterminalKind::Literal => {
1467 IsInFollow::Yes
1469 }
1470 NonterminalKind::Meta | NonterminalKind::TT => {
1471 IsInFollow::Yes
1474 }
1475 NonterminalKind::Vis => {
1476 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1478 match tok {
1479 TokenTree::Token(token) => match token.kind {
1480 Comma => IsInFollow::Yes,
1481 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1482 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1483 _ => {
1484 if token.can_begin_type() {
1485 IsInFollow::Yes
1486 } else {
1487 IsInFollow::No(TOKENS)
1488 }
1489 }
1490 },
1491 TokenTree::MetaVarDecl {
1492 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1493 ..
1494 } => IsInFollow::Yes,
1495 _ => IsInFollow::No(TOKENS),
1496 }
1497 }
1498 }
1499 }
1500}
1501
1502fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1503 match tt {
1504 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1505 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1506 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1507 _ => panic!(
1508 "{}",
1509 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1510 in follow set checker"
1511 ),
1512 }
1513}
1514
1515fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1516 node_id != DUMMY_NODE_ID
1519}
1520
1521pub(super) fn parser_from_cx(
1522 psess: &ParseSess,
1523 mut tts: TokenStream,
1524 recovery: Recovery,
1525) -> Parser<'_> {
1526 tts.desugar_doc_comments();
1527 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1528}