1#![allow(rustc::diagnostic_outside_of_impl)]
5#![allow(rustc::untranslatable_diagnostic)]
6#![feature(assert_matches)]
7#![feature(box_patterns)]
8#![feature(debug_closure_helpers)]
9#![feature(default_field_values)]
10#![feature(if_let_guard)]
11#![feature(iter_intersperse)]
12#![recursion_limit = "256"]
13use std::path::{Path, PathBuf};
16use std::str::Utf8Error;
17use std::sync::Arc;
18
19use rustc_ast as ast;
20use rustc_ast::tokenstream::{DelimSpan, TokenStream};
21use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
22use rustc_ast_pretty::pprust;
23use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
24use rustc_lexer::FrontmatterAllowed;
25use rustc_session::parse::ParseSess;
26use rustc_span::source_map::SourceMap;
27use rustc_span::{FileName, SourceFile, Span};
28pub use unicode_normalization::UNICODE_VERSION as UNICODE_NORMALIZATION_VERSION;
29
30pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
31
32#[macro_use]
33pub mod parser;
34use parser::Parser;
35use rustc_ast::token::Delimiter;
36
37pub mod lexer;
38
39mod errors;
40
41rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
42
43pub fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
45 match expr {
46 Ok(expr) => expr,
47 Err(errs) => {
48 for err in errs {
49 err.emit();
50 }
51 FatalError.raise()
52 }
53 }
54}
55
56pub fn new_parser_from_source_str(
60 psess: &ParseSess,
61 name: FileName,
62 source: String,
63) -> Result<Parser<'_>, Vec<Diag<'_>>> {
64 let source_file = psess.source_map().new_source_file(name, source);
65 new_parser_from_source_file(psess, source_file)
66}
67
68pub fn new_parser_from_file<'a>(
74 psess: &'a ParseSess,
75 path: &Path,
76 sp: Option<Span>,
77) -> Result<Parser<'a>, Vec<Diag<'a>>> {
78 let sm = psess.source_map();
79 let source_file = sm.load_file(path).unwrap_or_else(|e| {
80 let msg = format!("couldn't read `{}`: {}", path.display(), e);
81 let mut err = psess.dcx().struct_fatal(msg);
82 if let Ok(contents) = std::fs::read(path)
83 && let Err(utf8err) = String::from_utf8(contents.clone())
84 {
85 utf8_error(
86 sm,
87 &path.display().to_string(),
88 sp,
89 &mut err,
90 utf8err.utf8_error(),
91 &contents,
92 );
93 }
94 if let Some(sp) = sp {
95 err.span(sp);
96 }
97 err.emit();
98 });
99 new_parser_from_source_file(psess, source_file)
100}
101
102pub fn utf8_error<E: EmissionGuarantee>(
103 sm: &SourceMap,
104 path: &str,
105 sp: Option<Span>,
106 err: &mut Diag<'_, E>,
107 utf8err: Utf8Error,
108 contents: &[u8],
109) {
110 let start = utf8err.valid_up_to();
112 let note = format!("invalid utf-8 at byte `{start}`");
113 let msg = if let Some(len) = utf8err.error_len() {
114 format!(
115 "byte{s} `{bytes}` {are} not valid utf-8",
116 bytes = if len == 1 {
117 format!("{:?}", contents[start])
118 } else {
119 format!("{:?}", &contents[start..start + len])
120 },
121 s = pluralize!(len),
122 are = if len == 1 { "is" } else { "are" },
123 )
124 } else {
125 note.clone()
126 };
127 let contents = String::from_utf8_lossy(contents).to_string();
128 let source = sm.new_source_file(PathBuf::from(path).into(), contents);
129 let span = Span::with_root_ctxt(
130 source.normalized_byte_pos(start as u32),
131 source.normalized_byte_pos(start as u32),
132 );
133 if span.is_dummy() {
134 err.note(note);
135 } else {
136 if sp.is_some() {
137 err.span_note(span, msg);
138 } else {
139 err.span(span);
140 err.span_label(span, msg);
141 }
142 }
143}
144
145fn new_parser_from_source_file(
148 psess: &ParseSess,
149 source_file: Arc<SourceFile>,
150) -> Result<Parser<'_>, Vec<Diag<'_>>> {
151 let end_pos = source_file.end_position();
152 let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?;
153 let mut parser = Parser::new(psess, stream, None);
154 if parser.token == token::Eof {
155 parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
156 }
157 Ok(parser)
158}
159
160pub fn source_str_to_stream(
161 psess: &ParseSess,
162 name: FileName,
163 source: String,
164 override_span: Option<Span>,
165) -> Result<TokenStream, Vec<Diag<'_>>> {
166 let source_file = psess.source_map().new_source_file(name, source);
167 source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
170}
171
172fn source_file_to_stream<'psess>(
175 psess: &'psess ParseSess,
176 source_file: Arc<SourceFile>,
177 override_span: Option<Span>,
178 frontmatter_allowed: FrontmatterAllowed,
179) -> Result<TokenStream, Vec<Diag<'psess>>> {
180 let src = source_file.src.as_ref().unwrap_or_else(|| {
181 psess.dcx().bug(format!(
182 "cannot lex `source_file` without source: {}",
183 psess.source_map().filename_for_diagnostics(&source_file.name)
184 ));
185 });
186
187 lexer::lex_token_trees(
188 psess,
189 src.as_str(),
190 source_file.start_pos,
191 override_span,
192 frontmatter_allowed,
193 )
194}
195
196pub fn parse_in<'a, T>(
198 psess: &'a ParseSess,
199 tts: TokenStream,
200 name: &'static str,
201 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
202) -> PResult<'a, T> {
203 let mut parser = Parser::new(psess, tts, Some(name));
204 let result = f(&mut parser)?;
205 if parser.token != token::Eof {
206 parser.unexpected()?;
207 }
208 Ok(result)
209}
210
211pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream {
212 let source = pprust::item_to_string(item);
213 let filename = FileName::macro_expansion_source_code(&source);
214 unwrap_or_emit_fatal(source_str_to_stream(psess, filename, source, Some(item.span)))
215}
216
217pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream {
218 let source = pprust::crate_to_string_for_macros(krate);
219 let filename = FileName::macro_expansion_source_code(&source);
220 unwrap_or_emit_fatal(source_str_to_stream(
221 psess,
222 filename,
223 source,
224 Some(krate.spans.inner_span),
225 ))
226}
227
228pub fn parse_cfg_attr(
229 cfg_attr: &Attribute,
230 psess: &ParseSess,
231) -> Option<(MetaItemInner, Vec<(AttrItem, Span)>)> {
232 const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
233 const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
234 <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>";
235
236 match cfg_attr.get_normal_item().args {
237 ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
238 if !tokens.is_empty() =>
239 {
240 check_cfg_attr_bad_delim(psess, dspan, delim);
241 match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
242 Ok(r) => return Some(r),
243 Err(e) => {
244 e.with_help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`"))
245 .with_note(CFG_ATTR_NOTE_REF)
246 .emit();
247 }
248 }
249 }
250 _ => {
251 psess.dcx().emit_err(errors::MalformedCfgAttr {
252 span: cfg_attr.span,
253 sugg: CFG_ATTR_GRAMMAR_HELP,
254 });
255 }
256 }
257 None
258}
259
260fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
261 if let Delimiter::Parenthesis = delim {
262 return;
263 }
264 psess.dcx().emit_err(errors::CfgAttrBadDelim {
265 span: span.entire(),
266 sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
267 });
268}