|
1 | | -use std::ops::Range; |
2 | | -use std::sync::Arc; |
3 | | -use std::{io, thread}; |
4 | | - |
5 | | -use crate::doc::{NEEDLESS_DOCTEST_MAIN, TEST_ATTR_IN_DOCTEST}; |
| 1 | +use super::Fragments; |
| 2 | +use crate::doc::NEEDLESS_DOCTEST_MAIN; |
6 | 3 | use clippy_utils::diagnostics::span_lint; |
7 | | -use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind}; |
8 | | -use rustc_errors::emitter::HumanEmitter; |
9 | | -use rustc_errors::{Diag, DiagCtxt}; |
| 4 | +use clippy_utils::tokenize_with_text; |
| 5 | +use rustc_lexer::TokenKind; |
10 | 6 | use rustc_lint::LateContext; |
11 | | -use rustc_parse::lexer::StripTokens; |
12 | | -use rustc_parse::new_parser_from_source_str; |
13 | | -use rustc_parse::parser::ForceCollect; |
14 | | -use rustc_session::parse::ParseSess; |
15 | | -use rustc_span::edition::Edition; |
16 | | -use rustc_span::source_map::{FilePathMapping, SourceMap}; |
17 | | -use rustc_span::{FileName, Ident, Pos, sym}; |
18 | | - |
19 | | -use super::Fragments; |
20 | | - |
21 | | -fn get_test_spans(item: &Item, ident: Ident, test_attr_spans: &mut Vec<Range<usize>>) { |
22 | | - test_attr_spans.extend( |
23 | | - item.attrs |
24 | | - .iter() |
25 | | - .find(|attr| attr.has_name(sym::test)) |
26 | | - .map(|attr| attr.span.lo().to_usize()..ident.span.hi().to_usize()), |
27 | | - ); |
28 | | -} |
29 | | - |
30 | | -pub fn check( |
31 | | - cx: &LateContext<'_>, |
32 | | - text: &str, |
33 | | - edition: Edition, |
34 | | - range: Range<usize>, |
35 | | - fragments: Fragments<'_>, |
36 | | - ignore: bool, |
37 | | -) { |
38 | | - // return whether the code contains a needless `fn main` plus a vector of byte position ranges |
39 | | - // of all `#[test]` attributes in not ignored code examples |
40 | | - fn check_code_sample(code: String, edition: Edition, ignore: bool) -> (bool, Vec<Range<usize>>) { |
41 | | - rustc_driver::catch_fatal_errors(|| { |
42 | | - rustc_span::create_session_globals_then(edition, &[], None, || { |
43 | | - let mut test_attr_spans = vec![]; |
44 | | - let filename = FileName::anon_source_code(&code); |
45 | | - |
46 | | - let translator = rustc_driver::default_translator(); |
47 | | - let emitter = HumanEmitter::new(Box::new(io::sink()), translator); |
48 | | - let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings(); |
49 | | - #[expect(clippy::arc_with_non_send_sync)] // `Arc` is expected by with_dcx |
50 | | - let sm = Arc::new(SourceMap::new(FilePathMapping::empty())); |
51 | | - let psess = ParseSess::with_dcx(dcx, sm); |
52 | | - |
53 | | - let mut parser = |
54 | | - match new_parser_from_source_str(&psess, filename, code, StripTokens::ShebangAndFrontmatter) { |
55 | | - Ok(p) => p, |
56 | | - Err(errs) => { |
57 | | - errs.into_iter().for_each(Diag::cancel); |
58 | | - return (false, test_attr_spans); |
59 | | - }, |
60 | | - }; |
61 | | - |
62 | | - let mut relevant_main_found = false; |
63 | | - let mut eligible = true; |
64 | | - loop { |
65 | | - match parser.parse_item(ForceCollect::No) { |
66 | | - Ok(Some(item)) => match &item.kind { |
67 | | - ItemKind::Fn(box Fn { |
68 | | - ident, |
69 | | - sig, |
70 | | - body: Some(block), |
71 | | - .. |
72 | | - }) if ident.name == sym::main => { |
73 | | - if !ignore { |
74 | | - get_test_spans(&item, *ident, &mut test_attr_spans); |
75 | | - } |
76 | | - |
77 | | - let is_async = matches!(sig.header.coroutine_kind, Some(CoroutineKind::Async { .. })); |
78 | | - let returns_nothing = match &sig.decl.output { |
79 | | - FnRetTy::Default(..) => true, |
80 | | - FnRetTy::Ty(ty) if ty.kind.is_unit() => true, |
81 | | - FnRetTy::Ty(_) => false, |
82 | | - }; |
83 | | - |
84 | | - if returns_nothing && !is_async && !block.stmts.is_empty() { |
85 | | - // This main function should be linted, but only if there are no other functions |
86 | | - relevant_main_found = true; |
87 | | - } else { |
88 | | - // This main function should not be linted, we're done |
89 | | - eligible = false; |
90 | | - } |
91 | | - }, |
92 | | - // Another function was found; this case is ignored for needless_doctest_main |
93 | | - ItemKind::Fn(fn_) => { |
94 | | - eligible = false; |
95 | | - if ignore { |
96 | | - // If ignore is active invalidating one lint, |
97 | | - // and we already found another function thus |
98 | | - // invalidating the other one, we have no |
99 | | - // business continuing. |
100 | | - return (false, test_attr_spans); |
101 | | - } |
102 | | - get_test_spans(&item, fn_.ident, &mut test_attr_spans); |
103 | | - }, |
104 | | - // Tests with one of these items are ignored |
105 | | - ItemKind::Static(..) |
106 | | - | ItemKind::Const(..) |
107 | | - | ItemKind::ExternCrate(..) |
108 | | - | ItemKind::ForeignMod(..) => { |
109 | | - eligible = false; |
110 | | - }, |
111 | | - _ => {}, |
112 | | - }, |
113 | | - Ok(None) => break, |
114 | | - Err(e) => { |
115 | | - // See issue #15041. When calling `.cancel()` on the `Diag`, Clippy will unexpectedly panic |
116 | | - // when the `Diag` is unwinded. Meanwhile, we can just call `.emit()`, since the `DiagCtxt` |
117 | | - // is just a sink, nothing will be printed. |
118 | | - e.emit(); |
119 | | - return (false, test_attr_spans); |
120 | | - }, |
121 | | - } |
122 | | - } |
123 | | - |
124 | | - (relevant_main_found & eligible, test_attr_spans) |
125 | | - }) |
126 | | - }) |
127 | | - .ok() |
128 | | - .unwrap_or_default() |
| 7 | +use rustc_span::InnerSpan; |
| 8 | + |
| 9 | +fn returns_unit<'a>(mut tokens: impl Iterator<Item = (TokenKind, &'a str, InnerSpan)>) -> bool { |
| 10 | + let mut next = || tokens.next().map_or(TokenKind::Whitespace, |(kind, ..)| kind); |
| 11 | + |
| 12 | + match next() { |
| 13 | + // { |
| 14 | + TokenKind::OpenBrace => true, |
| 15 | + // - > ( ) { |
| 16 | + TokenKind::Minus => { |
| 17 | + next() == TokenKind::Gt |
| 18 | + && next() == TokenKind::OpenParen |
| 19 | + && next() == TokenKind::CloseParen |
| 20 | + && next() == TokenKind::OpenBrace |
| 21 | + }, |
| 22 | + _ => false, |
129 | 23 | } |
| 24 | +} |
130 | 25 |
|
131 | | - let trailing_whitespace = text.len() - text.trim_end().len(); |
132 | | - |
133 | | - // We currently only test for "fn main". Checking for the real |
134 | | - // entrypoint (with tcx.entry_fn(())) in each block would be unnecessarily |
135 | | - // expensive, as those are probably intended and relevant. Same goes for |
136 | | - // macros and other weird ways of declaring a main function. |
137 | | - // |
138 | | - // Also, as we only check for attribute names and don't do macro expansion, |
139 | | - // we can check only for #[test] |
140 | | - |
141 | | - if !((text.contains("main") && text.contains("fn")) || text.contains("#[test]")) { |
| 26 | +pub fn check(cx: &LateContext<'_>, text: &str, offset: usize, fragments: Fragments<'_>) { |
| 27 | + if !text.contains("main") { |
142 | 28 | return; |
143 | 29 | } |
144 | 30 |
|
145 | | - // Because of the global session, we need to create a new session in a different thread with |
146 | | - // the edition we need. |
147 | | - let text = text.to_owned(); |
148 | | - let (has_main, test_attr_spans) = thread::spawn(move || check_code_sample(text, edition, ignore)) |
149 | | - .join() |
150 | | - .expect("thread::spawn failed"); |
151 | | - if has_main && let Some(span) = fragments.span(cx, range.start..range.end - trailing_whitespace) { |
152 | | - span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest"); |
153 | | - } |
154 | | - for span in test_attr_spans { |
155 | | - let span = (range.start + span.start)..(range.start + span.end); |
156 | | - if let Some(span) = fragments.span(cx, span) { |
157 | | - span_lint(cx, TEST_ATTR_IN_DOCTEST, span, "unit tests in doctest are not executed"); |
| 31 | + let mut tokens = tokenize_with_text(text).filter(|&(kind, ..)| { |
| 32 | + !matches!( |
| 33 | + kind, |
| 34 | + TokenKind::Whitespace | TokenKind::BlockComment { .. } | TokenKind::LineComment { .. } |
| 35 | + ) |
| 36 | + }); |
| 37 | + if let Some((TokenKind::Ident, "fn", fn_span)) = tokens.next() |
| 38 | + && let Some((TokenKind::Ident, "main", main_span)) = tokens.next() |
| 39 | + && let Some((TokenKind::OpenParen, ..)) = tokens.next() |
| 40 | + && let Some((TokenKind::CloseParen, ..)) = tokens.next() |
| 41 | + && returns_unit(&mut tokens) |
| 42 | + { |
| 43 | + let mut depth = 1; |
| 44 | + for (kind, ..) in &mut tokens { |
| 45 | + match kind { |
| 46 | + TokenKind::OpenBrace => depth += 1, |
| 47 | + TokenKind::CloseBrace => { |
| 48 | + depth -= 1; |
| 49 | + if depth == 0 { |
| 50 | + break; |
| 51 | + } |
| 52 | + }, |
| 53 | + _ => {}, |
| 54 | + } |
| 55 | + } |
| 56 | + |
| 57 | + if tokens.next().is_none() |
| 58 | + && let Some(span) = fragments.span(cx, fn_span.start + offset..main_span.end + offset) |
| 59 | + { |
| 60 | + span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest"); |
158 | 61 | } |
159 | 62 | } |
160 | 63 | } |
0 commit comments