Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 94 additions & 79 deletions src/librustdoc/doctest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,14 +211,15 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions
Err(error) => return crate::wrap_return(dcx, Err(error)),
};
let args_path = temp_dir.path().join("rustdoc-cfgs");
let temp_dir_path = temp_dir.path().to_path_buf();
crate::wrap_return(dcx, generate_args_file(&args_path, &options));

let extract_doctests = options.output_format == OutputFormat::Doctest;
let save_temps = options.codegen_options.save_temps;
let result = interface::run_compiler(config, |compiler| {
let krate = rustc_interface::passes::parse(&compiler.sess);

let collector = rustc_interface::create_and_enter_global_ctxt(compiler, krate, |tcx| {
rustc_interface::create_and_enter_global_ctxt(compiler, krate, |tcx| {
let crate_name = tcx.crate_name(LOCAL_CRATE).to_string();
let opts = scrape_test_config(tcx, crate_name, args_path);

Expand All @@ -227,6 +228,8 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions
tcx,
);
let tests = hir_collector.collect_crate();
tcx.dcx().abort_if_errors();

if extract_doctests {
let mut collector = extracted::ExtractedDocTests::new();
tests.into_iter().for_each(|t| collector.add_test(t, &opts, &options));
Expand All @@ -235,93 +238,90 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions
let mut stdout = stdout.lock();
if let Err(error) = serde_json::ser::to_writer(&mut stdout, &collector) {
eprintln!();
Err(format!("Failed to generate JSON output for doctests: {error:?}"))
return Err(format!("Failed to generate JSON output for doctests: {error:?}"));
} else {
Ok(None)
return Ok(());
}
} else {
let mut collector = CreateRunnableDocTests::new(options, opts);
tests.into_iter().for_each(|t| collector.add_test(t, Some(compiler.sess.dcx())));

Ok(Some(collector))
}
});
compiler.sess.dcx().abort_if_errors();
let mut collector = CreateRunnableDocTests::new(options, opts);
tests.into_iter().for_each(|t| collector.add_test(t, Some(compiler.sess.dcx())));

collector
});
let CreateRunnableDocTests {
standalone_tests,
mergeable_tests,
rustdoc_options,
opts,
unused_extern_reports,
compiling_test_count,
..
} = collector;

let CreateRunnableDocTests {
standalone_tests,
mergeable_tests,
rustdoc_options,
opts,
unused_extern_reports,
compiling_test_count,
..
} = match result {
Ok(Some(collector)) => collector,
Ok(None) => return,
Err(error) => {
eprintln!("{error}");
// Since some files in the temporary folder are still owned and alive, we need
// to manually remove the folder.
if !save_temps {
let _ = std::fs::remove_dir_all(temp_dir.path());
run_tests(
compiler.sess.dcx(),
opts,
&rustdoc_options,
&unused_extern_reports,
standalone_tests,
mergeable_tests,
Some(temp_dir),
Some(tcx),
);

let compiling_test_count = compiling_test_count.load(Ordering::SeqCst);

// Collect and warn about unused externs, but only if we've gotten
// reports for each doctest
if json_unused_externs.is_enabled() {
let unused_extern_reports: Vec<_> =
std::mem::take(&mut unused_extern_reports.lock().unwrap());
if unused_extern_reports.len() == compiling_test_count {
let extern_names =
externs.iter().map(|(name, _)| name).collect::<FxIndexSet<&String>>();
let mut unused_extern_names = unused_extern_reports
.iter()
.map(|uexts| {
uexts.unused_extern_names.iter().collect::<FxIndexSet<&String>>()
})
.fold(extern_names, |uextsa, uextsb| {
uextsa.intersection(&uextsb).copied().collect::<FxIndexSet<&String>>()
})
.iter()
.map(|v| (*v).clone())
.collect::<Vec<String>>();
unused_extern_names.sort();
// Take the most severe lint level
let lint_level = unused_extern_reports
.iter()
.map(|uexts| uexts.lint_level.as_str())
.max_by_key(|v| match *v {
"warn" => 1,
"deny" => 2,
"forbid" => 3,
// The allow lint level is not expected,
// as if allow is specified, no message
// is to be emitted.
v => unreachable!("Invalid lint level '{v}'"),
})
.unwrap_or("warn")
.to_string();
let uext = UnusedExterns { lint_level, unused_extern_names };
let unused_extern_json = serde_json::to_string(&uext).unwrap();
eprintln!("{unused_extern_json}");
}
}
std::process::exit(1);
}
};

run_tests(
dcx,
opts,
&rustdoc_options,
&unused_extern_reports,
standalone_tests,
mergeable_tests,
Some(temp_dir),
);
Ok(())
})
});

let compiling_test_count = compiling_test_count.load(Ordering::SeqCst);

// Collect and warn about unused externs, but only if we've gotten
// reports for each doctest
if json_unused_externs.is_enabled() {
let unused_extern_reports: Vec<_> =
std::mem::take(&mut unused_extern_reports.lock().unwrap());
if unused_extern_reports.len() == compiling_test_count {
let extern_names =
externs.iter().map(|(name, _)| name).collect::<FxIndexSet<&String>>();
let mut unused_extern_names = unused_extern_reports
.iter()
.map(|uexts| uexts.unused_extern_names.iter().collect::<FxIndexSet<&String>>())
.fold(extern_names, |uextsa, uextsb| {
uextsa.intersection(&uextsb).copied().collect::<FxIndexSet<&String>>()
})
.iter()
.map(|v| (*v).clone())
.collect::<Vec<String>>();
unused_extern_names.sort();
// Take the most severe lint level
let lint_level = unused_extern_reports
.iter()
.map(|uexts| uexts.lint_level.as_str())
.max_by_key(|v| match *v {
"warn" => 1,
"deny" => 2,
"forbid" => 3,
// The allow lint level is not expected,
// as if allow is specified, no message
// is to be emitted.
v => unreachable!("Invalid lint level '{v}'"),
})
.unwrap_or("warn")
.to_string();
let uext = UnusedExterns { lint_level, unused_extern_names };
let unused_extern_json = serde_json::to_string(&uext).unwrap();
eprintln!("{unused_extern_json}");
if let Err(error) = result {
eprintln!("{error}");
// Since some files in the temporary folder are still owned and alive, we need
// to manually remove the folder.
if !save_temps {
let _ = std::fs::remove_dir_all(temp_dir_path);
}
std::process::exit(1);
}
}

Expand All @@ -334,6 +334,7 @@ pub(crate) fn run_tests(
mergeable_tests: FxIndexMap<MergeableTestKey, Vec<(DocTestBuilder, ScrapedDocTest)>>,
// We pass this argument so we can drop it manually before using `exit`.
mut temp_dir: Option<TempDir>,
tcx: Option<TyCtxt<'_>>,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(preliminary review) this is always Some(_); I'm probably missing sth.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When you run rustdoc on a markdown file, there is no TyCtxt ever. ;)

) {
let mut test_args = Vec::with_capacity(rustdoc_options.test_args.len() + 1);
test_args.insert(0, "rustdoctest".to_string());
Expand Down Expand Up @@ -386,6 +387,20 @@ pub(crate) fn run_tests(
diag.emit();
}

if let Some(tcx) = tcx {
tcx.node_span_lint(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[…] I'd really love to see if it's possible to emit lints from DiagCtxtHandle somehow,

That's not possible, it doesn't contain the necessary information. DiagCtxtHandle just stores low-level things in order to properly emit diagnostics. For lints on the other hand we need to be able to check if they were suppressed via lint control attributes or flags (allow, expect) which form a hierarchy (apart from that, it needs access to the query system (so TyCtxt) & some other data that's stored in the Session).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I haven't taken a deeper look at the code yet, so I'm not fully up to speed re. all the constraints that are imposed here but I can say that if lack of access to TyCtxt is a problem (as you've alluded to in the PR description), consider "buffering" the lint instead of calling node_span_lint eagerly. That's what rustc also does / has to do for early lints, it just puts them into a vector and emits them at a later point where TyCtxt is available.

Hopefully this is relevant ^^' I've yet to take a proper look. Of course if you want to avoid keeping TyCtxt or Session or XYZ alive for a longer time, buffering won't help.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Problem here is that the TyCtxt dies before we run the doctests normally, so I moved the doctest run earlier. Storing the warnings won't change anything sadly.

crate::lint::FAILED_MERGED_DOCTEST_COMPILATION,
CRATE_HIR_ID,
tcx.hir_span(CRATE_HIR_ID),
|lint| {
lint.primary_message(format!(
"failed to compile merged doctests for edition {edition}. \
Reverting to standalone doctests."
));
},
);
}

// We failed to compile all compatible tests as one so we push them into the
// `standalone_tests` doctests.
debug!("Failed to compile compatible doctests for edition {} all at once", edition);
Expand Down
1 change: 1 addition & 0 deletions src/librustdoc/doctest/markdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ pub(crate) fn test(input: &Input, options: Options, dcx: DiagCtxtHandle<'_>) ->
standalone_tests,
mergeable_tests,
None,
None,
);
Ok(())
}
12 changes: 12 additions & 0 deletions src/librustdoc/lint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,17 @@ declare_rustdoc_lint! {
"detects redundant explicit links in doc comments"
}

declare_rustdoc_lint! {
/// This lint is **warn-by-default**. It warns when merged doctests fail to compile
/// when running doctests. This is a `rustdoc` only lint, see the documentation in
/// the [rustdoc book].
///
/// [rustdoc book]: ../../../rustdoc/lints.html#failed_merged_doctest_compilation
FAILED_MERGED_DOCTEST_COMPILATION,
Warn,
"warns when merged doctest fail to compile when running doctests"
}

pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
vec![
BROKEN_INTRA_DOC_LINKS,
Expand All @@ -209,6 +220,7 @@ pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
MISSING_CRATE_LEVEL_DOCS,
UNESCAPED_BACKTICKS,
REDUNDANT_EXPLICIT_LINKS,
FAILED_MERGED_DOCTEST_COMPILATION,
]
});

Expand Down
1 change: 1 addition & 0 deletions tests/rustdoc-ui/doctest/dead-code-2024.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
//@ normalize-stdout: "compilation took \d+\.\d+s" -> "compilation took $$TIME"
//@ failure-status: 101

#![allow(rustdoc::failed_merged_doctest_compilation)]
#![doc(test(attr(allow(unused_variables), deny(warnings))))]

/// Example
Expand Down
10 changes: 5 additions & 5 deletions tests/rustdoc-ui/doctest/dead-code-2024.stdout
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@

running 1 test
test $DIR/dead-code-2024.rs - f (line 15) - compile ... FAILED
test $DIR/dead-code-2024.rs - f (line 16) - compile ... FAILED

failures:

---- $DIR/dead-code-2024.rs - f (line 15) stdout ----
---- $DIR/dead-code-2024.rs - f (line 16) stdout ----
error: trait `T` is never used
--> $DIR/dead-code-2024.rs:16:7
--> $DIR/dead-code-2024.rs:17:7
|
LL | trait T { fn f(); }
| ^
|
note: the lint level is defined here
--> $DIR/dead-code-2024.rs:14:9
--> $DIR/dead-code-2024.rs:15:9
|
LL | #![deny(warnings)]
| ^^^^^^^^
Expand All @@ -23,7 +23,7 @@ error: aborting due to 1 previous error
Couldn't compile the test.

failures:
$DIR/dead-code-2024.rs - f (line 15)
$DIR/dead-code-2024.rs - f (line 16)

test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME

Expand Down
1 change: 1 addition & 0 deletions tests/rustdoc-ui/doctest/dead-code-items.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
//@ failure-status: 101

#![doc(test(attr(deny(warnings))))]
#![allow(rustdoc::failed_merged_doctest_compilation)]

#[doc(test(attr(allow(dead_code))))]
/// Example
Expand Down
Loading
Loading