From 979704dacdbb0ba839de2c3f73acee891be17a63 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sun, 26 Oct 2025 16:18:50 +0800 Subject: [PATCH 001/131] Fix not applicable on statement for convert_to_guarded_return Fix not applicable in statement when exist else block Example --- ```rust fn main() { some_statements(); if$0 let Ok(x) = Err(92) { foo(x); } else { return; } some_statements(); } ``` **Before this PR** Assist not applicable **After this PR** ```rust fn main() { some_statements(); let Ok(x) = Err(92) else { return; }; foo(x); some_statements(); } ``` --- .../src/handlers/convert_to_guarded_return.rs | 72 ++++++++++++++++++- 1 file changed, 71 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 08b114072fd9f..ea5c1637b7608 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -95,7 +95,9 @@ fn if_expr_to_guarded_return( let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?; - if parent_block.tail_expr()? != if_expr.clone().into() { + if parent_block.tail_expr() != Some(if_expr.clone().into()) + && !(else_block.is_some() && ast::ExprStmt::can_cast(if_expr.syntax().parent()?.kind())) + { return None; } @@ -502,6 +504,36 @@ fn main() { ); } + #[test] + fn convert_if_let_has_else_block_in_statement() { + check_assist( + convert_to_guarded_return, + r#" +fn main() { + some_statements(); + if$0 let Ok(x) = Err(92) { + foo(x); + } else { + // needless comment + return; + } + some_statements(); +} +"#, + r#" +fn main() { + some_statements(); + let Ok(x) = Err(92) else { + // needless comment + return; + }; + foo(x); + some_statements(); +} +"#, + ); + } + #[test] fn convert_if_let_result_inside_let() { check_assist( @@ -1136,6 +1168,44 @@ fn main() { ); } + #[test] + fn ignore_else_if() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" +fn main() { + some_statements(); + if cond { + () + } else if$0 let Ok(x) = Err(92) { + foo(x); + } else { + return; + } + some_statements(); +} +"#, + ); + } + + #[test] + fn ignore_if_inside_let() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" +fn main() { + some_statements(); + let _ = if$0 let Ok(x) = Err(92) { + foo(x); + } else { + return; + } + some_statements(); +} +"#, + ); + } + #[test] fn ignore_let_else_branch() { check_assist_not_applicable( From c0ecf1ad40a953f01d8ebf5c035eef268cc001f8 Mon Sep 17 00:00:00 2001 From: Till Adam Date: Fri, 2 Jan 2026 21:57:02 +0100 Subject: [PATCH 002/131] Implement Span::line() and Span::column() for proc-macro server Add proper line/column resolution for proc-macro spans via a callback mechanism. Previously these methods returned hardcoded 1 values. The implementation adds: - SubRequest::LineColumn and SubResponse::LineColumnResult to the bidirectional protocol - ProcMacroClientInterface::line_column() method - Callback handling in load-cargo using LineIndex - Server implementation in RaSpanServer that uses the callback - a test for Span::line() and Span::column() in proc-macro server Add fn_like_span_line_column test proc-macro that exercises the new line/column API, and a corresponding test with a mock callback. --- src/tools/rust-analyzer/Cargo.lock | 1 + .../crates/load-cargo/src/lib.rs | 42 +++++++++--- .../src/bidirectional_protocol/msg.rs | 18 ++++- .../proc-macro-srv-cli/src/main_loop.rs | 14 ++++ .../crates/proc-macro-srv/Cargo.toml | 1 + .../proc-macro-test/imp/src/lib.rs | 10 +++ .../crates/proc-macro-srv/src/lib.rs | 2 + .../src/server_impl/rust_analyzer_span.rs | 10 ++- .../crates/proc-macro-srv/src/tests/mod.rs | 15 +++++ .../crates/proc-macro-srv/src/tests/utils.rs | 66 ++++++++++++++++++- 10 files changed, 161 insertions(+), 18 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 42eaeb01f1f2a..8188fbf960645 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1864,6 +1864,7 @@ dependencies = [ "intern", "libc", "libloading", + "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "memmap2", "object", "paths", diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index e8d98b1ce661d..33468a5003c38 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -554,14 +554,12 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::LocalFilePathResult { name }) } SubRequest::SourceText { file_id, ast_id, start, end } => { - let ast_id = span::ErasedFileAstId::from_raw(ast_id); - let editioned_file_id = span::EditionedFileId::from_raw(file_id); - let span = Span { - range: TextRange::new(TextSize::from(start), TextSize::from(end)), - anchor: SpanAnchor { file_id: editioned_file_id, ast_id }, - ctx: SyntaxContext::root(editioned_file_id.edition()), - }; - let range = db.resolve_span(span); + let range = resolve_sub_span( + db, + file_id, + ast_id, + TextRange::new(TextSize::from(start), TextSize::from(end)), + ); let source = db.file_text(range.file_id.file_id(db)).text(db); let text = source .get(usize::from(range.range.start())..usize::from(range.range.end())) @@ -569,6 +567,18 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::SourceTextResult { text }) } + SubRequest::LineColumn { file_id, ast_id, offset } => { + let range = + resolve_sub_span(db, file_id, ast_id, TextRange::empty(TextSize::from(offset))); + let source = db.file_text(range.file_id.file_id(db)).text(db); + let line_index = ide_db::line_index::LineIndex::new(source); + let (line, column) = line_index + .try_line_col(range.range.start()) + .map(|lc| (lc.line + 1, lc.col + 1)) + .unwrap_or((1, 1)); + // proc_macro::Span line/column are 1-based + Ok(SubResponse::LineColumnResult { line, column }) + } SubRequest::FilePath { file_id } => { let file_id = FileId::from_raw(file_id); let source_root_id = db.file_source_root(file_id).source_root_id(db); @@ -603,6 +613,22 @@ impl ProcMacroExpander for Expander { } } +fn resolve_sub_span( + db: &dyn ExpandDatabase, + file_id: u32, + ast_id: u32, + range: TextRange, +) -> hir_expand::FileRange { + let ast_id = span::ErasedFileAstId::from_raw(ast_id); + let editioned_file_id = span::EditionedFileId::from_raw(file_id); + let span = Span { + range, + anchor: SpanAnchor { file_id: editioned_file_id, ast_id }, + ctx: SyntaxContext::root(editioned_file_id.edition()), + }; + db.resolve_span(span) +} + #[cfg(test)] mod tests { use ide_db::base_db::RootQueryDb; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs index e41f8a5d7da76..0e3b700dcc5aa 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs @@ -13,13 +13,25 @@ pub enum SubRequest { FilePath { file_id: u32 }, SourceText { file_id: u32, ast_id: u32, start: u32, end: u32 }, LocalFilePath { file_id: u32 }, + LineColumn { file_id: u32, ast_id: u32, offset: u32 }, } #[derive(Debug, Serialize, Deserialize)] pub enum SubResponse { - FilePathResult { name: String }, - SourceTextResult { text: Option }, - LocalFilePathResult { name: Option }, + FilePathResult { + name: String, + }, + SourceTextResult { + text: Option, + }, + LocalFilePathResult { + name: Option, + }, + /// Line and column are 1-based. + LineColumnResult { + line: u32, + column: u32, + }, } #[derive(Debug, Serialize, Deserialize)] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index b2f4b96bd2551..22536a4e52b12 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -220,6 +220,20 @@ impl proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandl _ => None, } } + + fn line_column(&mut self, span: proc_macro_srv::span::Span) -> Option<(u32, u32)> { + let proc_macro_srv::span::Span { range, anchor, ctx: _ } = span; + match self.roundtrip(bidirectional::SubRequest::LineColumn { + file_id: anchor.file_id.as_u32(), + ast_id: anchor.ast_id.into_raw(), + offset: range.start().into(), + }) { + Some(bidirectional::BidirectionalMessage::SubResponse( + bidirectional::SubResponse::LineColumnResult { line, column }, + )) => Some((line, column)), + _ => None, + } + } } fn handle_expand_ra( diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 361017178409b..8e5617f8a20eb 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -31,6 +31,7 @@ libc.workspace = true [dev-dependencies] expect-test.workspace = true +line-index.workspace = true # used as proc macro test targets proc-macro-test.path = "./proc-macro-test" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index b4fac26d6e72c..06c76b6d03815 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -79,6 +79,16 @@ pub fn fn_like_span_ops(args: TokenStream) -> TokenStream { TokenStream::from_iter(vec![first, second, third]) } +/// Returns the line and column of the first token's span as two integer literals. +#[proc_macro] +pub fn fn_like_span_line_column(args: TokenStream) -> TokenStream { + let first = args.into_iter().next().unwrap(); + let span = first.span(); + let line = Literal::usize_unsuffixed(span.line()); + let column = Literal::usize_unsuffixed(span.column()); + TokenStream::from_iter(vec![TokenTree::Literal(line), TokenTree::Literal(column)]) +} + #[proc_macro_attribute] pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream { item diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index f2d1dfbba4ccb..c1ef49a7176b0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -98,6 +98,8 @@ pub trait ProcMacroClientInterface { fn file(&mut self, file_id: span::FileId) -> String; fn source_text(&mut self, span: Span) -> Option; fn local_file(&mut self, file_id: span::FileId) -> Option; + /// Line and column are 1-based. + fn line_column(&mut self, span: Span) -> Option<(u32, u32)>; } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 32725afc55272..3a25391b573bf 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -257,14 +257,12 @@ impl server::Span for RaSpanServer<'_> { Span { range: TextRange::empty(span.range.start()), ..span } } - fn line(&mut self, _span: Self::Span) -> usize { - // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL - 1 + fn line(&mut self, span: Self::Span) -> usize { + self.callback.as_mut().and_then(|cb| cb.line_column(span)).map_or(1, |(l, _)| l as usize) } - fn column(&mut self, _span: Self::Span) -> usize { - // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL - 1 + fn column(&mut self, span: Self::Span) -> usize { + self.callback.as_mut().and_then(|cb| cb.line_column(span)).map_or(1, |(_, c)| c as usize) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs index 20507a6def54d..ebef9a9a519a7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs @@ -703,6 +703,7 @@ fn list_test_macros() { fn_like_mk_idents [Bang] fn_like_span_join [Bang] fn_like_span_ops [Bang] + fn_like_span_line_column [Bang] attr_noop [Attr] attr_panic [Attr] attr_error [Attr] @@ -712,3 +713,17 @@ fn list_test_macros() { DeriveError [CustomDerive]"#]] .assert_eq(&res); } + +#[test] +fn test_fn_like_span_line_column() { + assert_expand_with_callback( + "fn_like_span_line_column", + // Input text with known position: "hello" starts at offset 1 (line 2, column 1 in 1-based) + " +hello", + expect![[r#" + LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 2 + LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 1 + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 61fcd810b1d9d..81ff1965d68b4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -6,7 +6,8 @@ use span::{ }; use crate::{ - EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, token_stream::TokenStream, + EnvSnapshot, ProcMacroClientInterface, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, + token_stream::TokenStream, }; fn parse_string(call_site: SpanId, src: &str) -> TokenStream { @@ -109,3 +110,66 @@ pub(crate) fn list() -> Vec { let res = srv.list_macros(&dylib_path).unwrap(); res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } + +/// A mock callback for testing that computes line/column from the input text. +struct MockCallback<'a> { + text: &'a str, +} + +impl ProcMacroClientInterface for MockCallback<'_> { + fn source_text(&mut self, span: Span) -> Option { + self.text + .get(usize::from(span.range.start())..usize::from(span.range.end())) + .map(ToOwned::to_owned) + } + + fn file(&mut self, _file_id: FileId) -> String { + String::new() + } + + fn local_file(&mut self, _file_id: FileId) -> Option { + None + } + + fn line_column(&mut self, span: Span) -> Option<(u32, u32)> { + let line_index = line_index::LineIndex::new(self.text); + let line_col = line_index.try_line_col(span.range.start())?; + // proc_macro uses 1-based line/column + Some((line_col.line as u32 + 1, line_col.col as u32 + 1)) + } +} + +pub fn assert_expand_with_callback( + macro_name: &str, + #[rust_analyzer::rust_fixture] ra_fixture: &str, + expect_spanned: Expect, +) { + let path = proc_macro_test_dylib_path(); + let expander = dylib::Expander::new(&temp_dir::TempDir::new().unwrap(), &path).unwrap(); + + let def_site = Span { + range: TextRange::new(0.into(), 150.into()), + anchor: SpanAnchor { + file_id: EditionedFileId::current_edition(FileId::from_raw(41)), + ast_id: ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContext::root(span::Edition::CURRENT), + }; + let call_site = Span { + range: TextRange::new(0.into(), 100.into()), + anchor: SpanAnchor { + file_id: EditionedFileId::current_edition(FileId::from_raw(42)), + ast_id: ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContext::root(span::Edition::CURRENT), + }; + let mixed_site = call_site; + + let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, ra_fixture); + + let mut callback = MockCallback { text: ra_fixture }; + let res = expander + .expand(macro_name, fixture, None, def_site, call_site, mixed_site, Some(&mut callback)) + .unwrap(); + expect_spanned.assert_eq(&format!("{res:?}")); +} From 4fbc52085fda746d8128a79b878a5008e94a81fe Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 5 Jan 2026 11:30:51 +0100 Subject: [PATCH 003/131] perf: Re-use scratch allocations for `try_evaluate_obligations` --- .../crates/hir-ty/src/next_solver/fulfill.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs index 0fe073297279a..a8bff44a02583 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs @@ -48,6 +48,7 @@ pub struct FulfillmentCtxt<'db> { /// use the context in exactly this snapshot. #[expect(unused)] usable_in_snapshot: usize, + try_evaluate_obligations_scratch: PendingObligations<'db>, } #[derive(Default, Debug, Clone)] @@ -115,6 +116,7 @@ impl<'db> FulfillmentCtxt<'db> { FulfillmentCtxt { obligations: Default::default(), usable_in_snapshot: infcx.num_open_snapshots(), + try_evaluate_obligations_scratch: Default::default(), } } } @@ -162,12 +164,12 @@ impl<'db> FulfillmentCtxt<'db> { // and select. They should use a different `ObligationCtxt` instead. Then we'll be also able // to not put the obligations queue in `InferenceTable`'s snapshots. // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots()); + self.try_evaluate_obligations_scratch.clear(); let mut errors = Vec::new(); - let mut obligations = Vec::new(); loop { let mut any_changed = false; - obligations.extend(self.obligations.drain_pending(|_| true)); - for (mut obligation, stalled_on) in obligations.drain(..) { + self.try_evaluate_obligations_scratch.extend(self.obligations.drain_pending(|_| true)); + for (mut obligation, stalled_on) in self.try_evaluate_obligations_scratch.drain(..) { if obligation.recursion_depth >= infcx.interner.recursion_limit() { self.obligations.on_fulfillment_overflow(infcx); // Only return true errors that we have accumulated while processing. From 4699fdc2887bd9a28921ca9e73193aa38f94d0c4 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Tue, 6 Jan 2026 16:53:34 +0800 Subject: [PATCH 004/131] Fix loses exists guard for move_guard Example --- ```rust fn main() { let cond = true; match 92 { 3 => true, x if cond => if x $0> 10 { false } else if x > 5 { true } else if x > 4 || x < -2 { false } else { true }, } } ``` **Before this PR** ```rust fn main() { let cond = true; match 92 { 3 => true, x if x > 10 => false, x if x > 5 => true, x if x > 4 || x < -2 => false, x => true, } } ``` **After this PR** ```rust fn main() { let cond = true; match 92 { 3 => true, x if cond && x > 10 => false, x if cond && x > 5 => true, x if cond && (x > 4 || x < -2) => false, x if cond => true, } } ``` --- .../ide-assists/src/handlers/move_guard.rs | 65 +++++++++++++++++-- 1 file changed, 60 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs index 1c0c6e43d53be..31baa63372ff0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs @@ -3,7 +3,7 @@ use syntax::{ SyntaxKind::WHITESPACE, ast::{ AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make, - syntax_factory::SyntaxFactory, + prec::ExprPrecedence, syntax_factory::SyntaxFactory, }, syntax_editor::Element, }; @@ -109,6 +109,7 @@ pub(crate) fn move_arm_cond_to_match_guard( let match_arm: MatchArm = ctx.find_node_at_offset::()?; let match_pat = match_arm.pat()?; let arm_body = match_arm.expr()?; + let arm_guard = match_arm.guard().and_then(|it| it.condition()); let mut replace_node = None; let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| { @@ -149,6 +150,25 @@ pub(crate) fn move_arm_cond_to_match_guard( 0 }; let indent_level = match_arm.indent_level(); + let make_guard = |cond: Option| { + let condition = match (arm_guard.clone(), cond) { + (None, None) => return None, + (None, Some(it)) | (Some(it), None) => it, + (Some(lhs), Some(rhs)) => { + let op_expr = |expr: Expr| { + if expr.precedence().needs_parentheses_in(ExprPrecedence::LAnd) { + make.expr_paren(expr).into() + } else { + expr + } + }; + let op = syntax::ast::BinaryOp::LogicOp(syntax::ast::LogicOp::And); + let expr_bin = make.expr_bin(op_expr(lhs), op, op_expr(rhs)); + expr_bin.into() + } + }; + Some(make.match_guard(condition)) + }; for (cond, block) in conds_blocks { let only_expr = block.statements().next().is_none(); @@ -156,8 +176,7 @@ pub(crate) fn move_arm_cond_to_match_guard( Some(then_expr) if only_expr => then_expr, _ => block.dedent(dedent.into()).into(), }; - let guard = make.match_guard(cond); - let new_arm = make.match_arm(match_pat.clone(), Some(guard), expr); + let new_arm = make.match_arm(match_pat.clone(), make_guard(Some(cond)), expr); replace_arms.push(new_arm); } if let Some(block) = tail { @@ -170,7 +189,7 @@ pub(crate) fn move_arm_cond_to_match_guard( } _ => block.dedent(dedent.into()).into(), }; - let new_arm = make.match_arm(match_pat, None, expr); + let new_arm = make.match_arm(match_pat, make_guard(None), expr); replace_arms.push(new_arm); } else { // There's no else branch. Add a pattern without guard, unless the following match @@ -185,7 +204,7 @@ pub(crate) fn move_arm_cond_to_match_guard( } _ => { let block_expr = make.expr_empty_block().into(); - replace_arms.push(make.match_arm(match_pat, None, block_expr)); + replace_arms.push(make.match_arm(match_pat, make_guard(None), block_expr)); } } } @@ -1081,6 +1100,42 @@ fn main() { x => {} } } +"#, + ) + } + + #[test] + fn move_arm_cond_to_match_guard_elseif_exist_guard() { + check_assist( + move_arm_cond_to_match_guard, + r#" +fn main() { + let cond = true; + match 92 { + 3 => true, + x if cond => if x $0> 10 { + false + } else if x > 5 { + true + } else if x > 4 || x < -2 { + false + } else { + true + }, + } +} +"#, + r#" +fn main() { + let cond = true; + match 92 { + 3 => true, + x if cond && x > 10 => false, + x if cond && x > 5 => true, + x if cond && (x > 4 || x < -2) => false, + x if cond => true, + } +} "#, ) } From a978fdcdace3c9bee4d11e03ee3ea2be5792025c Mon Sep 17 00:00:00 2001 From: Hendrik Lind Date: Tue, 6 Jan 2026 20:49:14 +0100 Subject: [PATCH 005/131] fix: use crates where ADT was defined in deref_chain of trait_applicable_items --- .../ide-completion/src/tests/flyimport.rs | 48 +++++++++++++++++++ .../ide-db/src/imports/import_assets.rs | 14 +++++- 2 files changed, 61 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 797df3f163dab..d7db896679dfd 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -1976,3 +1976,51 @@ fn main() { "#]], ); } + +#[test] +fn trait_method_import_across_multiple_crates() { + let fixture = r#" + //- /lib.rs crate:test-trait + pub trait TestTrait { + fn test_function(&self) -> u32; + } + + //- /lib.rs crate:test-implementation deps:test-trait + pub struct TestStruct(pub usize); + + impl test_trait::TestTrait for TestStruct { + fn test_function(&self) -> u32 { + 1 + } + } + + //- /main.rs crate:main deps:test-implementation,test-trait + use test_implementation::TestStruct; + + fn main() { + let test = TestStruct(42); + test.test_f$0 + } + "#; + + check( + fixture, + expect![[r#" + me test_function() (use test_trait::TestTrait) fn(&self) -> u32 + "#]], + ); + + check_edit( + "test_function", + fixture, + r#" +use test_implementation::TestStruct; +use test_trait::TestTrait; + +fn main() { + let test = TestStruct(42); + test.test_function()$0 +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 90e3bb61f44d3..35579eb2590dc 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -600,7 +600,19 @@ fn trait_applicable_items<'db>( } deref_chain .into_iter() - .filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?))) + .flat_map(|ty| { + let fingerprint = ty.fingerprint_for_trait_impl()?; + let mut crates = vec![]; + + if let Some(adt) = ty.as_adt() { + // Push crate where ADT was defined + crates.push((adt.krate(db).into(), fingerprint)); + } + // Always include environment crate + crates.push((ty.krate(db).into(), fingerprint)); + Some(crates) + }) + .flatten() .unique() .collect::>() }; From 6cc9e5ccd3728842f23502dc9d1f9de08056a340 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 7 Jan 2026 08:15:27 +0100 Subject: [PATCH 006/131] Document `Query` --- .../crates/ide-db/src/symbol_index.rs | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index eb0529d6b5e75..06e1f6bb4560e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -40,15 +40,61 @@ use salsa::Update; use crate::RootDatabase; +/// A query for searching symbols in the workspace or dependencies. +/// +/// This struct configures how symbol search is performed, including the search text, +/// matching strategy, and filtering options. It is used by [`world_symbols`] to find +/// symbols across the codebase. +/// +/// # Example +/// ```ignore +/// let mut query = Query::new("MyStruct".to_string()); +/// query.only_types(); // Only search for type definitions +/// query.libs(); // Include library dependencies +/// query.exact(); // Use exact matching instead of fuzzy +/// ``` #[derive(Debug, Clone)] pub struct Query { + /// The original search query string as provided by the user. + /// Used for the final matching check via [`SearchMode::check`]. query: String, + /// Lowercase version of [`Self::query`], pre-computed for efficiency. + /// Used to build FST automata for case-insensitive index lookups. lowercased: String, + /// The search strategy to use when matching symbols. + /// - [`SearchMode::Exact`]: Symbol name must exactly match the query. + /// - [`SearchMode::Fuzzy`]: Symbol name must contain all query characters in order (subsequence match). + /// - [`SearchMode::Prefix`]: Symbol name must start with the query string. + /// + /// Defaults to [`SearchMode::Fuzzy`]. mode: SearchMode, + /// Controls filtering of trait-associated items (methods, constants, types). + /// - [`AssocSearchMode::Include`]: Include both associated and non-associated items. + /// - [`AssocSearchMode::Exclude`]: Exclude trait-associated items from results. + /// - [`AssocSearchMode::AssocItemsOnly`]: Only return trait-associated items. + /// + /// Defaults to [`AssocSearchMode::Include`]. assoc_mode: AssocSearchMode, + /// Whether the final symbol name comparison should be case-sensitive. + /// When `false`, matching is case-insensitive (e.g., "foo" matches "Foo"). + /// + /// Defaults to `false`. case_sensitive: bool, + /// When `true`, only return type definitions: structs, enums, unions, + /// type aliases, built-in types, and traits. Functions, constants, statics, + /// and modules are excluded. + /// + /// Defaults to `false`. only_types: bool, + /// When `true`, search library dependency roots instead of local workspace crates. + /// This enables finding symbols in external dependencies including the standard library. + /// + /// Defaults to `false` (search local workspace only). libs: bool, + /// When `true`, exclude re-exported/imported symbols from results, + /// showing only the original definitions. + /// + /// Defaults to `false`. exclude_imports: bool, } From 1af7813baa02ec2f05acbaae5238b37bc0ebe5d7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 7 Jan 2026 09:12:00 +0100 Subject: [PATCH 007/131] Document `WithFixture` --- .../crates/test-fixture/src/lib.rs | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index d81f27d7c3b1d..ca68edd88c059 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -37,7 +37,110 @@ use triomphe::Arc; pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); +/// A trait for setting up test databases from fixture strings. +/// +/// Fixtures are strings containing Rust source code with optional metadata that describe +/// a project setup. This is the primary way to write tests for rust-analyzer without +/// having to depend on the entire sysroot. +/// +/// # Fixture Syntax +/// +/// ## Basic Structure +/// +/// A fixture without metadata is parsed into a single source file (`/main.rs`). +/// Metadata is added after a `//-` comment prefix. +/// +/// ```text +/// //- /main.rs +/// fn main() { +/// println!("Hello"); +/// } +/// ``` +/// +/// Note that the fixture syntax is optional and can be omitted if the test only requires +/// a simple single file. +/// +/// ## File Metadata +/// +/// Each file can have the following metadata after `//-`: +/// +/// - **Path** (required): Must start with `/`, e.g., `/main.rs`, `/lib.rs`, `/foo/bar.rs` +/// - **`crate:`**: Defines a new crate with this file as its root +/// - Optional version: `crate:foo@0.1.0,https://example.com/repo.git` +/// - **`deps:,`**: Dependencies (requires `crate:`) +/// - **`extern-prelude:,`**: Limits extern prelude to specified crates +/// - **`edition:`**: Rust edition (2015, 2018, 2021, 2024). Defaults to current. +/// - **`cfg:=,`**: Configuration options, e.g., `cfg:test,feature="foo"` +/// - **`env:=`**: Environment variables +/// - **`crate-attr:`**: Crate-level attributes, e.g., `crate-attr:no_std` +/// - **`new_source_root:local|library`**: Starts a new source root +/// - **`library`**: Marks crate as external library (not workspace member) +/// +/// ## Global Meta (must appear at the top, in order) +/// +/// - **`//- toolchain: nightly|stable`**: Sets the Rust toolchain (default: stable) +/// - **`//- target_data_layout: `**: LLVM data layout string +/// - **`//- target_arch: `**: Target architecture (default: x86_64) +/// - **`//- proc_macros: ,`**: Enables predefined test proc macros +/// - **`//- minicore: , `**: Includes subset of libcore +/// +/// ## Cursor Markers +/// +/// Use `$0` to mark cursor position(s) in the fixture: +/// - Single `$0`: marks a position (use with [`with_position`](Self::with_position)) +/// - Two `$0` markers: marks a range (use with [`with_range`](Self::with_range)) +/// - Escape as `\$0` if you need a literal `$0` +/// +/// # Examples +/// +/// ## Single file with cursor position +/// ```text +/// r#" +/// fn main() { +/// let x$0 = 42; +/// } +/// "# +/// ``` +/// +/// ## Multiple crates with dependencies +/// ```text +/// r#" +/// //- /main.rs crate:main deps:helper +/// use helper::greet; +/// fn main() { greet(); } +/// +/// //- /lib.rs crate:helper +/// pub fn greet() {} +/// "# +/// ``` +/// +/// ## Using minicore for lang items +/// ```text +/// r#" +/// //- minicore: option, result, iterator +/// //- /main.rs +/// fn foo() -> Option { Some(42) } +/// "# +/// ``` +/// +/// The available minicore flags are listed at the top of crates\test-utils\src\minicore.rs. +/// +/// ## Using test proc macros +/// ```text +/// r#" +/// //- proc_macros: identity, mirror +/// //- /main.rs crate:main deps:proc_macros +/// use proc_macros::identity; +/// +/// #[identity] +/// fn foo() {} +/// "# +/// ``` +/// +/// Available proc macros: `identity` (attr), `DeriveIdentity` (derive), `input_replace` (attr), +/// `mirror` (bang), `shorten` (bang) pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_single_file( #[rust_analyzer::rust_fixture] ra_fixture: &str, @@ -50,6 +153,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { (db, file) } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_many_files( #[rust_analyzer::rust_fixture] ra_fixture: &str, @@ -66,6 +170,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { (db, files) } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self { let mut db = Self::default(); @@ -75,6 +180,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { db } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_files_extra_proc_macros( #[rust_analyzer::rust_fixture] ra_fixture: &str, @@ -88,6 +194,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { db } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Self, FilePosition) { let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); @@ -95,6 +202,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { (db, FilePosition { file_id, offset }) } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Self, FileRange) { let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); @@ -102,6 +210,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { (db, FileRange { file_id, range }) } + /// See the trait documentation for more information on fixtures. #[track_caller] fn with_range_or_offset( #[rust_analyzer::rust_fixture] ra_fixture: &str, From ad020937855df0af1dc7416be0b12d0b8c2a89a1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 7 Jan 2026 08:46:47 +0100 Subject: [PATCH 008/131] feat: Allow rust paths in symbol search --- .../rust-analyzer/crates/hir/src/symbols.rs | 72 +- .../crates/ide-db/src/symbol_index.rs | 664 +++++++++++++++++- 2 files changed, 724 insertions(+), 12 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 073142670d2af..544c759ed3a77 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -9,6 +9,7 @@ use hir_def::{ ModuleDefId, ModuleId, TraitId, db::DefDatabase, item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob}, + nameres::crate_def_map, per_ns::Item, src::{HasChildSource, HasSource}, visibility::{Visibility, VisibilityExplicitness}, @@ -20,9 +21,12 @@ use hir_ty::{ }; use intern::Symbol; use rustc_hash::FxHashMap; -use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName}; +use syntax::{ + AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, + ast::{HasModuleItem, HasName}, +}; -use crate::{HasCrate, Module, ModuleDef, Semantics}; +use crate::{Crate, HasCrate, Module, ModuleDef, Semantics}; /// The actual data that is stored in the index. It should be as compact as /// possible. @@ -57,6 +61,70 @@ impl DeclarationLocation { } } +impl<'db> FileSymbol<'db> { + /// Create a `FileSymbol` representing a crate's root module. + /// This is used for crate search queries like `::` or `::foo`. + pub fn for_crate_root(db: &'db dyn HirDatabase, krate: Crate) -> Option> { + let display_name = krate.display_name(db)?; + let crate_name = display_name.crate_name(); + let root_module = krate.root_module(db); + let def_map = crate_def_map(db, krate.into()); + let module_data = &def_map[root_module.into()]; + + // Get the definition source (the source file for crate roots) + let definition = module_data.origin.definition_source(db); + let hir_file_id = definition.file_id; + + // For a crate root, the "declaration" is the source file itself + // We use the entire file's syntax node as the location + let syntax_node = definition.value.node(); + let ptr = SyntaxNodePtr::new(&syntax_node); + + // For the name, we need to create a synthetic name pointer. + // We'll use the first token of the file as a placeholder since crate roots + // don't have an explicit name in the source. + // We create a name_ptr pointing to the start of the file. + let name_ptr = match &definition.value { + crate::ModuleSource::SourceFile(sf) => { + // Try to find the first item with a name as a reasonable location for focus + // This is a bit of a hack but works for navigation purposes + let first_item: Option = sf.items().next(); + if let Some(item) = first_item { + if let Some(name) = item.syntax().children().find_map(syntax::ast::Name::cast) { + AstPtr::new(&name).wrap_left() + } else { + // No name found, try to use a NameRef instead + if let Some(name_ref) = + item.syntax().descendants().find_map(syntax::ast::NameRef::cast) + { + AstPtr::new(&name_ref).wrap_right() + } else { + return None; + } + } + } else { + return None; + } + } + _ => return None, + }; + + let loc = DeclarationLocation { hir_file_id, ptr, name_ptr }; + + Some(FileSymbol { + name: Symbol::intern(crate_name.as_str()), + def: ModuleDef::Module(root_module), + loc, + container_name: None, + is_alias: false, + is_assoc: false, + is_import: false, + do_not_complete: Complete::Yes, + _marker: PhantomData, + }) + } +} + /// Represents an outstanding module that the symbol collector must collect symbols from. #[derive(Debug)] struct SymbolCollectorWork { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index 06e1f6bb4560e..ca0d5ec1e5e62 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -55,12 +55,17 @@ use crate::RootDatabase; /// ``` #[derive(Debug, Clone)] pub struct Query { - /// The original search query string as provided by the user. - /// Used for the final matching check via [`SearchMode::check`]. + /// The item name to search for (last segment of the path, or full query if no path). + /// When empty with a non-empty `path_filter`, returns all items in that module. query: String, /// Lowercase version of [`Self::query`], pre-computed for efficiency. /// Used to build FST automata for case-insensitive index lookups. lowercased: String, + /// Path segments to filter by (all segments except the last). + /// Empty if no `::` in the original query. + path_filter: Vec, + /// If true, the first path segment must be a crate name (query started with `::`). + anchor_to_crate: bool, /// The search strategy to use when matching symbols. /// - [`SearchMode::Exact`]: Symbol name must exactly match the query. /// - [`SearchMode::Fuzzy`]: Symbol name must contain all query characters in order (subsequence match). @@ -100,10 +105,13 @@ pub struct Query { impl Query { pub fn new(query: String) -> Query { - let lowercased = query.to_lowercase(); + let (path_filter, item_query, anchor_to_crate) = Self::parse_path_query(&query); + let lowercased = item_query.to_lowercase(); Query { - query, + query: item_query, lowercased, + path_filter, + anchor_to_crate, only_types: false, libs: false, mode: SearchMode::Fuzzy, @@ -113,6 +121,74 @@ impl Query { } } + /// Parse a query string that may contain path segments. + /// + /// Returns (path_filter, item_query, anchor_to_crate) where: + /// - `path_filter`: Path segments to match (all but the last segment) + /// - `item_query`: The item name to search for (last segment) + /// - `anchor_to_crate`: Whether the first segment must be a crate name + fn parse_path_query(query: &str) -> (Vec, String, bool) { + // Check for leading :: (absolute path / crate search) + let anchor_to_crate = query.starts_with("::"); + let query = if anchor_to_crate { &query[2..] } else { query }; + + // Handle sole "::" - return all crates + if query.is_empty() && anchor_to_crate { + return (vec![], String::new(), true); + } + + // Check for trailing :: (module browsing - returns all items in module) + let return_all_in_module = query.ends_with("::"); + let query = if return_all_in_module { query.trim_end_matches("::") } else { query }; + + if !query.contains("::") { + // No path separator - single segment + if anchor_to_crate && !return_all_in_module { + // "::foo" - fuzzy search crate names only + return (vec![], query.to_string(), true); + } + if return_all_in_module { + // "foo::" - browse all items in module "foo" + // path_filter = ["foo"], query = "", anchor_to_crate = false/true + return (vec![query.to_string()], String::new(), anchor_to_crate); + } + // Plain "foo" - normal fuzzy search + return (vec![], query.to_string(), false); + } + + // Filter out empty segments (e.g., "foo::::bar" -> "foo::bar") + let segments: Vec<&str> = query.split("::").filter(|s| !s.is_empty()).collect(); + + if segments.is_empty() { + return (vec![], String::new(), anchor_to_crate); + } + + let path: Vec = + segments[..segments.len() - 1].iter().map(|s| s.to_string()).collect(); + let item = if return_all_in_module { + // All segments go to path, item is empty + let mut path = path; + path.push(segments.last().unwrap().to_string()); + return (path, String::new(), anchor_to_crate); + } else { + segments.last().unwrap_or(&"").to_string() + }; + + (path, item, anchor_to_crate) + } + + /// Returns true if this query should return all items in a module + /// (i.e., the original query ended with `::`) + fn is_module_browsing(&self) -> bool { + self.query.is_empty() && !self.path_filter.is_empty() + } + + /// Returns true if this query is searching for crates + /// (i.e., the query was "::" alone or "::foo" for fuzzy crate search) + fn is_crate_search(&self) -> bool { + self.anchor_to_crate && self.path_filter.is_empty() + } + pub fn only_types(&mut self) { self.only_types = true; } @@ -181,7 +257,28 @@ pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { let _p = tracing::info_span!("world_symbols", query = ?query.query).entered(); - let indices: Vec<_> = if query.libs { + // Handle special case: "::" alone or "::foo" for crate search + if query.is_crate_search() { + return search_crates(db, &query); + } + + // If we have a path filter, resolve it to target modules first + let indices: Vec<_> = if !query.path_filter.is_empty() { + let target_modules = resolve_path_to_modules( + db, + &query.path_filter, + query.anchor_to_crate, + query.case_sensitive, + ); + + if target_modules.is_empty() { + return vec![]; // Path doesn't resolve to any module + } + + // Get symbol indices only for the resolved modules + target_modules.iter().map(|&module| SymbolIndex::module_symbols(db, module)).collect() + } else if query.libs { + // Original behavior for non-path queries searching libs LibraryRoots::get(db) .roots(db) .par_iter() @@ -192,6 +289,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { .map(|&root| SymbolIndex::library_symbols(db, root)) .collect() } else { + // Original behavior for non-path queries searching local crates let mut crates = Vec::new(); for &root in LocalRoots::get(db).roots(db).iter() { @@ -204,13 +302,131 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { }; let mut res = vec![]; - query.search::<()>(&indices, |f| { - res.push(f.clone()); - ControlFlow::Continue(()) - }); + + // For module browsing (empty query, non-empty path_filter), return all symbols + if query.is_module_browsing() { + for index in &indices { + for symbol in index.symbols.iter() { + // Apply existing filters (only_types, assoc_mode, exclude_imports, etc.) + if query.matches_symbol_filters(symbol) { + res.push(symbol.clone()); + } + } + } + } else { + // Normal search: use FST to match item name + query.search::<()>(&indices, |f| { + res.push(f.clone()); + ControlFlow::Continue(()) + }); + } + res } +/// Search for crates by name (handles "::" and "::foo" queries) +fn search_crates<'db>(db: &'db RootDatabase, query: &Query) -> Vec> { + let mut res = vec![]; + + for krate in Crate::all(db) { + let Some(display_name) = krate.display_name(db) else { continue }; + let crate_name = display_name.crate_name().as_str(); + + // If query is empty (sole "::"), return all crates + // Otherwise, fuzzy match the crate name + let matches = if query.query.is_empty() { + true + } else { + query.mode.check(&query.query, query.case_sensitive, crate_name) + }; + + if matches { + // Create a FileSymbol for the crate's root module + if let Some(symbol) = hir::symbols::FileSymbol::for_crate_root(db, krate) { + res.push(symbol); + } + } + } + + res +} + +/// Resolve a path filter to the target module(s) it points to. +/// Returns the modules whose symbol indices should be searched. +/// +/// The path_filter contains segments like ["std", "vec"] for a query like "std::vec::Vec". +/// We resolve this by: +/// 1. Finding crates matching the first segment +/// 2. Walking down the module tree following subsequent segments +fn resolve_path_to_modules( + db: &dyn HirDatabase, + path_filter: &[String], + anchor_to_crate: bool, + case_sensitive: bool, +) -> Vec { + let [first_segment, rest_segments @ ..] = path_filter else { + return vec![]; + }; + + // Helper for name comparison + let names_match = |actual: &str, expected: &str| -> bool { + if case_sensitive { actual == expected } else { actual.eq_ignore_ascii_case(expected) } + }; + + // Find crates matching the first segment + let matching_crates: Vec = Crate::all(db) + .into_iter() + .filter(|krate| { + krate + .display_name(db) + .is_some_and(|name| names_match(name.crate_name().as_str(), first_segment)) + }) + .collect(); + + // If anchor_to_crate is true, first segment MUST be a crate name + // If anchor_to_crate is false, first segment could be a crate OR a module in local crates + let mut candidate_modules: Vec = vec![]; + + // Add crate root modules for matching crates + for krate in matching_crates { + candidate_modules.push(krate.root_module(db)); + } + + // If not anchored to crate, also search for modules matching first segment in local crates + if !anchor_to_crate { + for &root in LocalRoots::get(db).roots(db).iter() { + for &krate in db.source_root_crates(root).iter() { + let root_module = Crate::from(krate).root_module(db); + for child in root_module.children(db) { + if let Some(name) = child.name(db) { + if names_match(name.as_str(), first_segment) { + candidate_modules.push(child); + } + } + } + } + } + } + + // Walk down the module tree for remaining path segments + for segment in rest_segments { + candidate_modules = candidate_modules + .into_iter() + .flat_map(|module| { + module.children(db).filter(|child| { + child.name(db).is_some_and(|name| names_match(name.as_str(), segment)) + }) + }) + .collect(); + + if candidate_modules.is_empty() { + break; + } + } + + candidate_modules +} + #[derive(Default)] pub struct SymbolIndex<'db> { symbols: Box<[FileSymbol<'db>]>, @@ -382,12 +598,14 @@ impl<'db> SymbolIndex<'db> { } impl Query { + /// Search symbols in the given indices. pub(crate) fn search<'db, T>( - self, + &self, indices: &[&'db SymbolIndex<'db>], cb: impl FnMut(&'db FileSymbol<'db>) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("symbol_index::Query::search").entered(); + let mut op = fst::map::OpBuilder::new(); match self.mode { SearchMode::Exact => { @@ -466,6 +684,41 @@ impl Query { (true, AssocSearchMode::Exclude) | (false, AssocSearchMode::AssocItemsOnly) ) } + + /// Check if a symbol passes all filters except name matching. + /// Used for module browsing where we want all items in a module. + fn matches_symbol_filters(&self, symbol: &FileSymbol<'_>) -> bool { + // Check only_types filter + if self.only_types + && !matches!( + symbol.def, + hir::ModuleDef::Adt(..) + | hir::ModuleDef::TypeAlias(..) + | hir::ModuleDef::BuiltinType(..) + | hir::ModuleDef::Trait(..) + ) + { + return false; + } + + // Check assoc_mode filter + if !self.matches_assoc_mode(symbol.is_assoc) { + return false; + } + + // Check exclude_imports filter + if self.exclude_imports && symbol.is_import { + return false; + } + + // Check underscore prefix + let ignore_underscore_prefixed = !self.query.starts_with("__"); + if ignore_underscore_prefixed && symbol.name.as_str().starts_with("__") { + return false; + } + + true + } } #[cfg(test)] @@ -622,4 +875,395 @@ pub struct Foo; let symbols = world_symbols(&db, query); expect_file!["./test_data/test_symbols_exclude_imports.txt"].assert_debug_eq(&symbols); } + + #[test] + fn test_parse_path_query() { + // Plain query - no path + let (path, item, anchor) = Query::parse_path_query("Item"); + assert_eq!(path, Vec::::new()); + assert_eq!(item, "Item"); + assert!(!anchor); + + // Path with item + let (path, item, anchor) = Query::parse_path_query("foo::Item"); + assert_eq!(path, vec!["foo"]); + assert_eq!(item, "Item"); + assert!(!anchor); + + // Multi-segment path + let (path, item, anchor) = Query::parse_path_query("foo::bar::Item"); + assert_eq!(path, vec!["foo", "bar"]); + assert_eq!(item, "Item"); + assert!(!anchor); + + // Leading :: (anchor to crate) + let (path, item, anchor) = Query::parse_path_query("::std::vec::Vec"); + assert_eq!(path, vec!["std", "vec"]); + assert_eq!(item, "Vec"); + assert!(anchor); + + // Just "::" - return all crates + let (path, item, anchor) = Query::parse_path_query("::"); + assert_eq!(path, Vec::::new()); + assert_eq!(item, ""); + assert!(anchor); + + // "::foo" - fuzzy search crate names + let (path, item, anchor) = Query::parse_path_query("::foo"); + assert_eq!(path, Vec::::new()); + assert_eq!(item, "foo"); + assert!(anchor); + + // Trailing :: (module browsing) + let (path, item, anchor) = Query::parse_path_query("foo::"); + assert_eq!(path, vec!["foo"]); + assert_eq!(item, ""); + assert!(!anchor); + + // Full path with trailing :: + let (path, item, anchor) = Query::parse_path_query("foo::bar::"); + assert_eq!(path, vec!["foo", "bar"]); + assert_eq!(item, ""); + assert!(!anchor); + + // Absolute path with trailing :: + let (path, item, anchor) = Query::parse_path_query("::std::vec::"); + assert_eq!(path, vec!["std", "vec"]); + assert_eq!(item, ""); + assert!(anchor); + + // Empty segments should be filtered + let (path, item, anchor) = Query::parse_path_query("foo::::bar"); + assert_eq!(path, vec!["foo"]); + assert_eq!(item, "bar"); + assert!(!anchor); + } + + #[test] + fn test_query_modes() { + // Test is_module_browsing + let query = Query::new("foo::".to_owned()); + assert!(query.is_module_browsing()); + assert!(!query.is_crate_search()); + + // Test is_crate_search with sole :: + let query = Query::new("::".to_owned()); + assert!(!query.is_module_browsing()); + assert!(query.is_crate_search()); + + // Test is_crate_search with ::foo + let query = Query::new("::foo".to_owned()); + assert!(!query.is_module_browsing()); + assert!(query.is_crate_search()); + + // Normal query should be neither + let query = Query::new("foo".to_owned()); + assert!(!query.is_module_browsing()); + assert!(!query.is_crate_search()); + + // Path query should be neither + let query = Query::new("foo::bar".to_owned()); + assert!(!query.is_module_browsing()); + assert!(!query.is_crate_search()); + } + + #[test] + fn test_path_search() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:main +mod inner; +pub struct RootStruct; + +//- /inner.rs +pub struct InnerStruct; +pub mod nested { + pub struct NestedStruct; +} +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Search for item in specific module + let query = Query::new("inner::InnerStruct".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!(names.contains(&"InnerStruct"), "Expected InnerStruct in {:?}", names); + + // Search for item in nested module + let query = Query::new("inner::nested::NestedStruct".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!(names.contains(&"NestedStruct"), "Expected NestedStruct in {:?}", names); + + // Search with crate prefix + let query = Query::new("main::inner::InnerStruct".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!(names.contains(&"InnerStruct"), "Expected InnerStruct in {:?}", names); + + // Wrong path should return empty + let query = Query::new("wrong::InnerStruct".to_owned()); + let symbols = world_symbols(&db, query); + assert!(symbols.is_empty(), "Expected empty results for wrong path"); + } + + #[test] + fn test_module_browsing() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:main +mod mymod; + +//- /mymod.rs +pub struct MyStruct; +pub fn my_func() {} +pub const MY_CONST: u32 = 1; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Browse all items in module + let query = Query::new("main::mymod::".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + + assert!(names.contains(&"MyStruct"), "Expected MyStruct in {:?}", names); + assert!(names.contains(&"my_func"), "Expected my_func in {:?}", names); + assert!(names.contains(&"MY_CONST"), "Expected MY_CONST in {:?}", names); + } + + #[test] + fn test_fuzzy_item_with_path() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:main +mod mymod; + +//- /mymod.rs +pub struct MyLongStructName; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Fuzzy match on item name with exact path + let query = Query::new("main::mymod::MyLong".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!( + names.contains(&"MyLongStructName"), + "Expected fuzzy match for MyLongStructName in {:?}", + names + ); + } + + #[test] + fn test_case_insensitive_path() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:main +mod MyMod; + +//- /MyMod.rs +pub struct MyStruct; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Case insensitive path matching (default) + let query = Query::new("main::mymod::MyStruct".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!(names.contains(&"MyStruct"), "Expected case-insensitive match in {:?}", names); + } + + #[test] + fn test_absolute_path_search() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:mycrate +mod inner; +pub struct CrateRoot; + +//- /inner.rs +pub struct InnerItem; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Absolute path with leading :: + let query = Query::new("::mycrate::inner::InnerItem".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!( + names.contains(&"InnerItem"), + "Expected InnerItem with absolute path in {:?}", + names + ); + + // Absolute path should NOT match if crate name is wrong + let query = Query::new("::wrongcrate::inner::InnerItem".to_owned()); + let symbols = world_symbols(&db, query); + assert!(symbols.is_empty(), "Expected empty results for wrong crate name"); + } + + #[test] + fn test_wrong_path_returns_empty() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:main +mod existing; + +//- /existing.rs +pub struct MyStruct; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Non-existent module path + let query = Query::new("nonexistent::MyStruct".to_owned()); + let symbols = world_symbols(&db, query); + assert!(symbols.is_empty(), "Expected empty results for non-existent path"); + + // Correct item, wrong module + let query = Query::new("wrongmod::MyStruct".to_owned()); + let symbols = world_symbols(&db, query); + assert!(symbols.is_empty(), "Expected empty results for wrong module"); + } + + #[test] + fn test_root_module_items() { + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:mylib +pub struct RootItem; +pub fn root_fn() {} +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Items at crate root - path is just the crate name + let query = Query::new("mylib::RootItem".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!(names.contains(&"RootItem"), "Expected RootItem at crate root in {:?}", names); + + // Browse crate root + let query = Query::new("mylib::".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert!( + names.contains(&"RootItem"), + "Expected RootItem when browsing crate root in {:?}", + names + ); + assert!( + names.contains(&"root_fn"), + "Expected root_fn when browsing crate root in {:?}", + names + ); + } + + #[test] + fn test_crate_search_all() { + // Test that sole "::" returns all crates + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:alpha +pub struct AlphaStruct; + +//- /beta.rs crate:beta +pub struct BetaStruct; + +//- /gamma.rs crate:gamma +pub struct GammaStruct; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // Sole "::" should return all crates (as module symbols) + let query = Query::new("::".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + + assert!(names.contains(&"alpha"), "Expected alpha crate in {:?}", names); + assert!(names.contains(&"beta"), "Expected beta crate in {:?}", names); + assert!(names.contains(&"gamma"), "Expected gamma crate in {:?}", names); + assert_eq!(symbols.len(), 3, "Expected exactly 3 crates, got {:?}", names); + } + + #[test] + fn test_crate_search_fuzzy() { + // Test that "::foo" fuzzy-matches crate names + let (mut db, _) = RootDatabase::with_many_files( + r#" +//- /lib.rs crate:my_awesome_lib +pub struct AwesomeStruct; + +//- /other.rs crate:another_lib +pub struct OtherStruct; + +//- /foo.rs crate:foobar +pub struct FooStruct; +"#, + ); + + let mut local_roots = FxHashSet::default(); + local_roots.insert(WORKSPACE); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); + + // "::foo" should fuzzy-match crate names containing "foo" + let query = Query::new("::foo".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + + assert!(names.contains(&"foobar"), "Expected foobar crate in {:?}", names); + assert_eq!(symbols.len(), 1, "Expected only foobar crate, got {:?}", names); + + // "::awesome" should match my_awesome_lib + let query = Query::new("::awesome".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + + assert!(names.contains(&"my_awesome_lib"), "Expected my_awesome_lib crate in {:?}", names); + assert_eq!(symbols.len(), 1, "Expected only my_awesome_lib crate, got {:?}", names); + + // "::lib" should match multiple crates + let query = Query::new("::lib".to_owned()); + let symbols = world_symbols(&db, query); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + + assert!(names.contains(&"my_awesome_lib"), "Expected my_awesome_lib in {:?}", names); + assert!(names.contains(&"another_lib"), "Expected another_lib in {:?}", names); + assert_eq!(symbols.len(), 2, "Expected 2 crates matching 'lib', got {:?}", names); + + // "::nonexistent" should return empty + let query = Query::new("::nonexistent".to_owned()); + let symbols = world_symbols(&db, query); + assert!(symbols.is_empty(), "Expected empty results for non-matching crate pattern"); + } } From dc64aef1ed2ea86e42fe83f10e00c2f362dec114 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sun, 4 Jan 2026 19:10:10 +0900 Subject: [PATCH 009/131] fix: Properly lower `SelfOnly` predicates --- .../rust-analyzer/crates/hir-ty/src/lower.rs | 369 +++++++++++++----- .../crates/hir-ty/src/lower/path.rs | 34 +- .../crates/hir-ty/src/next_solver/interner.rs | 7 +- .../crates/hir-ty/src/next_solver/util.rs | 15 + .../hir-ty/src/tests/regression/new_solver.rs | 60 +++ 5 files changed, 373 insertions(+), 112 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 46ec554e0a65f..9da32464c86fe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -77,6 +77,7 @@ pub struct ImplTraits { #[derive(PartialEq, Eq, Debug, Hash)] pub struct ImplTrait { pub(crate) predicates: StoredClauses, + pub(crate) assoc_ty_bounds_start: u32, } pub type ImplTraitIdx = Idx; @@ -166,6 +167,12 @@ impl<'db> LifetimeElisionKind<'db> { } } +#[derive(Clone, Copy, PartialEq, Debug)] +pub(crate) enum GenericPredicateSource { + SelfOnly, + AssocTyBound, +} + #[derive(Debug)] pub struct TyLoweringContext<'db, 'a> { pub db: &'db dyn HirDatabase, @@ -465,10 +472,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { // this dance is to make sure the data is in the right // place even if we encounter more opaque types while // lowering the bounds - let idx = self - .impl_trait_mode - .opaque_type_data - .alloc(ImplTrait { predicates: Clauses::empty(interner).store() }); + let idx = self.impl_trait_mode.opaque_type_data.alloc(ImplTrait { + predicates: Clauses::empty(interner).store(), + assoc_ty_bounds_start: 0, + }); let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), @@ -608,7 +615,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { ignore_bindings: bool, generics: &Generics, predicate_filter: PredicateFilter, - ) -> impl Iterator> + use<'a, 'b, 'db> { + ) -> impl Iterator, GenericPredicateSource)> + use<'a, 'b, 'db> { match where_predicate { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { @@ -634,8 +641,8 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let self_ty = self.lower_ty(*target); Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings))) } - &WherePredicate::Lifetime { bound, target } => { - Either::Right(iter::once(Clause(Predicate::new( + &WherePredicate::Lifetime { bound, target } => Either::Right(iter::once(( + Clause(Predicate::new( self.interner, Binder::dummy(rustc_type_ir::PredicateKind::Clause( rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate( @@ -643,8 +650,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { self.lower_lifetime(target), )), )), - )))) - } + )), + GenericPredicateSource::SelfOnly, + ))), } .into_iter() } @@ -654,7 +662,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { bound: &'b TypeBound, self_ty: Ty<'db>, ignore_bindings: bool, - ) -> impl Iterator> + use<'b, 'a, 'db> { + ) -> impl Iterator, GenericPredicateSource)> + use<'b, 'a, 'db> { let interner = self.interner; let meta_sized = self.lang_items.MetaSized; let pointee_sized = self.lang_items.PointeeSized; @@ -712,7 +720,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } TypeBound::Use(_) | TypeBound::Error => {} } - clause.into_iter().chain(assoc_bounds.into_iter().flatten()) + clause + .into_iter() + .map(|pred| (pred, GenericPredicateSource::SelfOnly)) + .chain(assoc_bounds.into_iter().flatten()) } fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> { @@ -732,7 +743,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { for b in bounds { let db = ctx.db; - ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|b| { + ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|(b, _)| { match b.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(t) => { let id = t.def_id(); @@ -990,35 +1001,49 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { rustc_type_ir::AliasTyKind::Opaque, AliasTy::new_from_args(interner, def_id, args), ); - let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { - let mut predicates = Vec::new(); - for b in bounds { - predicates.extend(ctx.lower_type_bound(b, self_ty, false)); - } + let (predicates, assoc_ty_bounds_start) = + self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { + let mut predicates = Vec::new(); + let mut assoc_ty_bounds = Vec::new(); + for b in bounds { + for (pred, source) in ctx.lower_type_bound(b, self_ty, false) { + match source { + GenericPredicateSource::SelfOnly => predicates.push(pred), + GenericPredicateSource::AssocTyBound => assoc_ty_bounds.push(pred), + } + } + } - if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = self.lang_items.Sized; - let sized_clause = sized_trait.map(|trait_id| { - let trait_ref = TraitRef::new_from_args( - interner, - trait_id.into(), - GenericArgs::new_from_slice(&[self_ty.into()]), - ); - Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )) - }); - predicates.extend(sized_clause); - } - predicates - }); - ImplTrait { predicates: Clauses::new_from_slice(&predicates).store() } + if !ctx.unsized_types.contains(&self_ty) { + let sized_trait = self.lang_items.Sized; + let sized_clause = sized_trait.map(|trait_id| { + let trait_ref = TraitRef::new_from_args( + interner, + trait_id.into(), + GenericArgs::new_from_slice(&[self_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) + }); + predicates.extend(sized_clause); + } + + let assoc_ty_bounds_start = predicates.len() as u32; + predicates.extend(assoc_ty_bounds); + (predicates, assoc_ty_bounds_start) + }); + + ImplTrait { + predicates: Clauses::new_from_slice(&predicates).store(), + assoc_ty_bounds_start, + } } pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> { @@ -1139,6 +1164,31 @@ impl ImplTraitId { .expect("owner should have opaque type") .get_with(|it| it.impl_traits[idx].predicates.as_ref().as_slice()) } + + #[inline] + pub fn self_predicates<'db>( + self, + db: &'db dyn HirDatabase, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + let (impl_traits, idx) = match self { + ImplTraitId::ReturnTypeImplTrait(owner, idx) => { + (ImplTraits::return_type_impl_traits(db, owner), idx) + } + ImplTraitId::TypeAliasImplTrait(owner, idx) => { + (ImplTraits::type_alias_impl_traits(db, owner), idx) + } + }; + let predicates = + impl_traits.as_deref().expect("owner should have opaque type").get_with(|it| { + let impl_trait = &it.impl_traits[idx]; + ( + impl_trait.predicates.as_ref().as_slice(), + impl_trait.assoc_ty_bounds_start as usize, + ) + }); + + predicates.map_bound(|(preds, len)| &preds[..len]) + } } impl InternedOpaqueTyId { @@ -1146,6 +1196,14 @@ impl InternedOpaqueTyId { pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { self.loc(db).predicates(db) } + + #[inline] + pub fn self_predicates<'db>( + self, + db: &'db dyn HirDatabase, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + self.loc(db).self_predicates(db) + } } #[salsa::tracked] @@ -1655,12 +1713,15 @@ pub(crate) fn generic_predicates_for_param<'db>( ctx.store = maybe_parent_generics.store(); for pred in maybe_parent_generics.where_predicates() { if predicate(pred, &mut ctx) { - predicates.extend(ctx.lower_where_predicate( - pred, - true, - maybe_parent_generics, - PredicateFilter::All, - )); + predicates.extend( + ctx.lower_where_predicate( + pred, + true, + maybe_parent_generics, + PredicateFilter::All, + ) + .map(|(pred, _)| pred), + ); } } } @@ -1696,21 +1757,44 @@ pub(crate) fn type_alias_bounds<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { - type_alias_bounds_with_diagnostics(db, type_alias).0.map_bound(|it| it.as_slice()) + type_alias_bounds_with_diagnostics(db, type_alias).0.predicates.map_bound(|it| it.as_slice()) +} + +#[inline] +pub(crate) fn type_alias_self_bounds<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, +) -> EarlyBinder<'db, &'db [Clause<'db>]> { + let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, _) = + type_alias_bounds_with_diagnostics(db, type_alias); + predicates.map_bound(|it| &it.as_slice()[..assoc_ty_bounds_start as usize]) +} + +#[derive(PartialEq, Eq, Debug, Hash)] +struct TypeAliasBounds { + predicates: T, + assoc_ty_bounds_start: u32, } -pub(crate) fn type_alias_bounds_with_diagnostics<'db>( +fn type_alias_bounds_with_diagnostics<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, -) -> (EarlyBinder<'db, Clauses<'db>>, Diagnostics) { - let (bounds, diags) = type_alias_bounds_with_diagnostics_query(db, type_alias); - return (bounds.get(), diags.clone()); +) -> (TypeAliasBounds>>, Diagnostics) { + let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, diags) = + type_alias_bounds_with_diagnostics_query(db, type_alias); + return ( + TypeAliasBounds { + predicates: predicates.get(), + assoc_ty_bounds_start: *assoc_ty_bounds_start, + }, + diags.clone(), + ); #[salsa::tracked(returns(ref))] pub fn type_alias_bounds_with_diagnostics_query<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, - ) -> (StoredEarlyBinder, Diagnostics) { + ) -> (TypeAliasBounds>, Diagnostics) { let type_alias_data = db.type_alias_signature(type_alias); let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); let mut ctx = TyLoweringContext::new( @@ -1727,10 +1811,18 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>( let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); let mut bounds = Vec::new(); + let mut assoc_ty_bounds = Vec::new(); for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { - bounds.push(pred); - }); + ctx.lower_type_bound(bound, interner_ty, false).for_each( + |(pred, source)| match source { + GenericPredicateSource::SelfOnly => { + bounds.push(pred); + } + GenericPredicateSource::AssocTyBound => { + assoc_ty_bounds.push(pred); + } + }, + ); } if !ctx.unsized_types.contains(&interner_ty) { @@ -1745,8 +1837,14 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>( }; } + let assoc_ty_bounds_start = bounds.len() as u32; + bounds.extend(assoc_ty_bounds); + ( - StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()), + TypeAliasBounds { + predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()), + assoc_ty_bounds_start, + }, create_diagnostics(ctx.diagnostics), ) } @@ -1754,11 +1852,15 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>( #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GenericPredicates { - // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the - // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child, + // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait + // predicate for the parent. Then come the bounds of the associated types of the parents, + // then the explicit, self-only predicates for the parent, then the explicit, self-only trait + // predicate for the child, then the bounds of the associated types of the child, // then the implicit trait predicate for the child, if `is_trait` is `true`. predicates: StoredEarlyBinder, + parent_explicit_self_predicates_start: u32, own_predicates_start: u32, + own_assoc_ty_bounds_start: u32, is_trait: bool, parent_is_trait: bool, } @@ -1782,7 +1884,15 @@ impl GenericPredicates { pub(crate) fn from_explicit_own_predicates( predicates: StoredEarlyBinder, ) -> Self { - Self { predicates, own_predicates_start: 0, is_trait: false, parent_is_trait: false } + let len = predicates.get().skip_binder().len() as u32; + Self { + predicates, + parent_explicit_self_predicates_start: 0, + own_predicates_start: 0, + own_assoc_ty_bounds_start: len, + is_trait: false, + parent_is_trait: false, + } } #[inline] @@ -1814,6 +1924,14 @@ impl GenericPredicates { Self::query(db, def).explicit_predicates() } + #[inline] + pub fn query_explicit_implied<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + Self::query(db, def).explicit_implied_predicates() + } + #[inline] pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { self.predicates.get().map_bound(|it| it.as_slice()) @@ -1824,9 +1942,18 @@ impl GenericPredicates { self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..]) } - /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait. + /// Returns the predicates, minus the implicit `Self: Trait` predicate and bounds of the + /// associated types for a trait. #[inline] pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| { + &it.as_slice()[self.parent_explicit_self_predicates_start as usize + ..self.own_assoc_ty_bounds_start as usize] + }) + } + + #[inline] + pub fn explicit_implied_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { self.predicates.get().map_bound(|it| { &it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] }) @@ -1902,26 +2029,22 @@ where ); let sized_trait = ctx.lang_items.Sized; - let mut predicates = Vec::new(); + // We need to lower parents and self separately - see the comment below lowering of implicit + // `Sized` predicates for why. + let mut own_predicates = Vec::new(); + let mut parent_predicates = Vec::new(); + let mut own_assoc_ty_bounds = Vec::new(); + let mut parent_assoc_ty_bounds = Vec::new(); let all_generics = std::iter::successors(Some(&generics), |generics| generics.parent_generics()) .collect::>(); - let mut is_trait = false; - let mut parent_is_trait = false; - if all_generics.len() > 1 { - add_implicit_trait_predicate( - interner, - all_generics.last().unwrap().def(), - predicate_filter, - &mut predicates, - &mut parent_is_trait, - ); - } - // We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates - // for why. - let mut own_predicates_start = 0; + let own_implicit_trait_predicate = implicit_trait_predicate(interner, def, predicate_filter); + let parent_implicit_trait_predicate = if all_generics.len() > 1 { + implicit_trait_predicate(interner, all_generics.last().unwrap().def(), predicate_filter) + } else { + None + }; for &maybe_parent_generics in all_generics.iter().rev() { - let current_def_predicates_start = predicates.len(); // Collect only diagnostics from the child, not including parents. ctx.diagnostics.clear(); @@ -1929,15 +2052,37 @@ where ctx.store = maybe_parent_generics.store(); for pred in maybe_parent_generics.where_predicates() { tracing::debug!(?pred); - predicates.extend(ctx.lower_where_predicate( - pred, - false, - maybe_parent_generics, - predicate_filter, - )); + for (pred, source) in + ctx.lower_where_predicate(pred, false, maybe_parent_generics, predicate_filter) + { + match source { + GenericPredicateSource::SelfOnly => { + if maybe_parent_generics.def() == def { + own_predicates.push(pred); + } else { + parent_predicates.push(pred); + } + } + GenericPredicateSource::AssocTyBound => { + if maybe_parent_generics.def() == def { + own_assoc_ty_bounds.push(pred); + } else { + parent_assoc_ty_bounds.push(pred); + } + } + } + } } - push_const_arg_has_type_predicates(db, &mut predicates, maybe_parent_generics); + if maybe_parent_generics.def() == def { + push_const_arg_has_type_predicates(db, &mut own_predicates, maybe_parent_generics); + } else { + push_const_arg_has_type_predicates( + db, + &mut parent_predicates, + maybe_parent_generics, + ); + } if let Some(sized_trait) = sized_trait { let mut add_sized_clause = |param_idx, param_id, param_data| { @@ -1971,7 +2116,11 @@ where }), )), )); - predicates.push(clause); + if maybe_parent_generics.def() == def { + own_predicates.push(clause); + } else { + parent_predicates.push(clause); + } }; let parent_params_len = maybe_parent_generics.len_parent(); maybe_parent_generics.iter_self().enumerate().for_each( @@ -1990,30 +2139,55 @@ where // predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent. // But we do have to lower the parent first. } - - if maybe_parent_generics.def() == def { - own_predicates_start = current_def_predicates_start as u32; - } } - add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait); - let diagnostics = create_diagnostics(ctx.diagnostics); + + // The order is: + // + // 1. parent implicit trait pred + // 2. parent assoc bounds + // 3. parent self only preds + // 4. own self only preds + // 5. own assoc ty bounds + // 6. own implicit trait pred + // + // The purpose of this is to index the slice of the followings, without making extra `Vec`s or + // iterators: + // - explicit self only predicates, of own or own + self + // - explicit predicates, of own or own + self + let predicates = parent_implicit_trait_predicate + .iter() + .chain(parent_assoc_ty_bounds.iter()) + .chain(parent_predicates.iter()) + .chain(own_predicates.iter()) + .chain(own_assoc_ty_bounds.iter()) + .chain(own_implicit_trait_predicate.iter()) + .copied() + .collect::>(); + let parent_is_trait = parent_implicit_trait_predicate.is_some(); + let is_trait = own_implicit_trait_predicate.is_some(); + let parent_explicit_self_predicates_start = + parent_is_trait as u32 + parent_assoc_ty_bounds.len() as u32; + let own_predicates_start = + parent_explicit_self_predicates_start + parent_predicates.len() as u32; + let own_assoc_ty_bounds_start = own_predicates_start + own_predicates.len() as u32; + let predicates = GenericPredicates { + parent_explicit_self_predicates_start, own_predicates_start, + own_assoc_ty_bounds_start, is_trait, parent_is_trait, predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()), }; return (predicates, diagnostics); - fn add_implicit_trait_predicate<'db>( + fn implicit_trait_predicate<'db>( interner: DbInterner<'db>, def: GenericDefId, predicate_filter: PredicateFilter, - predicates: &mut Vec>, - set_is_trait: &mut bool, - ) { + ) -> Option> { // For traits, add `Self: Trait` predicate. This is // not part of the predicates that a user writes, but it // is something that one must prove in order to invoke a @@ -2029,8 +2203,9 @@ where if let GenericDefId::TraitId(def_id) = def && predicate_filter == PredicateFilter::All { - *set_is_trait = true; - predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner)); + Some(TraitRef::identity(interner, def_id.into()).upcast(interner)) + } else { + None } } } @@ -2327,7 +2502,7 @@ pub(crate) fn associated_ty_item_bounds<'db>( let mut bounds = Vec::new(); for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| { + ctx.lower_type_bound(bound, self_ty, false).for_each(|(pred, _)| { if let Some(bound) = pred .kind() .map_bound(|c| match c { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index a79f547c2a44f..b77aeab62d157 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -32,7 +32,8 @@ use crate::{ db::HirDatabase, generics::{Generics, generics}, lower::{ - LifetimeElisionKind, PathDiagnosticCallbackData, named_associated_type_shorthand_candidates, + GenericPredicateSource, LifetimeElisionKind, PathDiagnosticCallbackData, + named_associated_type_shorthand_candidates, }, next_solver::{ Binder, Clause, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Predicate, @@ -853,7 +854,8 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { pub(super) fn assoc_type_bindings_from_type_bound<'c>( mut self, trait_ref: TraitRef<'db>, - ) -> Option> + use<'a, 'b, 'c, 'db>> { + ) -> Option, GenericPredicateSource)> + use<'a, 'b, 'c, 'db>> + { let interner = self.ctx.interner; self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| { @@ -921,21 +923,29 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { ), )), )); - predicates.push(pred); + predicates.push((pred, GenericPredicateSource::SelfOnly)); } } }) } for bound in binding.bounds.iter() { - predicates.extend(self.ctx.lower_type_bound( - bound, - Ty::new_alias( - self.ctx.interner, - AliasTyKind::Projection, - AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args), - ), - false, - )); + predicates.extend( + self.ctx + .lower_type_bound( + bound, + Ty::new_alias( + self.ctx.interner, + AliasTyKind::Projection, + AliasTy::new_from_args( + self.ctx.interner, + associated_ty.into(), + args, + ), + ), + false, + ) + .map(|(pred, _)| (pred, GenericPredicateSource::AssocTyBound)), + ); } predicates }) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 2a3df1d32a30d..e17bdac68cdd0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -41,7 +41,8 @@ use crate::{ AdtIdWrapper, AnyImplId, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, OpaqueTypeKey, RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper, - TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds, + TypeAliasIdWrapper, UnevaluatedConst, + util::{explicit_item_bounds, explicit_item_self_bounds}, }, }; @@ -1421,7 +1422,7 @@ impl<'db> Interner for DbInterner<'db> { self, def_id: Self::DefId, ) -> EarlyBinder> { - explicit_item_bounds(self, def_id) + explicit_item_self_bounds(self, def_id) .map_bound(|bounds| elaborate(self, bounds).filter_only_self()) } @@ -1500,7 +1501,7 @@ impl<'db> Interner for DbInterner<'db> { } } - predicates_of(self.db, def_id).explicit_predicates().map_bound(|predicates| { + predicates_of(self.db, def_id).explicit_implied_predicates().map_bound(|predicates| { predicates .iter() .copied() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs index 34ecfed08f29d..9a1b476976e34 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs @@ -455,6 +455,21 @@ pub fn explicit_item_bounds<'db>( clauses.map_bound(|clauses| clauses.iter().copied()) } +pub fn explicit_item_self_bounds<'db>( + interner: DbInterner<'db>, + def_id: SolverDefId, +) -> EarlyBinder<'db, impl DoubleEndedIterator> + ExactSizeIterator> { + let db = interner.db(); + let clauses = match def_id { + SolverDefId::TypeAliasId(type_alias) => { + crate::lower::type_alias_self_bounds(db, type_alias) + } + SolverDefId::InternedOpaqueTyId(id) => id.self_predicates(db), + _ => panic!("Unexpected GenericDefId"), + }; + clauses.map_bound(|clauses| clauses.iter().copied()) +} + pub struct ContainsTypeErrors; impl<'db> TypeVisitor> for ContainsTypeErrors { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs index a4554673cdd5e..be6ab23ad761e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs @@ -750,3 +750,63 @@ fn main() { "#]], ); } + +#[test] +fn regression_19339() { + check_infer( + r#" +trait Bar { + type Baz; + + fn baz(&self) -> Self::Baz; +} + +trait Foo { + type Bar; + + fn bar(&self) -> Self::Bar; +} + +trait FooFactory { + type Output: Foo>; + + fn foo(&self) -> Self::Output; + + fn foo_rpit(&self) -> impl Foo>; +} + +fn test1(foo: impl Foo>) { + let baz = foo.bar().baz(); +} + +fn test2(factory: T) { + let baz = factory.foo().bar().baz(); + let baz = factory.foo_rpit().bar().baz(); +} +"#, + expect![[r#" + 39..43 'self': &'? Self + 101..105 'self': &'? Self + 198..202 'self': &'? Self + 239..243 'self': &'? Self + 290..293 'foo': impl Foo + ?Sized + 325..359 '{ ...z(); }': () + 335..338 'baz': u8 + 341..344 'foo': impl Foo + ?Sized + 341..350 'foo.bar()': impl Bar + 341..356 'foo.bar().baz()': u8 + 385..392 'factory': T + 397..487 '{ ...z(); }': () + 407..410 'baz': u8 + 413..420 'factory': T + 413..426 'factory.foo()': ::Output + 413..432 'factor....bar()': <::Output as Foo>::Bar + 413..438 'factor....baz()': u8 + 448..451 'baz': u8 + 454..461 'factory': T + 454..472 'factor...rpit()': impl Foo + Bar + ?Sized + 454..478 'factor....bar()': + ?Sized as Foo>::Bar + 454..484 'factor....baz()': u8 + "#]], + ); +} From e2b507233c3224187dd5ef53a9044a430e4f9129 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Tue, 2 Dec 2025 13:05:47 +1100 Subject: [PATCH 010/131] Add ProjectJsonTargetSpec.project_root Needed to support flychecking in a later diff --- src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs | 1 + src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 9beab3c0e45c1..81d60179cc946 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -833,6 +833,7 @@ impl GlobalStateSnapshot { label: build.label, target_kind: build.target_kind, shell_runnables: project.runnables().to_owned(), + project_root: project.project_root().to_owned(), })); } ProjectWorkspaceKind::DetachedFile { .. } => {} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index e0f95a7830ea8..8452b6493e87b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -68,6 +68,7 @@ pub(crate) struct ProjectJsonTargetSpec { pub(crate) label: String, pub(crate) target_kind: TargetKind, pub(crate) shell_runnables: Vec, + pub(crate) project_root: AbsPathBuf, } impl ProjectJsonTargetSpec { From ff94498ef2a831c47d9237262f612a3bb530bd69 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Tue, 2 Dec 2025 14:02:07 +1100 Subject: [PATCH 011/131] project-model: Helpers for traversing dep graph in ProjectJson Needed for all_workspace_dependencies_for_package implementation. --- .../crates/project-model/src/project_json.rs | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index b3478d2cfe033..adc9b1a49fd40 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -78,6 +78,13 @@ pub struct ProjectJson { runnables: Vec, } +impl std::ops::Index for ProjectJson { + type Output = Crate; + fn index(&self, index: CrateArrayIdx) -> &Self::Output { + &self.crates[index.0] + } +} + impl ProjectJson { /// Create a new ProjectJson instance. /// @@ -218,6 +225,14 @@ impl ProjectJson { .find(|build| build.build_file.as_std_path() == path) } + pub fn crate_by_label(&self, label: &str) -> Option<&Crate> { + // this is fast enough for now, but it's unfortunate that this is O(crates). + self.crates + .iter() + .filter(|krate| krate.is_workspace_member) + .find(|krate| krate.build.as_ref().is_some_and(|build| build.label == label)) + } + /// Returns the path to the project's manifest or root folder, if no manifest exists. pub fn manifest_or_root(&self) -> &AbsPath { self.manifest.as_ref().map_or(&self.project_root, |manifest| manifest.as_ref()) @@ -258,6 +273,12 @@ pub struct Crate { pub build: Option, } +impl Crate { + pub fn iter_deps(&self) -> impl ExactSizeIterator { + self.deps.iter().map(|dep| dep.krate) + } +} + /// Additional, build-specific data about a crate. #[derive(Clone, Debug, Eq, PartialEq)] pub struct Build { From 3083bde73d2e7a1e2227ef77f1f248598005ba08 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Tue, 2 Dec 2025 14:02:25 +1100 Subject: [PATCH 012/131] project-model: Don't do O(n) clones as well as O(n) search --- .../rust-analyzer/crates/project-model/src/project_json.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index adc9b1a49fd40..8fe7885983a74 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -221,8 +221,9 @@ impl ProjectJson { self.crates .iter() .filter(|krate| krate.is_workspace_member) - .filter_map(|krate| krate.build.clone()) + .filter_map(|krate| krate.build.as_ref()) .find(|build| build.build_file.as_std_path() == path) + .cloned() } pub fn crate_by_label(&self, label: &str) -> Option<&Crate> { From ac641771a83d23f965b3dcb180bb854de21002fb Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 013/131] project-model: Return crate by reference --- .../rust-analyzer/crates/project-model/src/project_json.rs | 3 +-- .../rust-analyzer/crates/rust-analyzer/src/global_state.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 8fe7885983a74..a7fba69362445 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -202,12 +202,11 @@ impl ProjectJson { &self.project_root } - pub fn crate_by_root(&self, root: &AbsPath) -> Option { + pub fn crate_by_root(&self, root: &AbsPath) -> Option<&Crate> { self.crates .iter() .filter(|krate| krate.is_workspace_member) .find(|krate| krate.root_module == root) - .cloned() } /// Returns the path to the project's manifest, if it exists. diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 81d60179cc946..99dc8bce062e3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -825,7 +825,7 @@ impl GlobalStateSnapshot { let Some(krate) = project.crate_by_root(path) else { continue; }; - let Some(build) = krate.build else { + let Some(build) = krate.build.clone() else { continue; }; From 3b97d38702700a274b22dcd9cc21d836d4dc1ce0 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Tue, 2 Dec 2025 14:11:52 +1100 Subject: [PATCH 014/131] Fix misuse of ? This exited the whole loop instead of having continue semantics and continuing to find workspaces. So wrap in find_map. --- .../crates/rust-analyzer/src/global_state.rs | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 99dc8bce062e3..68d65cdee6f30 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -847,21 +847,18 @@ impl GlobalStateSnapshot { &self, package: &Arc, ) -> Option>> { - for workspace in self.workspaces.iter() { - match &workspace.kind { - ProjectWorkspaceKind::Cargo { cargo, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { - let package = cargo.packages().find(|p| cargo[*p].id == *package)?; - - return cargo[package] - .all_member_deps - .as_ref() - .map(|deps| deps.iter().map(|dep| cargo[*dep].id.clone()).collect()); - } - _ => {} + self.workspaces.iter().find_map(|workspace| match &workspace.kind { + ProjectWorkspaceKind::Cargo { cargo, .. } + | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { + let package = cargo.packages().find(|p| cargo[*p].id == *package)?; + + return cargo[package] + .all_member_deps + .as_ref() + .map(|deps| deps.iter().map(|dep| cargo[*dep].id.clone()).collect()); } - } - None + _ => None, + }) } pub(crate) fn file_exists(&self, file_id: FileId) -> bool { From 327ea186f07053f70b4bf6af394e6f8ec80f303e Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 015/131] flycheck: Make the flycheckable unit a flycheck::PackageSpecifier enum You should be able to flycheck a ProjectJson crate based on its build label. This paves the way for that. Context: I don't think this has been working for some time. It used to be that we would use cargo to build ProjectJson crates. Support for ProjectJson seems to have been somewhat steamrolled in PR 18845 (e4bf6e1bc36e4cbc8a36d7911788176eb9fac76e). --- .../crates/rust-analyzer/src/diagnostics.rs | 16 +++-- .../crates/rust-analyzer/src/flycheck.rs | 69 +++++++++++++++---- .../crates/rust-analyzer/src/global_state.rs | 52 ++++++++++---- .../src/handlers/notification.rs | 42 +++++++---- 4 files changed, 131 insertions(+), 48 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index 4a247800af9d5..712960f13d7e9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -3,7 +3,6 @@ pub(crate) mod flycheck_to_proto; use std::mem; -use cargo_metadata::PackageId; use ide::FileId; use ide_db::{FxHashMap, base_db::DbPanicContext}; use itertools::Itertools; @@ -12,10 +11,13 @@ use smallvec::SmallVec; use stdx::iter_eq_by; use triomphe::Arc; -use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind}; +use crate::{ + flycheck::PackageSpecifier, global_state::GlobalStateSnapshot, lsp, lsp_ext, + main_loop::DiagnosticsTaskKind, +}; pub(crate) type CheckFixes = - Arc>, FxHashMap>>>>; + Arc, FxHashMap>>>>; #[derive(Debug, Default, Clone)] pub struct DiagnosticsMapConfig { @@ -29,7 +31,7 @@ pub(crate) type DiagnosticsGeneration = usize; #[derive(Debug, Clone, Default)] pub(crate) struct WorkspaceFlycheckDiagnostic { - pub(crate) per_package: FxHashMap>, PackageFlycheckDiagnostic>, + pub(crate) per_package: FxHashMap, PackageFlycheckDiagnostic>, } #[derive(Debug, Clone)] @@ -85,7 +87,7 @@ impl DiagnosticCollection { pub(crate) fn clear_check_for_package( &mut self, flycheck_id: usize, - package_id: Arc, + package_id: PackageSpecifier, ) { let Some(check) = self.check.get_mut(flycheck_id) else { return; @@ -124,7 +126,7 @@ impl DiagnosticCollection { pub(crate) fn clear_check_older_than_for_package( &mut self, flycheck_id: usize, - package_id: Arc, + package_id: PackageSpecifier, generation: DiagnosticsGeneration, ) { let Some(check) = self.check.get_mut(flycheck_id) else { @@ -154,7 +156,7 @@ impl DiagnosticCollection { &mut self, flycheck_id: usize, generation: DiagnosticsGeneration, - package_id: &Option>, + package_id: &Option, file_id: FileId, diagnostic: lsp_types::Diagnostic, fix: Option>, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index b062641691885..2819ae98daafa 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -195,9 +195,9 @@ impl FlycheckHandle { /// Schedule a re-start of the cargo check worker to do a package wide check. pub(crate) fn restart_for_package( &self, - package: Arc, + package: PackageSpecifier, target: Option, - workspace_deps: Option>>, + workspace_deps: Option>, ) { let generation = self.generation.fetch_add(1, Ordering::Relaxed) + 1; self.sender @@ -233,7 +233,7 @@ pub(crate) enum ClearDiagnosticsKind { #[derive(Debug)] pub(crate) enum ClearScope { Workspace, - Package(Arc), + Package(PackageSpecifier), } pub(crate) enum FlycheckMessage { @@ -243,7 +243,7 @@ pub(crate) enum FlycheckMessage { generation: DiagnosticsGeneration, workspace_root: Arc, diagnostic: Diagnostic, - package_id: Option>, + package_id: Option, }, /// Request clearing all outdated diagnostics. @@ -295,7 +295,32 @@ pub(crate) enum Progress { enum FlycheckScope { Workspace, - Package { package: Arc, workspace_deps: Option>> }, + Package { + // Either a cargo package or a $label in rust-project.check.overrideCommand + package: PackageSpecifier, + workspace_deps: Option>, + }, +} + +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub(crate) enum PackageSpecifier { + Cargo { + /// The one in Cargo.toml, assumed to work with `cargo check -p {}` etc + package_id: Arc, + }, + BuildInfo { + /// If a `build` field is present in rust-project.json, its label field + label: String, + }, +} + +impl PackageSpecifier { + pub(crate) fn as_str(&self) -> &str { + match self { + Self::Cargo { package_id } => &package_id.repr, + Self::BuildInfo { label } => label, + } + } } enum StateChange { @@ -331,7 +356,7 @@ struct FlycheckActor { command_handle: Option>, /// The receiver side of the channel mentioned above. command_receiver: Option>, - diagnostics_cleared_for: FxHashSet>, + diagnostics_cleared_for: FxHashSet, diagnostics_received: DiagnosticsReceived, } @@ -564,7 +589,10 @@ impl FlycheckActor { msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)), ))); let package_id = Arc::new(msg.package_id); - if self.diagnostics_cleared_for.insert(package_id.clone()) { + if self + .diagnostics_cleared_for + .insert(PackageSpecifier::Cargo { package_id: package_id.clone() }) + { tracing::trace!( flycheck_id = self.id, package_id = package_id.repr, @@ -572,7 +600,9 @@ impl FlycheckActor { ); self.send(FlycheckMessage::ClearDiagnostics { id: self.id, - kind: ClearDiagnosticsKind::All(ClearScope::Package(package_id)), + kind: ClearDiagnosticsKind::All(ClearScope::Package( + PackageSpecifier::Cargo { package_id }, + )), }); } } @@ -580,7 +610,7 @@ impl FlycheckActor { tracing::trace!( flycheck_id = self.id, message = diagnostic.message, - package_id = package_id.as_ref().map(|it| &it.repr), + package_id = package_id.as_ref().map(|it| it.as_str()), "diagnostic received" ); if self.diagnostics_received == DiagnosticsReceived::No { @@ -590,7 +620,7 @@ impl FlycheckActor { if self.diagnostics_cleared_for.insert(package_id.clone()) { tracing::trace!( flycheck_id = self.id, - package_id = package_id.repr, + package_id = package_id.as_str(), "clearing diagnostics" ); self.send(FlycheckMessage::ClearDiagnostics { @@ -666,7 +696,18 @@ impl FlycheckActor { match scope { FlycheckScope::Workspace => cmd.arg("--workspace"), - FlycheckScope::Package { package, .. } => cmd.arg("-p").arg(&package.repr), + FlycheckScope::Package { + package: PackageSpecifier::Cargo { package_id }, + .. + } => cmd.arg("-p").arg(&package_id.repr), + FlycheckScope::Package { + package: PackageSpecifier::BuildInfo { .. }, .. + } => { + // No way to flycheck this single package. All we have is a build label. + // There's no way to really say whether this build label happens to be + // a cargo canonical name, so we won't try. + return None; + } }; if let Some(tgt) = target { @@ -748,7 +789,7 @@ impl FlycheckActor { #[allow(clippy::large_enum_variant)] enum CargoCheckMessage { CompilerArtifact(cargo_metadata::Artifact), - Diagnostic { diagnostic: Diagnostic, package_id: Option> }, + Diagnostic { diagnostic: Diagnostic, package_id: Option }, } struct CargoCheckParser; @@ -767,7 +808,9 @@ impl JsonLinesParser for CargoCheckParser { cargo_metadata::Message::CompilerMessage(msg) => { Some(CargoCheckMessage::Diagnostic { diagnostic: msg.message, - package_id: Some(Arc::new(msg.package_id)), + package_id: Some(PackageSpecifier::Cargo { + package_id: Arc::new(msg.package_id), + }), }) } _ => None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 68d65cdee6f30..0cfd0a141baeb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -9,7 +9,6 @@ use std::{ time::{Duration, Instant}, }; -use cargo_metadata::PackageId; use crossbeam_channel::{Receiver, Sender, unbounded}; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; @@ -36,7 +35,7 @@ use crate::{ config::{Config, ConfigChange, ConfigErrors, RatomlFileKind}, diagnostics::{CheckFixes, DiagnosticCollection}, discover, - flycheck::{FlycheckHandle, FlycheckMessage}, + flycheck::{FlycheckHandle, FlycheckMessage, PackageSpecifier}, line_index::{LineEndings, LineIndex}, lsp::{from_proto, to_proto::url_from_abs_path}, lsp_ext, @@ -845,20 +844,43 @@ impl GlobalStateSnapshot { pub(crate) fn all_workspace_dependencies_for_package( &self, - package: &Arc, - ) -> Option>> { - self.workspaces.iter().find_map(|workspace| match &workspace.kind { - ProjectWorkspaceKind::Cargo { cargo, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { - let package = cargo.packages().find(|p| cargo[*p].id == *package)?; - - return cargo[package] - .all_member_deps - .as_ref() - .map(|deps| deps.iter().map(|dep| cargo[*dep].id.clone()).collect()); + package: &PackageSpecifier, + ) -> Option> { + match package { + PackageSpecifier::Cargo { package_id } => { + self.workspaces.iter().find_map(|workspace| match &workspace.kind { + ProjectWorkspaceKind::Cargo { cargo, .. } + | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { + let package = cargo.packages().find(|p| cargo[*p].id == *package_id)?; + + cargo[package].all_member_deps.as_ref().map(|deps| { + deps.iter() + .map(|dep| cargo[*dep].id.clone()) + .map(|p| PackageSpecifier::Cargo { package_id: p }) + .collect() + }) + } + _ => None, + }) } - _ => None, - }) + PackageSpecifier::BuildInfo { label } => { + self.workspaces.iter().find_map(|workspace| match &workspace.kind { + ProjectWorkspaceKind::Json(p) => { + let krate = p.crate_by_label(label)?; + Some( + krate + .iter_deps() + .filter_map(|dep| p[dep].build.as_ref()) + .map(|build| PackageSpecifier::BuildInfo { + label: build.label.clone(), + }) + .collect(), + ) + } + _ => None, + }) + } + } } pub(crate) fn file_exists(&self, file_id: FileId) -> bool { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 4a6544508ff4e..57adbbfe72a7b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -18,7 +18,7 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ config::{Config, ConfigChange}, - flycheck::{InvocationStrategy, Target}, + flycheck::{InvocationStrategy, PackageSpecifier, Target}, global_state::{FetchWorkspaceRequest, GlobalState}, lsp::{from_proto, utils::apply_document_changes}, lsp_ext::{self, RunFlycheckParams}, @@ -328,22 +328,32 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { } InvocationStrategy::PerWorkspace => { Box::new(move || { - let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| { + let target = TargetSpec::for_file(&world, file_id)?.map(|it| { let tgt_kind = it.target_kind(); let (tgt_name, root, package) = match it { - TargetSpec::Cargo(c) => (c.target, c.workspace_root, c.package_id), - _ => return None, + TargetSpec::Cargo(c) => ( + Some(c.target), + c.workspace_root, + PackageSpecifier::Cargo { package_id: c.package_id }, + ), + TargetSpec::ProjectJson(p) => ( + None, + p.project_root, + PackageSpecifier::BuildInfo { label: p.label.clone() }, + ), }; - let tgt = match tgt_kind { - project_model::TargetKind::Bin => Target::Bin(tgt_name), - project_model::TargetKind::Example => Target::Example(tgt_name), - project_model::TargetKind::Test => Target::Test(tgt_name), - project_model::TargetKind::Bench => Target::Benchmark(tgt_name), - _ => return Some((None, root, package)), - }; + let tgt = tgt_name.and_then(|tgt_name| { + Some(match tgt_kind { + project_model::TargetKind::Bin => Target::Bin(tgt_name), + project_model::TargetKind::Example => Target::Example(tgt_name), + project_model::TargetKind::Test => Target::Test(tgt_name), + project_model::TargetKind::Bench => Target::Benchmark(tgt_name), + _ => return None, + }) + }); - Some((Some(tgt), root, package)) + (tgt, root, package) }); tracing::debug!(?target, "flycheck target"); // we have a specific non-library target, attempt to only check that target, nothing @@ -365,7 +375,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { cargo: Some((cargo, _, _)), .. } => *cargo.workspace_root() == root, - _ => false, + project_model::ProjectWorkspaceKind::Json(p) => { + *p.project_root() == root + } + project_model::ProjectWorkspaceKind::DetachedFile { + cargo: None, + .. + } => false, }); if let Some(idx) = package_workspace_idx { let workspace_deps = From 95a07dbfa06ad7c30d4e9fa2f85de2b991d610f3 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 016/131] project-model: Introduce RunnableKind::Flycheck We need to distinguish from RunnableKind::Check, which is human-readable. --- .../crates/project-model/src/project_json.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index a7fba69362445..536f170e11926 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -246,6 +246,10 @@ impl ProjectJson { pub fn runnables(&self) -> &[Runnable] { &self.runnables } + + pub fn runnable_template(&self, kind: RunnableKind) -> Option<&Runnable> { + self.runnables().iter().find(|r| r.kind == kind) + } } /// A crate points to the root module of a crate and lists the dependencies of the crate. This is @@ -349,6 +353,7 @@ pub struct Runnable { /// The kind of runnable. #[derive(Debug, Clone, PartialEq, Eq)] pub enum RunnableKind { + /// `cargo check`, basically, with human-readable output. Check, /// Can run a binary. @@ -356,6 +361,10 @@ pub enum RunnableKind { /// Run a single test. TestOne, + + /// Template for checking a target, emitting rustc JSON diagnostics. + /// May include {label} which will get the label from the `build` section of a crate. + Flycheck, } #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] @@ -462,6 +471,7 @@ pub struct RunnableData { #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub enum RunnableKindData { + Flycheck, Check, Run, TestOne, @@ -532,6 +542,7 @@ impl From for RunnableKind { RunnableKindData::Check => RunnableKind::Check, RunnableKindData::Run => RunnableKind::Run, RunnableKindData::TestOne => RunnableKind::TestOne, + RunnableKindData::Flycheck => RunnableKind::Flycheck, } } } From 2a899bb119bd9e33eadce3b7179cb1b8bc49fd78 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 017/131] flycheck: Use RunnableKind::Flycheck from ProjectJson to flycheck This adds a substitution helper to get the right behaviour re {label} and $saved_file. --- .../crates/rust-analyzer/src/flycheck.rs | 226 +++++++++++++++++- .../crates/rust-analyzer/src/reload.rs | 19 +- 2 files changed, 238 insertions(+), 7 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 2819ae98daafa..1b1e3344e25f9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -14,6 +14,7 @@ use ide_db::FxHashSet; use itertools::Itertools; use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use project_model::TargetDirectoryConfig; +use project_model::project_json; use rustc_hash::FxHashMap; use serde::Deserialize as _; use serde_derive::Deserialize; @@ -89,6 +90,24 @@ impl CargoOptions { } } +/// The flycheck config from a rust-project.json file or discoverConfig JSON output. +#[derive(Debug, Default)] +pub(crate) struct FlycheckConfigJson { + /// The template with [project_json::RunnableKind::Flycheck] + pub single_template: Option, +} + +impl FlycheckConfigJson { + pub(crate) fn any_configured(&self) -> bool { + // self.workspace_template.is_some() || + self.single_template.is_some() + } +} + +/// The flycheck config from rust-analyzer's own configuration. +/// +/// We rely on this when rust-project.json does not specify a flycheck runnable +/// #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum FlycheckConfig { CargoCommand { @@ -128,7 +147,7 @@ impl fmt::Display for FlycheckConfig { // in the IDE (e.g. in the VS Code status bar). let display_args = args .iter() - .map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg }) + .map(|arg| if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { "..." } else { arg }) .collect::>(); write!(f, "{command} {}", display_args.join(" ")) @@ -156,6 +175,7 @@ impl FlycheckHandle { generation: Arc, sender: Sender, config: FlycheckConfig, + config_json: FlycheckConfigJson, sysroot_root: Option, workspace_root: AbsPathBuf, manifest_path: Option, @@ -166,6 +186,7 @@ impl FlycheckHandle { generation.load(Ordering::Relaxed), sender, config, + config_json, sysroot_root, workspace_root, manifest_path, @@ -341,6 +362,8 @@ struct FlycheckActor { generation: DiagnosticsGeneration, sender: Sender, config: FlycheckConfig, + config_json: FlycheckConfigJson, + manifest_path: Option, ws_target_dir: Option, /// Either the workspace root of the workspace we are flychecking, @@ -373,7 +396,66 @@ enum Event { CheckEvent(Option), } -pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; +/// This is stable behaviour. Don't change. +const SAVED_FILE_PLACEHOLDER_DOLLAR: &str = "$saved_file"; +const LABEL_INLINE: &str = "{label}"; +const SAVED_FILE_INLINE: &str = "{saved_file}"; + +struct Substitutions<'a> { + label: Option<&'a str>, + saved_file: Option<&'a str>, +} + +impl<'a> Substitutions<'a> { + /// If you have a runnable, and it has {label} in it somewhere, treat it as a template that + /// may be unsatisfied if you do not provide a label to substitute into it. Returns None in + /// that situation. Otherwise performs the requested substitutions. + /// + /// Same for {saved_file}. + /// + #[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */ + fn substitute( + self, + template: &project_json::Runnable, + extra_env: &std::collections::HashMap, H>, + ) -> Option { + let mut cmd = toolchain::command(&template.program, &template.cwd, extra_env); + for arg in &template.args { + if let Some(ix) = arg.find(LABEL_INLINE) { + if let Some(label) = self.label { + let mut arg = arg.to_string(); + arg.replace_range(ix..ix + LABEL_INLINE.len(), label); + cmd.arg(arg); + continue; + } else { + return None; + } + } + if let Some(ix) = arg.find(SAVED_FILE_INLINE) { + if let Some(saved_file) = self.saved_file { + let mut arg = arg.to_string(); + arg.replace_range(ix..ix + SAVED_FILE_INLINE.len(), saved_file); + cmd.arg(arg); + continue; + } else { + return None; + } + } + // Legacy syntax: full argument match + if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { + if let Some(saved_file) = self.saved_file { + cmd.arg(saved_file); + continue; + } else { + return None; + } + } + cmd.arg(arg); + } + cmd.current_dir(&template.cwd); + Some(cmd) + } +} impl FlycheckActor { fn new( @@ -381,6 +463,7 @@ impl FlycheckActor { generation: DiagnosticsGeneration, sender: Sender, config: FlycheckConfig, + config_json: FlycheckConfigJson, sysroot_root: Option, workspace_root: AbsPathBuf, manifest_path: Option, @@ -392,6 +475,7 @@ impl FlycheckActor { generation, sender, config, + config_json, sysroot_root, root: Arc::new(workspace_root), scope: FlycheckScope::Workspace, @@ -672,6 +756,29 @@ impl FlycheckActor { self.diagnostics_received = DiagnosticsReceived::No; } + fn explicit_check_command( + &self, + scope: &FlycheckScope, + saved_file: Option<&AbsPath>, + ) -> Option { + let label = match scope { + // We could add a runnable like "RunnableKind::FlycheckWorkspace". But generally + // if you're not running cargo, it's because your workspace is too big to check + // all at once. You can always use `check_overrideCommand` with no {label}. + FlycheckScope::Workspace => return None, + FlycheckScope::Package { package: PackageSpecifier::BuildInfo { label }, .. } => { + label.as_str() + } + FlycheckScope::Package { + package: PackageSpecifier::Cargo { package_id: label }, + .. + } => &label.repr, + }; + let template = self.config_json.single_template.as_ref()?; + let subs = Substitutions { label: Some(label), saved_file: saved_file.map(|x| x.as_str()) }; + subs.substitute(template, &FxHashMap::default()) + } + /// Construct a `Command` object for checking the user's code. If the user /// has specified a custom command with placeholders that we cannot fill, /// return None. @@ -683,6 +790,20 @@ impl FlycheckActor { ) -> Option { match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { + // Only use the rust-project.json's flycheck config when no check_overrideCommand + // is configured. In the FlycheckConcig::CustomCommand branch we will still do + // label substitution, but on the overrideCommand instead. + // + // There needs to be SOME way to override what your discoverConfig tool says, + // because to change the flycheck runnable there you may have to literally + // recompile the tool. + if self.config_json.any_configured() { + // Completely handle according to rust-project.json. + // We don't consider this to be "using cargo" so we will not apply any of the + // CargoOptions to the command. + return self.explicit_check_command(scope, saved_file); + } + let mut cmd = toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env); if let Some(sysroot_root) = &self.sysroot_root @@ -757,7 +878,7 @@ impl FlycheckActor { // we're saving a file, replace the placeholder in the arguments. if let Some(saved_file) = saved_file { for arg in args { - if arg == SAVED_FILE_PLACEHOLDER { + if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { cmd.arg(saved_file); } else { cmd.arg(arg); @@ -765,7 +886,7 @@ impl FlycheckActor { } } else { for arg in args { - if arg == SAVED_FILE_PLACEHOLDER { + if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { // The custom command has a $saved_file placeholder, // but we had an IDE event that wasn't a file save. Do nothing. return None; @@ -837,3 +958,100 @@ enum JsonMessage { Cargo(cargo_metadata::Message), Rustc(Diagnostic), } + +#[cfg(test)] +mod tests { + use ide_db::FxHashMap; + use itertools::Itertools; + use paths::Utf8Path; + use project_model::project_json; + + use crate::flycheck::Substitutions; + + #[test] + fn test_substitutions() { + let label = ":label"; + let saved_file = "file.rs"; + + // Runnable says it needs both; you need both. + assert_eq!(test_substitute(None, None, "{label} {saved_file}").as_deref(), None); + assert_eq!(test_substitute(Some(label), None, "{label} {saved_file}").as_deref(), None); + assert_eq!( + test_substitute(None, Some(saved_file), "{label} {saved_file}").as_deref(), + None + ); + assert_eq!( + test_substitute(Some(label), Some(saved_file), "{label} {saved_file}").as_deref(), + Some("build :label file.rs") + ); + + // Only need label? only need label. + assert_eq!(test_substitute(None, None, "{label}").as_deref(), None); + assert_eq!(test_substitute(Some(label), None, "{label}").as_deref(), Some("build :label"),); + assert_eq!(test_substitute(None, Some(saved_file), "{label}").as_deref(), None,); + assert_eq!( + test_substitute(Some(label), Some(saved_file), "{label}").as_deref(), + Some("build :label"), + ); + + // Only need saved_file + assert_eq!(test_substitute(None, None, "{saved_file}").as_deref(), None); + assert_eq!(test_substitute(Some(label), None, "{saved_file}").as_deref(), None); + assert_eq!( + test_substitute(None, Some(saved_file), "{saved_file}").as_deref(), + Some("build file.rs") + ); + assert_eq!( + test_substitute(Some(label), Some(saved_file), "{saved_file}").as_deref(), + Some("build file.rs") + ); + + // Need neither + assert_eq!(test_substitute(None, None, "xxx").as_deref(), Some("build xxx")); + assert_eq!(test_substitute(Some(label), None, "xxx").as_deref(), Some("build xxx")); + assert_eq!(test_substitute(None, Some(saved_file), "xxx").as_deref(), Some("build xxx")); + assert_eq!( + test_substitute(Some(label), Some(saved_file), "xxx").as_deref(), + Some("build xxx") + ); + + // {label} mid-argument substitution + assert_eq!( + test_substitute(Some(label), None, "--label={label}").as_deref(), + Some("build --label=:label") + ); + + // {saved_file} mid-argument substitution + assert_eq!( + test_substitute(None, Some(saved_file), "--saved={saved_file}").as_deref(), + Some("build --saved=file.rs") + ); + + // $saved_file legacy support (no mid-argument substitution, we never supported that) + assert_eq!( + test_substitute(None, Some(saved_file), "$saved_file").as_deref(), + Some("build file.rs") + ); + + fn test_substitute( + label: Option<&str>, + saved_file: Option<&str>, + args: &str, + ) -> Option { + Substitutions { label, saved_file } + .substitute( + &project_json::Runnable { + program: "build".to_owned(), + args: Vec::from_iter(args.split_whitespace().map(ToOwned::to_owned)), + cwd: Utf8Path::new("/path").to_owned(), + kind: project_json::RunnableKind::Flycheck, + }, + &FxHashMap::default(), + ) + .map(|command| { + command.get_args().map(|x| x.to_string_lossy()).collect_vec().join(" ") + }) + .map(|args| format!("build {}", args)) + } + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index e3a5ee221973b..0a16b7a5614c5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -25,7 +25,9 @@ use load_cargo::{ProjectFolders, load_proc_macro}; use lsp_types::FileSystemWatcher; use paths::Utf8Path; use proc_macro_api::ProcMacroClient; -use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts}; +use project_model::{ + ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts, project_json, +}; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, ChangeKind}; @@ -875,6 +877,7 @@ impl GlobalState { generation.clone(), sender.clone(), config, + crate::flycheck::FlycheckConfigJson::default(), None, self.config.root_path().clone(), None, @@ -894,16 +897,25 @@ impl GlobalState { cargo: Some((cargo, _, _)), .. } => ( + crate::flycheck::FlycheckConfigJson::default(), cargo.workspace_root(), Some(cargo.manifest_path()), Some(cargo.target_directory()), ), ProjectWorkspaceKind::Json(project) => { + let config_json = crate::flycheck::FlycheckConfigJson { + single_template: project + .runnable_template(project_json::RunnableKind::Flycheck) + .cloned(), + }; // Enable flychecks for json projects if a custom flycheck command was supplied // in the workspace configuration. match config { + _ if config_json.any_configured() => { + (config_json, project.path(), None, None) + } FlycheckConfig::CustomCommand { .. } => { - (project.path(), None, None) + (config_json, project.path(), None, None) } _ => return None, } @@ -913,12 +925,13 @@ impl GlobalState { ws.sysroot.root().map(ToOwned::to_owned), )) }) - .map(|(id, (root, manifest_path, target_dir), sysroot_root)| { + .map(|(id, (config_json, root, manifest_path, target_dir), sysroot_root)| { FlycheckHandle::spawn( id, generation.clone(), sender.clone(), config.clone(), + config_json, sysroot_root, root.to_path_buf(), manifest_path.map(|it| it.to_path_buf()), From 7f608da06af36c0c21b6119959e47d9480ccb18a Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 018/131] flycheck: Support {label} in check_overrideCommand as well as $saved_file --- .../crates/rust-analyzer/src/flycheck.rs | 38 ++++++++----------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 1b1e3344e25f9..cf4ab29b8649e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -872,29 +872,23 @@ impl FlycheckActor { &*self.root } }; - let mut cmd = toolchain::command(command, root, extra_env); - - // If the custom command has a $saved_file placeholder, and - // we're saving a file, replace the placeholder in the arguments. - if let Some(saved_file) = saved_file { - for arg in args { - if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { - cmd.arg(saved_file); - } else { - cmd.arg(arg); - } - } - } else { - for arg in args { - if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { - // The custom command has a $saved_file placeholder, - // but we had an IDE event that wasn't a file save. Do nothing. - return None; - } + let runnable = project_json::Runnable { + program: command.clone(), + cwd: Utf8Path::to_owned(root.as_ref()), + args: args.clone(), + kind: project_json::RunnableKind::Flycheck, + }; - cmd.arg(arg); - } - } + let label = match scope { + FlycheckScope::Workspace => None, + // We support substituting both build labels (e.g. buck, bazel) and cargo package ids. + // With cargo package ids, you get `cargo check -p path+file:///path/to/rust-analyzer/crates/hir#0.0.0`. + // That does work! + FlycheckScope::Package { package, .. } => Some(package.as_str()), + }; + + let subs = Substitutions { label, saved_file: saved_file.map(|x| x.as_str()) }; + let cmd = subs.substitute(&runnable, extra_env)?; Some(cmd) } From 4e61c6052124c2e71403973cc3adb34c4ee5454d Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 019/131] flycheck: Always flycheck single crate if there is a build label from rust-project.json This requires us to add $saved_file / {saved_file} interpolation back to restart_for_package. Otherwise we break existing users of $saved_file. No grand reason why we can't delete saved_file later, although I would just leave it because sometimes a build system might really know better which target(s) to build, including multiple targets. --- .../rust-analyzer/crates/rust-analyzer/src/flycheck.rs | 3 ++- .../crates/rust-analyzer/src/handlers/notification.rs | 9 ++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index cf4ab29b8649e..57ad774b1850e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -219,13 +219,14 @@ impl FlycheckHandle { package: PackageSpecifier, target: Option, workspace_deps: Option>, + saved_file: Option, ) { let generation = self.generation.fetch_add(1, Ordering::Relaxed) + 1; self.sender .send(StateChange::Restart { generation, scope: FlycheckScope::Package { package, workspace_deps }, - saved_file: None, + saved_file, target, }) .unwrap(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 57adbbfe72a7b..d956010433301 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -328,6 +328,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { } InvocationStrategy::PerWorkspace => { Box::new(move || { + let saved_file = vfs_path.as_path().map(ToOwned::to_owned); let target = TargetSpec::for_file(&world, file_id)?.map(|it| { let tgt_kind = it.target_kind(); let (tgt_name, root, package) = match it { @@ -362,8 +363,10 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { if let Some((target, root, package)) = target { // trigger a package check if we have a non-library target as that can't affect // anything else in the workspace OR if we're not allowed to check the workspace as - // the user opted into package checks then - let package_check_allowed = target.is_some() || !may_flycheck_workspace; + // the user opted into package checks then OR if this is not cargo. + let package_check_allowed = target.is_some() + || !may_flycheck_workspace + || matches!(package, PackageSpecifier::BuildInfo { .. }); if package_check_allowed { package_workspace_idx = world.workspaces.iter().position(|ws| match &ws.kind { @@ -390,6 +393,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { package, target, workspace_deps, + saved_file.clone(), ); } } @@ -460,7 +464,6 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { ws_contains_file && !is_pkg_ws }); - let saved_file = vfs_path.as_path().map(ToOwned::to_owned); let mut workspace_check_triggered = false; // Find and trigger corresponding flychecks 'flychecks: for flycheck in world.flycheck.iter() { From 778de45547f9a584894fad295c86539e7c57aa9d Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 020/131] flycheck: Add display_command to pretty-print flycheck command being run in a notification --- .../crates/rust-analyzer/src/flycheck.rs | 82 +++++++++++++++++++ .../rust-analyzer/crates/toolchain/src/lib.rs | 3 + 2 files changed, 85 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 57ad774b1850e..7f814121e909b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -22,6 +22,7 @@ use serde_derive::Deserialize; pub(crate) use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, }; +use toolchain::DISPLAY_COMMAND_IGNORE_ENVS; use toolchain::Tool; use triomphe::Arc; @@ -954,6 +955,54 @@ enum JsonMessage { Rustc(Diagnostic), } +/// Not good enough to execute in a shell, but good enough to show the user without all the noisy +/// quotes +/// +/// Pass implicit_cwd if there is one regarded as the obvious by the user, so we can skip showing it. +/// Compactness is the aim of the game, the output typically gets truncated quite a lot. +fn display_command(c: &Command, implicit_cwd: Option<&std::path::Path>) -> String { + let mut o = String::new(); + use std::fmt::Write; + let lossy = std::ffi::OsStr::to_string_lossy; + if let Some(dir) = c.get_current_dir() { + if Some(dir) == implicit_cwd.map(std::path::Path::new) { + // pass + } else if dir.to_string_lossy().contains(" ") { + write!(o, "cd {:?} && ", dir).unwrap(); + } else { + write!(o, "cd {} && ", dir.display()).unwrap(); + } + } + for (env, val) in c.get_envs() { + let (env, val) = (lossy(env), val.map(lossy).unwrap_or(std::borrow::Cow::Borrowed(""))); + if DISPLAY_COMMAND_IGNORE_ENVS.contains(&env.as_ref()) { + continue; + } + if env.contains(" ") { + write!(o, "\"{}={}\" ", env, val).unwrap(); + } else if val.contains(" ") { + write!(o, "{}=\"{}\" ", env, val).unwrap(); + } else { + write!(o, "{}={} ", env, val).unwrap(); + } + } + let prog = lossy(c.get_program()); + if prog.contains(" ") { + write!(o, "{:?}", prog).unwrap(); + } else { + write!(o, "{}", prog).unwrap(); + } + for arg in c.get_args() { + let arg = lossy(arg); + if arg.contains(" ") { + write!(o, " \"{}\"", arg).unwrap(); + } else { + write!(o, " {}", arg).unwrap(); + } + } + o +} + #[cfg(test)] mod tests { use ide_db::FxHashMap; @@ -962,6 +1011,7 @@ mod tests { use project_model::project_json; use crate::flycheck::Substitutions; + use crate::flycheck::display_command; #[test] fn test_substitutions() { @@ -1049,4 +1099,36 @@ mod tests { .map(|args| format!("build {}", args)) } } + + #[test] + fn test_display_command() { + use std::path::Path; + let workdir = Path::new("workdir"); + let mut cmd = toolchain::command("command", workdir, &FxHashMap::default()); + assert_eq!(display_command(cmd.arg("--arg"), Some(workdir)), "command --arg"); + assert_eq!( + display_command(cmd.arg("spaced arg"), Some(workdir)), + "command --arg \"spaced arg\"" + ); + assert_eq!( + display_command(cmd.env("ENVIRON", "yeah"), Some(workdir)), + "ENVIRON=yeah command --arg \"spaced arg\"" + ); + assert_eq!( + display_command(cmd.env("OTHER", "spaced env"), Some(workdir)), + "ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\"" + ); + assert_eq!( + display_command(cmd.current_dir("/tmp"), Some(workdir)), + "cd /tmp && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\"" + ); + assert_eq!( + display_command(cmd.current_dir("/tmp and/thing"), Some(workdir)), + "cd \"/tmp and/thing\" && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\"" + ); + assert_eq!( + display_command(cmd.current_dir("/tmp and/thing"), Some(Path::new("/tmp and/thing"))), + "ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\"" + ); + } } diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs index 39319886cfe4a..1a17269838708 100644 --- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs +++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs @@ -74,6 +74,9 @@ impl Tool { // Prevent rustup from automatically installing toolchains, see https://github.com/rust-lang/rust-analyzer/issues/20719. pub const NO_RUSTUP_AUTO_INSTALL_ENV: (&str, &str) = ("RUSTUP_AUTO_INSTALL", "0"); +// These get ignored when displaying what command is running in LSP status messages. +pub const DISPLAY_COMMAND_IGNORE_ENVS: &[&str] = &[NO_RUSTUP_AUTO_INSTALL_ENV.0]; + #[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */ pub fn command( cmd: impl AsRef, From 53a371c505a00be220aca18dfba091ee2b2d8f31 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 4 Sep 2024 13:52:59 +1000 Subject: [PATCH 021/131] flycheck: notifications show full command when configured in a rust-project.json runnable For JSON / override users, pretty-print the custom flycheck command with fewer quote characters Better debug logging in flycheck --- .../crates/rust-analyzer/src/flycheck.rs | 52 ++++++++++++++----- .../crates/rust-analyzer/src/global_state.rs | 2 + .../crates/rust-analyzer/src/main_loop.rs | 27 +++++++--- .../crates/rust-analyzer/src/reload.rs | 1 + 4 files changed, 62 insertions(+), 20 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 7f814121e909b..6dcae76c93544 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -309,13 +309,18 @@ impl fmt::Debug for FlycheckMessage { #[derive(Debug)] pub(crate) enum Progress { - DidStart, + DidStart { + /// The user sees this in VSCode, etc. May be a shortened version of the command we actually + /// executed, otherwise it is way too long. + user_facing_command: String, + }, DidCheckCrate(String), DidFinish(io::Result<()>), DidCancel, DidFailToRestart(String), } +#[derive(Debug, Clone)] enum FlycheckScope { Workspace, Package { @@ -346,6 +351,16 @@ impl PackageSpecifier { } } +#[derive(Debug)] +enum FlycheckCommandOrigin { + /// Regular cargo invocation + Cargo, + /// Configured via check_overrideCommand + CheckOverrideCommand, + /// From a runnable with [project_json::RunnableKind::Flycheck] + ProjectJsonRunnable, +} + enum StateChange { Restart { generation: DiagnosticsGeneration, @@ -529,16 +544,28 @@ impl FlycheckActor { } let command = self.check_command(&scope, saved_file.as_deref(), target); - self.scope = scope; + self.scope = scope.clone(); self.generation = generation; - let Some(command) = command else { + let Some((command, origin)) = command else { + tracing::debug!(?scope, "failed to build flycheck command"); continue; }; - let formatted_command = format!("{command:?}"); + let debug_command = format!("{command:?}"); + let user_facing_command = match origin { + // Don't show all the --format=json-with-blah-blah args, just the simple + // version + FlycheckCommandOrigin::Cargo => self.config.to_string(), + // show them the full command but pretty printed. advanced user + FlycheckCommandOrigin::ProjectJsonRunnable + | FlycheckCommandOrigin::CheckOverrideCommand => display_command( + &command, + Some(std::path::Path::new(self.root.as_path())), + ), + }; - tracing::debug!(?command, "will restart flycheck"); + tracing::debug!(?origin, ?command, "will restart flycheck"); let (sender, receiver) = unbounded(); match CommandHandle::spawn( command, @@ -575,14 +602,14 @@ impl FlycheckActor { }, ) { Ok(command_handle) => { - tracing::debug!(command = formatted_command, "did restart flycheck"); + tracing::debug!(?origin, command = %debug_command, "did restart flycheck"); self.command_handle = Some(command_handle); self.command_receiver = Some(receiver); - self.report_progress(Progress::DidStart); + self.report_progress(Progress::DidStart { user_facing_command }); } Err(error) => { self.report_progress(Progress::DidFailToRestart(format!( - "Failed to run the following command: {formatted_command} error={error}" + "Failed to run the following command: {debug_command} origin={origin:?} error={error}" ))); } } @@ -789,7 +816,7 @@ impl FlycheckActor { scope: &FlycheckScope, saved_file: Option<&AbsPath>, target: Option, - ) -> Option { + ) -> Option<(Command, FlycheckCommandOrigin)> { match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { // Only use the rust-project.json's flycheck config when no check_overrideCommand @@ -803,7 +830,8 @@ impl FlycheckActor { // Completely handle according to rust-project.json. // We don't consider this to be "using cargo" so we will not apply any of the // CargoOptions to the command. - return self.explicit_check_command(scope, saved_file); + let cmd = self.explicit_check_command(scope, saved_file)?; + return Some((cmd, FlycheckCommandOrigin::ProjectJsonRunnable)); } let mut cmd = @@ -864,7 +892,7 @@ impl FlycheckActor { self.ws_target_dir.as_ref().map(Utf8PathBuf::as_path), ); cmd.args(&options.extra_args); - Some(cmd) + Some((cmd, FlycheckCommandOrigin::Cargo)) } FlycheckConfig::CustomCommand { command, args, extra_env, invocation_strategy } => { let root = match invocation_strategy { @@ -892,7 +920,7 @@ impl FlycheckActor { let subs = Substitutions { label, saved_file: saved_file.map(|x| x.as_str()) }; let cmd = subs.substitute(&runnable, extra_env)?; - Some(cmd) + Some((cmd, FlycheckCommandOrigin::CheckOverrideCommand)) } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 0cfd0a141baeb..39b4aaa64738d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -112,6 +112,7 @@ pub(crate) struct GlobalState { pub(crate) flycheck_sender: Sender, pub(crate) flycheck_receiver: Receiver, pub(crate) last_flycheck_error: Option, + pub(crate) flycheck_formatted_commands: Vec, // Test explorer pub(crate) test_run_session: Option>, @@ -288,6 +289,7 @@ impl GlobalState { flycheck_sender, flycheck_receiver, last_flycheck_error: None, + flycheck_formatted_commands: vec![], test_run_session: None, test_run_sender, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index dd0813c14454c..62a3b3a17bdfc 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -1179,8 +1179,24 @@ impl GlobalState { kind: ClearDiagnosticsKind::OlderThan(generation, ClearScope::Package(package_id)), } => self.diagnostics.clear_check_older_than_for_package(id, package_id, generation), FlycheckMessage::Progress { id, progress } => { + let format_with_id = |user_facing_command: String| { + if self.flycheck.len() == 1 { + user_facing_command + } else { + format!("{user_facing_command} (#{})", id + 1) + } + }; + + self.flycheck_formatted_commands + .resize_with(self.flycheck.len().max(id + 1), || { + format_with_id(self.config.flycheck(None).to_string()) + }); + let (state, message) = match progress { - flycheck::Progress::DidStart => (Progress::Begin, None), + flycheck::Progress::DidStart { user_facing_command } => { + self.flycheck_formatted_commands[id] = format_with_id(user_facing_command); + (Progress::Begin, None) + } flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), flycheck::Progress::DidCancel => { self.last_flycheck_error = None; @@ -1200,13 +1216,8 @@ impl GlobalState { } }; - // When we're running multiple flychecks, we have to include a disambiguator in - // the title, or the editor complains. Note that this is a user-facing string. - let title = if self.flycheck.len() == 1 { - format!("{}", self.config.flycheck(None)) - } else { - format!("{} (#{})", self.config.flycheck(None), id + 1) - }; + // Clone because we &mut self for report_progress + let title = self.flycheck_formatted_commands[id].clone(); self.report_progress( &title, state, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 0a16b7a5614c5..ccafbd7b30b9d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -942,6 +942,7 @@ impl GlobalState { } } .into(); + self.flycheck_formatted_commands = vec![]; } } From 3fdb78cba695824cba5be894a032779c9b4a4ac3 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 3 Dec 2025 11:34:58 +1100 Subject: [PATCH 022/131] flycheck: Rename FlycheckConfig::CargoCommand to Automatic Because (1) it is what we use when there is no relevant config (2) we automatically use either rust-project.json's flycheck, or cargo This also puts check_command config into CargoOptions. It's a cargo option, after all. --- .../crates/rust-analyzer/src/config.rs | 20 ++++++----- .../crates/rust-analyzer/src/flycheck.rs | 36 ++++++++++++------- .../crates/rust-analyzer/src/test_runner.rs | 2 +- 3 files changed, 35 insertions(+), 23 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index e39569e108de4..c2f7ada8c8cae 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -2431,6 +2431,8 @@ impl Config { pub(crate) fn cargo_test_options(&self, source_root: Option) -> CargoOptions { CargoOptions { + // Might be nice to allow users to specify test_command = "nextest" + subcommand: "test".into(), target_tuples: self.cargo_target(source_root).clone().into_iter().collect(), all_targets: false, no_default_features: *self.cargo_noDefaultFeatures(source_root), @@ -2464,9 +2466,9 @@ impl Config { }, } } - Some(_) | None => FlycheckConfig::CargoCommand { - command: self.check_command(source_root).clone(), - options: CargoOptions { + Some(_) | None => FlycheckConfig::Automatic { + cargo_options: CargoOptions { + subcommand: self.check_command(source_root).clone(), target_tuples: self .check_targets(source_root) .clone() @@ -4171,8 +4173,8 @@ mod tests { assert_eq!(config.cargo_targetDir(None), &None); assert!(matches!( config.flycheck(None), - FlycheckConfig::CargoCommand { - options: CargoOptions { target_dir_config: TargetDirectoryConfig::None, .. }, + FlycheckConfig::Automatic { + cargo_options: CargoOptions { target_dir_config: TargetDirectoryConfig::None, .. }, .. } )); @@ -4195,8 +4197,8 @@ mod tests { Utf8PathBuf::from(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_owned())); assert!(matches!( config.flycheck(None), - FlycheckConfig::CargoCommand { - options: CargoOptions { target_dir_config, .. }, + FlycheckConfig::Automatic { + cargo_options: CargoOptions { target_dir_config, .. }, .. } if target_dir_config.target_dir(Some(&ws_target_dir)).map(Cow::into_owned) == Some(ws_target_dir.join("rust-analyzer")) @@ -4221,8 +4223,8 @@ mod tests { ); assert!(matches!( config.flycheck(None), - FlycheckConfig::CargoCommand { - options: CargoOptions { target_dir_config, .. }, + FlycheckConfig::Automatic { + cargo_options: CargoOptions { target_dir_config, .. }, .. } if target_dir_config.target_dir(None).map(Cow::into_owned) == Some(Utf8PathBuf::from("other_folder")) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 6dcae76c93544..512c231990cb6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -38,8 +38,11 @@ pub(crate) enum InvocationStrategy { PerWorkspace, } +/// Data needed to construct a `cargo` command invocation, e.g. for flycheck or running a test. #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct CargoOptions { + /// The cargo subcommand to run, e.g. "check" or "clippy" + pub(crate) subcommand: String, pub(crate) target_tuples: Vec, pub(crate) all_targets: bool, pub(crate) set_test: bool, @@ -111,11 +114,16 @@ impl FlycheckConfigJson { /// #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum FlycheckConfig { - CargoCommand { - command: String, - options: CargoOptions, + /// Automatically use rust-project.json's flycheck runnable or just use cargo (the common case) + /// + /// We can't have a variant for ProjectJson because that is configured on the fly during + /// discoverConfig. We only know what we can read at config time. + Automatic { + /// If we do use cargo, how to build the check command + cargo_options: CargoOptions, ansi_color_output: bool, }, + /// check_overrideCommand. This overrides both cargo and rust-project.json's flycheck runnable. CustomCommand { command: String, args: Vec, @@ -127,7 +135,7 @@ pub(crate) enum FlycheckConfig { impl FlycheckConfig { pub(crate) fn invocation_strategy(&self) -> InvocationStrategy { match self { - FlycheckConfig::CargoCommand { .. } => InvocationStrategy::PerWorkspace, + FlycheckConfig::Automatic { .. } => InvocationStrategy::PerWorkspace, FlycheckConfig::CustomCommand { invocation_strategy, .. } => { invocation_strategy.clone() } @@ -138,7 +146,9 @@ impl FlycheckConfig { impl fmt::Display for FlycheckConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"), + FlycheckConfig::Automatic { cargo_options, .. } => { + write!(f, "cargo {}", cargo_options.subcommand) + } FlycheckConfig::CustomCommand { command, args, .. } => { // Don't show `my_custom_check --foo $saved_file` literally to the user, as it // looks like we've forgotten to substitute $saved_file. @@ -572,11 +582,11 @@ impl FlycheckActor { CargoCheckParser, sender, match &self.config { - FlycheckConfig::CargoCommand { options, .. } => { + FlycheckConfig::Automatic { cargo_options, .. } => { let ws_target_dir = self.ws_target_dir.as_ref().map(Utf8PathBuf::as_path); let target_dir = - options.target_dir_config.target_dir(ws_target_dir); + cargo_options.target_dir_config.target_dir(ws_target_dir); // If `"rust-analyzer.cargo.targetDir": null`, we should use // workspace's target dir instead of hard-coded fallback. @@ -818,7 +828,7 @@ impl FlycheckActor { target: Option, ) -> Option<(Command, FlycheckCommandOrigin)> { match &self.config { - FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { + FlycheckConfig::Automatic { cargo_options, ansi_color_output } => { // Only use the rust-project.json's flycheck config when no check_overrideCommand // is configured. In the FlycheckConcig::CustomCommand branch we will still do // label substitution, but on the overrideCommand instead. @@ -835,15 +845,15 @@ impl FlycheckActor { } let mut cmd = - toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env); + toolchain::command(Tool::Cargo.path(), &*self.root, &cargo_options.extra_env); if let Some(sysroot_root) = &self.sysroot_root - && !options.extra_env.contains_key("RUSTUP_TOOLCHAIN") + && !cargo_options.extra_env.contains_key("RUSTUP_TOOLCHAIN") && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() { cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); } cmd.env("CARGO_LOG", "cargo::core::compiler::fingerprint=info"); - cmd.arg(command); + cmd.arg(&cargo_options.subcommand); match scope { FlycheckScope::Workspace => cmd.arg("--workspace"), @@ -887,11 +897,11 @@ impl FlycheckActor { cmd.arg("--keep-going"); - options.apply_on_command( + cargo_options.apply_on_command( &mut cmd, self.ws_target_dir.as_ref().map(Utf8PathBuf::as_path), ); - cmd.args(&options.extra_args); + cmd.args(&cargo_options.extra_args); Some((cmd, FlycheckCommandOrigin::Cargo)) } FlycheckConfig::CustomCommand { command, args, extra_env, invocation_strategy } => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs index 7111a15d02467..f0020f9088e3f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs @@ -105,7 +105,7 @@ impl CargoTestHandle { let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env); cmd.env("RUSTC_BOOTSTRAP", "1"); cmd.arg("--color=always"); - cmd.arg("test"); + cmd.arg(&options.subcommand); // test, usually cmd.arg("--package"); cmd.arg(&test_target.package); From 2d581773fed793f9d62b190e56374065d37291d7 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 3 Dec 2025 12:03:05 +1100 Subject: [PATCH 023/131] Fix RunnableKind::Run label interpolation It was pretty useless without this. Previously: Parsing target pattern `{label}` Caused by: Invalid target name `{label}`. (...) Build ID: 6dab5942-d81c-4430-83b0-5ba523999050 Network: Up: 0B Down: 0B Command: run. Time elapsed: 0.3s BUILD FAILED * The terminal process "buck2 'run', '{label}'" terminated with exit code: 3. --- .../crates/rust-analyzer/src/target_spec.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 8452b6493e87b..b8d9acc02a328 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -77,7 +77,16 @@ impl ProjectJsonTargetSpec { RunnableKind::Bin => { for runnable in &self.shell_runnables { if matches!(runnable.kind, project_model::project_json::RunnableKind::Run) { - return Some(runnable.clone()); + let mut runnable = runnable.clone(); + + let replaced_args: Vec<_> = runnable + .args + .iter() + .map(|arg| arg.replace("{label}", &self.label)) + .collect(); + runnable.args = replaced_args; + + return Some(runnable); } } From 71e2ded9fb196bc9b4a1736759f9465f5d4d9619 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 3 Dec 2025 13:32:03 +1100 Subject: [PATCH 024/131] doc: Update docs for runnables to include run/flycheck --- .../crates/project-model/src/project_json.rs | 3 +++ .../docs/book/src/non_cargo_based_projects.md | 19 ++++++++++++++----- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 536f170e11926..6938010cbd708 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -357,9 +357,12 @@ pub enum RunnableKind { Check, /// Can run a binary. + /// May include {label} which will get the label from the `build` section of a crate. Run, /// Run a single test. + /// May include {label} which will get the label from the `build` section of a crate. + /// May include {test_id} which will get the test clicked on by the user. TestOne, /// Template for checking a target, emitting rustc JSON diagnostics. diff --git a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md index e7df4a5d76685..d8be9a82d0c9c 100644 --- a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md +++ b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md @@ -204,16 +204,25 @@ interface Runnable { args: string[]; /// The current working directory of the runnable. cwd: string; - /// Used to decide what code lens to offer. + /// Maps a runnable to a piece of rust-analyzer functionality. /// - /// `testOne`: This runnable will be used when the user clicks the 'Run Test' - /// CodeLens above a test. + /// - `testOne`: This runnable will be used when the user clicks the 'Run Test' + /// CodeLens above a test. + /// - `run`: This runnable will be used when the user clicks the 'Run' CodeLens + /// above a main function or triggers a run command. + /// - `flycheck`: This is run to provide check-on-save diagnostics when the user + /// saves a file. It must emit rustc JSON diagnostics that rust-analyzer can + /// parse. If this runnable is not specified, we may try to use `cargo check -p`. + /// This is only run for a single crate that the user saved a file in. The + /// {label} syntax is replaced with `BuildInfo::label`. + /// Alternatively, you may use `{saved_file}` and figure out which crate + /// to produce diagnostics for based on that. /// /// The args for testOne can contain two template strings: /// `{label}` and `{test_id}`. `{label}` will be replaced - /// with the `Build::label` and `{test_id}` will be replaced + /// with the `BuildInfo::label` and `{test_id}` will be replaced /// with the test name. - kind: 'testOne' | string; + kind: 'testOne' | 'run' | 'flycheck' | string; } ``` From 422597f76395060d968fcb5c9e7de311bf1dc9a4 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 3 Dec 2025 14:44:40 +1100 Subject: [PATCH 025/131] doc: make example for workspace.discoverConfig actually work rust-project requires {arg} these days. No good giving people bad information even if it's not crucial to documenting this. --- .../rust-analyzer/crates/rust-analyzer/src/config.rs | 9 +++++---- .../docs/book/src/configuration_generated.md | 5 +++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index c2f7ada8c8cae..2b7ade6c26ef1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -480,8 +480,8 @@ config_data! { /// Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`]. /// - /// [`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`. - /// `progress_label` is used for the title in progress indicators, whereas `files_to_watch` + /// [`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`. + /// `progressLabel` is used for the title in progress indicators, whereas `filesToWatch` /// is used to determine which build system-specific files should be watched in order to /// reload rust-analyzer. /// @@ -490,9 +490,10 @@ config_data! { /// "rust-analyzer.workspace.discoverConfig": { /// "command": [ /// "rust-project", - /// "develop-json" + /// "develop-json", + /// "{arg}" /// ], - /// "progressLabel": "rust-analyzer", + /// "progressLabel": "buck2/rust-project", /// "filesToWatch": [ /// "BUCK" /// ] diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index 58b6363345279..a0738ca0e1790 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -1623,9 +1623,10 @@ Below is an example of a valid configuration: "rust-analyzer.workspace.discoverConfig": { "command": [ "rust-project", - "develop-json" + "develop-json", + "{arg}" ], - "progressLabel": "rust-analyzer", + "progressLabel": "buck2/rust-project", "filesToWatch": [ "BUCK" ] From f06a6b9fdcb9db492a364b48a08f70afa98da182 Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Wed, 3 Dec 2025 17:11:16 +1100 Subject: [PATCH 026/131] doc: overhaul non-cargo build system docs --- .../crates/rust-analyzer/src/config.rs | 18 ++- .../docs/book/src/configuration_generated.md | 22 ++-- .../docs/book/src/non_cargo_based_projects.md | 109 +++++++++++++++--- .../rust-analyzer/editors/code/package.json | 4 +- 4 files changed, 122 insertions(+), 31 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 2b7ade6c26ef1..28ac94e4deb61 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -500,7 +500,7 @@ config_data! { /// } /// ``` /// - /// ## On `DiscoverWorkspaceConfig::command` + /// ## Workspace Discovery Protocol /// /// **Warning**: This format is provisional and subject to change. /// @@ -871,10 +871,18 @@ config_data! { /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten /// by changing `#rust-analyzer.check.invocationStrategy#`. /// - /// If `$saved_file` is part of the command, rust-analyzer will pass - /// the absolute path of the saved file to the provided command. This is - /// intended to be used with non-Cargo build systems. - /// Note that `$saved_file` is experimental and may be removed in the future. + /// It supports two interpolation syntaxes, both mainly intended to be used with + /// [non-Cargo build systems](./non_cargo_based_projects.md): + /// + /// - If `{saved_file}` is part of the command, rust-analyzer will pass + /// the absolute path of the saved file to the provided command. + /// (A previous version, `$saved_file`, also works.) + /// - If `{label}` is part of the command, rust-analyzer will pass the + /// Cargo package ID, which can be used with `cargo check -p`, or a build label from + /// `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like + /// [`"rust-analyzer.check.workspace": false`](#check.workspace). + /// + /// /// /// An example command would be: /// diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index a0738ca0e1790..c4124aaae0753 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -323,10 +323,18 @@ each of them, with the working directory being the workspace root (i.e., the folder containing the `Cargo.toml`). This can be overwritten by changing `#rust-analyzer.check.invocationStrategy#`. -If `$saved_file` is part of the command, rust-analyzer will pass -the absolute path of the saved file to the provided command. This is -intended to be used with non-Cargo build systems. -Note that `$saved_file` is experimental and may be removed in the future. +It supports two interpolation syntaxes, both mainly intended to be used with +[non-Cargo build systems](./non_cargo_based_projects.md): + +- If `{saved_file}` is part of the command, rust-analyzer will pass + the absolute path of the saved file to the provided command. + (A previous version, `$saved_file`, also works.) +- If `{label}` is part of the command, rust-analyzer will pass the + Cargo package ID, which can be used with `cargo check -p`, or a build label from + `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like + [`"rust-analyzer.check.workspace": false`](#check.workspace). + + An example command would be: @@ -1613,8 +1621,8 @@ Default: `null` Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`]. -[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`. -`progress_label` is used for the title in progress indicators, whereas `files_to_watch` +[`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`. +`progressLabel` is used for the title in progress indicators, whereas `filesToWatch` is used to determine which build system-specific files should be watched in order to reload rust-analyzer. @@ -1633,7 +1641,7 @@ Below is an example of a valid configuration: } ``` -## On `DiscoverWorkspaceConfig::command` +## Workspace Discovery Protocol **Warning**: This format is provisional and subject to change. diff --git a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md index d8be9a82d0c9c..a48b025c7b3a6 100644 --- a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md +++ b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md @@ -229,7 +229,15 @@ interface Runnable { This format is provisional and subject to change. Specifically, the `roots` setup will be different eventually. -There are three ways to feed `rust-project.json` to rust-analyzer: +### Providing a JSON project to rust-analyzer + +There are four ways to feed `rust-project.json` to rust-analyzer: + +- Use + [`"rust-analyzer.workspace.discoverConfig": … }`](./configuration.md#workspace.discoverConfig) + to specify a workspace discovery command to generate project descriptions + on-the-fly. Please note that the command output is message-oriented and must + follow [the discovery protocol](./configuration.md#workspace-discovery-protocol). - Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it. @@ -249,19 +257,86 @@ location or (for inline JSON) relative to `rootUri`. You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading. -Note that calls to `cargo check` are disabled when using -`rust-project.json` by default, so compilation errors and warnings will -no longer be sent to your LSP client. To enable these compilation errors -you will need to specify explicitly what command rust-analyzer should -run to perform the checks using the -`rust-analyzer.check.overrideCommand` configuration. As an example, the -following configuration explicitly sets `cargo check` as the `check` -command. - - { "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] } - -`check.overrideCommand` requires the command specified to output json -error messages for rust-analyzer to consume. The `--message-format=json` -flag does this for `cargo check` so whichever command you use must also -output errors in this format. See the [Configuration](#_configuration) -section for more information. +### Flycheck support + +Rust-analyzer has functionality to run an actual build of a crate when the user saves a file, to +fill in diagnostics it does not implement natively. This is known as "flycheck". + +**Flycheck is disabled when using `rust-project.json` unless explicitly configured**, so compilation +errors and warnings will no longer be sent to your LSP client by default. To enable these +compilation errors you will need to specify explicitly what command rust-analyzer should run to +perform the checks. There are two ways to do this: + +- `rust-project.json` may contain a `runnables` field. The `flycheck` runnable may be used to + configure a check command. See above for documentation. + +- Using the [`rust-analyzer.check.overrideCommand`](./configuration.md#check.overrideCommand) + configuration. This will also override anything in `rust-project.json`. As an example, the + following configuration explicitly sets `cargo check` as the `check` command. + + ```json + { "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] } + ``` + + Note also that this works with cargo projects. + +Either option requires the command specified to output JSON error messages for rust-analyzer to +consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use +must also output errors in this format. + +Either option also supports two syntaxes within each argument: + +- `{label}` will be replaced with the `BuildInfo::label` of the crate + containing a saved file, if `BuildInfo` is provided. In the case of `check.overrideCommand` being + used in a Cargo project, this will be the cargo package ID, which can be used with `cargo check -p`. +- `{saved_file}` will be replaced with an absolute path to the saved file. This can be queried against a + build system to find targets that include the file. + +For example: + +```json +{ "rust-analyzer.check.overrideCommand": ["custom_crate_checker", "{label}"] } +``` + +If you do use `{label}` or `{saved_file}`, the command will not be run unless the relevant value can +be substituted. + + +#### Flycheck considerations + +##### Diagnostic output on error + +A flycheck command using a complex build orchestrator like `"bazel", "build", "{label}"`, even with +a tweak to return JSON messages, is often insufficient. Such a command will typically succeed if +there are warnings, but if there are errors, it might "fail to compile" the diagnostics and not +produce any output. You must build a package in such a way that the build succeeds even if `rustc` +exits with an error, and prints the JSON build messages in every case. + +##### Diagnostics for upstream crates + +`cargo check -p` re-prints any errors and warnings in crates higher up in the dependency graph +than the one requested. We do clear all diagnostics when flychecking, so if you manage to +replicate this behaviour, diagnostics for crates other than the one being checked will show up in +the editor. If you do not, then users may be confused that diagnostics are "stuck" or disappear +entirely when there is a build error in an upstream crate. + +##### Compiler options + +`cargo check` invokes rustc differently from `cargo build`. It turns off codegen (with `rustc +--emit=metadata`), which results in lower latency to get to diagnostics. If your build system can +configure this, it is recommended. + +If your build tool can configure rustc for incremental compiles, this is also recommended. + +##### Locking and pre-emption + +In any good build system, including Cargo, build commands sometimes block each other. Running a +flycheck will (by default) frequently block you from running other build commands. Generally this is +undesirable. Users will have to (unintuitively) press save again in the editor to cancel a +flycheck, so that some other command may proceed. + +If your build system has the ability to isolate any rust-analyzer-driven flychecks and prevent lock +contention, for example a separate build output directory and/or daemon instance, this is +recommended. Alternatively, consider using a feature if available that can set the priority of +various build invocations and automatically cancel lower-priority ones when needed. Flychecks should +be set to a lower priority than general direct build invocations. diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 2157cbd486535..0d91378706a40 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -1213,7 +1213,7 @@ "title": "Check", "properties": { "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIt supports two interpolation syntaxes, both mainly intended to be used with\n[non-Cargo build systems](./non_cargo_based_projects.md):\n\n- If `{saved_file}` is part of the command, rust-analyzer will pass\n the absolute path of the saved file to the provided command.\n (A previous version, `$saved_file`, also works.)\n- If `{label}` is part of the command, rust-analyzer will pass the\n Cargo package ID, which can be used with `cargo check -p`, or a build label from\n `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like\n [`\"rust-analyzer.check.workspace\": false`](#check.workspace).\n\n\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -3135,7 +3135,7 @@ "title": "Workspace", "properties": { "rust-analyzer.workspace.discoverConfig": { - "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: .", + "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`.\n`progressLabel` is used for the title in progress indicators, whereas `filesToWatch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\",\n \"{arg}\"\n ],\n \"progressLabel\": \"buck2/rust-project\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## Workspace Discovery Protocol\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: .", "default": null, "anyOf": [ { From b02e9756f2d0b0367cdedcba16016fc5e867999c Mon Sep 17 00:00:00 2001 From: Cormac Relf Date: Thu, 8 Jan 2026 09:53:00 +1100 Subject: [PATCH 027/131] Fix hir-ty clippy issue I am not familiar with this code at allso just doing what I can to unblock. --- .../crates/hir-ty/src/next_solver/infer/traits.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs index 14df42dc2aebe..dde623483642f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs @@ -55,6 +55,13 @@ impl ObligationCause { } } +impl Default for ObligationCause { + #[inline] + fn default() -> Self { + Self::new() + } +} + /// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for /// which the "impl_source" must be found. The process of finding an "impl_source" is /// called "resolving" the `Obligation`. This process consists of From 6a9de224c463f54dc66dec6e3dbe3e244d2d7014 Mon Sep 17 00:00:00 2001 From: The rustc-josh-sync Cronjob Bot Date: Thu, 8 Jan 2026 04:20:55 +0000 Subject: [PATCH 028/131] Prepare for merging from rust-lang/rust This updates the rust-version file to 548e586795f6b6fe089d8329aa5edbf0f5202646. --- src/tools/rust-analyzer/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 5ffe95a0b54fa..4b08b0884ca81 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -e7d44143a12a526488e4f0c0d7ea8e62a4fe9354 +548e586795f6b6fe089d8329aa5edbf0f5202646 From 5d8a7daf2ab5d13330030880af021cf1cf418a7e Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 8 Jan 2026 09:25:28 +0200 Subject: [PATCH 029/131] Fixes for builtin derive expansions - Do not store the `MacroCallId` of the "real" expansion anywhere, so that the IDE layer could not expand it by mistake - Fix a stupid bug where we used the directive of the `derive` itself instead of of the macro, leading us to re-expand it again and again. --- .../crates/hir-def/src/builtin_derive.rs | 22 ++++- .../crates/hir-def/src/dyn_map.rs | 10 ++- .../crates/hir-def/src/item_scope.rs | 19 ++-- .../crates/hir-def/src/lang_item.rs | 54 ++++++++++-- .../crates/hir-def/src/nameres.rs | 10 ++- .../crates/hir-def/src/nameres/collector.rs | 88 ++++++++++++------- .../rust-analyzer/crates/hir/src/semantics.rs | 31 +++++-- .../crates/hir/src/semantics/source_to_def.rs | 17 ++-- .../crates/ide/src/expand_macro.rs | 60 +++++-------- .../crates/intern/src/symbol/symbols.rs | 1 + 10 files changed, 211 insertions(+), 101 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_derive.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_derive.rs index 32385516ab583..946f08ec3682e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/builtin_derive.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_derive.rs @@ -8,7 +8,8 @@ use intern::{Symbol, sym}; use tt::TextRange; use crate::{ - AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, db::DefDatabase, + AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, MacroId, + db::DefDatabase, lang_item::LangItems, }; macro_rules! declare_enum { @@ -86,6 +87,25 @@ declare_enum!( DispatchFromDyn => [], ); +impl BuiltinDeriveImplTrait { + pub fn derive_macro(self, lang_items: &LangItems) -> Option { + match self { + BuiltinDeriveImplTrait::Copy => lang_items.CopyDerive, + BuiltinDeriveImplTrait::Clone => lang_items.CloneDerive, + BuiltinDeriveImplTrait::Default => lang_items.DefaultDerive, + BuiltinDeriveImplTrait::Debug => lang_items.DebugDerive, + BuiltinDeriveImplTrait::Hash => lang_items.HashDerive, + BuiltinDeriveImplTrait::Ord => lang_items.OrdDerive, + BuiltinDeriveImplTrait::PartialOrd => lang_items.PartialOrdDerive, + BuiltinDeriveImplTrait::Eq => lang_items.EqDerive, + BuiltinDeriveImplTrait::PartialEq => lang_items.PartialEqDerive, + BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => { + lang_items.CoercePointeeDerive + } + } + } +} + impl BuiltinDeriveImplMethod { pub fn trait_method( self, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs index 7d3a94b038330..4308d0ef1c296 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs @@ -27,14 +27,15 @@ pub mod keys { use std::marker::PhantomData; + use either::Either; use hir_expand::{MacroCallId, attrs::AttrId}; use rustc_hash::FxHashMap; use syntax::{AstNode, AstPtr, ast}; use crate::{ - BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, - ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, - TypeAliasId, TypeOrConstParamId, UnionId, UseId, + BlockId, BuiltinDeriveImplId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, + FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, + StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, dyn_map::{DynMap, Policy}, }; @@ -71,7 +72,8 @@ pub mod keys { ( AttrId, /* derive() */ MacroCallId, - /* actual derive macros */ Box<[Option]>, + /* actual derive macros */ + Box<[Option>]>, ), > = Key::new(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index a3278dd76c868..9e1efb9777869 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -4,6 +4,7 @@ use std::{fmt, sync::LazyLock}; use base_db::Crate; +use either::Either; use hir_expand::{AstId, MacroCallId, attrs::AttrId, name::Name}; use indexmap::map::Entry; use itertools::Itertools; @@ -199,7 +200,7 @@ struct DeriveMacroInvocation { attr_id: AttrId, /// The `#[derive]` call attr_call_id: MacroCallId, - derive_call_ids: SmallVec<[Option; 4]>, + derive_call_ids: SmallVec<[Option>; 4]>, } pub(crate) static BUILTIN_SCOPE: LazyLock> = LazyLock::new(|| { @@ -345,7 +346,9 @@ impl ItemScope { pub fn all_macro_calls(&self) -> impl Iterator + '_ { self.macro_invocations.values().copied().chain(self.attr_macros.values().copied()).chain( self.derive_macros.values().flat_map(|it| { - it.iter().flat_map(|it| it.derive_call_ids.iter().copied().flatten()) + it.iter().flat_map(|it| { + it.derive_call_ids.iter().copied().flatten().flat_map(|it| it.left()) + }) }), ) } @@ -379,6 +382,10 @@ impl ItemScope { self.types.get(name).map(|item| (item.def, item.vis)) } + pub(crate) fn makro(&self, name: &Name) -> Option { + self.macros.get(name).map(|item| item.def) + } + /// XXX: this is O(N) rather than O(1), try to not introduce new usages. pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> { match item { @@ -519,7 +526,7 @@ impl ItemScope { pub(crate) fn set_derive_macro_invoc( &mut self, adt: AstId, - call: MacroCallId, + call: Either, id: AttrId, idx: usize, ) { @@ -539,7 +546,7 @@ impl ItemScope { adt: AstId, attr_id: AttrId, attr_call_id: MacroCallId, - mut derive_call_ids: SmallVec<[Option; 4]>, + mut derive_call_ids: SmallVec<[Option>; 4]>, ) { derive_call_ids.shrink_to_fit(); self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation { @@ -554,7 +561,9 @@ impl ItemScope { ) -> impl Iterator< Item = ( AstId, - impl Iterator])>, + impl Iterator< + Item = (AttrId, MacroCallId, &[Option>]), + >, ), > + '_ { self.derive_macros.iter().map(|(k, v)| { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index eba4d87ec9f8a..092ff6e486717 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -7,8 +7,8 @@ use intern::{Symbol, sym}; use stdx::impl_from; use crate::{ - AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId, - StaticId, StructId, TraitId, TypeAliasId, UnionId, + AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, MacroId, + ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, attrs::AttrFlags, db::DefDatabase, nameres::{DefMap, assoc::TraitItems, crate_def_map, crate_local_def_map}, @@ -99,7 +99,7 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option Option { + let mut current = &core_def_map[core_def_map.root]; + for module in modules { + let Some((ModuleDefId::ModuleId(cur), _)) = + current.scope.type_(&Name::new_symbol_root(module.clone())) + else { + return None; + }; + if cur.krate(db) != core_def_map.krate() || cur.block(db) != core_def_map.block_id() { + return None; + } + current = &core_def_map[cur]; + } + current.scope.makro(&Name::new_symbol_root(name)) +} + #[salsa::tracked(returns(as_deref))] pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option> { let mut traits = Vec::new(); @@ -195,7 +216,11 @@ macro_rules! language_item_table { @non_lang_core_traits: - $( core::$($non_lang_module:ident)::*, $non_lang_trait:ident; )* + $( core::$($non_lang_trait_module:ident)::*, $non_lang_trait:ident; )* + + @non_lang_core_macros: + + $( core::$($non_lang_macro_module:ident)::*, $non_lang_macro:ident, $non_lang_macro_field:ident; )* ) => { #[allow(non_snake_case)] // FIXME: Should we remove this? #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] @@ -207,6 +232,9 @@ macro_rules! language_item_table { $( pub $non_lang_trait: Option, )* + $( + pub $non_lang_macro_field: Option, + )* } impl LangItems { @@ -218,6 +246,7 @@ macro_rules! language_item_table { fn merge_prefer_self(&mut self, other: &Self) { $( self.$lang_item = self.$lang_item.or(other.$lang_item); )* $( self.$non_lang_trait = self.$non_lang_trait.or(other.$non_lang_trait); )* + $( self.$non_lang_macro_field = self.$non_lang_macro_field.or(other.$non_lang_macro_field); )* } fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) { @@ -233,8 +262,9 @@ macro_rules! language_item_table { } } - fn fill_non_lang_core_traits(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) { - $( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_module),* ], sym::$non_lang_trait); )* + fn fill_non_lang_core_items(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) { + $( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_trait_module),* ], sym::$non_lang_trait); )* + $( self.$non_lang_macro_field = resolve_core_macro(db, core_def_map, &[ $(sym::$non_lang_macro_module),* ], sym::$non_lang_macro); )* } } @@ -479,4 +509,16 @@ language_item_table! { LangItems => core::hash, Hash; core::cmp, Ord; core::cmp, Eq; + + @non_lang_core_macros: + core::default, Default, DefaultDerive; + core::fmt, Debug, DebugDerive; + core::hash, Hash, HashDerive; + core::cmp, PartialOrd, PartialOrdDerive; + core::cmp, Ord, OrdDerive; + core::cmp, PartialEq, PartialEqDerive; + core::cmp, Eq, EqDerive; + core::marker, CoercePointee, CoercePointeeDerive; + core::marker, Copy, CopyDerive; + core::clone, Clone, CloneDerive; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 5f05cdb1e2ba2..150372f1a0d97 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -61,6 +61,7 @@ mod tests; use std::ops::{Deref, DerefMut, Index, IndexMut}; use base_db::Crate; +use either::Either; use hir_expand::{ EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, mod_path::ModPath, name::Name, proc_macro::ProcMacroKind, @@ -75,8 +76,8 @@ use triomphe::Arc; use tt::TextRange; use crate::{ - AstId, BlockId, BlockLoc, ExternCrateId, FunctionId, FxIndexMap, Lookup, MacroCallStyles, - MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId, + AstId, BlockId, BlockLoc, BuiltinDeriveImplId, ExternCrateId, FunctionId, FxIndexMap, Lookup, + MacroCallStyles, MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId, db::DefDatabase, item_scope::{BuiltinShadowMode, ItemScope}, item_tree::TreeId, @@ -192,7 +193,8 @@ pub struct DefMap { /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// attributes. // FIXME: Figure out a better way for the IDE layer to resolve these? - derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroId, MacroCallId)>>, + derive_helpers_in_scope: + FxHashMap, Vec<(Name, MacroId, Either)>>, /// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`]. pub macro_def_to_macro_id: FxHashMap, @@ -540,7 +542,7 @@ impl DefMap { pub fn derive_helpers_in_scope( &self, id: AstId, - ) -> Option<&[(Name, MacroId, MacroCallId)]> { + ) -> Option<&[(Name, MacroId, Either)]> { self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 87ade0651762f..323060f61d155 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -3,7 +3,7 @@ //! `DefCollector::collect` contains the fixed-point iteration loop which //! resolves imports and expands macros. -use std::{iter, mem}; +use std::{iter, mem, ops::Range}; use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin}; use cfg::{CfgAtom, CfgExpr, CfgOptions}; @@ -226,6 +226,7 @@ struct DeferredBuiltinDerive { container: ItemContainerId, derive_attr_id: AttrId, derive_index: u32, + helpers_range: Range, } /// Walks the tree of module recursively @@ -1354,7 +1355,7 @@ impl<'db> DefCollector<'db> { if let Ok((macro_id, def_id, call_id)) = id { self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc( ast_id.ast_id, - call_id, + Either::Left(call_id), *derive_attr, *derive_pos, ); @@ -1369,7 +1370,7 @@ impl<'db> DefCollector<'db> { .extend(izip!( helpers.iter().cloned(), iter::repeat(macro_id), - iter::repeat(call_id), + iter::repeat(Either::Left(call_id)), )); } } @@ -1492,6 +1493,8 @@ impl<'db> DefCollector<'db> { Interned::new(path), ); + derive_call_ids.push(None); + // Try to resolve the derive immediately. If we succeed, we can also use the fast path // for builtin derives. If not, we cannot use it, as it can cause the ADT to become // interned while the derive is still unresolved, which will cause it to get forgotten. @@ -1506,23 +1509,42 @@ impl<'db> DefCollector<'db> { call_id, ); + let ast_id_without_path = ast_id.ast_id; + let directive = MacroDirective { + module_id: directive.module_id, + depth: directive.depth + 1, + kind: MacroDirectiveKind::Derive { + ast_id, + derive_attr: *attr_id, + derive_pos: idx, + ctxt: call_site.ctx, + derive_macro_id: call_id, + }, + container: directive.container, + }; + if let Ok((macro_id, def_id, call_id)) = id { - derive_call_ids.push(Some(call_id)); + let (mut helpers_start, mut helpers_end) = (0, 0); // Record its helper attributes. if def_id.krate != self.def_map.krate { let def_map = crate_def_map(self.db, def_id.krate); if let Some(helpers) = def_map.data.exported_derives.get(¯o_id) { - self.def_map + let derive_helpers = self + .def_map .derive_helpers_in_scope - .entry(ast_id.ast_id.map(|it| it.upcast())) - .or_default() - .extend(izip!( - helpers.iter().cloned(), - iter::repeat(macro_id), - iter::repeat(call_id), - )); + .entry( + ast_id_without_path.map(|it| it.upcast()), + ) + .or_default(); + helpers_start = derive_helpers.len(); + derive_helpers.extend(izip!( + helpers.iter().cloned(), + iter::repeat(macro_id), + iter::repeat(Either::Left(call_id)), + )); + helpers_end = derive_helpers.len(); } } @@ -1531,7 +1553,7 @@ impl<'db> DefCollector<'db> { def_id.kind { self.deferred_builtin_derives - .entry(ast_id.ast_id.upcast()) + .entry(ast_id_without_path.upcast()) .or_default() .push(DeferredBuiltinDerive { call_id, @@ -1541,24 +1563,15 @@ impl<'db> DefCollector<'db> { depth: directive.depth, derive_attr_id: *attr_id, derive_index: idx as u32, + helpers_range: helpers_start..helpers_end, }); } else { - push_resolved(&mut resolved, directive, call_id); + push_resolved(&mut resolved, &directive, call_id); + *derive_call_ids.last_mut().unwrap() = + Some(Either::Left(call_id)); } } else { - derive_call_ids.push(None); - self.unresolved_macros.push(MacroDirective { - module_id: directive.module_id, - depth: directive.depth + 1, - kind: MacroDirectiveKind::Derive { - ast_id, - derive_attr: *attr_id, - derive_pos: idx, - ctxt: call_site.ctx, - derive_macro_id: call_id, - }, - container: directive.container, - }); + self.unresolved_macros.push(directive); } } @@ -1858,9 +1871,8 @@ impl ModCollector<'_, '_> { ast_id: FileAstId, id: AdtId, def_map: &mut DefMap| { - let Some(deferred_derives) = - deferred_derives.remove(&InFile::new(file_id, ast_id.upcast())) - else { + let ast_id = InFile::new(file_id, ast_id.upcast()); + let Some(deferred_derives) = deferred_derives.remove(&ast_id.upcast()) else { return; }; let module = &mut def_map.modules[module_id]; @@ -1876,6 +1888,22 @@ impl ModCollector<'_, '_> { }, ); module.scope.define_builtin_derive_impl(impl_id); + module.scope.set_derive_macro_invoc( + ast_id, + Either::Right(impl_id), + deferred_derive.derive_attr_id, + deferred_derive.derive_index as usize, + ); + // Change its helper attributes to the new id. + if let Some(derive_helpers) = + def_map.derive_helpers_in_scope.get_mut(&ast_id.map(|it| it.upcast())) + { + for (_, _, call_id) in + &mut derive_helpers[deferred_derive.helpers_range.clone()] + { + *call_id = Either::Right(impl_id); + } + } }); } }; diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index f4c42537de939..e55b693ef0186 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -13,7 +13,7 @@ use std::{ use base_db::FxIndexSet; use either::Either; use hir_def::{ - DefWithBodyId, MacroId, StructId, TraitId, VariantId, + BuiltinDeriveImplId, DefWithBodyId, HasModule, MacroId, StructId, TraitId, VariantId, attrs::parse_extra_crate_attrs, expr_store::{Body, ExprOrPatSource, HygieneId, path::Path}, hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, @@ -622,7 +622,20 @@ impl<'db> SemanticsImpl<'db> { Some( calls .into_iter() - .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id })) + .map(|call| { + let call = call?; + match call { + Either::Left(call) => { + macro_call_to_macro_id(ctx, call).map(|id| Macro { id }) + } + Either::Right(call) => { + let call = call.loc(self.db); + let krate = call.krate(self.db); + let lang_items = hir_def::lang_item::lang_items(self.db, krate); + call.trait_.derive_macro(lang_items).map(|id| Macro { id }) + } + } + }) .collect(), ) }) @@ -633,7 +646,7 @@ impl<'db> SemanticsImpl<'db> { .derive_macro_calls(attr)? .into_iter() .flat_map(|call| { - let file_id = call?; + let file_id = call?.left()?; let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id); let root_node = value.0.syntax_node(); self.cache(root_node.clone(), file_id.into()); @@ -643,7 +656,10 @@ impl<'db> SemanticsImpl<'db> { Some(res) } - fn derive_macro_calls(&self, attr: &ast::Attr) -> Option>> { + fn derive_macro_calls( + &self, + attr: &ast::Attr, + ) -> Option>>> { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, &adt); @@ -690,8 +706,9 @@ impl<'db> SemanticsImpl<'db> { .derive_helpers_in_scope(InFile::new(sa.file_id, id))? .iter() .filter(|&(name, _, _)| *name == attr_name) - .map(|&(_, macro_, call)| (macro_.into(), call)) + .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?))) .collect(); + // FIXME: We filter our builtin derive "fake" expansions, is this correct? Should we still expose them somehow? res.is_empty().not().then_some(res) } @@ -1338,6 +1355,7 @@ impl<'db> SemanticsImpl<'db> { // FIXME: We need to call `f` for all of them as well though! process_expansion_for_token(ctx, &mut stack, derive_attr); for derive in derives.into_iter().flatten() { + let Either::Left(derive) = derive else { continue }; process_expansion_for_token(ctx, &mut stack, derive); } } @@ -1467,11 +1485,12 @@ impl<'db> SemanticsImpl<'db> { for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { + let Either::Left(derive) = *derive else { continue }; // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! res = res - .or(process_expansion_for_token(ctx, &mut stack, *derive)); + .or(process_expansion_for_token(ctx, &mut stack, derive)); } res }) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 2574059927313..d222c3dc7ed1a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -87,10 +87,10 @@ use either::Either; use hir_def::{ - AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, - ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, - Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, - UseId, VariantId, + AdtId, BlockId, BuiltinDeriveImplId, ConstId, ConstParamId, DefWithBodyId, EnumId, + EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, + ImplId, LifetimeParamId, Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, + TypeParamId, UnionId, UseId, VariantId, dyn_map::{ DynMap, keys::{self, Key}, @@ -394,7 +394,7 @@ impl SourceToDefCtx<'_, '_> { &mut self, item: InFile<&ast::Adt>, src: InFile, - ) -> Option<(AttrId, MacroCallId, &[Option])> { + ) -> Option<(AttrId, MacroCallId, &[Option>])> { let map = self.dyn_map(item)?; map[keys::DERIVE_MACRO_CALL] .get(&AstPtr::new(&src.value)) @@ -409,8 +409,11 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn derive_macro_calls<'slf>( &'slf mut self, adt: InFile<&ast::Adt>, - ) -> Option])> + use<'slf>> - { + ) -> Option< + impl Iterator< + Item = (AttrId, MacroCallId, &'slf [Option>]), + > + use<'slf>, + > { self.dyn_map(adt).as_ref().map(|&map| { let dyn_map = &map[keys::DERIVE_MACRO_CALL]; adt.value diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index 7d02b8091890a..ba8b3aa9cafea 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -583,26 +583,16 @@ fn main() { fn macro_expand_derive() { check( r#" -//- proc_macros: identity -//- minicore: clone, derive +//- proc_macros: identity, derive_identity +//- minicore: derive #[proc_macros::identity] -#[derive(C$0lone)] +#[derive(proc_macros::DeriveIde$0ntity)] struct Foo {} "#, expect![[r#" - Clone - impl <>core::clone::Clone for Foo< >where { - fn clone(&self) -> Self { - match self { - Foo{} - => Foo{} - , - - } - } - - }"#]], + proc_macros::DeriveIdentity + struct Foo{}"#]], ); } @@ -610,15 +600,17 @@ struct Foo {} fn macro_expand_derive2() { check( r#" -//- minicore: copy, clone, derive +//- proc_macros: derive_identity +//- minicore: derive -#[derive(Cop$0y)] -#[derive(Clone)] +#[derive(proc_macros::$0DeriveIdentity)] +#[derive(proc_macros::DeriveIdentity)] struct Foo {} "#, expect![[r#" - Copy - impl <>core::marker::Copy for Foo< >where{}"#]], + proc_macros::DeriveIdentity + #[derive(proc_macros::DeriveIdentity)] + struct Foo{}"#]], ); } @@ -626,35 +618,27 @@ struct Foo {} fn macro_expand_derive_multi() { check( r#" -//- minicore: copy, clone, derive +//- proc_macros: derive_identity +//- minicore: derive -#[derive(Cop$0y, Clone)] +#[derive(proc_macros::DeriveIdent$0ity, proc_macros::DeriveIdentity)] struct Foo {} "#, expect![[r#" - Copy - impl <>core::marker::Copy for Foo< >where{}"#]], + proc_macros::DeriveIdentity + struct Foo{}"#]], ); check( r#" -//- minicore: copy, clone, derive +//- proc_macros: derive_identity +//- minicore: derive -#[derive(Copy, Cl$0one)] +#[derive(proc_macros::DeriveIdentity, proc_macros::De$0riveIdentity)] struct Foo {} "#, expect![[r#" - Clone - impl <>core::clone::Clone for Foo< >where { - fn clone(&self) -> Self { - match self { - Foo{} - => Foo{} - , - - } - } - - }"#]], + proc_macros::DeriveIdentity + struct Foo{}"#]], ); } diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index b6efc599f181c..3fadca29d118b 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -532,4 +532,5 @@ define_symbols! { CoerceUnsized, DispatchFromDyn, define_opaque, + marker, } From 459d77e863f0607f4f0fdc25be19db6f49fdc9c0 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 8 Jan 2026 22:23:16 +0200 Subject: [PATCH 030/131] Publish smol_str v0.3.5 --- src/tools/rust-analyzer/Cargo.lock | 2 +- src/tools/rust-analyzer/lib/smol_str/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 8188fbf960645..5bdde7c7c3e62 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "smol_str" -version = "0.3.4" +version = "0.3.5" dependencies = [ "arbitrary", "borsh", diff --git a/src/tools/rust-analyzer/lib/smol_str/Cargo.toml b/src/tools/rust-analyzer/lib/smol_str/Cargo.toml index 118b25993ffe5..4e7844b49e195 100644 --- a/src/tools/rust-analyzer/lib/smol_str/Cargo.toml +++ b/src/tools/rust-analyzer/lib/smol_str/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "smol_str" -version = "0.3.4" +version = "0.3.5" description = "small-string optimized string type with O(1) clone" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/smol_str" From 26be33ae18b2aaa376eba9e755b54bc11e104a7b Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Thu, 8 Jan 2026 15:07:24 +0800 Subject: [PATCH 031/131] Fix not disable string escape highlights Example --- with config `strings: false` ```rust fn main() { format_args!("foo\nbar\invalid"); } ``` **Before this PR** ```rust fn main() { format_args!("foo\nbar\invalid"); // ^^ EscapeSequence // ^^ InvalidEscapeSequence } ``` **After this PR** ```rust fn main() { format_args!("foo\nbar\invalid"); } ``` --- .../crates/ide/src/syntax_highlighting.rs | 16 ++++--- .../ide/src/syntax_highlighting/escape.rs | 43 ++++++++++++----- .../ide/src/syntax_highlighting/highlights.rs | 8 +++- .../test_data/highlight_strings_disabled.html | 47 +++++++++++++++++++ .../ide/src/syntax_highlighting/tests.rs | 17 +++++++ 5 files changed, 113 insertions(+), 18 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index e7c5f95a250ee..e64fd6488f2a6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -513,21 +513,21 @@ fn string_injections( ); if !string.is_raw() { - highlight_escape_string(hl, &string); + highlight_escape_string(hl, config, &string); } } } else if let Some(byte_string) = ast::ByteString::cast(token.clone()) { if !byte_string.is_raw() { - highlight_escape_string(hl, &byte_string); + highlight_escape_string(hl, config, &byte_string); } } else if let Some(c_string) = ast::CString::cast(token.clone()) { if !c_string.is_raw() { - highlight_escape_string(hl, &c_string); + highlight_escape_string(hl, config, &c_string); } } else if let Some(char) = ast::Char::cast(token.clone()) { - highlight_escape_char(hl, &char) + highlight_escape_char(hl, config, &char) } else if let Some(byte) = ast::Byte::cast(token) { - highlight_escape_byte(hl, &byte) + highlight_escape_byte(hl, config, &byte) } ControlFlow::Continue(()) } @@ -586,7 +586,11 @@ fn descend_token( fn filter_by_config(highlight: &mut Highlight, config: &HighlightConfig<'_>) -> bool { match &mut highlight.tag { - HlTag::StringLiteral if !config.strings => return false, + HlTag::StringLiteral | HlTag::EscapeSequence | HlTag::InvalidEscapeSequence + if !config.strings => + { + return false; + } HlTag::Comment if !config.comments => return false, // If punctuation is disabled, make the macro bang part of the macro call again. tag @ HlTag::Punctuation(HlPunct::MacroBang) => { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs index 094f88f3a8641..4da69cc43d9ec 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs @@ -1,10 +1,14 @@ //! Syntax highlighting for escape sequences use crate::syntax_highlighting::highlights::Highlights; -use crate::{HlRange, HlTag}; +use crate::{HighlightConfig, HlRange, HlTag}; use syntax::ast::{Byte, Char, IsString}; use syntax::{AstToken, TextRange, TextSize}; -pub(super) fn highlight_escape_string(stack: &mut Highlights, string: &T) { +pub(super) fn highlight_escape_string( + stack: &mut Highlights, + config: &HighlightConfig<'_>, + string: &T, +) { let text = string.text(); let start = string.syntax().text_range().start(); string.escaped_char_ranges(&mut |piece_range, char| { @@ -13,16 +17,23 @@ pub(super) fn highlight_escape_string(stack: &mut Highlights, strin Ok(_) => HlTag::EscapeSequence, Err(_) => HlTag::InvalidEscapeSequence, }; - stack.add(HlRange { - range: piece_range + start, - highlight: highlight.into(), - binding_hash: None, - }); + stack.add_with( + config, + HlRange { + range: piece_range + start, + highlight: highlight.into(), + binding_hash: None, + }, + ); } }); } -pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) { +pub(super) fn highlight_escape_char( + stack: &mut Highlights, + config: &HighlightConfig<'_>, + char: &Char, +) { if char.value().is_err() { // We do not emit invalid escapes highlighting here. The lexer would likely be in a bad // state and this token contains junk, since `'` is not a reliable delimiter (consider @@ -43,10 +54,17 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) { char.syntax().text_range().start() + TextSize::from(1), TextSize::from(text.len() as u32), ); - stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) + stack.add_with( + config, + HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }, + ) } -pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) { +pub(super) fn highlight_escape_byte( + stack: &mut Highlights, + config: &HighlightConfig<'_>, + byte: &Byte, +) { if byte.value().is_err() { // See `highlight_escape_char` for why no error highlighting here. return; @@ -65,5 +83,8 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) { byte.syntax().text_range().start() + TextSize::from(2), TextSize::from(text.len() as u32), ); - stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) + stack.add_with( + config, + HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }, + ) } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs index 340290eafedbe..6fe4d08443389 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs @@ -4,7 +4,7 @@ use std::iter; use stdx::equal_range_by; use syntax::TextRange; -use crate::{HlRange, HlTag}; +use crate::{HighlightConfig, HlRange, HlTag}; pub(super) struct Highlights { root: Node, @@ -26,6 +26,12 @@ impl Highlights { self.root.add(hl_range); } + pub(super) fn add_with(&mut self, config: &HighlightConfig<'_>, mut hl_range: HlRange) { + if super::filter_by_config(&mut hl_range.highlight, config) { + self.root.add(hl_range); + } + } + pub(super) fn to_vec(&self) -> Vec { let mut res = Vec::new(); self.root.flatten(&mut res); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html new file mode 100644 index 0000000000000..344d0c2ff03b5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html @@ -0,0 +1,47 @@ + + +
fn main() {
+    format_args!("foo\nbar");
+    format_args!("foo\invalid");
+}
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 89a5e434f90cc..8b529cf10f7f9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -1498,6 +1498,23 @@ fn main() { ); } +#[test] +fn test_strings_highlighting_disabled() { + // Test that comments are not highlighted when disabled + check_highlighting_with_config( + r#" +//- minicore: fmt +fn main() { + format_args!("foo\nbar"); + format_args!("foo\invalid"); +} +"#, + HighlightConfig { strings: false, ..HL_CONFIG }, + expect_file!["./test_data/highlight_strings_disabled.html"], + false, + ); +} + #[test] fn regression_20952() { check_highlighting( From e80fbd4bca604211f810fc207f33089730a3e9e1 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 9 Jan 2026 13:47:13 +0200 Subject: [PATCH 032/131] Fix lifetimes len diagnostics for fn pointers --- .../rust-analyzer/crates/hir-ty/src/lower.rs | 51 ++++++++++--------- .../crates/hir-ty/src/lower/path.rs | 8 +-- .../src/handlers/missing_lifetime.rs | 15 ++++++ 3 files changed, 47 insertions(+), 27 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index a97d7687162ea..9befca11b3e5c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -27,8 +27,8 @@ use hir_def::{ resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs}, signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, type_ref::{ - ConstRef, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, - TypeRef, TypeRefId, + ConstRef, FnType, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef, + TypeBound, TypeRef, TypeRefId, }, }; use hir_expand::name::Name; @@ -98,7 +98,7 @@ impl ImplTraitLoweringState { } } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Copy)] pub enum LifetimeElisionKind<'db> { /// Create a new anonymous lifetime parameter and reference it. /// @@ -437,26 +437,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability)) } TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed), - TypeRef::Fn(fn_) => { - let substs = self.with_shifted_in( - DebruijnIndex::from_u32(1), - |ctx: &mut TyLoweringContext<'_, '_>| { - Tys::new_from_iter( - interner, - fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)), - ) - }, - ); - Ty::new_fn_ptr( - interner, - Binder::dummy(FnSig { - abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), - safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, - c_variadic: fn_.is_varargs, - inputs_and_output: substs, - }), - ) - } + TypeRef::Fn(fn_) => self.lower_fn_ptr(fn_), TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), TypeRef::ImplTrait(bounds) => { match self.impl_trait_mode.mode { @@ -517,6 +498,30 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { (ty, res) } + fn lower_fn_ptr(&mut self, fn_: &FnType) -> Ty<'db> { + let interner = self.interner; + let (params, ret_ty) = fn_.split_params_and_ret(); + let old_lifetime_elision = self.lifetime_elision; + let mut args = Vec::with_capacity(fn_.params.len()); + self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx: &mut TyLoweringContext<'_, '_>| { + ctx.lifetime_elision = + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; + args.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr))); + ctx.lifetime_elision = LifetimeElisionKind::for_fn_ret(interner); + args.push(ctx.lower_ty(ret_ty)); + }); + self.lifetime_elision = old_lifetime_elision; + Ty::new_fn_ptr( + interner, + Binder::dummy(FnSig { + abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, + c_variadic: fn_.is_varargs, + inputs_and_output: Tys::new_from_slice(&args), + }), + ) + } + /// This is only for `generic_predicates_for_param`, where we can't just /// lower the self types of the predicates since that could lead to cycles. /// So we just check here if the `type_ref` resolves to a generic param, and which. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index b77aeab62d157..f3d0de12275ed 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -599,7 +599,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { explicit_self_ty: Option>, lowering_assoc_type_generics: bool, ) -> GenericArgs<'db> { - let old_lifetime_elision = self.ctx.lifetime_elision.clone(); + let old_lifetime_elision = self.ctx.lifetime_elision; if let Some(args) = self.current_or_prev_segment.args_and_bindings && args.parenthesized != GenericArgsParentheses::No @@ -640,7 +640,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { explicit_self_ty, PathGenericsSource::Segment(self.current_segment_u32()), lowering_assoc_type_generics, - self.ctx.lifetime_elision.clone(), + self.ctx.lifetime_elision, ); self.ctx.lifetime_elision = old_lifetime_elision; result @@ -884,7 +884,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { assoc_type: binding_idx as u32, }, false, - this.ctx.lifetime_elision.clone(), + this.ctx.lifetime_elision, ) }); let args = GenericArgs::new_from_iter( @@ -902,7 +902,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { // `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def). LifetimeElisionKind::for_fn_ret(self.ctx.interner) } else { - self.ctx.lifetime_elision.clone() + self.ctx.lifetime_elision }; self.with_lifetime_elision(lifetime_elision, |this| { match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs index b07f9e68f6341..5cb710b66b5fa 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs @@ -100,4 +100,19 @@ fn foo WithLifetime>() {} "#, ); } + + #[test] + fn regression_21430() { + check_diagnostics( + r#" +struct S { + f: fn(A<()>), +} + +struct A<'a, T> { + a: &'a T, +} + "#, + ); + } } From e52695c3fca6f9b973c98103407d83d1c963cbdf Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Fri, 9 Jan 2026 11:58:16 +0000 Subject: [PATCH 033/131] internal: Include private definitions in generated rustdoc rust-analyzer has handy prebuilt `cargo doc` output at https://rust-lang.github.io/rust-analyzer/ide/ However, it doesn't include private definitions, which makes it less useful when trying to learn unfamiliar parts of the codebase. Instead, pass `--document-private-items` so the HTML includes information on private types and modules too. rustdoc renders these with a padlock icon, so it's still clear that they're private. This change also exposes some more rustdoc warnings, which I've fixed. --- src/tools/rust-analyzer/.github/workflows/rustdoc.yaml | 2 +- src/tools/rust-analyzer/crates/hir-def/src/nameres.rs | 2 +- .../crates/hir-expand/src/builtin/attr_macro.rs | 2 +- .../rust-analyzer/crates/hir-expand/src/cfg_process.rs | 2 +- .../rust-analyzer/crates/hir-ty/src/infer/closure.rs | 2 +- .../rust-analyzer/crates/hir-ty/src/method_resolution.rs | 4 ++-- .../crates/hir-ty/src/method_resolution/probe.rs | 2 +- .../crates/hir-ty/src/next_solver/infer/mod.rs | 2 +- src/tools/rust-analyzer/crates/hir/src/term_search.rs | 2 +- .../crates/ide-assists/src/handlers/inline_type_alias.rs | 6 +++++- .../rust-analyzer/crates/ide-completion/src/context.rs | 2 +- src/tools/rust-analyzer/crates/parser/src/grammar.rs | 2 +- .../crates/project-model/src/cargo_workspace.rs | 2 +- .../crates/rust-analyzer/src/config/patch_old_style.rs | 2 +- .../rust-analyzer/crates/rust-analyzer/src/discover.rs | 4 ++-- .../crates/rust-analyzer/src/global_state.rs | 2 +- .../crates/rust-analyzer/src/handlers/dispatch.rs | 6 +++--- .../rust-analyzer/crates/rust-analyzer/src/task_pool.rs | 2 +- src/tools/rust-analyzer/crates/span/src/hygiene.rs | 8 ++++---- .../crates/syntax/src/syntax_editor/mapping.rs | 2 +- src/tools/rust-analyzer/lib/line-index/src/lib.rs | 2 +- 21 files changed, 32 insertions(+), 28 deletions(-) diff --git a/src/tools/rust-analyzer/.github/workflows/rustdoc.yaml b/src/tools/rust-analyzer/.github/workflows/rustdoc.yaml index 9cc18fc69ede6..0cc7ce77ddb64 100644 --- a/src/tools/rust-analyzer/.github/workflows/rustdoc.yaml +++ b/src/tools/rust-analyzer/.github/workflows/rustdoc.yaml @@ -24,7 +24,7 @@ jobs: run: rustup update --no-self-update stable - name: Build Documentation - run: cargo doc --all --no-deps + run: cargo doc --all --no-deps --document-private-items - name: Deploy Docs uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 150372f1a0d97..1e3ea50c5a0f3 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -216,7 +216,7 @@ struct DefMapCrateData { registered_tools: Vec, /// Unstable features of Rust enabled with `#![feature(A, B)]`. unstable_features: FxHashSet, - /// #[rustc_coherence_is_core] + /// `#[rustc_coherence_is_core]` rustc_coherence_is_core: bool, no_core: bool, no_std: bool, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs index 06b9b5418e372..c94663ca0cbcb 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs @@ -115,7 +115,7 @@ fn dummy_gate_test_expand( /// wasting a lot of memory, and it would also require some way to use a path in a way that makes it /// always resolve as a derive without nameres recollecting them. /// So this hacky approach is a lot more friendly for us, though it does require a bit of support in -/// [`hir::Semantics`] to make this work. +/// hir::Semantics to make this work. fn derive_expand( db: &dyn ExpandDatabase, id: MacroCallId, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index a0de36548e9f0..ccef9168ac3a2 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -1,4 +1,4 @@ -//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro +//! Processes out `#[cfg]` and `#[cfg_attr]` attributes from the input for the derive macro use std::{cell::OnceCell, ops::ControlFlow}; use ::tt::TextRange; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index d1391ad24e4df..ce99016470c14 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -466,7 +466,7 @@ impl<'db> InferenceContext<'_, 'db> { } /// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args - /// and return type to infer a [`ty::PolyFnSig`] for the closure. + /// and return type to infer a `PolyFnSig` for the closure. fn extract_sig_from_projection( &self, projection: PolyProjectionPredicate<'db>, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index e4681b464fec3..ad4d79e68a9f5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -206,11 +206,11 @@ impl<'a, 'db> InferenceContext<'a, 'db> { } } -/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`. +/// Used by `FnCtxt::lookup_method_for_operator` with `-Znext-solver`. /// /// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while /// `AsInfer` just treats it as ambiguous and succeeds. This is necessary -/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque +/// as we want `FnCtxt::check_expr_call` to treat not-yet-defined opaque /// types as rigid to support `impl Deref` and /// `Box`. /// diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs index 4a7c7d93539e6..42a590e8b4cb3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs @@ -1740,7 +1740,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> { /// We want to only accept trait methods if they were hold even if the /// opaque types were rigid. To handle this, we both check that for trait /// candidates the goal were to hold even when treating opaques as rigid, - /// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank). + /// see `rustc_trait_selection::solve::OpaqueTypesJank`. /// /// We also check that all opaque types encountered as self types in the /// autoderef chain don't get constrained when applying the candidate. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs index 7d291f7ddbedb..21baacb116938 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs @@ -140,7 +140,7 @@ pub struct InferCtxtInner<'db> { /// /// Before running `resolve_regions_and_report_errors`, the creator /// of the inference context is expected to invoke - /// [`InferCtxt::process_registered_region_obligations`] + /// `InferCtxt::process_registered_region_obligations` /// for each body-id in this map, which will process the /// obligations within. This is expected to be done 'late enough' /// that all type inference variables have been bound and so forth. diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search.rs b/src/tools/rust-analyzer/crates/hir/src/term_search.rs index e4089218305ce..f2dc1ce798ad9 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search.rs @@ -172,7 +172,7 @@ impl<'db> LookupTable<'db> { /// Insert new type trees for type /// /// Note that the types have to be the same, unification is not enough as unification is not - /// transitive. For example Vec and FxHashSet both unify with Iterator, + /// transitive. For example `Vec` and `FxHashSet` both unify with `Iterator`, /// but they clearly do not unify themselves. fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator>) { match self.data.get_mut(&ty) { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index ae8d130df23ca..c7a48f3261a9f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -290,19 +290,23 @@ impl ConstAndTypeMap { /// ^ alias generic params /// let a: A<100>; /// ^ instance generic args -/// ``` /// /// generic['a] = '_ due to omission /// generic[N] = 100 due to the instance arg /// generic[T] = u64 due to the default param +/// ``` /// /// 2. Copy the concrete type and substitute in each found mapping: /// +/// ```ignore /// &'_ [u64; 100] +/// ``` /// /// 3. Remove wildcard lifetimes entirely: /// +/// ```ignore /// &[u64; 100] +/// ``` fn create_replacement( lifetime_map: &LifetimeMap, const_and_type_map: &ConstAndTypeMap, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index d116f665adbdd..cab8bced88df1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -628,7 +628,7 @@ impl CompletionContext<'_> { } /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and - /// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`]. + /// passes all doc-aliases along, to funnel it into `Completions::add_path_resolution`. pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec)) { let _p = tracing::info_span!("CompletionContext::process_all_names").entered(); self.scope.process_all_names(&mut |name, def| { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs index bf8430294110c..e481bbe9bc4a0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs @@ -6,7 +6,7 @@ //! each submodule starts with `use super::*` import and exports //! "public" productions via `pub(super)`. //! -//! See docs for [`Parser`](super::parser::Parser) to learn about API, +//! See docs for [`Parser`] to learn about API, //! available to the grammar, and see docs for [`Event`](super::event::Event) //! to learn how this actually manages to produce parse trees. //! diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 6e1a3f37ff1c1..483ab28450455 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -640,7 +640,7 @@ impl FetchMetadata { /// Builds a command to fetch metadata for the given `cargo_toml` manifest. /// /// Performs a lightweight pre-fetch using the `--no-deps` option, - /// available via [`FetchMetadata::no_deps_metadata`], to gather basic + /// available via `FetchMetadata::no_deps_metadata`, to gather basic /// information such as the `target-dir`. /// /// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN` diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs index 389bb7848c01c..5dc463eccce46 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs @@ -3,7 +3,7 @@ use serde_json::{Value, json}; /// This function patches the json config to the new expected keys. /// That is we try to load old known config keys here and convert them to the new ones. -/// See https://github.com/rust-lang/rust-analyzer/pull/12010 +/// See /// /// We already have an alias system for simple cases, but if we make structural changes /// the alias infra fails down. diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs index 4aef5b0b7f3d5..f129f156a030a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs @@ -42,7 +42,7 @@ impl DiscoverCommand { Self { sender, command } } - /// Spawn the command inside [Discover] and report progress, if any. + /// Spawn the command inside `DiscoverCommand` and report progress, if any. pub(crate) fn spawn( &self, discover_arg: DiscoverArgument, @@ -73,7 +73,7 @@ impl DiscoverCommand { } } -/// A handle to a spawned [Discover]. +/// A handle to a spawned `DiscoverCommand`. #[derive(Debug)] pub(crate) struct DiscoverHandle { pub(crate) handle: CommandHandle, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 39b4aaa64738d..afd4162de6227 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -188,7 +188,7 @@ pub(crate) struct GlobalState { /// been called. pub(crate) deferred_task_queue: DeferredTaskQueue, - /// HACK: Workaround for https://github.com/rust-lang/rust-analyzer/issues/19709 + /// HACK: Workaround for /// This is marked true if we failed to load a crate root file at crate graph creation, /// which will usually end up causing a bunch of incorrect diagnostics on startup. pub(crate) incomplete_crate_graph: bool, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index 10bbb0bb31d99..90deae2d902e5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -101,7 +101,7 @@ impl RequestDispatcher<'_> { } /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not - /// ready this will return a default constructed [`R::Result`]. + /// ready this will return a default constructed `R::Result`. pub(crate) fn on( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, @@ -128,7 +128,7 @@ impl RequestDispatcher<'_> { } /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not - /// ready this will return a `default` constructed [`R::Result`]. + /// ready this will return a `default` constructed `R::Result`. pub(crate) fn on_with_vfs_default( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, @@ -176,7 +176,7 @@ impl RequestDispatcher<'_> { } /// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not - /// ready this will return a default constructed [`R::Result`]. + /// ready this will return a default constructed `R::Result`. pub(crate) fn on_latency_sensitive( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs index 8b8876b801cf8..104cd3d2eae9e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs @@ -52,7 +52,7 @@ impl TaskPool { /// `DeferredTaskQueue` holds deferred tasks. /// /// These are tasks that must be run after -/// [`GlobalState::process_changes`] has been called. +/// `GlobalState::process_changes` has been called. pub(crate) struct DeferredTaskQueue { pub(crate) sender: crossbeam_channel::Sender, pub(crate) receiver: crossbeam_channel::Receiver, diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index ea4f4c5efb42f..92bf892ea5297 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -8,9 +8,9 @@ //! //! # The Expansion Order Hierarchy //! -//! `ExpnData` in rustc, rust-analyzer's version is [`MacroCallLoc`]. Traversing the hierarchy -//! upwards can be achieved by walking up [`MacroCallLoc::kind`]'s contained file id, as -//! [`MacroFile`]s are interned [`MacroCallLoc`]s. +//! `ExpnData` in rustc, rust-analyzer's version is `MacroCallLoc`. Traversing the hierarchy +//! upwards can be achieved by walking up `MacroCallLoc::kind`'s contained file id, as +//! `MacroFile`s are interned `MacroCallLoc`s. //! //! # The Macro Definition Hierarchy //! @@ -18,7 +18,7 @@ //! //! # The Call-site Hierarchy //! -//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer. +//! `ExpnData::call_site` in rustc, `MacroCallLoc::call_site` in rust-analyzer. use crate::Edition; use std::fmt; diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs index 1eaef03197c5d..6257bf4e572ec 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs @@ -1,6 +1,6 @@ //! Maps syntax elements through disjoint syntax nodes. //! -//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a [`SyntaxEditor`] +//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a `SyntaxEditor` use itertools::Itertools; use rustc_hash::FxHashMap; diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs index 905da330e64b0..d5f0584d988f4 100644 --- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs +++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs @@ -207,7 +207,7 @@ impl LineIndex { } } -/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs +/// This is adapted from the rustc_span crate, fn analyze_source_file(src: &str) -> (Vec, IntMap>) { assert!(src.len() < !0u32 as usize); let mut lines = vec![]; From 998a5ac623d9879cfba667d6220a8f7d228845e8 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 9 Jan 2026 15:55:45 +0200 Subject: [PATCH 034/131] Remove code made redundant by method resolution rewrite Its job is now done elsewhere, and it's also wrong (not accounting for autoderef) --- .../crates/hir-ty/src/infer/expr.rs | 12 ++-- .../crates/hir-ty/src/tests/regression.rs | 55 +++++++++++++--- .../crates/hir-ty/src/tests/traits.rs | 66 +++++++++++-------- 3 files changed, 88 insertions(+), 45 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 226e9f5cd6674..62339779a5625 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -1704,7 +1704,7 @@ impl<'db> InferenceContext<'_, 'db> { }); match resolved { Ok((func, _is_visible)) => { - self.check_method_call(tgt_expr, &[], func.sig, receiver_ty, expected) + self.check_method_call(tgt_expr, &[], func.sig, expected) } Err(_) => self.err_ty(), } @@ -1844,7 +1844,7 @@ impl<'db> InferenceContext<'_, 'db> { item: func.def_id.into(), }) } - self.check_method_call(tgt_expr, args, func.sig, receiver_ty, expected) + self.check_method_call(tgt_expr, args, func.sig, expected) } // Failed to resolve, report diagnostic and try to resolve as call to field access or // assoc function @@ -1934,16 +1934,14 @@ impl<'db> InferenceContext<'_, 'db> { tgt_expr: ExprId, args: &[ExprId], sig: FnSig<'db>, - receiver_ty: Ty<'db>, expected: &Expectation<'db>, ) -> Ty<'db> { - let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() { - (sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..]) + let param_tys = if !sig.inputs_and_output.inputs().is_empty() { + &sig.inputs_and_output.inputs()[1..] } else { - (self.types.types.error, &[] as _) + &[] }; let ret_ty = sig.output(); - self.table.unify(formal_receiver_ty, receiver_ty); self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic); ret_ty diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index c805f030446cb..df49d7999feea 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -891,13 +891,14 @@ use core::ops::Deref; struct BufWriter {} -struct Mutex {} -struct MutexGuard<'a, T> {} +struct Mutex(T); +struct MutexGuard<'a, T>(&'a T); impl Mutex { fn lock(&self) -> MutexGuard<'_, T> {} } impl<'a, T: 'a> Deref for MutexGuard<'a, T> { type Target = T; + fn deref(&self) -> &Self::Target { loop {} } } fn flush(&self) { let w: &Mutex; @@ -905,14 +906,18 @@ fn flush(&self) { } "#, expect![[r#" - 123..127 'self': &'? Mutex - 150..152 '{}': MutexGuard<'?, T> - 234..238 'self': &'? {unknown} - 240..290 '{ ...()); }': () - 250..251 'w': &'? Mutex - 276..287 '*(w.lock())': BufWriter - 278..279 'w': &'? Mutex - 278..286 'w.lock()': MutexGuard<'?, BufWriter> + 129..133 'self': &'? Mutex + 156..158 '{}': MutexGuard<'?, T> + 242..246 'self': &'? MutexGuard<'a, T> + 265..276 '{ loop {} }': &'? T + 267..274 'loop {}': ! + 272..274 '{}': () + 289..293 'self': &'? {unknown} + 295..345 '{ ...()); }': () + 305..306 'w': &'? Mutex + 331..342 '*(w.lock())': BufWriter + 333..334 'w': &'? Mutex + 333..341 'w.lock()': MutexGuard<'?, BufWriter> "#]], ); } @@ -2563,3 +2568,33 @@ fn main() { "#, ); } + +#[test] +fn regression_21429() { + check_no_mismatches( + r#" +trait DatabaseLike { + type ForeignKey: ForeignKeyLike; +} + +trait ForeignKeyLike { + type DB: DatabaseLike; + + fn host_columns(&self, database: &Self::DB); +} + +trait ColumnLike { + type DB: DatabaseLike; + + fn foo() -> &&<::DB as DatabaseLike>::ForeignKey { + loop {} + } + + fn foreign_keys(&self, database: &Self::DB) { + let fk = Self::foo(); + fk.host_columns(database); + } +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index 38591f486e971..b825a0a8f0e5a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -429,7 +429,7 @@ fn associated_type_shorthand_from_method_bound() { trait Iterable { type Item; } -struct S; +struct S(T); impl S { fn foo(self) -> T::Item where T: Iterable { loop {} } } @@ -1103,40 +1103,50 @@ fn test() { fn argument_impl_trait_type_args_2() { check_infer_with_mismatches( r#" -//- minicore: sized +//- minicore: sized, phantom_data +use core::marker::PhantomData; + trait Trait {} struct S; impl Trait for S {} -struct F; +struct F(PhantomData); impl F { fn foo(self, x: impl Trait) -> (T, U) { loop {} } } fn test() { - F.foo(S); - F::.foo(S); - F::.foo::(S); - F::.foo::(S); // extraneous argument should be ignored + F(PhantomData).foo(S); + F::(PhantomData).foo(S); + F::(PhantomData).foo::(S); + F::(PhantomData).foo::(S); // extraneous argument should be ignored }"#, expect![[r#" - 87..91 'self': F - 93..94 'x': impl Trait - 118..129 '{ loop {} }': (T, U) - 120..127 'loop {}': ! - 125..127 '{}': () - 143..283 '{ ...ored }': () - 149..150 'F': F<{unknown}> - 149..157 'F.foo(S)': ({unknown}, {unknown}) - 155..156 'S': S - 163..171 'F::': F - 163..178 'F::.foo(S)': (u32, {unknown}) - 176..177 'S': S - 184..192 'F::': F - 184..206 'F::(S)': (u32, i32) - 204..205 'S': S - 212..220 'F::': F - 212..239 'F::(S)': (u32, i32) - 237..238 'S': S + 135..139 'self': F + 141..142 'x': impl Trait + 166..177 '{ loop {} }': (T, U) + 168..175 'loop {}': ! + 173..175 '{}': () + 191..383 '{ ...ored }': () + 197..198 'F': fn F<{unknown}>(PhantomData<{unknown}>) -> F<{unknown}> + 197..211 'F(PhantomData)': F<{unknown}> + 197..218 'F(Phan...foo(S)': ({unknown}, {unknown}) + 199..210 'PhantomData': PhantomData<{unknown}> + 216..217 'S': S + 224..232 'F::': fn F(PhantomData) -> F + 224..245 'F:: + 224..252 'F:: + 250..251 'S': S + 258..266 'F::': fn F(PhantomData) -> F + 258..279 'F:: + 258..293 'F::(S)': (u32, i32) + 267..278 'PhantomData': PhantomData + 291..292 'S': S + 299..307 'F::': fn F(PhantomData) -> F + 299..320 'F:: + 299..339 'F::(S)': (u32, i32) + 308..319 'PhantomData': PhantomData + 337..338 'S': S "#]], ); } @@ -4012,7 +4022,7 @@ fn f() { fn dyn_map() { check_types( r#" -pub struct Key {} +pub struct Key(K, V, P); pub trait Policy { type K; @@ -4024,7 +4034,7 @@ impl Policy for (K, V) { type V = V; } -pub struct KeyMap {} +pub struct KeyMap(KEY); impl KeyMap> { pub fn get(&self, key: &P::K) -> P::V { @@ -5023,7 +5033,7 @@ fn main() { 278..280 '{}': () 290..291 '_': Box + '?> 294..298 'iter': Box + 'static> - 294..310 'iter.i...iter()': Box + 'static> + 294..310 'iter.i...iter()': Box + '?> 152..156 'self': &'? mut Box 177..208 '{ ... }': Option<::Item> 191..198 'loop {}': ! From bcf059c81eb056c39cb03958feb6baea289690b4 Mon Sep 17 00:00:00 2001 From: cry-inc Date: Fri, 9 Jan 2026 18:23:37 +0100 Subject: [PATCH 035/131] Fix issue with ignore attribute for tests where the attribute has a value with the reason --- src/tools/rust-analyzer/crates/hir-def/src/attrs.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs index 83df11f2d2a45..0b8f65687218c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs @@ -135,6 +135,7 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow match name.text() { "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), + "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE), "lang" => attr_flags.insert(AttrFlags::LANG_ITEM), "path" => attr_flags.insert(AttrFlags::HAS_PATH), "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), From c825a504ab7d284610adf43d496fbfe899ed90aa Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 10 Jan 2026 09:51:42 +0100 Subject: [PATCH 036/131] Cleanup --- .../rust-analyzer/crates/hir/src/symbols.rs | 145 +++--- .../crates/ide-db/src/symbol_index.rs | 183 ++------ .../ide-db/src/test_data/test_doc_alias.txt | 112 +++-- .../test_symbol_index_collection.txt | 422 +++++++++++------- .../test_symbols_exclude_imports.txt | 12 +- .../test_data/test_symbols_with_imports.txt | 24 +- .../crates/ide/src/navigation_target.rs | 4 +- .../crates/mbe/src/expander/matcher.rs | 5 +- .../rust-analyzer/tests/slow-tests/main.rs | 44 +- .../rust-analyzer/crates/syntax/src/ptr.rs | 2 +- 10 files changed, 505 insertions(+), 448 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 544c759ed3a77..f9002f31fd15f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -21,12 +21,9 @@ use hir_ty::{ }; use intern::Symbol; use rustc_hash::FxHashMap; -use syntax::{ - AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, - ast::{HasModuleItem, HasName}, -}; +use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName}; -use crate::{Crate, HasCrate, Module, ModuleDef, Semantics}; +use crate::{HasCrate, Module, ModuleDef, Semantics}; /// The actual data that is stored in the index. It should be as compact as /// possible. @@ -44,14 +41,14 @@ pub struct FileSymbol<'db> { _marker: PhantomData<&'db ()>, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct DeclarationLocation { /// The file id for both the `ptr` and `name_ptr`. pub hir_file_id: HirFileId, /// This points to the whole syntax node of the declaration. pub ptr: SyntaxNodePtr, /// This points to the [`syntax::ast::Name`] identifier of the declaration. - pub name_ptr: AstPtr>, + pub name_ptr: Option>>, } impl DeclarationLocation { @@ -61,70 +58,6 @@ impl DeclarationLocation { } } -impl<'db> FileSymbol<'db> { - /// Create a `FileSymbol` representing a crate's root module. - /// This is used for crate search queries like `::` or `::foo`. - pub fn for_crate_root(db: &'db dyn HirDatabase, krate: Crate) -> Option> { - let display_name = krate.display_name(db)?; - let crate_name = display_name.crate_name(); - let root_module = krate.root_module(db); - let def_map = crate_def_map(db, krate.into()); - let module_data = &def_map[root_module.into()]; - - // Get the definition source (the source file for crate roots) - let definition = module_data.origin.definition_source(db); - let hir_file_id = definition.file_id; - - // For a crate root, the "declaration" is the source file itself - // We use the entire file's syntax node as the location - let syntax_node = definition.value.node(); - let ptr = SyntaxNodePtr::new(&syntax_node); - - // For the name, we need to create a synthetic name pointer. - // We'll use the first token of the file as a placeholder since crate roots - // don't have an explicit name in the source. - // We create a name_ptr pointing to the start of the file. - let name_ptr = match &definition.value { - crate::ModuleSource::SourceFile(sf) => { - // Try to find the first item with a name as a reasonable location for focus - // This is a bit of a hack but works for navigation purposes - let first_item: Option = sf.items().next(); - if let Some(item) = first_item { - if let Some(name) = item.syntax().children().find_map(syntax::ast::Name::cast) { - AstPtr::new(&name).wrap_left() - } else { - // No name found, try to use a NameRef instead - if let Some(name_ref) = - item.syntax().descendants().find_map(syntax::ast::NameRef::cast) - { - AstPtr::new(&name_ref).wrap_right() - } else { - return None; - } - } - } else { - return None; - } - } - _ => return None, - }; - - let loc = DeclarationLocation { hir_file_id, ptr, name_ptr }; - - Some(FileSymbol { - name: Symbol::intern(crate_name.as_str()), - def: ModuleDef::Module(root_module), - loc, - container_name: None, - is_alias: false, - is_assoc: false, - is_import: false, - do_not_complete: Complete::Yes, - _marker: PhantomData, - }) - } -} - /// Represents an outstanding module that the symbol collector must collect symbols from. #[derive(Debug)] struct SymbolCollectorWork { @@ -167,6 +100,11 @@ impl<'a> SymbolCollector<'a> { let _p = tracing::info_span!("SymbolCollector::collect", ?module).entered(); tracing::info!(?module, "SymbolCollector::collect"); + // If this is a crate root module, add a symbol for the crate itself + if module.is_crate_root(self.db) { + self.push_crate_root(module); + } + // The initial work is the root module we're collecting, additional work will // be populated as we traverse the module's definitions. self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None }); @@ -176,6 +114,51 @@ impl<'a> SymbolCollector<'a> { } } + /// Push a symbol for a crate's root module. + /// This allows crate roots to appear in the symbol index for queries like `::` or `::foo`. + fn push_crate_root(&mut self, module: Module) { + let krate = module.krate(self.db); + let Some(display_name) = krate.display_name(self.db) else { return }; + let crate_name = display_name.crate_name(); + let canonical_name = display_name.canonical_name(); + + let def_map = crate_def_map(self.db, krate.into()); + let module_data = &def_map[def_map.crate_root(self.db)]; + + let definition = module_data.origin.definition_source(self.db); + let hir_file_id = definition.file_id; + let syntax_node = definition.value.node(); + let ptr = SyntaxNodePtr::new(&syntax_node); + + let loc = DeclarationLocation { hir_file_id, ptr, name_ptr: None }; + + self.symbols.insert(FileSymbol { + name: crate_name.symbol().clone(), + def: ModuleDef::Module(module), + loc, + container_name: None, + is_alias: false, + is_assoc: false, + is_import: false, + do_not_complete: Complete::Yes, + _marker: PhantomData, + }); + + if canonical_name != crate_name.symbol() { + self.symbols.insert(FileSymbol { + name: canonical_name.clone(), + def: ModuleDef::Module(module), + loc, + container_name: None, + is_alias: false, + is_assoc: false, + is_import: false, + do_not_complete: Complete::Yes, + _marker: PhantomData, + }); + } + } + pub fn finish(self) -> Box<[FileSymbol<'a>]> { self.symbols.into_iter().collect() } @@ -277,7 +260,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(use_tree_src.syntax()), - name_ptr: AstPtr::new(&name_syntax), + name_ptr: Some(AstPtr::new(&name_syntax)), }; this.symbols.insert(FileSymbol { name: name.symbol().clone(), @@ -312,7 +295,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(source.value.syntax()), - name_ptr: AstPtr::new(&name_syntax), + name_ptr: Some(AstPtr::new(&name_syntax)), }; this.symbols.insert(FileSymbol { name: name.symbol().clone(), @@ -477,10 +460,10 @@ impl<'a> SymbolCollector<'a> { let source = loc.source(self.db); let Some(name_node) = source.value.name() else { return Complete::Yes }; let def = ModuleDef::from(id.into()); - let dec_loc = DeclarationLocation { + let loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(source.value.syntax()), - name_ptr: AstPtr::new(&name_node).wrap_left(), + name_ptr: Some(AstPtr::new(&name_node).wrap_left()), }; let mut do_not_complete = Complete::Yes; @@ -495,7 +478,7 @@ impl<'a> SymbolCollector<'a> { self.symbols.insert(FileSymbol { name: alias.clone(), def, - loc: dec_loc.clone(), + loc, container_name: self.current_container_name.clone(), is_alias: true, is_assoc, @@ -510,7 +493,7 @@ impl<'a> SymbolCollector<'a> { name: name.symbol().clone(), def, container_name: self.current_container_name.clone(), - loc: dec_loc, + loc, is_alias: false, is_assoc, is_import: false, @@ -527,10 +510,10 @@ impl<'a> SymbolCollector<'a> { let Some(declaration) = module_data.origin.declaration() else { return }; let module = declaration.to_node(self.db); let Some(name_node) = module.name() else { return }; - let dec_loc = DeclarationLocation { + let loc = DeclarationLocation { hir_file_id: declaration.file_id, ptr: SyntaxNodePtr::new(module.syntax()), - name_ptr: AstPtr::new(&name_node).wrap_left(), + name_ptr: Some(AstPtr::new(&name_node).wrap_left()), }; let def = ModuleDef::Module(module_id.into()); @@ -543,7 +526,7 @@ impl<'a> SymbolCollector<'a> { self.symbols.insert(FileSymbol { name: alias.clone(), def, - loc: dec_loc.clone(), + loc, container_name: self.current_container_name.clone(), is_alias: true, is_assoc: false, @@ -558,7 +541,7 @@ impl<'a> SymbolCollector<'a> { name: name.symbol().clone(), def: ModuleDef::Module(module_id.into()), container_name: self.current_container_name.clone(), - loc: dec_loc, + loc, is_alias: false, is_assoc: false, is_import: false, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index ca0d5ec1e5e62..05c3f360fa874 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -129,58 +129,19 @@ impl Query { /// - `anchor_to_crate`: Whether the first segment must be a crate name fn parse_path_query(query: &str) -> (Vec, String, bool) { // Check for leading :: (absolute path / crate search) - let anchor_to_crate = query.starts_with("::"); - let query = if anchor_to_crate { &query[2..] } else { query }; - - // Handle sole "::" - return all crates - if query.is_empty() && anchor_to_crate { - return (vec![], String::new(), true); - } - - // Check for trailing :: (module browsing - returns all items in module) - let return_all_in_module = query.ends_with("::"); - let query = if return_all_in_module { query.trim_end_matches("::") } else { query }; - - if !query.contains("::") { - // No path separator - single segment - if anchor_to_crate && !return_all_in_module { - // "::foo" - fuzzy search crate names only - return (vec![], query.to_string(), true); - } - if return_all_in_module { - // "foo::" - browse all items in module "foo" - // path_filter = ["foo"], query = "", anchor_to_crate = false/true - return (vec![query.to_string()], String::new(), anchor_to_crate); - } - // Plain "foo" - normal fuzzy search - return (vec![], query.to_string(), false); - } - - // Filter out empty segments (e.g., "foo::::bar" -> "foo::bar") - let segments: Vec<&str> = query.split("::").filter(|s| !s.is_empty()).collect(); - - if segments.is_empty() { - return (vec![], String::new(), anchor_to_crate); - } + let (query, anchor_to_crate) = match query.strip_prefix("::") { + Some(q) => (q, true), + None => (query, false), + }; - let path: Vec = - segments[..segments.len() - 1].iter().map(|s| s.to_string()).collect(); - let item = if return_all_in_module { - // All segments go to path, item is empty - let mut path = path; - path.push(segments.last().unwrap().to_string()); - return (path, String::new(), anchor_to_crate); - } else { - segments.last().unwrap_or(&"").to_string() + let Some((prefix, query)) = query.rsplit_once("::") else { + return (vec![], query.to_owned(), anchor_to_crate); }; - (path, item, anchor_to_crate) - } + let prefix: Vec<_> = + prefix.split("::").filter(|s| !s.is_empty()).map(ToOwned::to_owned).collect(); - /// Returns true if this query should return all items in a module - /// (i.e., the original query ended with `::`) - fn is_module_browsing(&self) -> bool { - self.query.is_empty() && !self.path_filter.is_empty() + (prefix, query.to_owned(), anchor_to_crate) } /// Returns true if this query is searching for crates @@ -245,11 +206,14 @@ pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_ // That is, `#` switches from "types" to all symbols, `*` switches from the current // workspace to dependencies. // -// Note that filtering does not currently work in VSCode due to the editor never -// sending the special symbols to the language server. Instead, you can configure -// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and -// `rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed -// with `__` are hidden from the search results unless configured otherwise. +// This also supports general Rust path syntax with the usual rules. +// +// Note that paths do not currently work in VSCode due to the editor never +// sending the special symbols to the language server. Some other editors might not support the # or +// * search either, instead, you can configure the filtering via the +// `rust-analyzer.workspace.symbol.search.scope` and `rust-analyzer.workspace.symbol.search.kind` +// settings. Symbols prefixed with `__` are hidden from the search results unless configured +// otherwise. // // | Editor | Shortcut | // |---------|-----------| @@ -257,12 +221,11 @@ pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { let _p = tracing::info_span!("world_symbols", query = ?query.query).entered(); - // Handle special case: "::" alone or "::foo" for crate search if query.is_crate_search() { return search_crates(db, &query); } - // If we have a path filter, resolve it to target modules first + // If we have a path filter, resolve it to target modules let indices: Vec<_> = if !query.path_filter.is_empty() { let target_modules = resolve_path_to_modules( db, @@ -272,13 +235,11 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { ); if target_modules.is_empty() { - return vec![]; // Path doesn't resolve to any module + return vec![]; } - // Get symbol indices only for the resolved modules target_modules.iter().map(|&module| SymbolIndex::module_symbols(db, module)).collect() } else if query.libs { - // Original behavior for non-path queries searching libs LibraryRoots::get(db) .roots(db) .par_iter() @@ -289,7 +250,6 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { .map(|&root| SymbolIndex::library_symbols(db, root)) .collect() } else { - // Original behavior for non-path queries searching local crates let mut crates = Vec::new(); for &root in LocalRoots::get(db).roots(db).iter() { @@ -303,23 +263,11 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> { let mut res = vec![]; - // For module browsing (empty query, non-empty path_filter), return all symbols - if query.is_module_browsing() { - for index in &indices { - for symbol in index.symbols.iter() { - // Apply existing filters (only_types, assoc_mode, exclude_imports, etc.) - if query.matches_symbol_filters(symbol) { - res.push(symbol.clone()); - } - } - } - } else { - // Normal search: use FST to match item name - query.search::<()>(&indices, |f| { - res.push(f.clone()); - ControlFlow::Continue(()) - }); - } + // Normal search: use FST to match item name + query.search::<()>(&indices, |f| { + res.push(f.clone()); + ControlFlow::Continue(()) + }); res } @@ -341,9 +289,15 @@ fn search_crates<'db>(db: &'db RootDatabase, query: &Query) -> Vec) -> bool { - // Check only_types filter - if self.only_types - && !matches!( - symbol.def, - hir::ModuleDef::Adt(..) - | hir::ModuleDef::TypeAlias(..) - | hir::ModuleDef::BuiltinType(..) - | hir::ModuleDef::Trait(..) - ) - { - return false; - } - - // Check assoc_mode filter - if !self.matches_assoc_mode(symbol.is_assoc) { - return false; - } - - // Check exclude_imports filter - if self.exclude_imports && symbol.is_import { - return false; - } - - // Check underscore prefix - let ignore_underscore_prefixed = !self.query.starts_with("__"); - if ignore_underscore_prefixed && symbol.name.as_str().starts_with("__") { - return false; - } - - true - } } #[cfg(test)] @@ -939,34 +858,6 @@ pub struct Foo; assert!(!anchor); } - #[test] - fn test_query_modes() { - // Test is_module_browsing - let query = Query::new("foo::".to_owned()); - assert!(query.is_module_browsing()); - assert!(!query.is_crate_search()); - - // Test is_crate_search with sole :: - let query = Query::new("::".to_owned()); - assert!(!query.is_module_browsing()); - assert!(query.is_crate_search()); - - // Test is_crate_search with ::foo - let query = Query::new("::foo".to_owned()); - assert!(!query.is_module_browsing()); - assert!(query.is_crate_search()); - - // Normal query should be neither - let query = Query::new("foo".to_owned()); - assert!(!query.is_module_browsing()); - assert!(!query.is_crate_search()); - - // Path query should be neither - let query = Query::new("foo::bar".to_owned()); - assert!(!query.is_module_browsing()); - assert!(!query.is_crate_search()); - } - #[test] fn test_path_search() { let (mut db, _) = RootDatabase::with_many_files( diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt index 5783d97564d03..71680699b7395 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt @@ -27,11 +27,13 @@ kind: STRUCT, range: 83..119, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), ), }, container_name: None, @@ -62,11 +64,13 @@ kind: STRUCT, range: 0..81, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), ), }, container_name: None, @@ -97,11 +101,13 @@ kind: STRUCT, range: 0..81, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), ), }, container_name: None, @@ -132,11 +138,13 @@ kind: STRUCT, range: 0..81, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), ), }, container_name: None, @@ -146,6 +154,34 @@ do_not_complete: Yes, _marker: PhantomData<&()>, }, + FileSymbol { + name: "ra_test_fixture", + def: Module( + Module { + id: ModuleIdLt { + [salsa id]: Id(3800), + }, + }, + ), + loc: DeclarationLocation { + hir_file_id: FileId( + EditionedFileId( + Id(3000), + ), + ), + ptr: SyntaxNodePtr { + kind: SOURCE_FILE, + range: 0..128, + }, + name_ptr: None, + }, + container_name: None, + is_alias: false, + is_assoc: false, + is_import: false, + do_not_complete: Yes, + _marker: PhantomData<&()>, + }, FileSymbol { name: "s1", def: Adt( @@ -167,11 +203,13 @@ kind: STRUCT, range: 0..81, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), ), }, container_name: None, @@ -202,11 +240,13 @@ kind: STRUCT, range: 83..119, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), ), }, container_name: None, @@ -237,11 +277,13 @@ kind: STRUCT, range: 0..81, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), ), }, container_name: None, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 7692a7d61abf5..2d62a56fe22df 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -25,11 +25,13 @@ kind: VARIANT, range: 201..202, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 201..202, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 201..202, + }, + ), ), }, container_name: Some( @@ -60,11 +62,13 @@ kind: TYPE_ALIAS, range: 470..490, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 475..480, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 475..480, + }, + ), ), }, container_name: None, @@ -93,11 +97,13 @@ kind: VARIANT, range: 204..205, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 204..205, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 204..205, + }, + ), ), }, container_name: Some( @@ -128,11 +134,13 @@ kind: CONST, range: 413..434, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 419..424, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 419..424, + }, + ), ), }, container_name: None, @@ -161,11 +169,13 @@ kind: CONST, range: 593..665, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 599..615, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 599..615, + }, + ), ), }, container_name: None, @@ -196,11 +206,13 @@ kind: ENUM, range: 185..207, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 190..194, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 190..194, + }, + ), ), }, container_name: None, @@ -231,11 +243,13 @@ kind: USE_TREE, range: 727..749, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 736..749, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 736..749, + }, + ), ), }, container_name: None, @@ -266,11 +280,13 @@ kind: MACRO_DEF, range: 153..168, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 159..164, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 159..164, + }, + ), ), }, container_name: None, @@ -299,11 +315,13 @@ kind: STATIC, range: 435..469, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 442..448, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 442..448, + }, + ), ), }, container_name: None, @@ -334,11 +352,13 @@ kind: STRUCT, range: 170..184, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 177..183, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 177..183, + }, + ), ), }, container_name: None, @@ -369,11 +389,13 @@ kind: STRUCT, range: 0..22, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 6..21, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 6..21, + }, + ), ), }, container_name: None, @@ -404,11 +426,13 @@ kind: STRUCT, range: 391..409, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 398..408, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 398..408, + }, + ), ), }, container_name: Some( @@ -441,11 +465,13 @@ kind: STRUCT, range: 628..654, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 635..653, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 635..653, + }, + ), ), }, container_name: Some( @@ -478,11 +504,13 @@ kind: STRUCT, range: 552..580, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 559..579, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 559..579, + }, + ), ), }, container_name: None, @@ -513,11 +541,13 @@ kind: STRUCT, range: 261..279, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 268..275, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 268..275, + }, + ), ), }, container_name: None, @@ -546,11 +576,13 @@ kind: TRAIT, range: 334..373, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 340..345, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 340..345, + }, + ), ), }, container_name: None, @@ -581,11 +613,13 @@ kind: USE_TREE, range: 755..769, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 764..769, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 764..769, + }, + ), ), }, container_name: None, @@ -616,11 +650,13 @@ kind: UNION, range: 208..222, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 214..219, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 214..219, + }, + ), ), }, container_name: None, @@ -649,11 +685,13 @@ kind: MODULE, range: 492..530, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 496..501, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 496..501, + }, + ), ), }, container_name: None, @@ -682,11 +720,13 @@ kind: MODULE, range: 667..677, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 671..676, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 671..676, + }, + ), ), }, container_name: None, @@ -717,11 +757,13 @@ kind: MACRO_RULES, range: 51..131, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 64..77, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 64..77, + }, + ), ), }, container_name: None, @@ -750,11 +792,13 @@ kind: FN, range: 307..330, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 310..325, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 310..325, + }, + ), ), }, container_name: Some( @@ -785,11 +829,13 @@ kind: FN, range: 242..257, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 245..252, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 245..252, + }, + ), ), }, container_name: Some( @@ -822,11 +868,13 @@ kind: MACRO_RULES, range: 1..48, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 14..31, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 14..31, + }, + ), ), }, container_name: None, @@ -855,12 +903,42 @@ kind: FN, range: 375..411, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 378..382, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 378..382, + }, + ), + ), + }, + container_name: None, + is_alias: false, + is_assoc: false, + is_import: false, + do_not_complete: Yes, + _marker: PhantomData<&()>, + }, + FileSymbol { + name: "ra_test_fixture", + def: Module( + Module { + id: ModuleIdLt { + [salsa id]: Id(3800), }, + }, + ), + loc: DeclarationLocation { + hir_file_id: FileId( + EditionedFileId( + Id(3000), + ), ), + ptr: SyntaxNodePtr { + kind: SOURCE_FILE, + range: 0..793, + }, + name_ptr: None, }, container_name: None, is_alias: false, @@ -890,11 +968,13 @@ kind: USE_TREE, range: 684..721, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 701..721, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 701..721, + }, + ), ), }, container_name: None, @@ -923,11 +1003,13 @@ kind: FN, range: 352..371, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 355..363, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 355..363, + }, + ), ), }, container_name: Some( @@ -969,11 +1051,13 @@ kind: STRUCT, range: 508..528, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 515..527, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 515..527, + }, + ), ), }, container_name: None, @@ -1011,11 +1095,13 @@ kind: USE_TREE, range: 141..173, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 157..173, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 157..173, + }, + ), ), }, container_name: None, @@ -1046,11 +1132,13 @@ kind: USE_TREE, range: 141..173, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 157..173, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 157..173, + }, + ), ), }, container_name: None, @@ -1081,11 +1169,13 @@ kind: STRUCT, range: 0..20, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 7..19, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 7..19, + }, + ), ), }, container_name: None, @@ -1116,11 +1206,13 @@ kind: USE_TREE, range: 35..69, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 51..69, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 51..69, + }, + ), ), }, container_name: None, @@ -1151,11 +1243,13 @@ kind: USE_TREE, range: 85..125, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 115..125, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 115..125, + }, + ), ), }, container_name: None, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt index 6f5f8f889c7dd..87f0c7d9a8170 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt @@ -20,11 +20,13 @@ kind: STRUCT, range: 0..15, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 11..14, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 11..14, + }, + ), ), }, container_name: None, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt index 5d3fe4d2658d6..e96aa889ba065 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt @@ -20,11 +20,13 @@ kind: STRUCT, range: 0..15, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME, - range: 11..14, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 11..14, + }, + ), ), }, container_name: None, @@ -55,11 +57,13 @@ kind: USE_TREE, range: 17..25, }, - name_ptr: AstPtr( - SyntaxNodePtr { - kind: NAME_REF, - range: 22..25, - }, + name_ptr: Some( + AstPtr( + SyntaxNodePtr { + kind: NAME_REF, + range: 22..25, + }, + ), ), }, container_name: None, diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index a271cac6fcd07..047df309eca6e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -19,7 +19,7 @@ use ide_db::{ }; use stdx::never; use syntax::{ - AstNode, SyntaxNode, TextRange, + AstNode, AstPtr, SyntaxNode, TextRange, ast::{self, HasName}, }; @@ -253,7 +253,7 @@ impl<'db> TryToNav for FileSymbol<'db> { db, self.loc.hir_file_id, self.loc.ptr.text_range(), - Some(self.loc.name_ptr.text_range()), + self.loc.name_ptr.map(AstPtr::text_range), ) .map(|(FileRange { file_id, range: full_range }, focus_range)| { NavigationTarget { diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index 8f6627a60fe63..fe01fb1f10637 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -414,8 +414,9 @@ fn match_loop_inner<'t>( } // Check if we need a separator. - if item.sep.is_some() && !item.sep_matched { - let sep = item.sep.as_ref().unwrap(); + if let Some(sep) = &item.sep + && !item.sep_matched + { let mut fork = src.clone(); if expect_separator(&mut fork, sep) { // HACK: here we use `meta_result` to pass `TtIter` back to caller because diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index eb1b8c5dd0e6e..9f3c6742d651d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -1447,7 +1447,27 @@ foo = { path = "../foo" } .server() .wait_until_workspace_is_loaded(); - server.request::(Default::default(), json!([])); + server.request::( + Default::default(), + json!([ + { + "name": "bar", + "kind": 2, + "location": { + "uri": "file:///[..]bar/src/lib.rs", + "range": { + "start": { + "line": 0, + "character": 0 + }, + "end": { + "line": 0, + "character": 0 + } + } + } + }]), + ); let server = Project::with_fixture( r#" @@ -1486,7 +1506,27 @@ version = "0.0.0" .server() .wait_until_workspace_is_loaded(); - server.request::(Default::default(), json!([])); + server.request::( + Default::default(), + json!([ + { + "name": "baz", + "kind": 2, + "location": { + "uri": "file:///[..]baz/src/lib.rs", + "range": { + "start": { + "line": 0, + "character": 0 + }, + "end": { + "line": 0, + "character": 0 + } + } + } + }]), + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs index 34c07598d2001..c4979b8e3ae80 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs @@ -68,7 +68,7 @@ impl AstPtr { self.raw } - pub fn text_range(&self) -> TextRange { + pub fn text_range(self) -> TextRange { self.raw.text_range() } From 4e18f1dad2be32a53a43840a39cab9457262d1c1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 10 Jan 2026 14:53:22 +0100 Subject: [PATCH 037/131] Abstract proc-macro-srv input and output away --- .../proc-macro-api/src/legacy_protocol.rs | 4 +- .../proc-macro-api/src/legacy_protocol/msg.rs | 4 +- .../crates/proc-macro-api/src/lib.rs | 41 +++++- .../crates/proc-macro-api/src/process.rs | 138 +++++++++++++----- .../crates/proc-macro-srv-cli/src/main.rs | 12 +- .../proc-macro-srv-cli/src/main_loop.rs | 72 +++++---- 6 files changed, 193 insertions(+), 78 deletions(-) diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs index 22a7d9868e215..4524d1b66bfe2 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs @@ -162,11 +162,11 @@ fn send_request( req: Request, buf: &mut P::Buf, ) -> Result, ServerError> { - req.write::<_, P>(&mut writer).map_err(|err| ServerError { + req.write::

(&mut writer).map_err(|err| ServerError { message: "failed to write request".into(), io: Some(Arc::new(err)), })?; - let res = Response::read::<_, P>(&mut reader, buf).map_err(|err| ServerError { + let res = Response::read::

(&mut reader, buf).map_err(|err| ServerError { message: "failed to read response".into(), io: Some(Arc::new(err)), })?; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs index 4146b619ec0c4..1b6590693354b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs @@ -155,13 +155,13 @@ impl ExpnGlobals { } pub trait Message: serde::Serialize + DeserializeOwned { - fn read(inp: &mut R, buf: &mut C::Buf) -> io::Result> { + fn read(inp: &mut dyn BufRead, buf: &mut C::Buf) -> io::Result> { Ok(match C::read(inp, buf)? { None => None, Some(buf) => Some(C::decode(buf)?), }) } - fn write(self, out: &mut W) -> io::Result<()> { + fn write(self, out: &mut dyn Write) -> io::Result<()> { let value = C::encode(&self)?; C::write(out, &value) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index f5fcc99f14a3c..98ee6817c2d21 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -18,7 +18,7 @@ extern crate rustc_driver as _; pub mod bidirectional_protocol; pub mod legacy_protocol; -mod process; +pub mod process; pub mod transport; use paths::{AbsPath, AbsPathBuf}; @@ -44,6 +44,25 @@ pub mod version { pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; } +#[derive(Copy, Clone)] +pub enum ProtocolFormat { + JsonLegacy, + PostcardLegacy, + BidirectionalPostcardPrototype, +} + +impl fmt::Display for ProtocolFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ProtocolFormat::JsonLegacy => write!(f, "json-legacy"), + ProtocolFormat::PostcardLegacy => write!(f, "postcard-legacy"), + ProtocolFormat::BidirectionalPostcardPrototype => { + write!(f, "bidirectional-postcard-prototype") + } + } + } +} + /// Represents different kinds of procedural macros that can be expanded by the external server. #[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)] pub enum ProcMacroKind { @@ -132,7 +151,25 @@ impl ProcMacroClient { > + Clone, version: Option<&Version>, ) -> io::Result { - let process = ProcMacroServerProcess::run(process_path, env, version)?; + let process = ProcMacroServerProcess::spawn(process_path, env, version)?; + Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() }) + } + + /// Invokes `spawn` and returns a client connected to the resulting read and write handles. + /// + /// The `process_path` is used for `Self::server_path`. This function is mainly used for testing. + pub fn with_io_channels( + process_path: &AbsPath, + spawn: impl Fn( + Option, + ) -> io::Result<( + Box, + Box, + Box, + )>, + version: Option<&Version>, + ) -> io::Result { + let process = ProcMacroServerProcess::run(spawn, version, || "".to_owned())?; Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() }) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index f6a656e3ce3a4..4f87621587908 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -13,14 +13,13 @@ use span::Span; use stdx::JodChild; use crate::{ - Codec, ProcMacro, ProcMacroKind, ServerError, + Codec, ProcMacro, ProcMacroKind, ProtocolFormat, ServerError, bidirectional_protocol::{self, SubCallback, msg::BidirectionalMessage, reject_subrequests}, legacy_protocol::{self, SpanMode}, version, }; /// Represents a process handling proc-macro communication. -#[derive(Debug)] pub(crate) struct ProcMacroServerProcess { /// The state of the proc-macro server process, the protocol is currently strictly sequential /// hence the lock on the state. @@ -31,6 +30,16 @@ pub(crate) struct ProcMacroServerProcess { exited: OnceLock>, } +impl std::fmt::Debug for ProcMacroServerProcess { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ProcMacroServerProcess") + .field("version", &self.version) + .field("protocol", &self.protocol) + .field("exited", &self.exited) + .finish() + } +} + #[derive(Debug, Clone)] pub(crate) enum Protocol { LegacyJson { mode: SpanMode }, @@ -38,22 +47,83 @@ pub(crate) enum Protocol { BidirectionalPostcardPrototype { mode: SpanMode }, } +pub trait ProcessExit: Send + Sync { + fn exit_err(&mut self) -> Option; +} + +impl ProcessExit for Process { + fn exit_err(&mut self) -> Option { + match self.child.try_wait() { + Ok(None) | Err(_) => None, + Ok(Some(status)) => { + let mut msg = String::new(); + if !status.success() + && let Some(stderr) = self.child.stderr.as_mut() + { + _ = stderr.read_to_string(&mut msg); + } + Some(ServerError { + message: format!( + "proc-macro server exited with {status}{}{msg}", + if msg.is_empty() { "" } else { ": " } + ), + io: None, + }) + } + } + } +} + /// Maintains the state of the proc-macro server process. -#[derive(Debug)] struct ProcessSrvState { - process: Process, - stdin: ChildStdin, - stdout: BufReader, + process: Box, + stdin: Box, + stdout: Box, } impl ProcMacroServerProcess { /// Starts the proc-macro server and performs a version check - pub(crate) fn run<'a>( + pub(crate) fn spawn<'a>( process_path: &AbsPath, env: impl IntoIterator< Item = (impl AsRef, &'a Option>), > + Clone, version: Option<&Version>, + ) -> io::Result { + Self::run( + |format| { + let mut process = Process::run( + process_path, + env.clone(), + format.map(|format| format.to_string()).as_deref(), + )?; + let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); + + Ok((Box::new(process), Box::new(stdin), Box::new(stdout))) + }, + version, + || { + #[expect(clippy::disallowed_methods)] + Command::new(process_path) + .arg("--version") + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned()) + .unwrap_or_else(|_| "unknown version".to_owned()) + }, + ) + } + + /// Invokes `spawn` and performs a version check. + pub(crate) fn run( + spawn: impl Fn( + Option, + ) -> io::Result<( + Box, + Box, + Box, + )>, + version: Option<&Version>, + binary_server_version: impl Fn() -> String, ) -> io::Result { const VERSION: Version = Version::new(1, 93, 0); // we do `>` for nightly as this started working in the middle of the 1.93 nightly release, so we dont want to break on half of the nightlies @@ -65,27 +135,33 @@ impl ProcMacroServerProcess { && has_working_format_flag { &[ - ( - Some("bidirectional-postcard-prototype"), - Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id }, - ), - (Some("postcard-legacy"), Protocol::LegacyPostcard { mode: SpanMode::Id }), - (Some("json-legacy"), Protocol::LegacyJson { mode: SpanMode::Id }), + Some(ProtocolFormat::BidirectionalPostcardPrototype), + Some(ProtocolFormat::PostcardLegacy), + Some(ProtocolFormat::JsonLegacy), ] } else { - &[(None, Protocol::LegacyJson { mode: SpanMode::Id })] + &[None] }; let mut err = None; - for &(format, ref protocol) in formats { + for &format in formats { let create_srv = || { - let mut process = Process::run(process_path, env.clone(), format)?; - let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); + let (process, stdin, stdout) = spawn(format)?; io::Result::Ok(ProcMacroServerProcess { state: Mutex::new(ProcessSrvState { process, stdin, stdout }), version: 0, - protocol: protocol.clone(), + protocol: match format { + Some(ProtocolFormat::BidirectionalPostcardPrototype) => { + Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id } + } + Some(ProtocolFormat::PostcardLegacy) => { + Protocol::LegacyPostcard { mode: SpanMode::Id } + } + Some(ProtocolFormat::JsonLegacy) | None => { + Protocol::LegacyJson { mode: SpanMode::Id } + } + }, exited: OnceLock::new(), }) }; @@ -93,12 +169,7 @@ impl ProcMacroServerProcess { tracing::info!("sending proc-macro server version check"); match srv.version_check(Some(&mut reject_subrequests)) { Ok(v) if v > version::CURRENT_API_VERSION => { - #[allow(clippy::disallowed_methods)] - let process_version = Command::new(process_path) - .arg("--version") - .output() - .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned()) - .unwrap_or_else(|_| "unknown version".to_owned()); + let process_version = binary_server_version(); err = Some(io::Error::other(format!( "Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \ This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain.", @@ -275,22 +346,9 @@ impl ProcMacroServerProcess { f(&mut state.stdin, &mut state.stdout, &mut buf).map_err(|e| { if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { - match state.process.child.try_wait() { - Ok(None) | Err(_) => e, - Ok(Some(status)) => { - let mut msg = String::new(); - if !status.success() - && let Some(stderr) = state.process.child.stderr.as_mut() - { - _ = stderr.read_to_string(&mut msg); - } - let server_error = ServerError { - message: format!( - "proc-macro server exited with {status}{}{msg}", - if msg.is_empty() { "" } else { ": " } - ), - io: None, - }; + match state.process.exit_err() { + None => e, + Some(server_error) => { self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone() } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index bdfdb50002e12..189a1eea5c196 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -45,7 +45,11 @@ fn main() -> std::io::Result<()> { } let &format = matches.get_one::("format").expect("format value should always be present"); - run(format) + + let mut stdin = std::io::BufReader::new(std::io::stdin()); + let mut stdout = std::io::stdout(); + + run(&mut stdin, &mut stdout, format) } #[derive(Copy, Clone)] @@ -88,7 +92,11 @@ impl ValueEnum for ProtocolFormat { } #[cfg(not(feature = "sysroot-abi"))] -fn run(_: ProtocolFormat) -> std::io::Result<()> { +fn run( + _: &mut std::io::BufReader, + _: &mut std::io::Stdout, + _: ProtocolFormat, +) -> std::io::Result<()> { Err(std::io::Error::new( std::io::ErrorKind::Unsupported, "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 22536a4e52b12..0c651d22b41bc 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -6,7 +6,7 @@ use proc_macro_api::{ transport::codec::{json::JsonProtocol, postcard::PostcardProtocol}, version::CURRENT_API_VERSION, }; -use std::io; +use std::io::{self, BufRead, Write}; use legacy::Message; @@ -32,15 +32,24 @@ impl legacy::SpanTransformer for SpanTrans { } } -pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> { +pub(crate) fn run( + stdin: &mut (dyn BufRead + Send + Sync), + stdout: &mut (dyn Write + Send + Sync), + format: ProtocolFormat, +) -> io::Result<()> { match format { - ProtocolFormat::JsonLegacy => run_::(), - ProtocolFormat::PostcardLegacy => run_::(), - ProtocolFormat::BidirectionalPostcardPrototype => run_new::(), + ProtocolFormat::JsonLegacy => run_old::(stdin, stdout), + ProtocolFormat::PostcardLegacy => run_old::(stdin, stdout), + ProtocolFormat::BidirectionalPostcardPrototype => { + run_new::(stdin, stdout) + } } } -fn run_new() -> io::Result<()> { +fn run_new( + stdin: &mut (dyn BufRead + Send + Sync), + stdout: &mut (dyn Write + Send + Sync), +) -> io::Result<()> { fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind { match kind { proc_macro_srv::ProcMacroKind::CustomDerive => { @@ -52,8 +61,6 @@ fn run_new() -> io::Result<()> { } let mut buf = C::Buf::default(); - let mut stdin = io::stdin(); - let mut stdout = io::stdout(); let env_snapshot = EnvSnapshot::default(); let srv = proc_macro_srv::ProcMacroSrv::new(&env_snapshot); @@ -61,8 +68,7 @@ fn run_new() -> io::Result<()> { let mut span_mode = legacy::SpanMode::Id; 'outer: loop { - let req_opt = - bidirectional::BidirectionalMessage::read::<_, C>(&mut stdin.lock(), &mut buf)?; + let req_opt = bidirectional::BidirectionalMessage::read::(stdin, &mut buf)?; let Some(req) = req_opt else { break 'outer; }; @@ -77,22 +83,22 @@ fn run_new() -> io::Result<()> { .collect() }); - send_response::(&stdout, bidirectional::Response::ListMacros(res))?; + send_response::(stdout, bidirectional::Response::ListMacros(res))?; } bidirectional::Request::ApiVersionCheck {} => { send_response::( - &stdout, + stdout, bidirectional::Response::ApiVersionCheck(CURRENT_API_VERSION), )?; } bidirectional::Request::SetConfig(config) => { span_mode = config.span_mode; - send_response::(&stdout, bidirectional::Response::SetConfig(config))?; + send_response::(stdout, bidirectional::Response::SetConfig(config))?; } bidirectional::Request::ExpandMacro(task) => { - handle_expand::(&srv, &mut stdin, &mut stdout, &mut buf, span_mode, *task)?; + handle_expand::(&srv, stdin, stdout, &mut buf, span_mode, *task)?; } }, _ => continue, @@ -104,8 +110,8 @@ fn run_new() -> io::Result<()> { fn handle_expand( srv: &proc_macro_srv::ProcMacroSrv<'_>, - stdin: &io::Stdin, - stdout: &io::Stdout, + stdin: &mut (dyn BufRead + Send + Sync), + stdout: &mut (dyn Write + Send + Sync), buf: &mut C::Buf, span_mode: legacy::SpanMode, task: bidirectional::ExpandMacro, @@ -118,7 +124,7 @@ fn handle_expand( fn handle_expand_id( srv: &proc_macro_srv::ProcMacroSrv<'_>, - stdout: &io::Stdout, + stdout: &mut dyn Write, task: bidirectional::ExpandMacro, ) -> io::Result<()> { let bidirectional::ExpandMacro { lib, env, current_dir, data } = task; @@ -157,12 +163,12 @@ fn handle_expand_id( }) .map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default())); - send_response::(&stdout, bidirectional::Response::ExpandMacro(res)) + send_response::(stdout, bidirectional::Response::ExpandMacro(res)) } struct ProcMacroClientHandle<'a, C: Codec> { - stdin: &'a io::Stdin, - stdout: &'a io::Stdout, + stdin: &'a mut (dyn BufRead + Send + Sync), + stdout: &'a mut (dyn Write + Send + Sync), buf: &'a mut C::Buf, } @@ -173,11 +179,11 @@ impl<'a, C: Codec> ProcMacroClientHandle<'a, C> { ) -> Option { let msg = bidirectional::BidirectionalMessage::SubRequest(req); - if msg.write::<_, C>(&mut self.stdout.lock()).is_err() { + if msg.write::(&mut *self.stdout).is_err() { return None; } - match bidirectional::BidirectionalMessage::read::<_, C>(&mut self.stdin.lock(), self.buf) { + match bidirectional::BidirectionalMessage::read::(&mut *self.stdin, self.buf) { Ok(Some(msg)) => Some(msg), _ => None, } @@ -238,8 +244,8 @@ impl proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandl fn handle_expand_ra( srv: &proc_macro_srv::ProcMacroSrv<'_>, - stdin: &io::Stdin, - stdout: &io::Stdout, + stdin: &mut (dyn BufRead + Send + Sync), + stdout: &mut (dyn Write + Send + Sync), buf: &mut C::Buf, task: bidirectional::ExpandMacro, ) -> io::Result<()> { @@ -301,10 +307,13 @@ fn handle_expand_ra( .map(|(tree, span_data_table)| bidirectional::ExpandMacroExtended { tree, span_data_table }) .map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default())); - send_response::(&stdout, bidirectional::Response::ExpandMacroExtended(res)) + send_response::(stdout, bidirectional::Response::ExpandMacroExtended(res)) } -fn run_() -> io::Result<()> { +fn run_old( + stdin: &mut (dyn BufRead + Send + Sync), + stdout: &mut (dyn Write + Send + Sync), +) -> io::Result<()> { fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind { match kind { proc_macro_srv::ProcMacroKind::CustomDerive => { @@ -316,8 +325,8 @@ fn run_() -> io::Result<()> { } let mut buf = C::Buf::default(); - let mut read_request = || legacy::Request::read::<_, C>(&mut io::stdin().lock(), &mut buf); - let write_response = |msg: legacy::Response| msg.write::<_, C>(&mut io::stdout().lock()); + let mut read_request = || legacy::Request::read::(stdin, &mut buf); + let mut write_response = |msg: legacy::Response| msg.write::(stdout); let env = EnvSnapshot::default(); let srv = proc_macro_srv::ProcMacroSrv::new(&env); @@ -446,7 +455,10 @@ fn run_() -> io::Result<()> { Ok(()) } -fn send_response(stdout: &io::Stdout, resp: bidirectional::Response) -> io::Result<()> { +fn send_response( + stdout: &mut dyn Write, + resp: bidirectional::Response, +) -> io::Result<()> { let resp = bidirectional::BidirectionalMessage::Response(resp); - resp.write::<_, C>(&mut stdout.lock()) + resp.write::(stdout) } From 27fef0ccbe56e9e4494d19780cdd7c41660d764d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 10 Jan 2026 16:01:09 +0100 Subject: [PATCH 038/131] internal: Landing integration test infra for proc-macro-srv-cli --- src/tools/rust-analyzer/Cargo.lock | 6 + .../crates/proc-macro-api/src/lib.rs | 6 +- .../crates/proc-macro-srv-cli/Cargo.toml | 14 ++ .../crates/proc-macro-srv-cli/src/lib.rs | 6 + .../crates/proc-macro-srv-cli/src/main.rs | 43 ++-- .../proc-macro-srv-cli/src/main_loop.rs | 5 +- .../proc-macro-srv-cli/tests/common/utils.rs | 213 +++++++++++++++++ .../proc-macro-srv-cli/tests/legacy_json.rs | 224 ++++++++++++++++++ 8 files changed, 494 insertions(+), 23 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs create mode 100644 src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs create mode 100644 src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 5bdde7c7c3e62..d6c6250e13dc7 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1879,9 +1879,15 @@ name = "proc-macro-srv-cli" version = "0.0.0" dependencies = [ "clap", + "expect-test", + "intern", + "paths", "postcard", "proc-macro-api", "proc-macro-srv", + "proc-macro-test", + "span", + "tt", ] [[package]] diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 98ee6817c2d21..822809943a364 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -44,10 +44,14 @@ pub mod version { pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; } -#[derive(Copy, Clone)] +/// Protocol format for communication between client and server. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ProtocolFormat { + /// JSON-based legacy protocol (newline-delimited JSON). JsonLegacy, + /// Postcard-based legacy protocol (COBS-encoded postcard). PostcardLegacy, + /// Bidirectional postcard protocol with sub-request support. BidirectionalPostcardPrototype, } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index 6b2db0b269d57..a25e3b64ad42b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -10,12 +10,26 @@ license.workspace = true rust-version.workspace = true publish = false +[lib] +doctest = false + [dependencies] proc-macro-srv.workspace = true proc-macro-api.workspace = true postcard.workspace = true clap = {version = "4.5.42", default-features = false, features = ["std"]} +[dev-dependencies] +expect-test.workspace = true +paths.workspace = true +# span = {workspace = true, default-features = false} does not work +span = { path = "../span", default-features = false} +tt.workspace = true +intern.workspace = true + +# used as proc macro test target +proc-macro-test.path = "../proc-macro-srv/proc-macro-test" + [features] default = [] # default = ["sysroot-abi"] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs new file mode 100644 index 0000000000000..9e6f03bf46046 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs @@ -0,0 +1,6 @@ +//! Library interface for `proc-macro-srv-cli`. +//! +//! This module exposes the server main loop and protocol format for integration testing. + +#[cfg(feature = "sysroot-abi")] +pub mod main_loop; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index 189a1eea5c196..a246d4d3f28f9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -9,11 +9,11 @@ extern crate rustc_driver as _; mod version; -#[cfg(feature = "sysroot-abi")] -mod main_loop; use clap::{Command, ValueEnum}; +use proc_macro_api::ProtocolFormat; + #[cfg(feature = "sysroot-abi")] -use main_loop::run; +use proc_macro_srv_cli::main_loop::run; fn main() -> std::io::Result<()> { let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE"); @@ -32,7 +32,7 @@ fn main() -> std::io::Result<()> { .long("format") .action(clap::ArgAction::Set) .default_value("json-legacy") - .value_parser(clap::builder::EnumValueParser::::new()), + .value_parser(clap::builder::EnumValueParser::::new()), clap::Arg::new("version") .long("version") .action(clap::ArgAction::SetTrue) @@ -43,33 +43,37 @@ fn main() -> std::io::Result<()> { println!("rust-analyzer-proc-macro-srv {}", version::version()); return Ok(()); } - let &format = - matches.get_one::("format").expect("format value should always be present"); + let &format = matches + .get_one::("format") + .expect("format value should always be present"); let mut stdin = std::io::BufReader::new(std::io::stdin()); let mut stdout = std::io::stdout(); - run(&mut stdin, &mut stdout, format) + run(&mut stdin, &mut stdout, format.into()) } +/// Wrapper for CLI argument parsing that implements `ValueEnum`. #[derive(Copy, Clone)] -enum ProtocolFormat { - JsonLegacy, - PostcardLegacy, - BidirectionalPostcardPrototype, +struct ProtocolFormatArg(ProtocolFormat); + +impl From for ProtocolFormat { + fn from(arg: ProtocolFormatArg) -> Self { + arg.0 + } } -impl ValueEnum for ProtocolFormat { +impl ValueEnum for ProtocolFormatArg { fn value_variants<'a>() -> &'a [Self] { &[ - ProtocolFormat::JsonLegacy, - ProtocolFormat::PostcardLegacy, - ProtocolFormat::BidirectionalPostcardPrototype, + ProtocolFormatArg(ProtocolFormat::JsonLegacy), + ProtocolFormatArg(ProtocolFormat::PostcardLegacy), + ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype), ] } fn to_possible_value(&self) -> Option { - match self { + match self.0 { ProtocolFormat::JsonLegacy => Some(clap::builder::PossibleValue::new("json-legacy")), ProtocolFormat::PostcardLegacy => { Some(clap::builder::PossibleValue::new("postcard-legacy")) @@ -79,12 +83,13 @@ impl ValueEnum for ProtocolFormat { } } } + fn from_str(input: &str, _ignore_case: bool) -> Result { match input { - "json-legacy" => Ok(ProtocolFormat::JsonLegacy), - "postcard-legacy" => Ok(ProtocolFormat::PostcardLegacy), + "json-legacy" => Ok(ProtocolFormatArg(ProtocolFormat::JsonLegacy)), + "postcard-legacy" => Ok(ProtocolFormatArg(ProtocolFormat::PostcardLegacy)), "bidirectional-postcard-prototype" => { - Ok(ProtocolFormat::BidirectionalPostcardPrototype) + Ok(ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype)) } _ => Err(format!("unknown protocol format: {input}")), } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 0c651d22b41bc..b927eea46b586 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -1,6 +1,6 @@ //! The main loop of the proc-macro server. use proc_macro_api::{ - Codec, + Codec, ProtocolFormat, bidirectional_protocol::msg as bidirectional, legacy_protocol::msg as legacy, transport::codec::{json::JsonProtocol, postcard::PostcardProtocol}, @@ -12,7 +12,6 @@ use legacy::Message; use proc_macro_srv::{EnvSnapshot, SpanId}; -use crate::ProtocolFormat; struct SpanTrans; impl legacy::SpanTransformer for SpanTrans { @@ -32,7 +31,7 @@ impl legacy::SpanTransformer for SpanTrans { } } -pub(crate) fn run( +pub fn run( stdin: &mut (dyn BufRead + Send + Sync), stdout: &mut (dyn Write + Send + Sync), format: ProtocolFormat, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs new file mode 100644 index 0000000000000..722e92eec7e52 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs @@ -0,0 +1,213 @@ +use std::{ + collections::VecDeque, + io::{self, BufRead, Read, Write}, + sync::{Arc, Condvar, Mutex}, + thread, +}; + +use paths::Utf8PathBuf; +use proc_macro_api::{ + legacy_protocol::msg::{FlatTree, Message, Request, Response, SpanDataIndexMap}, + transport::codec::json::JsonProtocol, +}; +use span::{Edition, EditionedFileId, FileId, Span, SpanAnchor, SyntaxContext, TextRange}; +use tt::{Delimiter, DelimiterKind, TopSubtreeBuilder}; + +/// Shared state for an in-memory byte channel. +#[derive(Default)] +struct ChannelState { + buffer: VecDeque, + closed: bool, +} + +type InMemoryChannel = Arc<(Mutex, Condvar)>; + +/// Writer end of an in-memory channel. +pub(crate) struct ChannelWriter { + state: InMemoryChannel, +} + +impl Write for ChannelWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let (lock, cvar) = &*self.state; + let mut state = lock.lock().unwrap(); + if state.closed { + return Err(io::Error::new(io::ErrorKind::BrokenPipe, "channel closed")); + } + state.buffer.extend(buf); + cvar.notify_all(); + Ok(buf.len()) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl Drop for ChannelWriter { + fn drop(&mut self) { + let (lock, cvar) = &*self.state; + let mut state = lock.lock().unwrap(); + state.closed = true; + cvar.notify_all(); + } +} + +/// Reader end of an in-memory channel. +pub(crate) struct ChannelReader { + state: InMemoryChannel, + internal_buf: Vec, +} + +impl Read for ChannelReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let (lock, cvar) = &*self.state; + let mut state = lock.lock().unwrap(); + + while state.buffer.is_empty() && !state.closed { + state = cvar.wait(state).unwrap(); + } + + if state.buffer.is_empty() && state.closed { + return Ok(0); + } + + let to_read = buf.len().min(state.buffer.len()); + for (dst, src) in buf.iter_mut().zip(state.buffer.drain(..to_read)) { + *dst = src; + } + Ok(to_read) + } +} + +impl BufRead for ChannelReader { + fn fill_buf(&mut self) -> io::Result<&[u8]> { + let (lock, cvar) = &*self.state; + let mut state = lock.lock().unwrap(); + + while state.buffer.is_empty() && !state.closed { + state = cvar.wait(state).unwrap(); + } + + self.internal_buf.clear(); + self.internal_buf.extend(&state.buffer); + Ok(&self.internal_buf) + } + + fn consume(&mut self, amt: usize) { + let (lock, _) = &*self.state; + let mut state = lock.lock().unwrap(); + let to_drain = amt.min(state.buffer.len()); + drop(state.buffer.drain(..to_drain)); + } +} + +/// Creates a connected pair of channels for bidirectional communication. +fn create_channel_pair() -> (ChannelWriter, ChannelReader, ChannelWriter, ChannelReader) { + // Channel for client -> server communication + let client_to_server = Arc::new(( + Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }), + Condvar::new(), + )); + let client_writer = ChannelWriter { state: client_to_server.clone() }; + let server_reader = ChannelReader { state: client_to_server, internal_buf: Vec::new() }; + + // Channel for server -> client communication + let server_to_client = Arc::new(( + Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }), + Condvar::new(), + )); + + let server_writer = ChannelWriter { state: server_to_client.clone() }; + let client_reader = ChannelReader { state: server_to_client, internal_buf: Vec::new() }; + + (client_writer, client_reader, server_writer, server_reader) +} + +pub(crate) fn proc_macro_test_dylib_path() -> Utf8PathBuf { + let path = proc_macro_test::PROC_MACRO_TEST_LOCATION; + if path.is_empty() { + panic!("proc-macro-test dylib not available (requires nightly toolchain)"); + } + path.into() +} + +/// Runs a test with the server in a background thread. +pub(crate) fn with_server(test_fn: F) -> R +where + F: FnOnce(&mut dyn Write, &mut dyn BufRead) -> R, +{ + let (mut client_writer, mut client_reader, mut server_writer, mut server_reader) = + create_channel_pair(); + + let server_handle = thread::spawn(move || { + proc_macro_srv_cli::main_loop::run( + &mut server_reader, + &mut server_writer, + proc_macro_api::ProtocolFormat::JsonLegacy, + ) + }); + + let result = test_fn(&mut client_writer, &mut client_reader); + + // Close the client writer to signal the server to stop + drop(client_writer); + + // Wait for server to finish + match server_handle.join() { + Ok(Ok(())) => {} + Ok(Err(e)) => { + // IO error from server is expected when client disconnects + if matches!( + e.kind(), + io::ErrorKind::BrokenPipe + | io::ErrorKind::UnexpectedEof + | io::ErrorKind::InvalidData + ) { + panic!("Server error: {e}"); + } + } + Err(e) => std::panic::resume_unwind(e), + } + + result +} + +/// Sends a request and reads the response using JSON protocol. +pub(crate) fn request( + writer: &mut dyn Write, + reader: &mut dyn BufRead, + request: Request, +) -> Response { + request.write::(writer).expect("failed to write request"); + + let mut buf = String::new(); + Response::read::(reader, &mut buf) + .expect("failed to read response") + .expect("no response received") +} + +/// Creates a simple empty token tree suitable for testing. +pub(crate) fn create_empty_token_tree( + version: u32, + span_data_table: &mut SpanDataIndexMap, +) -> FlatTree { + let anchor = SpanAnchor { + file_id: EditionedFileId::new(FileId::from_raw(0), Edition::CURRENT), + ast_id: span::ROOT_ERASED_FILE_AST_ID, + }; + let span = Span { + range: TextRange::empty(0.into()), + anchor, + ctx: SyntaxContext::root(Edition::CURRENT), + }; + + let builder = TopSubtreeBuilder::new(Delimiter { + open: span, + close: span, + kind: DelimiterKind::Invisible, + }); + let tt = builder.build(); + + FlatTree::from_subtree(tt.view(), version, span_data_table) +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs new file mode 100644 index 0000000000000..1fa886219a8a4 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs @@ -0,0 +1,224 @@ +//! Integration tests for the proc-macro-srv-cli main loop. +//! +//! These tests exercise the full client-server RPC procedure using in-memory +//! channels without needing to spawn the actual server and client processes. + +#![cfg(feature = "sysroot-abi")] + +mod common { + pub(crate) mod utils; +} + +use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server}; +use expect_test::expect; +use proc_macro_api::{ + legacy_protocol::msg::{ + ExpandMacro, ExpandMacroData, ExpnGlobals, PanicMessage, Request, Response, ServerConfig, + SpanDataIndexMap, SpanMode, + }, + version::CURRENT_API_VERSION, +}; + +#[test] +fn test_version_check() { + with_server(|writer, reader| { + let response = request(writer, reader, Request::ApiVersionCheck {}); + + match response { + Response::ApiVersionCheck(version) => { + assert_eq!(version, CURRENT_API_VERSION); + } + other => panic!("unexpected response: {other:?}"), + } + }); +} + +#[test] +fn test_list_macros() { + with_server(|writer, reader| { + let dylib_path = proc_macro_test_dylib_path(); + let response = request(writer, reader, Request::ListMacros { dylib_path }); + + let Response::ListMacros(Ok(macros)) = response else { + panic!("expected successful ListMacros response"); + }; + + let mut macro_list: Vec<_> = + macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect(); + macro_list.sort(); + let macro_list_str = macro_list.join("\n"); + + expect![[r#" + DeriveEmpty [CustomDerive] + DeriveError [CustomDerive] + DerivePanic [CustomDerive] + DeriveReemit [CustomDerive] + attr_error [Attr] + attr_noop [Attr] + attr_panic [Attr] + fn_like_clone_tokens [Bang] + fn_like_error [Bang] + fn_like_mk_idents [Bang] + fn_like_mk_literals [Bang] + fn_like_noop [Bang] + fn_like_panic [Bang] + fn_like_span_join [Bang] + fn_like_span_line_column [Bang] + fn_like_span_ops [Bang]"#]] + .assert_eq(¯o_list_str); + }); +} + +#[test] +fn test_list_macros_invalid_path() { + with_server(|writer, reader| { + let response = request( + writer, + reader, + Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() }, + ); + + match response { + Response::ListMacros(Err(e)) => assert!( + e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"), + "{e}" + ), + other => panic!("expected error response, got: {other:?}"), + } + }); +} + +#[test] +fn test_set_config() { + with_server(|writer, reader| { + let config = ServerConfig { span_mode: SpanMode::Id }; + let response = request(writer, reader, Request::SetConfig(config)); + + match response { + Response::SetConfig(returned_config) => { + assert_eq!(returned_config.span_mode, SpanMode::Id); + } + other => panic!("unexpected response: {other:?}"), + } + }); +} + +#[test] +fn test_set_config_rust_analyzer_mode() { + with_server(|writer, reader| { + let config = ServerConfig { span_mode: SpanMode::RustAnalyzer }; + let response = request(writer, reader, Request::SetConfig(config)); + + match response { + Response::SetConfig(returned_config) => { + assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer); + } + other => panic!("unexpected response: {other:?}"), + } + }); +} + +#[test] +fn test_expand_macro_panic() { + with_server(|writer, reader| { + let dylib_path = proc_macro_test_dylib_path(); + + let version_response = request(writer, reader, Request::ApiVersionCheck {}); + let Response::ApiVersionCheck(version) = version_response else { + panic!("expected version check response"); + }; + + let mut span_data_table = SpanDataIndexMap::default(); + let macro_body = create_empty_token_tree(version, &mut span_data_table); + + let expand_request = Request::ExpandMacro(Box::new(ExpandMacro { + lib: dylib_path, + env: vec![], + current_dir: None, + data: ExpandMacroData { + macro_body, + macro_name: "fn_like_panic".to_owned(), + attributes: None, + has_global_spans: ExpnGlobals { + serialize: version >= 3, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, + span_data_table: vec![], + }, + })); + + let response = request(writer, reader, expand_request); + + match response { + Response::ExpandMacro(Err(PanicMessage(msg))) => { + assert!(msg.contains("fn_like_panic"), "panic message should mention the macro"); + } + Response::ExpandMacro(Ok(_)) => { + panic!("expected panic, but macro succeeded"); + } + other => panic!("unexpected response: {other:?}"), + } + }); +} + +#[test] +fn test_basic_call_flow() { + with_server(|writer, reader| { + let dylib_path = proc_macro_test_dylib_path(); + + let response1 = request(writer, reader, Request::ApiVersionCheck {}); + assert!(matches!(response1, Response::ApiVersionCheck(_))); + + let response2 = + request(writer, reader, Request::SetConfig(ServerConfig { span_mode: SpanMode::Id })); + assert!(matches!(response2, Response::SetConfig(_))); + + let response3 = + request(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() }); + assert!(matches!(response3, Response::ListMacros(Ok(_)))); + }); +} + +#[test] +fn test_expand_nonexistent_macro() { + with_server(|writer, reader| { + let dylib_path = proc_macro_test_dylib_path(); + + let version_response = request(writer, reader, Request::ApiVersionCheck {}); + let Response::ApiVersionCheck(version) = version_response else { + panic!("expected version check response"); + }; + + let mut span_data_table = SpanDataIndexMap::default(); + let macro_body = create_empty_token_tree(version, &mut span_data_table); + + let expand_request = Request::ExpandMacro(Box::new(ExpandMacro { + lib: dylib_path, + env: vec![], + current_dir: None, + data: ExpandMacroData { + macro_body, + macro_name: "NonexistentMacro".to_owned(), + attributes: None, + has_global_spans: ExpnGlobals { + serialize: version >= 3, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, + span_data_table: vec![], + }, + })); + + let response = request(writer, reader, expand_request); + + match response { + Response::ExpandMacro(Err(PanicMessage(msg))) => { + expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg) + } + other => panic!("expected error for nonexistent macro, got: {other:?}"), + } + }); +} From e40bd1cf6ece4bf9a2fb05474a20cd399694be93 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sun, 11 Jan 2026 15:06:38 +0800 Subject: [PATCH 039/131] Add inherit attributes for extract_function assist Example --- ```rust #[cfg(test)] fn foo() { foo($01 + 1$0); } ``` **Before this PR** ```rust #[cfg(test)] fn foo() { foo(fun_name()); } fn $0fun_name() -> i32 { 1 + 1 } ``` **After this PR** ```rust #[cfg(test)] fn foo() { foo(fun_name()); } #[cfg(test)] fn $0fun_name() -> i32 { 1 + 1 } ``` --- .../src/handlers/extract_function.rs | 69 ++++++++++++++++++- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 231df9b5b3e15..294e5f7da8b3c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -25,7 +25,7 @@ use syntax::{ SyntaxKind::{self, COMMENT}, SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, WalkEvent, ast::{ - self, AstNode, AstToken, HasGenericParams, HasName, edit::IndentLevel, + self, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, edit::IndentLevel, edit_in_place::Indent, }, match_ast, ted, @@ -375,6 +375,7 @@ struct ContainerInfo<'db> { ret_type: Option>, generic_param_lists: Vec, where_clauses: Vec, + attrs: Vec, edition: Edition, } @@ -911,6 +912,7 @@ impl FunctionBody { let parents = generic_parents(&parent); let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect(); let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect(); + let attrs = parents.iter().flat_map(|it| it.attrs()).filter(is_inherit_attr).collect(); Some(( ContainerInfo { @@ -919,6 +921,7 @@ impl FunctionBody { ret_type: ty, generic_param_lists, where_clauses, + attrs, edition, }, contains_tail_expr, @@ -1103,6 +1106,14 @@ impl GenericParent { GenericParent::Trait(trait_) => trait_.where_clause(), } } + + fn attrs(&self) -> impl Iterator { + match self { + GenericParent::Fn(fn_) => fn_.attrs(), + GenericParent::Impl(impl_) => impl_.attrs(), + GenericParent::Trait(trait_) => trait_.attrs(), + } + } } /// Search `parent`'s ancestors for items with potentially applicable generic parameters @@ -1578,7 +1589,7 @@ fn format_function( let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun); make::fn_( - None, + fun.mods.attrs.clone(), None, fun_name, generic_params, @@ -1958,6 +1969,11 @@ fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned()) } +fn is_inherit_attr(attr: &ast::Attr) -> bool { + let Some(name) = attr.simple_name() else { return false }; + matches!(name.as_str(), "track_caller" | "cfg") +} + fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type { let ty_str = format_type(ty, ctx, module); make::ty(&ty_str) @@ -6372,6 +6388,55 @@ fn foo() { fn $0fun_name(mut a: i32, mut b: i32) { (a, b) = (b, a); } +"#, + ); + } + + #[test] + fn with_cfg_attr() { + check_assist( + extract_function, + r#" +//- /main.rs crate:main cfg:test +#[cfg(test)] +fn foo() { + foo($01 + 1$0); +} +"#, + r#" +#[cfg(test)] +fn foo() { + foo(fun_name()); +} + +#[cfg(test)] +fn $0fun_name() -> i32 { + 1 + 1 +} +"#, + ); + } + + #[test] + fn with_track_caller() { + check_assist( + extract_function, + r#" +#[track_caller] +fn foo() { + foo($01 + 1$0); +} +"#, + r#" +#[track_caller] +fn foo() { + foo(fun_name()); +} + +#[track_caller] +fn $0fun_name() -> i32 { + 1 + 1 +} "#, ); } From 8150413bf5c1327c0a58b544662b50e5ef4bedc2 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Wed, 7 Jan 2026 17:19:33 +0530 Subject: [PATCH 040/131] add byte range subrequest/response --- .../crates/proc-macro-api/src/bidirectional_protocol/msg.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs index 0e3b700dcc5aa..57e7b1ee8f684 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs @@ -1,5 +1,7 @@ //! Bidirectional protocol messages +use std::ops::Range; + use paths::Utf8PathBuf; use serde::{Deserialize, Serialize}; @@ -14,6 +16,7 @@ pub enum SubRequest { SourceText { file_id: u32, ast_id: u32, start: u32, end: u32 }, LocalFilePath { file_id: u32 }, LineColumn { file_id: u32, ast_id: u32, offset: u32 }, + ByteRange { file_id: u32, ast_id: u32, start: u32, end: u32 }, } #[derive(Debug, Serialize, Deserialize)] @@ -32,6 +35,9 @@ pub enum SubResponse { line: u32, column: u32, }, + ByteRangeResult { + range: Range, + }, } #[derive(Debug, Serialize, Deserialize)] From e909b4b28286d914c1d67f12f419cf1ed88daa8d Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Wed, 7 Jan 2026 17:20:06 +0530 Subject: [PATCH 041/131] update proc-macro-srv to include byte-range --- src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs | 3 +++ .../proc-macro-srv/src/server_impl/rust_analyzer_span.rs | 5 ++++- .../rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs | 5 +++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index c1ef49a7176b0..ac9f89352c2bf 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -41,6 +41,7 @@ use std::{ env, ffi::OsString, fs, + ops::Range, path::{Path, PathBuf}, sync::{Arc, Mutex, PoisonError}, thread, @@ -100,6 +101,8 @@ pub trait ProcMacroClientInterface { fn local_file(&mut self, file_id: span::FileId) -> Option; /// Line and column are 1-based. fn line_column(&mut self, span: Span) -> Option<(u32, u32)>; + + fn byte_range(&mut self, span: Span) -> Range; } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 3a25391b573bf..9946608247c3e 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -162,7 +162,10 @@ impl server::Span for RaSpanServer<'_> { span } fn byte_range(&mut self, span: Self::Span) -> Range { - // FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL + if let Some(cb) = self.callback.as_mut() { + return cb.byte_range(span); + } + Range { start: span.range.start().into(), end: span.range.end().into() } } fn join(&mut self, first: Self::Span, second: Self::Span) -> Option { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 81ff1965d68b4..b7c5c4fdd21f0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -4,6 +4,7 @@ use expect_test::Expect; use span::{ EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange, }; +use std::ops::Range; use crate::{ EnvSnapshot, ProcMacroClientInterface, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, @@ -137,6 +138,10 @@ impl ProcMacroClientInterface for MockCallback<'_> { // proc_macro uses 1-based line/column Some((line_col.line as u32 + 1, line_col.col as u32 + 1)) } + + fn byte_range(&mut self, span: Span) -> Range { + Range { start: span.range.start().into(), end: span.range.end().into() } + } } pub fn assert_expand_with_callback( From e68a654dca8b43e4f430485249fdef00ed9d85a7 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Wed, 7 Jan 2026 17:20:37 +0530 Subject: [PATCH 042/131] add byte range to main loop and direct the request via callback and define the callback on client side --- .../crates/load-cargo/src/lib.rs | 10 ++++++++++ .../proc-macro-srv-cli/src/main_loop.rs | 19 ++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 33468a5003c38..904f704221cfe 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -591,6 +591,16 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::FilePathResult { name }) } + SubRequest::ByteRange { file_id, ast_id, start, end } => { + let range = resolve_sub_span( + db, + file_id, + ast_id, + TextRange::new(TextSize::from(start), TextSize::from(end)), + ); + + Ok(SubResponse::ByteRangeResult { range: range.range.into() }) + } }; match self.0.expand( subtree.view(), diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 0c651d22b41bc..e35f832716dd7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -6,7 +6,7 @@ use proc_macro_api::{ transport::codec::{json::JsonProtocol, postcard::PostcardProtocol}, version::CURRENT_API_VERSION, }; -use std::io::{self, BufRead, Write}; +use std::{io, ops::Range}; use legacy::Message; @@ -240,6 +240,23 @@ impl proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandl _ => None, } } + + fn byte_range( + &mut self, + proc_macro_srv::span::Span { range, anchor, ctx: _ }: proc_macro_srv::span::Span, + ) -> Range { + match self.roundtrip(bidirectional::SubRequest::ByteRange { + file_id: anchor.file_id.as_u32(), + ast_id: anchor.ast_id.into_raw(), + start: range.start().into(), + end: range.end().into(), + }) { + Some(bidirectional::BidirectionalMessage::SubResponse( + bidirectional::SubResponse::ByteRangeResult { range }, + )) => range, + _ => Range { start: range.start().into(), end: range.end().into() }, + } + } } fn handle_expand_ra( From 78d243c751e420c38c3a92f707aae78bafc8ad1e Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Thu, 8 Jan 2026 22:43:03 +0530 Subject: [PATCH 043/131] add comment on incrementality of subrequest --- src/tools/rust-analyzer/crates/load-cargo/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 904f704221cfe..8342492a33a4c 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -553,6 +553,7 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::LocalFilePathResult { name }) } + // Not incremental: requires full file text. SubRequest::SourceText { file_id, ast_id, start, end } => { let range = resolve_sub_span( db, @@ -567,6 +568,7 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::SourceTextResult { text }) } + // Not incremental: requires building line index. SubRequest::LineColumn { file_id, ast_id, offset } => { let range = resolve_sub_span(db, file_id, ast_id, TextRange::empty(TextSize::from(offset))); @@ -591,6 +593,7 @@ impl ProcMacroExpander for Expander { Ok(SubResponse::FilePathResult { name }) } + // Not incremental: requires global span resolution. SubRequest::ByteRange { file_id, ast_id, start, end } => { let range = resolve_sub_span( db, From d30f7c9f7c8c375c2b806b770dd11f3d481979e9 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 11 Jan 2026 15:51:33 +0530 Subject: [PATCH 044/131] add write read imports --- .../rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index e35f832716dd7..3beaeb0697ec9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -6,7 +6,10 @@ use proc_macro_api::{ transport::codec::{json::JsonProtocol, postcard::PostcardProtocol}, version::CURRENT_API_VERSION, }; -use std::{io, ops::Range}; +use std::{ + io::{self, BufRead, Write}, + ops::Range, +}; use legacy::Message; From 33f0f80c1ac9dd1df22082cce77e7eedbb30b4f1 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 11 Jan 2026 21:33:57 +0530 Subject: [PATCH 045/131] add make corresponding constructor methods in SyntaxFactory --- .../src/ast/syntax_factory/constructors.rs | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index 7cf9e2bf14f9f..5fe419ad4eb79 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -1578,6 +1578,44 @@ impl SyntaxFactory { pub fn ident(&self, text: &str) -> SyntaxToken { make::tokens::ident(text) } + + pub fn mut_self_param(&self) -> ast::SelfParam { + let ast = make::mut_self_param().clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.finish(&mut mapping); + } + + ast + } + + pub fn ret_type(&self, ty: ast::Type) -> ast::RetType { + let ast = make::ret_type(ty.clone()).clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(ty.syntax().clone(), ast.ty().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + ast + } + + pub fn ty_ref(&self, ty: ast::Type, is_mut: bool) -> ast::Type { + let ast = make::ty_ref(ty.clone(), is_mut).clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + match &ast { + ast::Type::RefType(ref_ty) => { + builder.map_node(ty.syntax().clone(), ref_ty.ty().unwrap().syntax().clone()); + } + _ => unreachable!(), + } + builder.finish(&mut mapping); + } + ast + } } // `ext` constructors From 16d74e7b90fc5fe2aaa23377bf51093f0295910b Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 11 Jan 2026 21:34:44 +0530 Subject: [PATCH 046/131] migrate generate_mut_trait_impl to new SyntaxEditor --- .../src/handlers/generate_mut_trait_impl.rs | 191 +++++++++++------- 1 file changed, 121 insertions(+), 70 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index ae1ae24d1ec1c..53f6f4883f4dc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -1,8 +1,8 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use syntax::{ - AstNode, T, - ast::{self, edit_in_place::Indent, make}, - ted, + AstNode, SyntaxElement, SyntaxNode, T, + ast::{self, edit::AstNodeEdit, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + syntax_editor::{Element, Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists}; @@ -45,12 +45,13 @@ use crate::{AssistContext, AssistId, Assists}; // } // ``` pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let impl_def = ctx.find_node_at_offset::()?.clone_for_update(); - let indent = impl_def.indent_level(); + let impl_def = ctx.find_node_at_offset::()?; + let indent = Indent::indent_level(&impl_def); let ast::Type::PathType(path) = impl_def.trait_()? else { return None; }; + let trait_name = path.path()?.segment()?.name_ref()?; let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?; @@ -59,75 +60,133 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; let trait_new = get_trait_mut(&trait_, famous)?; - // Index -> IndexMut - ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax()); - - // index -> index_mut - let (trait_method_name, new_trait_method_name) = impl_def - .syntax() - .descendants() - .filter_map(ast::Name::cast) - .find_map(process_method_name)?; - ted::replace( - trait_method_name.syntax(), - make::name(new_trait_method_name).clone_for_update().syntax(), - ); - - if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) { - ted::remove(type_alias.syntax()); - } - - // &self -> &mut self - let mut_self_param = make::mut_self_param(); - let self_param: ast::SelfParam = - impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?; - ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax()); - - // &Self::Output -> &mut Self::Output - let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?; - let new_ret_type = process_ret_type(&ret_type)?; - ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax()); - - let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it { - ast::AssocItem::Fn(f) => Some(f), - _ => None, - })?; - let _ = process_ref_mut(&fn_); - - let assoc_list = make::assoc_item_list(None).clone_for_update(); - ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax()); - impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_)); - let target = impl_def.syntax().text_range(); + acc.add( AssistId::generate("generate_mut_trait_impl"), format!("Generate `{trait_new}` impl from this `{trait_name}` trait"), target, |edit| { - edit.insert( - target.start(), - if ctx.config.snippet_cap.is_some() { - format!("$0{impl_def}\n\n{indent}") - } else { - format!("{impl_def}\n\n{indent}") - }, + let impl_clone = impl_def.reset_indent().clone_subtree(); + let mut editor = SyntaxEditor::new(impl_clone.syntax().clone()); + let factory = SyntaxFactory::without_mappings(); + + apply_generate_mut_impl(&mut editor, &factory, &impl_clone, trait_new); + + let new_root = editor.finish(); + let new_root = new_root.new_root(); + + let new_impl = ast::Impl::cast(new_root.clone()).unwrap(); + + Indent::indent(&new_impl, indent); + + let mut editor = edit.make_editor(impl_def.syntax()); + editor.insert_all( + Position::before(impl_def.syntax()), + vec![ + new_impl.syntax().syntax_element(), + factory.whitespace(&format!("\n\n{indent}")).syntax_element(), + ], ); + + if let Some(cap) = ctx.config.snippet_cap { + let tabstop_before = edit.make_tabstop_before(cap); + editor.add_annotation(new_impl.syntax(), tabstop_before); + } + + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } -fn process_ref_mut(fn_: &ast::Fn) -> Option<()> { - let expr = fn_.body()?.tail_expr()?; - match &expr { - ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => { - ted::insert_all_raw( - ted::Position::after(ref_expr.amp_token()?), - vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()], - ); - } - _ => {} +fn delete_with_trivia(editor: &mut SyntaxEditor, node: &SyntaxNode) { + let mut end: SyntaxElement = node.clone().into(); + + if let Some(next) = node.next_sibling_or_token() + && let SyntaxElement::Token(tok) = &next + && tok.kind().is_trivia() + { + end = next.clone(); } - None + + editor.delete_all(node.clone().into()..=end); +} + +fn apply_generate_mut_impl( + editor: &mut SyntaxEditor, + factory: &SyntaxFactory, + impl_def: &ast::Impl, + trait_new: &str, +) -> Option<()> { + let path = + impl_def.trait_().and_then(|t| t.syntax().descendants().find_map(ast::Path::cast))?; + let seg = path.segment()?; + let name_ref = seg.name_ref()?; + + let new_name_ref = factory.name_ref(trait_new); + editor.replace(name_ref.syntax(), new_name_ref.syntax()); + + if let Some((name, new_name)) = + impl_def.syntax().descendants().filter_map(ast::Name::cast).find_map(process_method_name) + { + let new_name_node = factory.name(new_name); + editor.replace(name.syntax(), new_name_node.syntax()); + } + + if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) { + delete_with_trivia(editor, type_alias.syntax()); + } + + if let Some(self_param) = impl_def.syntax().descendants().find_map(ast::SelfParam::cast) { + let mut_self = factory.mut_self_param(); + editor.replace(self_param.syntax(), mut_self.syntax()); + } + + if let Some(ret_type) = impl_def.syntax().descendants().find_map(ast::RetType::cast) + && let Some(new_ty) = process_ret_type(factory, &ret_type) + { + let new_ret = factory.ret_type(new_ty); + editor.replace(ret_type.syntax(), new_ret.syntax()) + } + + if let Some(fn_) = impl_def.assoc_item_list().and_then(|l| { + l.assoc_items().find_map(|it| match it { + ast::AssocItem::Fn(f) => Some(f), + _ => None, + }) + }) { + process_ref_mut(editor, factory, &fn_); + } + + Some(()) +} + +fn process_ref_mut(editor: &mut SyntaxEditor, factory: &SyntaxFactory, fn_: &ast::Fn) { + let Some(expr) = fn_.body().and_then(|b| b.tail_expr()) else { return }; + + let ast::Expr::RefExpr(ref_expr) = expr else { return }; + + if ref_expr.mut_token().is_some() { + return; + } + + let Some(amp) = ref_expr.amp_token() else { return }; + + let mut_kw = factory.token(T![mut]); + let space = factory.whitespace(" "); + + editor.insert(Position::after(amp.clone()), space.syntax_element()); + editor.insert(Position::after(amp), mut_kw.syntax_element()); +} + +fn process_ret_type(factory: &SyntaxFactory, ref_ty: &ast::RetType) -> Option { + let ty = ref_ty.ty()?; + let ast::Type::RefType(ref_type) = ty else { + return None; + }; + + let inner = ref_type.ty()?; + Some(factory.ty_ref(inner, true)) } fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> { @@ -158,14 +217,6 @@ fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> { Some((name, new_name)) } -fn process_ret_type(ref_ty: &ast::RetType) -> Option { - let ty = ref_ty.ty()?; - let ast::Type::RefType(ref_type) = ty else { - return None; - }; - Some(make::ty_ref(ref_type.ty()?, true)) -} - #[cfg(test)] mod tests { use crate::{ From 594ca4b1bc47588a851265d51a5db05ed2f080b2 Mon Sep 17 00:00:00 2001 From: The rustc-josh-sync Cronjob Bot Date: Mon, 12 Jan 2026 04:26:31 +0000 Subject: [PATCH 047/131] Prepare for merging from rust-lang/rust This updates the rust-version file to 44a5b55557c26353f388400d7da95527256fe260. --- src/tools/rust-analyzer/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 4b08b0884ca81..b53a66c667517 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -548e586795f6b6fe089d8329aa5edbf0f5202646 +44a5b55557c26353f388400d7da95527256fe260 From 80acef153ffa6ecc4c9458b8a1169b10d7d750c7 Mon Sep 17 00:00:00 2001 From: The rustc-josh-sync Cronjob Bot Date: Mon, 12 Jan 2026 04:26:43 +0000 Subject: [PATCH 048/131] Format code --- src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs | 4 +++- src/tools/rust-analyzer/crates/span/src/hygiene.rs | 4 +--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index d10e122a5deb2..1726412275991 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -101,7 +101,9 @@ impl DeclarativeMacroExpander { match &*value { "transparent" => ControlFlow::Break(Transparency::Transparent), // "semitransparent" is for old rustc versions. - "semiopaque" | "semitransparent" => ControlFlow::Break(Transparency::SemiOpaque), + "semiopaque" | "semitransparent" => { + ControlFlow::Break(Transparency::SemiOpaque) + } "opaque" => ControlFlow::Break(Transparency::Opaque), _ => ControlFlow::Continue(()), } diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index 9904c562a7409..fe05ef9465181 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -241,9 +241,7 @@ const _: () = { edition: zalsa_::interned::Lookup::into_owned(data.2), parent: zalsa_::interned::Lookup::into_owned(data.3), opaque: opaque(zalsa_::FromId::from_id(id)), - opaque_and_semiopaque: opaque_and_semiopaque( - zalsa_::FromId::from_id(id), - ), + opaque_and_semiopaque: opaque_and_semiopaque(zalsa_::FromId::from_id(id)), }, ) } From 294a0afd655f22f98201c5e34e9f3b57e42a537c Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 12 Jan 2026 08:51:42 +0200 Subject: [PATCH 049/131] Disable `unused_variables` and `unused_mut` warnings They suffer from an unacceptable amount of false positives after #21209. Another option to disable them is to include them in `rust-analyzer.diagnostics.disable` by default, but that will mean users could override that. --- .../src/handlers/mutability_errors.rs | 24 ++++++----- .../src/handlers/unused_variables.rs | 42 ++++++++++--------- 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 2887a32825db4..e3cfbdfb515f4 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -1,3 +1,5 @@ +#![expect(unused, reason = "diagnostics is temporarily disabled due to too many false positives")] + use hir::db::ExpandDatabase; use ide_db::source_change::SourceChange; use ide_db::text_edit::TextEdit; @@ -88,16 +90,17 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op )]) })(); let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); - Some( - Diagnostic::new_with_syntax_node_ptr( - ctx, - DiagnosticCode::RustcLint("unused_mut"), - "variable does not need to be mutable", - ast, - ) - // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive, hence not stable. - .with_fixes(fixes), - ) + // Some( + // Diagnostic::new_with_syntax_node_ptr( + // ctx, + // DiagnosticCode::RustcLint("unused_mut"), + // "variable does not need to be mutable", + // ast, + // ) + // // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive, hence not stable. + // .with_fixes(fixes), + // ) + None } pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option { @@ -105,6 +108,7 @@ pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option Date: Mon, 12 Jan 2026 08:36:32 +0100 Subject: [PATCH 050/131] Create a new `SymbolKind::CrateRoot` --- .../src/completions/extern_crate.rs | 6 +++--- .../crates/ide-completion/src/item.rs | 1 + .../rust-analyzer/crates/ide-db/src/lib.rs | 8 ++++--- .../crates/ide/src/navigation_target.rs | 13 ++++-------- .../crates/ide/src/references.rs | 2 +- .../rust-analyzer/crates/ide/src/runnables.rs | 8 +++---- .../ide/src/syntax_highlighting/highlight.rs | 21 +++++++++---------- .../ide/src/syntax_highlighting/inject.rs | 12 +++++------ .../ide/src/syntax_highlighting/tags.rs | 1 + .../test_data/highlight_asm.html | 8 +++---- .../test_data/highlight_attributes.html | 2 +- .../test_data/highlight_crate_root.html | 16 +++++++------- .../test_data/highlight_default_library.html | 2 +- .../test_data/highlight_deprecated.html | 4 ++-- .../test_data/highlight_doctest.html | 6 +++--- .../test_data/highlight_extern_crate.html | 14 ++++++------- .../test_data/highlight_general.html | 12 +++++------ .../test_data/highlight_injection_2.html | 2 +- .../test_data/highlight_issue_18089.html | 2 +- .../test_data/highlight_keywords_2015.html | 8 +++---- .../test_data/highlight_keywords_2018.html | 8 +++---- .../test_data/highlight_keywords_2021.html | 8 +++---- .../test_data/highlight_keywords_2024.html | 8 +++---- .../test_data/highlight_keywords_macros.html | 4 ++-- .../test_data/highlight_macros.html | 2 +- .../test_data/highlight_strings.html | 2 +- .../test_data/highlight_unsafe.html | 4 ++-- .../crates/rust-analyzer/src/lsp/to_proto.rs | 8 ++++++- 28 files changed, 98 insertions(+), 94 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs index 71a3e4eb4ed6d..91202e8b32fcd 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs @@ -17,7 +17,7 @@ pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionConte } let mut item = CompletionItem::new( - CompletionItemKind::SymbolKind(SymbolKind::Module), + CompletionItemKind::SymbolKind(SymbolKind::CrateRoot), ctx.source_range(), name.display_no_db(ctx.edition).to_smolstr(), ctx.edition, @@ -48,7 +48,7 @@ mod other_mod {} let completion_list = completion_list_no_kw(case); - assert_eq!("md other_crate_a\n".to_owned(), completion_list); + assert_eq!("cr other_crate_a\n".to_owned(), completion_list); } #[test] @@ -68,6 +68,6 @@ mod other_mod {} let completion_list = completion_list_no_kw(case); - assert_eq!("md other_crate_a\n".to_owned(), completion_list); + assert_eq!("cr other_crate_a\n".to_owned(), completion_list); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 71d32da74710d..1a9139d8553be 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -381,6 +381,7 @@ impl CompletionItemKind { SymbolKind::BuiltinAttr => "ba", SymbolKind::Const => "ct", SymbolKind::ConstParam => "cp", + SymbolKind::CrateRoot => "cr", SymbolKind::Derive => "de", SymbolKind::DeriveHelper => "dh", SymbolKind::Enum => "en", diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 413b58bf7980a..023b32b361955 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -65,7 +65,7 @@ use base_db::{ }; use hir::{ FilePositionWrapper, FileRangeWrapper, - db::{DefDatabase, ExpandDatabase}, + db::{DefDatabase, ExpandDatabase, HirDatabase}, }; use triomphe::Arc; @@ -269,6 +269,7 @@ pub enum SymbolKind { BuiltinAttr, Const, ConstParam, + CrateRoot, Derive, DeriveHelper, Enum, @@ -307,14 +308,15 @@ impl From for SymbolKind { } } -impl From for SymbolKind { - fn from(it: hir::ModuleDef) -> Self { +impl SymbolKind { + pub fn from_module_def(db: &dyn HirDatabase, it: hir::ModuleDef) -> Self { match it { hir::ModuleDef::Const(..) => SymbolKind::Const, hir::ModuleDef::Variant(..) => SymbolKind::Variant, hir::ModuleDef::Function(..) => SymbolKind::Function, hir::ModuleDef::Macro(mac) if mac.is_proc_macro() => SymbolKind::ProcMacro, hir::ModuleDef::Macro(..) => SymbolKind::Macro, + hir::ModuleDef::Module(m) if m.is_crate_root(db) => SymbolKind::CrateRoot, hir::ModuleDef::Module(..) => SymbolKind::Module, hir::ModuleDef::Static(..) => SymbolKind::Static, hir::ModuleDef::Adt(hir::Adt::Struct(..)) => SymbolKind::Struct, diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index 047df309eca6e..185df92e2d39d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -264,7 +264,7 @@ impl<'db> TryToNav for FileSymbol<'db> { .flatten() .map_or_else(|| self.name.clone(), |it| it.symbol().clone()), alias: self.is_alias.then(|| self.name.clone()), - kind: Some(self.def.into()), + kind: Some(SymbolKind::from_module_def(db, self.def)), full_range, focus_range, container_name: self.container_name.clone(), @@ -480,16 +480,11 @@ impl ToNav for hir::Module { ModuleSource::Module(node) => (node.syntax(), node.name()), ModuleSource::BlockExpr(node) => (node.syntax(), None), }; + let kind = if self.is_crate_root(db) { SymbolKind::CrateRoot } else { SymbolKind::Module }; orig_range_with_focus(db, file_id, syntax, focus).map( |(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget::from_syntax( - file_id, - name.clone(), - focus_range, - full_range, - SymbolKind::Module, - ) + NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind) }, ) } @@ -549,7 +544,7 @@ impl TryToNav for hir::ExternCrateDecl { self.alias_or_name(db).unwrap_or_else(|| self.name(db)).symbol().clone(), focus_range, full_range, - SymbolKind::Module, + SymbolKind::CrateRoot, ); res.docs = self.docs(db).map(Documentation::into_owned); diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 4918fe4ff9a47..5443021988d45 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -1079,7 +1079,7 @@ use self$0; use self$0; "#, expect![[r#" - _ Module FileId(0) 0..10 + _ CrateRoot FileId(0) 0..10 FileId(0) 4..8 import "#]], diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 6cec912503515..42efa7142b50b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -815,7 +815,7 @@ mod not_a_root { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"_\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"_\", kind: CrateRoot })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })", @@ -1136,7 +1136,7 @@ fn test_foo1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"_\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"_\", kind: CrateRoot })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))", ] "#]], @@ -1155,7 +1155,7 @@ fn test_foo1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"_\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"_\", kind: CrateRoot })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))", ] "#]], @@ -1234,7 +1234,7 @@ generate_main!(); "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"_\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"_\", kind: CrateRoot })", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 044fd3f5ac9a8..3795d3d4146d6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -399,7 +399,7 @@ fn highlight_name_ref( highlight_def(sema, krate, field_ref.into(), edition, true) } NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => { - let mut h = HlTag::Symbol(SymbolKind::Module).into(); + let mut h = HlTag::Symbol(SymbolKind::CrateRoot).into(); if krate.as_ref().is_some_and(|krate| resolved_krate != *krate) { h |= HlMod::Library; @@ -417,7 +417,6 @@ fn highlight_name_ref( if is_deprecated { h |= HlMod::Deprecated; } - h |= HlMod::CrateRoot; h } }; @@ -496,15 +495,15 @@ pub(super) fn highlight_def( } Definition::TupleField(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Field)), None), Definition::Crate(krate) => ( - Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot, + Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)).into(), Some(krate.attrs(sema.db)), ), Definition::Module(module) => { - let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module)); - if module.is_crate_root(db) { - h |= HlMod::CrateRoot; - } - + let h = Highlight::new(HlTag::Symbol(if module.is_crate_root(db) { + SymbolKind::CrateRoot + } else { + SymbolKind::Module + })); (h, Some(module.attrs(sema.db))) } Definition::Function(func) => { @@ -662,8 +661,7 @@ pub(super) fn highlight_def( (h, None) } Definition::ExternCrateDecl(extern_crate) => { - let mut highlight = - Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot; + let mut highlight = Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)).into(); if extern_crate.alias(db).is_none() { highlight |= HlMod::Library; } @@ -805,6 +803,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { TYPE_PARAM => SymbolKind::TypeParam, RECORD_FIELD => SymbolKind::Field, MODULE => SymbolKind::Module, + EXTERN_CRATE => SymbolKind::CrateRoot, FN => SymbolKind::Function, CONST => SymbolKind::Const, STATIC => SymbolKind::Static, @@ -835,7 +834,7 @@ fn highlight_name_ref_by_syntax( }; match parent.kind() { - EXTERN_CRATE => HlTag::Symbol(SymbolKind::Module) | HlMod::CrateRoot, + EXTERN_CRATE => HlTag::Symbol(SymbolKind::CrateRoot).into(), METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent) .and_then(|it| highlight_method_call(sema, krate, &it, is_unsafe_node)) .unwrap_or_else(|| SymbolKind::Method.into()), diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index 26d2bb5e02884..291333f09cf88 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -1,6 +1,6 @@ //! "Recursive" Syntax highlighting for code in doctests and fixtures. -use hir::{EditionedFileId, HirFileId, InFile, Semantics}; +use hir::{EditionedFileId, HirFileId, InFile, Semantics, db::HirDatabase}; use ide_db::{ SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper, rust_doc::is_rust_fence, @@ -109,7 +109,7 @@ pub(super) fn doc_comment( .for_each(|(range, def)| { hl.add(HlRange { range, - highlight: module_def_to_hl_tag(def) + highlight: module_def_to_hl_tag(sema.db, def) | HlMod::Documentation | HlMod::Injected | HlMod::IntraDocLink, @@ -200,11 +200,11 @@ pub(super) fn doc_comment( } } -fn module_def_to_hl_tag(def: Definition) -> HlTag { +fn module_def_to_hl_tag(db: &dyn HirDatabase, def: Definition) -> HlTag { let symbol = match def { - Definition::Module(_) | Definition::Crate(_) | Definition::ExternCrateDecl(_) => { - SymbolKind::Module - } + Definition::Crate(_) | Definition::ExternCrateDecl(_) => SymbolKind::CrateRoot, + Definition::Module(m) if m.is_crate_root(db) => SymbolKind::CrateRoot, + Definition::Module(_) => SymbolKind::Module, Definition::Function(_) => SymbolKind::Function, Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct, Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs index ca3c3e3aaace1..0c64d3de1012b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs @@ -144,6 +144,7 @@ impl HlTag { SymbolKind::BuiltinAttr => "builtin_attr", SymbolKind::Const => "constant", SymbolKind::ConstParam => "const_param", + SymbolKind::CrateRoot => "crate_root", SymbolKind::Derive => "derive", SymbolKind::DeriveHelper => "derive_helper", SymbolKind::Enum => "enum", diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html index 100fdd2155a40..1228849c5bfd2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html @@ -45,7 +45,7 @@ unsafe { let foo = 1; let mut o = 0; - core::arch::asm!( + core::arch::asm!( "%input = OpLoad _ {0}", concat!("%result = ", "bar", " _ %input"), "OpStore {1} %result", @@ -54,7 +54,7 @@ ); let thread_id: usize; - core::arch::asm!(" + core::arch::asm!(" mov {0}, gs:[0x30] mov {0}, [{0}+0x48] ", out(reg) thread_id, options(pure, readonly, nostack)); @@ -64,7 +64,7 @@ static VirtualFree: usize; const OffPtr: usize; const OffFn: usize; - core::arch::asm!(" + core::arch::asm!(" push {free_type} push {free_size} push {base} @@ -97,7 +97,7 @@ // Ensure thumb mode is set. let rv = (rv as u32) | 1; let msp = msp as u32; - core::arch::asm!( + core::arch::asm!( "mrs {tmp}, CONTROL", "bics {tmp}, {spsel}", "msr CONTROL, {tmp}", diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html index b151ff42fc390..fa7f7b1cbafbe 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html @@ -43,7 +43,7 @@

#[allow(dead_code)]
 #[rustfmt::skip]
-#[proc_macros::identity]
+#[proc_macros::identity]
 #[derive(Default)]
 /// This is a doc comment
 // This is a normal comment
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
index a6e6b16bead51..0b32cedca5d8b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -41,25 +41,25 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
extern crate foo;
-use core::iter;
+
extern crate foo;
+use core::iter;
 
 pub const NINETY_TWO: u8 = 92;
 
-use foo as foooo;
+use foo as foooo;
 
-pub(crate) fn main() {
+pub(crate) fn main() {
     let baz = iter::repeat(92);
 }
 
 mod bar {
-    pub(in super) const FORTY_TWO: u8 = 42;
+    pub(in super) const FORTY_TWO: u8 = 42;
 
     mod baz {
-        use super::super::NINETY_TWO;
-        use crate::foooo::Point;
+        use super::super::NINETY_TWO;
+        use crate::foooo::Point;
 
-        pub(in super::super) const TWENTY_NINE: u8 = 29;
+        pub(in super::super) const TWENTY_NINE: u8 = 29;
     }
 }
 
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html index 2f4a2004f1de6..29f78959a54f1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html @@ -41,7 +41,7 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
use core::iter;
+
use core::iter;
 
 fn main() {
     let foo = Some(92);
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
index 41d3dff8ed9e8..5287affbfc5c1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
@@ -42,8 +42,8 @@
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
 
#![deprecated]
-use crate as _;
-extern crate bar;
+use crate as _;
+extern crate bar;
 #[deprecated]
 macro_rules! macro_ {
     () => {};
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index b5c3df6ee447f..ce9ec7431a975 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -48,9 +48,9 @@
 
 //! Syntactic name ref highlighting testing
 //! ```rust
-//! extern crate self;
-//! extern crate other as otter;
-//! extern crate core;
+//! extern crate self;
+//! extern crate other as otter;
+//! extern crate core;
 //! trait T { type Assoc; }
 //! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
 //! ```
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
index 3a45182368834..8f7cbddd7ffba 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -41,12 +41,12 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
extern crate self as this;
-extern crate std;
-extern crate alloc as abc;
-extern crate unresolved as definitely_unresolved;
+
extern crate self as this;
+extern crate std;
+extern crate alloc as abc;
+extern crate unresolved as definitely_unresolved;
 extern crate unresolved as _;
-extern crate test as opt_in_crate;
-extern crate test as _;
-extern crate proc_macro;
+extern crate test as opt_in_crate;
+extern crate test as _;
+extern crate proc_macro;
 
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index fd652f444ffd5..c6dbc435c0e81 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -72,7 +72,7 @@ } } -use self::FooCopy::{self as BarCopy}; +use self::FooCopy::{self as BarCopy}; #[derive(Copy)] struct FooCopy { @@ -110,7 +110,7 @@ FOO } -use core::ops::Fn; +use core::ops::Fn; fn baz<F: Fn() -> ()>(f: F) { f() } @@ -184,15 +184,15 @@ } fn use_foo_items() { - let bob = foo::Person { + let bob = foo::Person { name: "Bob", - age: foo::consts::NUMBER, + age: foo::consts::NUMBER, }; - let control_flow = foo::identity(foo::ControlFlow::Continue); + let control_flow = foo::identity(foo::ControlFlow::Continue); if control_flow.should_die() { - foo::die!(); + foo::die!(); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html index 5a5d9bd1f909b..391a46f706c80 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html @@ -47,7 +47,7 @@ fixture(r#" @@- /main.rs crate:main deps:other_crate fn test() { - let x = other_crate::foo::S::thing(); + let x = other_crate::foo::S::thing(); x; } //^ i128 diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html index b28818e679ff7..fccf34083d7f2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html @@ -45,5 +45,5 @@ template!(template); } -#[proc_macros::issue_18089] +#[proc_macros::issue_18089] fn template() {}
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html index d2a53b2ff9e1f..6366cba1bd030 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html @@ -41,12 +41,12 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
extern crate self;
+
extern crate self;
 
-use crate;
-use self;
+use crate;
+use self;
 mod __ {
-    use super::*;
+    use super::*;
 }
 
 macro_rules! void {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
index d309b4723238b..a89e8190832e8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
@@ -41,12 +41,12 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
extern crate self;
+
extern crate self;
 
-use crate;
-use self;
+use crate;
+use self;
 mod __ {
-    use super::*;
+    use super::*;
 }
 
 macro_rules! void {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
index d309b4723238b..a89e8190832e8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
@@ -41,12 +41,12 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
extern crate self;
+
extern crate self;
 
-use crate;
-use self;
+use crate;
+use self;
 mod __ {
-    use super::*;
+    use super::*;
 }
 
 macro_rules! void {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
index 575c9a6b0aca8..aa1500b8f85bb 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
@@ -41,12 +41,12 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
extern crate self;
+
extern crate self;
 
-use crate;
-use self;
+use crate;
+use self;
 mod __ {
-    use super::*;
+    use super::*;
 }
 
 macro_rules! void {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
index caf66ace7a689..484afd81ead2a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
@@ -41,6 +41,6 @@
 .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
-
lib2015::void_2015!(try async await gen);
-lib2024::void_2024!(try async await gen);
+
lib2015::void_2015!(try async await gen);
+lib2024::void_2024!(try async await gen);
 
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index b63d5cedc825b..59612634fda38 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -41,7 +41,7 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
use proc_macros::{mirror, identity, DeriveIdentity};
+
use proc_macros::{mirror, identity, DeriveIdentity};
 
 mirror! {
     {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index e178782c79c4e..4e3822c3d31f8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -165,7 +165,7 @@
     toho!("{}fmt", 0);
     let i: u64 = 3;
     let o: u64;
-    core::arch::asm!(
+    core::arch::asm!(
         "mov {0}, {1}",
         "add {0}, 5",
         out(reg) o,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 93513f5b575d4..008987d409adc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -91,7 +91,7 @@
 
         // unsafe fn and method calls
         unsafe_fn();
-        self::unsafe_fn();
+        self::unsafe_fn();
         (unsafe_fn as unsafe fn())();
         Struct { field: 0 }.unsafe_method();
 
@@ -120,7 +120,7 @@
         &EXTERN_STATIC;
         &raw const EXTERN_STATIC;
 
-        core::arch::asm!(
+        core::arch::asm!(
             "push {base}",
             base = const 0
         );
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 6f0f57725fc7a..e5b983dcbf85c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -70,6 +70,7 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
         | SymbolKind::Attribute
         | SymbolKind::Derive
         | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
+        SymbolKind::CrateRoot => lsp_types::SymbolKind::PACKAGE,
         SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
         SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
             lsp_types::SymbolKind::TYPE_PARAMETER
@@ -141,6 +142,7 @@ pub(crate) fn completion_item_kind(
             SymbolKind::Method => lsp_types::CompletionItemKind::METHOD,
             SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
             SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+            SymbolKind::CrateRoot => lsp_types::CompletionItemKind::MODULE,
             SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
             SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
             SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
@@ -803,11 +805,16 @@ fn semantic_token_type_and_modifiers(
 ) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
     use semantic_tokens::{modifiers as mods, types};
 
+    let mut mods = semantic_tokens::ModifierSet::default();
     let ty = match highlight.tag {
         HlTag::Symbol(symbol) => match symbol {
             SymbolKind::Attribute => types::DECORATOR,
             SymbolKind::Derive => types::DERIVE,
             SymbolKind::DeriveHelper => types::DERIVE_HELPER,
+            SymbolKind::CrateRoot => {
+                mods |= mods::CRATE_ROOT;
+                types::NAMESPACE
+            }
             SymbolKind::Module => types::NAMESPACE,
             SymbolKind::Impl => types::TYPE_ALIAS,
             SymbolKind::Field => types::PROPERTY,
@@ -870,7 +877,6 @@ fn semantic_token_type_and_modifiers(
         },
     };
 
-    let mut mods = semantic_tokens::ModifierSet::default();
     for modifier in highlight.mods.iter() {
         let modifier = match modifier {
             HlMod::Associated => mods::ASSOCIATED,

From 5f6d3852ee494e167eaa8128b1d5f81a0f03234d Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Mon, 12 Jan 2026 08:46:20 +0100
Subject: [PATCH 051/131] Fix crate root search in world symbols duplicating
 root entries

---
 .../rust-analyzer/crates/hir/src/symbols.rs   |  15 +--
 .../crates/ide-db/src/items_locator.rs        |   4 +-
 .../crates/ide-db/src/symbol_index.rs         | 102 +++++++++---------
 .../ide-db/src/test_data/test_doc_alias.txt   |  28 -----
 .../test_symbol_index_collection.txt          |  28 -----
 .../ide/src/syntax_highlighting/highlight.rs  |   9 +-
 .../rust-analyzer/tests/slow-tests/main.rs    |   4 +-
 7 files changed, 67 insertions(+), 123 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index f9002f31fd15f..4461659f5c4e1 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -23,7 +23,7 @@ use intern::Symbol;
 use rustc_hash::FxHashMap;
 use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName};
 
-use crate::{HasCrate, Module, ModuleDef, Semantics};
+use crate::{Crate, HasCrate, Module, ModuleDef, Semantics};
 
 /// The actual data that is stored in the index. It should be as compact as
 /// possible.
@@ -100,11 +100,6 @@ impl<'a> SymbolCollector<'a> {
         let _p = tracing::info_span!("SymbolCollector::collect", ?module).entered();
         tracing::info!(?module, "SymbolCollector::collect");
 
-        // If this is a crate root module, add a symbol for the crate itself
-        if module.is_crate_root(self.db) {
-            self.push_crate_root(module);
-        }
-
         // The initial work is the root module we're collecting, additional work will
         // be populated as we traverse the module's definitions.
         self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None });
@@ -116,8 +111,7 @@ impl<'a> SymbolCollector<'a> {
 
     /// Push a symbol for a crate's root module.
     /// This allows crate roots to appear in the symbol index for queries like `::` or `::foo`.
-    fn push_crate_root(&mut self, module: Module) {
-        let krate = module.krate(self.db);
+    pub fn push_crate_root(&mut self, krate: Crate) {
         let Some(display_name) = krate.display_name(self.db) else { return };
         let crate_name = display_name.crate_name();
         let canonical_name = display_name.canonical_name();
@@ -131,10 +125,11 @@ impl<'a> SymbolCollector<'a> {
         let ptr = SyntaxNodePtr::new(&syntax_node);
 
         let loc = DeclarationLocation { hir_file_id, ptr, name_ptr: None };
+        let root_module = krate.root_module(self.db);
 
         self.symbols.insert(FileSymbol {
             name: crate_name.symbol().clone(),
-            def: ModuleDef::Module(module),
+            def: ModuleDef::Module(root_module),
             loc,
             container_name: None,
             is_alias: false,
@@ -147,7 +142,7 @@ impl<'a> SymbolCollector<'a> {
         if canonical_name != crate_name.symbol() {
             self.symbols.insert(FileSymbol {
                 name: canonical_name.clone(),
-                def: ModuleDef::Module(module),
+                def: ModuleDef::Module(root_module),
                 loc,
                 container_name: None,
                 is_alias: false,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 0d305530d925e..af0c69c6856d7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -110,7 +110,7 @@ pub fn items_with_name_in_module(
             local_query
         }
     };
-    local_query.search(&[SymbolIndex::module_symbols(db, module)], |local_candidate| {
+    local_query.search(db, &[SymbolIndex::module_symbols(db, module)], |local_candidate| {
         cb(match local_candidate.def {
             hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
             def => ItemInNs::from(def),
@@ -140,7 +140,7 @@ fn find_items(
 
     // Query the local crate using the symbol index.
     let mut local_results = Vec::new();
-    local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| {
+    local_query.search(db, &symbol_index::crate_symbols(db, krate), |local_candidate| {
         let def = match local_candidate.def {
             hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
             def => ItemInNs::from(def),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index 05c3f360fa874..c95b541748ecc 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -218,15 +218,18 @@ pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_
 // | Editor  | Shortcut |
 // |---------|-----------|
 // | VS Code | Ctrl+T
-pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> {
+pub fn world_symbols(db: &RootDatabase, mut query: Query) -> Vec> {
     let _p = tracing::info_span!("world_symbols", query = ?query.query).entered();
 
-    if query.is_crate_search() {
-        return search_crates(db, &query);
-    }
-
-    // If we have a path filter, resolve it to target modules
-    let indices: Vec<_> = if !query.path_filter.is_empty() {
+    // Search for crates by name (handles "::" and "::foo" queries)
+    let indices: Vec<_> = if query.is_crate_search() {
+        query.only_types = false;
+        query.libs = true;
+        vec![SymbolIndex::extern_prelude_symbols(db)]
+        // If we have a path filter, resolve it to target modules
+    } else if !query.path_filter.is_empty() {
+        query.only_types = false;
+        query.libs = true;
         let target_modules = resolve_path_to_modules(
             db,
             &query.path_filter,
@@ -258,13 +261,17 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> {
         crates
             .par_iter()
             .for_each_with(db.clone(), |snap, &krate| _ = crate_symbols(snap, krate.into()));
-        crates.into_iter().flat_map(|krate| Vec::from(crate_symbols(db, krate.into()))).collect()
+        crates
+            .into_iter()
+            .flat_map(|krate| Vec::from(crate_symbols(db, krate.into())))
+            .chain(std::iter::once(SymbolIndex::extern_prelude_symbols(db)))
+            .collect()
     };
 
     let mut res = vec![];
 
     // Normal search: use FST to match item name
-    query.search::<()>(&indices, |f| {
+    query.search::<()>(db, &indices, |f| {
         res.push(f.clone());
         ControlFlow::Continue(())
     });
@@ -272,39 +279,6 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec> {
     res
 }
 
-/// Search for crates by name (handles "::" and "::foo" queries)
-fn search_crates<'db>(db: &'db RootDatabase, query: &Query) -> Vec> {
-    let mut res = vec![];
-
-    for krate in Crate::all(db) {
-        let Some(display_name) = krate.display_name(db) else { continue };
-        let crate_name = display_name.crate_name().as_str();
-
-        // If query is empty (sole "::"), return all crates
-        // Otherwise, fuzzy match the crate name
-        let matches = if query.query.is_empty() {
-            true
-        } else {
-            query.mode.check(&query.query, query.case_sensitive, crate_name)
-        };
-
-        if matches {
-            // Get the crate root module's symbol index and find the root module symbol
-            let root_module = krate.root_module(db);
-            let index = SymbolIndex::module_symbols(db, root_module);
-            // Find the module symbol itself (representing the crate)
-            for symbol in index.symbols.iter() {
-                if matches!(symbol.def, hir::ModuleDef::Module(m) if m == root_module) {
-                    res.push(symbol.clone());
-                    break;
-                }
-            }
-        }
-    }
-
-    res
-}
-
 /// Resolve a path filter to the target module(s) it points to.
 /// Returns the modules whose symbol indices should be searched.
 ///
@@ -452,6 +426,33 @@ impl<'db> SymbolIndex<'db> {
 
         module_symbols(db, InternedModuleId::new(db, hir::ModuleId::from(module)))
     }
+
+    /// The symbol index for all extern prelude crates.
+    pub fn extern_prelude_symbols(db: &dyn HirDatabase) -> &SymbolIndex<'_> {
+        #[salsa::tracked(returns(ref))]
+        fn extern_prelude_symbols<'db>(db: &'db dyn HirDatabase) -> SymbolIndex<'db> {
+            let _p = tracing::info_span!("extern_prelude_symbols").entered();
+
+            // We call this without attaching because this runs in parallel, so we need to attach here.
+            hir::attach_db(db, || {
+                let mut collector = SymbolCollector::new(db, false);
+
+                for krate in Crate::all(db) {
+                    if krate
+                        .display_name(db)
+                        .is_none_or(|name| name.canonical_name().as_str() == "build-script-build")
+                    {
+                        continue;
+                    }
+                    collector.push_crate_root(krate);
+                }
+
+                SymbolIndex::new(collector.finish())
+            })
+        }
+
+        extern_prelude_symbols(db)
+    }
 }
 
 impl fmt::Debug for SymbolIndex<'_> {
@@ -555,6 +556,7 @@ impl Query {
     /// Search symbols in the given indices.
     pub(crate) fn search<'db, T>(
         &self,
+        db: &'db RootDatabase,
         indices: &[&'db SymbolIndex<'db>],
         cb: impl FnMut(&'db FileSymbol<'db>) -> ControlFlow,
     ) -> Option {
@@ -568,7 +570,7 @@ impl Query {
                 for index in indices.iter() {
                     op = op.add(index.map.search(&automaton));
                 }
-                self.search_maps(indices, op.union(), cb)
+                self.search_maps(db, indices, op.union(), cb)
             }
             SearchMode::Fuzzy => {
                 let automaton = fst::automaton::Subsequence::new(&self.lowercased);
@@ -576,7 +578,7 @@ impl Query {
                 for index in indices.iter() {
                     op = op.add(index.map.search(&automaton));
                 }
-                self.search_maps(indices, op.union(), cb)
+                self.search_maps(db, indices, op.union(), cb)
             }
             SearchMode::Prefix => {
                 let automaton = fst::automaton::Str::new(&self.lowercased).starts_with();
@@ -584,13 +586,14 @@ impl Query {
                 for index in indices.iter() {
                     op = op.add(index.map.search(&automaton));
                 }
-                self.search_maps(indices, op.union(), cb)
+                self.search_maps(db, indices, op.union(), cb)
             }
         }
     }
 
     fn search_maps<'db, T>(
         &self,
+        db: &'db RootDatabase,
         indices: &[&'db SymbolIndex<'db>],
         mut stream: fst::map::Union<'_>,
         mut cb: impl FnMut(&'db FileSymbol<'db>) -> ControlFlow,
@@ -598,18 +601,21 @@ impl Query {
         let ignore_underscore_prefixed = !self.query.starts_with("__");
         while let Some((_, indexed_values)) = stream.next() {
             for &IndexedValue { index, value } in indexed_values {
-                let symbol_index = &indices[index];
+                let symbol_index = indices[index];
                 let (start, end) = SymbolIndex::map_value_to_range(value);
 
                 for symbol in &symbol_index.symbols[start..end] {
                     let non_type_for_type_only_query = self.only_types
-                        && !matches!(
+                        && !(matches!(
                             symbol.def,
                             hir::ModuleDef::Adt(..)
                                 | hir::ModuleDef::TypeAlias(..)
                                 | hir::ModuleDef::BuiltinType(..)
                                 | hir::ModuleDef::Trait(..)
-                        );
+                        ) || matches!(
+                            symbol.def,
+                            hir::ModuleDef::Module(module) if module.is_crate_root(db)
+                        ));
                     if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) {
                         continue;
                     }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index 71680699b7395..0c28c312f83bc 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -154,34 +154,6 @@
                 do_not_complete: Yes,
                 _marker: PhantomData<&()>,
             },
-            FileSymbol {
-                name: "ra_test_fixture",
-                def: Module(
-                    Module {
-                        id: ModuleIdLt {
-                            [salsa id]: Id(3800),
-                        },
-                    },
-                ),
-                loc: DeclarationLocation {
-                    hir_file_id: FileId(
-                        EditionedFileId(
-                            Id(3000),
-                        ),
-                    ),
-                    ptr: SyntaxNodePtr {
-                        kind: SOURCE_FILE,
-                        range: 0..128,
-                    },
-                    name_ptr: None,
-                },
-                container_name: None,
-                is_alias: false,
-                is_assoc: false,
-                is_import: false,
-                do_not_complete: Yes,
-                _marker: PhantomData<&()>,
-            },
             FileSymbol {
                 name: "s1",
                 def: Adt(
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 2d62a56fe22df..4b588572d328d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -919,34 +919,6 @@
                 do_not_complete: Yes,
                 _marker: PhantomData<&()>,
             },
-            FileSymbol {
-                name: "ra_test_fixture",
-                def: Module(
-                    Module {
-                        id: ModuleIdLt {
-                            [salsa id]: Id(3800),
-                        },
-                    },
-                ),
-                loc: DeclarationLocation {
-                    hir_file_id: FileId(
-                        EditionedFileId(
-                            Id(3000),
-                        ),
-                    ),
-                    ptr: SyntaxNodePtr {
-                        kind: SOURCE_FILE,
-                        range: 0..793,
-                    },
-                    name_ptr: None,
-                },
-                container_name: None,
-                is_alias: false,
-                is_assoc: false,
-                is_import: false,
-                do_not_complete: Yes,
-                _marker: PhantomData<&()>,
-            },
             FileSymbol {
                 name: "really_define_struct",
                 def: Macro(
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 3795d3d4146d6..dcc9a8c0d5f70 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -494,10 +494,9 @@ pub(super) fn highlight_def(
             (Highlight::new(HlTag::Symbol(SymbolKind::Field)), Some(field.attrs(sema.db)))
         }
         Definition::TupleField(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Field)), None),
-        Definition::Crate(krate) => (
-            Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)).into(),
-            Some(krate.attrs(sema.db)),
-        ),
+        Definition::Crate(krate) => {
+            (Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)), Some(krate.attrs(sema.db)))
+        }
         Definition::Module(module) => {
             let h = Highlight::new(HlTag::Symbol(if module.is_crate_root(db) {
                 SymbolKind::CrateRoot
@@ -661,7 +660,7 @@ pub(super) fn highlight_def(
             (h, None)
         }
         Definition::ExternCrateDecl(extern_crate) => {
-            let mut highlight = Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)).into();
+            let mut highlight = Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot));
             if extern_crate.alias(db).is_none() {
                 highlight |= HlMod::Library;
             }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
index 9f3c6742d651d..b4a7b44d165ac 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -1452,7 +1452,7 @@ foo = { path = "../foo" }
         json!([
         {
           "name": "bar",
-          "kind": 2,
+          "kind": 4,
           "location": {
             "uri": "file:///[..]bar/src/lib.rs",
             "range": {
@@ -1511,7 +1511,7 @@ version = "0.0.0"
         json!([
         {
           "name": "baz",
-          "kind": 2,
+          "kind": 4,
           "location": {
             "uri": "file:///[..]baz/src/lib.rs",
             "range": {

From 55f6901e967dda86ac15cf836777f1b32855feb7 Mon Sep 17 00:00:00 2001
From: Wilfred Hughes 
Date: Mon, 29 Dec 2025 03:23:48 -0800
Subject: [PATCH 052/131] Fix lowering with supertrait predicates

Previously both valid and invalid Rust code could crash r-a due to a
cyclic query during lowering.
---
 .../rust-analyzer/crates/hir-ty/src/lower.rs  | 38 +++++++++++----
 .../crates/hir-ty/src/tests/regression.rs     | 48 +++++++++++++++++++
 2 files changed, 78 insertions(+), 8 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 9befca11b3e5c..5789bf02a42e7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -53,7 +53,7 @@ use tracing::debug;
 use triomphe::{Arc, ThinArc};
 
 use crate::{
-    FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
+    FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind, all_super_traits,
     consteval::intern_const_ref,
     db::{HirDatabase, InternedOpaqueTyId},
     generics::{Generics, generics, trait_self_param_idx},
@@ -1624,11 +1624,16 @@ pub(crate) fn field_types_with_diagnostics_query<'db>(
     (res, create_diagnostics(ctx.diagnostics))
 }
 
+/// Predicates for `param_id` of the form `P: SomeTrait`. If
+/// `assoc_name` is provided, only return predicates referencing traits
+/// that have an associated type of that name.
+///
 /// This query exists only to be used when resolving short-hand associated types
 /// like `T::Item`.
 ///
 /// See the analogous query in rustc and its comment:
 /// 
+///
 /// This is a query mostly to handle cycles somewhat gracefully; e.g. the
 /// following bounds are disallowed: `T: Foo, U: Foo`, but
 /// these are fine: `T: Foo, U: Foo<()>`.
@@ -1652,7 +1657,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
     );
 
     // we have to filter out all other predicates *first*, before attempting to lower them
-    let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
+    let has_relevant_bound = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
         WherePredicate::ForLifetime { target, bound, .. }
         | WherePredicate::TypeBound { target, bound, .. } => {
             let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
@@ -1700,11 +1705,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
                         return false;
                     };
 
-                    rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| {
-                        tr.0.trait_items(db).items.iter().any(|(name, item)| {
-                            matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
-                        })
-                    })
+                    trait_or_supertrait_has_assoc_type(db, tr, assoc_name)
                 }
                 TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false,
             }
@@ -1717,7 +1718,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
     {
         ctx.store = maybe_parent_generics.store();
         for pred in maybe_parent_generics.where_predicates() {
-            if predicate(pred, &mut ctx) {
+            if has_relevant_bound(pred, &mut ctx) {
                 predicates.extend(
                     ctx.lower_where_predicate(
                         pred,
@@ -1757,6 +1758,27 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
     StoredEarlyBinder::bind(Clauses::empty(DbInterner::new_no_crate(db)).store())
 }
 
+/// Check if this trait or any of its supertraits define an associated
+/// type with the given name.
+fn trait_or_supertrait_has_assoc_type(
+    db: &dyn HirDatabase,
+    tr: TraitId,
+    assoc_name: &Name,
+) -> bool {
+    for trait_id in all_super_traits(db, tr) {
+        if trait_id
+            .trait_items(db)
+            .items
+            .iter()
+            .any(|(name, item)| matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name)
+        {
+            return true;
+        }
+    }
+
+    false
+}
+
 #[inline]
 pub(crate) fn type_alias_bounds<'db>(
     db: &'db dyn HirDatabase,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index df49d7999feea..a04c46f8eabd1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -2598,3 +2598,51 @@ trait ColumnLike {
     "#,
     );
 }
+
+#[test]
+fn issue_21006_generic_predicates_for_param_supertrait_cycle() {
+    check_no_mismatches(
+        r#"
+trait VCipherSuite {}
+
+trait CipherSuite
+where
+    OprfHash: Hash,
+{
+}
+
+type Bar = ::Hash;
+
+type OprfHash = ::Hash;
+
+impl Foo {
+    fn seal() {}
+}
+        "#,
+    );
+}
+
+#[test]
+fn issue_21006_self_assoc_trait() {
+    check_types(
+        r#"
+trait Baz {
+    fn baz(&self);
+}
+
+trait Foo {
+    type Assoc;
+}
+
+trait Bar: Foo
+where
+    Self::Assoc: Baz,
+{
+    fn bar(v: Self::Assoc) {
+        let _ = v.baz();
+        //  ^ ()
+    }
+}
+        "#,
+    );
+}

From 01ebc285e1300de9f43a02b80a5bc408dab7bb3c Mon Sep 17 00:00:00 2001
From: Alex Butler 
Date: Mon, 12 Jan 2026 12:28:17 +0000
Subject: [PATCH 053/131] smol_str: update changelog 0.3.5

---
 src/tools/rust-analyzer/lib/smol_str/CHANGELOG.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/lib/smol_str/CHANGELOG.md b/src/tools/rust-analyzer/lib/smol_str/CHANGELOG.md
index b7da6d18a440c..4aa25fa13446f 100644
--- a/src/tools/rust-analyzer/lib/smol_str/CHANGELOG.md
+++ b/src/tools/rust-analyzer/lib/smol_str/CHANGELOG.md
@@ -1,6 +1,6 @@
 # Changelog
 
-## Unreleased
+## 0.3.5 - 2026-01-08
 - Optimise `SmolStr::clone` 4-5x speedup inline, 0.5x heap (slow down).
 
 ## 0.3.4 - 2025-10-23

From c20e6a12269678cf4012b5962ccfb73db18c940f Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Mon, 12 Jan 2026 20:26:24 +0800
Subject: [PATCH 054/131] Fix not complete `mut` and `raw` in `&x.foo()`

Example
---
```rust
fn main() {
    let _ = &$0x.foo();
}
```

**Before this PR**

```rust
...
kw loop
kw match
kw return
kw self::
...
```

**After this PR**

```rust
...
kw loop
kw match
kw mut
kw raw
kw return
kw self::
...
```
---
 .../ide-completion/src/context/analysis.rs    | 10 ++++----
 .../ide-completion/src/tests/expression.rs    | 25 ++++++++++++++++++-
 2 files changed, 29 insertions(+), 6 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 65bae5b66e179..0db93b0837cda 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -1305,14 +1305,14 @@ fn classify_name_ref<'db>(
 
     let make_path_kind_expr = |expr: ast::Expr| {
         let it = expr.syntax();
+        let prev_token = iter::successors(it.first_token(), |it| it.prev_token())
+            .skip(1)
+            .find(|it| !it.kind().is_trivia());
         let in_block_expr = is_in_block(it);
         let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
         let after_if_expr = is_after_if_expr(it.clone());
-        let ref_expr_parent =
-            path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
-        let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
-            .map(|it| it.kind() == SyntaxKind::AMP)
-            .unwrap_or(false);
+        let after_amp = prev_token.as_ref().is_some_and(|it| it.kind() == SyntaxKind::AMP);
+        let ref_expr_parent = prev_token.and_then(|it| it.parent()).and_then(ast::RefExpr::cast);
         let (innermost_ret_ty, self_param) = {
             let find_ret_ty = |it: SyntaxNode| {
                 if let Some(item) = ast::Item::cast(it.clone()) {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index 78f003dd210b2..ff005a29218b4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -706,7 +706,30 @@ fn completes_after_ref_expr() {
             kw while
             kw while let
         "#]],
-    )
+    );
+    check(
+        r#"fn main() { let _ = &$0x.foo() }"#,
+        expect![[r#"
+            fn main() fn()
+            bt u32     u32
+            kw const
+            kw crate::
+            kw false
+            kw for
+            kw if
+            kw if let
+            kw loop
+            kw match
+            kw mut
+            kw raw
+            kw return
+            kw self::
+            kw true
+            kw unsafe
+            kw while
+            kw while let
+        "#]],
+    );
 }
 
 #[test]

From 62e777ca76c62d34993dbf576957a6a9e224f034 Mon Sep 17 00:00:00 2001
From: Roberto Aloi 
Date: Mon, 12 Jan 2026 14:11:12 +0100
Subject: [PATCH 055/131] Fix overlapping cfg attributes for
 wasm32-unknown-emscripten target

---
 src/tools/rust-analyzer/crates/stdx/src/process.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
index 2efeed45e44e0..7c4ae978b04a0 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/process.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -76,7 +76,7 @@ pub fn spawn_with_streaming_output(
     Ok(Output { status, stdout, stderr })
 }
 
-#[cfg(unix)]
+#[cfg(all(unix, not(target_arch = "wasm32")))]
 mod imp {
     use std::{
         io::{self, prelude::*},

From 9a5aa90516602b288de34dd65161d90f2db202a6 Mon Sep 17 00:00:00 2001
From: Eric Huss 
Date: Mon, 12 Jan 2026 09:34:11 -0800
Subject: [PATCH 056/131] Add some clarifications and fixes for fmt syntax

This tries to clarify a few things regarding fmt syntax:

- The comment on `Parser::word` seems to be wrong, as that
  underscore-prefixed words are just fine. This was changed in
  https://github.com/rust-lang/rust/pull/66847.
- I struggled to follow the description of the width argument. It
  referred to a "second argument", but I don't know what second argument
  it is referring to (which is the first?). Either way, I rewrote the
  paragraph to try to be a little more explicit, and to use shorter
  sentences.
- The description of the precision argument wasn't really clear about
  the distinction of an Nth argument and a named argument. I added
  a sentence to try to emphasize the difference.
- `IDENTIFIER_OR_KEYWORD` was changed recently in
  https://github.com/rust-lang/reference/pull/2049 to include bare `_`.
  But fmt named arguments are not allowed to be a bare `_`.
---
 compiler/rustc_parse_format/src/lib.rs |  2 +-
 library/alloc/src/fmt.rs               | 15 ++++++++++-----
 2 files changed, 11 insertions(+), 6 deletions(-)

diff --git a/compiler/rustc_parse_format/src/lib.rs b/compiler/rustc_parse_format/src/lib.rs
index 86326fc6536cd..a687a45480dcd 100644
--- a/compiler/rustc_parse_format/src/lib.rs
+++ b/compiler/rustc_parse_format/src/lib.rs
@@ -753,7 +753,7 @@ impl<'input> Parser<'input> {
     }
 
     /// Parses a word starting at the current position. A word is the same as a
-    /// Rust identifier, except that it can't start with `_` character.
+    /// Rust identifier or keyword, except that it can't be a bare `_` character.
     fn word(&mut self) -> &'input str {
         let index = self.input_vec_index;
         match self.peek() {
diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs
index 3d7c580be8c95..e3ff2ba51aba0 100644
--- a/library/alloc/src/fmt.rs
+++ b/library/alloc/src/fmt.rs
@@ -136,9 +136,10 @@
 //! padding specified by fill/alignment will be used to take up the required
 //! space (see below).
 //!
-//! The value for the width can also be provided as a [`usize`] in the list of
-//! parameters by adding a postfix `$`, indicating that the second argument is
-//! a [`usize`] specifying the width.
+//! The width can also be provided dynamically by referencing another argument
+//! with a `$` suffix. Use `{:N$}` to reference the Nth positional argument
+//! (where N is an integer), or `{:name$}` to reference a named argument. The
+//! referenced argument must be of type [`usize`].
 //!
 //! Referring to an argument with the dollar syntax does not affect the "next
 //! argument" counter, so it's usually a good idea to refer to arguments by
@@ -236,7 +237,8 @@
 //!
 //! 2. An integer or name followed by dollar sign `.N$`:
 //!
-//!    use format *argument* `N` (which must be a `usize`) as the precision.
+//!    use the value of format *argument* `N` (which must be a `usize`) as the precision.
+//!    An integer refers to a positional argument, and a name refers to a named argument.
 //!
 //! 3. An asterisk `.*`:
 //!
@@ -363,7 +365,10 @@
 //! - `ws` is any character for which [`char::is_whitespace`] returns `true`, has no semantic
 //!   meaning and is completely optional,
 //! - `integer` is a decimal integer that may contain leading zeroes and must fit into an `usize` and
-//! - `identifier` is an `IDENTIFIER_OR_KEYWORD` (not an `IDENTIFIER`) as defined by the [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html).
+//! - `identifier` is an `IDENTIFIER_OR_KEYWORD` (not an `IDENTIFIER`) as
+//!   defined by the [Rust language
+//!   reference](https://doc.rust-lang.org/reference/identifiers.html), except
+//!   for a bare `_`.
 //!
 //! # Formatting traits
 //!

From 2c32c0e8047eb5535f96c3b7e468cfe023568b31 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Tue, 13 Jan 2026 14:56:51 +0200
Subject: [PATCH 057/131] Make `naked_asm!()` always return `!`

As it should.
---
 .../rust-analyzer/crates/hir-ty/src/infer/expr.rs  |  8 ++++++--
 .../crates/hir-ty/src/tests/simple.rs              | 14 ++++++++++++++
 .../crates/test-utils/src/minicore.rs              |  4 ++++
 3 files changed, 24 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 62339779a5625..c57d41cc5f734 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -8,7 +8,7 @@ use hir_def::{
     expr_store::path::{GenericArgs as HirGenericArgs, Path},
     hir::{
         Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
-        LabelId, Literal, Pat, PatId, Statement, UnaryOp,
+        InlineAsmKind, LabelId, Literal, Pat, PatId, Statement, UnaryOp,
     },
     resolver::ValueNs,
 };
@@ -1037,7 +1037,11 @@ impl<'db> InferenceContext<'_, 'db> {
                     // FIXME: `sym` should report for things that are not functions or statics.
                     AsmOperand::Sym(_) => (),
                 });
-                if diverge { self.types.types.never } else { self.types.types.unit }
+                if diverge || asm.kind == InlineAsmKind::NakedAsm {
+                    self.types.types.never
+                } else {
+                    self.types.types.unit
+                }
             }
         };
         // use a new type variable if we got unknown here
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 6367521841abd..d02e455fc3dc9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3983,3 +3983,17 @@ fn foo() {
         "#]],
     );
 }
+
+#[test]
+fn naked_asm_returns_never() {
+    check_no_mismatches(
+        r#"
+//- minicore: asm
+
+#[unsafe(naked)]
+extern "C" fn foo() -> ! {
+    core::arch::naked_asm!("");
+}
+    "#,
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 01274a9835f40..580a619cf108d 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -1880,6 +1880,10 @@ mod arch {
     pub macro global_asm("assembly template", $(operands,)* $(options($(option),*))?) {
         /* compiler built-in */
     }
+    #[rustc_builtin_macro]
+    pub macro naked_asm("assembly template", $(operands,)* $(options($(option),*))?) {
+        /* compiler built-in */
+    }
 }
 // endregion:asm
 

From a2154802699c6de58702bcf4e3211ccb977cd3be Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Tue, 13 Jan 2026 19:00:03 +0800
Subject: [PATCH 058/131] Migrate `unwrap_block` assist to use SyntaxEditor

- Fix invalid match in let-stmt
- Fix multiple statements loses indent

Example
---
```rust
fn main() {
    let value = match rel_path {
        Ok(rel_path) => {$0
            let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
            Some((*id, rel_path))
        }
        Err(_) => None,
    };
}
```

**Before this PR**

```rust
fn main() {
    let value = let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
    let value = Some((*id, rel_path));
}
```

**After this PR**

```rust
fn main() {
    let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
    let value = Some((*id, rel_path));
}
```

---

```rust
fn main() {
    let mut a = {$0
        1;
        2;
        3
    };
}
```

**Before this PR**

```rust
fn main() {
    1;
2;
    let mut a = 3;
}
```

**After this PR**

```rust
fn main() -> i32 {
    1;
    2;
    let mut a = 3;
}
```
---
 .../ide-assists/src/handlers/unwrap_block.rs  | 255 +++++++++---------
 1 file changed, 135 insertions(+), 120 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
index a83f6835ca615..e4f5e3523bd2a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -1,10 +1,12 @@
 use syntax::{
-    AstNode, SyntaxKind, T, TextRange,
+    AstNode, SyntaxElement, SyntaxKind, SyntaxNode, T,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
         make,
     },
+    match_ast,
+    syntax_editor::{Element, Position, SyntaxEditor},
 };
 
 use crate::{AssistContext, AssistId, Assists};
@@ -27,123 +29,108 @@ use crate::{AssistContext, AssistId, Assists};
 // }
 // ```
 pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
-    let assist_id = AssistId::refactor_rewrite("unwrap_block");
-    let assist_label = "Unwrap block";
     let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
-    let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
+    let block = l_curly_token.parent_ancestors().nth(1).and_then(ast::BlockExpr::cast)?;
     let target = block.syntax().text_range();
-    let mut parent = block.syntax().parent()?;
-    if ast::MatchArm::can_cast(parent.kind()) {
-        parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
-    }
-
-    let kind = parent.kind();
-    if matches!(kind, SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
-        acc.add(assist_id, assist_label, target, |builder| {
-            builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
-        })
-    } else if matches!(kind, SyntaxKind::LET_STMT) {
-        let parent = ast::LetStmt::cast(parent)?;
-        let pattern = ast::Pat::cast(parent.syntax().first_child()?)?;
-        let ty = parent.ty();
-        let list = block.stmt_list()?;
-        let replaced = match list.syntax().last_child() {
-            Some(last) => {
-                let stmts: Vec = list.statements().collect();
-                let initializer = ast::Expr::cast(last)?;
-                let let_stmt = make::let_stmt(pattern, ty, Some(initializer));
-                if !stmts.is_empty() {
-                    let block = make::block_expr(stmts, None);
-                    format!("{}\n    {}", update_expr_string(block.to_string()), let_stmt)
-                } else {
-                    let_stmt.to_string()
-                }
-            }
-            None => {
-                let empty_tuple = make::ext::expr_unit();
-                make::let_stmt(pattern, ty, Some(empty_tuple)).to_string()
-            }
-        };
-        acc.add(assist_id, assist_label, target, |builder| {
-            builder.replace(parent.syntax().text_range(), replaced);
-        })
-    } else {
-        let parent = ast::Expr::cast(parent)?;
-        match parent.clone() {
-            ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
-            ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
-            ast::Expr::IfExpr(if_expr) => {
-                let then_branch = if_expr.then_branch()?;
-                if then_branch == block {
-                    if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
-                        // For `else if` blocks
-                        let ancestor_then_branch = ancestor.then_branch()?;
-
-                        return acc.add(assist_id, assist_label, target, |edit| {
-                            let range_to_del_else_if = TextRange::new(
-                                ancestor_then_branch.syntax().text_range().end(),
-                                l_curly_token.text_range().start(),
-                            );
-                            let range_to_del_rest = TextRange::new(
-                                then_branch.syntax().text_range().end(),
-                                if_expr.syntax().text_range().end(),
-                            );
-
-                            edit.delete(range_to_del_rest);
-                            edit.delete(range_to_del_else_if);
-                            edit.replace(
-                                target,
-                                update_expr_string_without_newline(then_branch.to_string()),
-                            );
-                        });
-                    }
-                } else {
-                    return acc.add(assist_id, assist_label, target, |edit| {
-                        let range_to_del = TextRange::new(
-                            then_branch.syntax().text_range().end(),
-                            l_curly_token.text_range().start(),
-                        );
-
-                        edit.delete(range_to_del);
-                        edit.replace(target, update_expr_string_without_newline(block.to_string()));
+    let mut container = block.syntax().clone();
+    let mut replacement = block.clone();
+    let mut prefer_container = None;
+
+    let from_indent = block.indent_level();
+    let into_indent = loop {
+        let parent = container.parent()?;
+        container = match_ast! {
+            match parent {
+                ast::ForExpr(it) => it.syntax().clone(),
+                ast::LoopExpr(it) => it.syntax().clone(),
+                ast::WhileExpr(it) => it.syntax().clone(),
+                ast::MatchArm(it) => it.parent_match().syntax().clone(),
+                ast::LetStmt(it) => {
+                    replacement = wrap_let(&it, replacement);
+                    prefer_container = Some(it.syntax().clone());
+                    it.syntax().clone()
+                },
+                ast::IfExpr(it) => {
+                    prefer_container.get_or_insert_with(|| {
+                        if let Some(else_branch) = it.else_branch()
+                            && *else_branch.syntax() == container
+                        {
+                            else_branch.syntax().clone()
+                        } else {
+                            it.syntax().clone()
+                        }
                     });
-                }
+                    it.syntax().clone()
+                },
+                ast::ExprStmt(it) => it.syntax().clone(),
+                ast::StmtList(it) => break it.indent_level(),
+                _ => return None,
             }
-            _ => return None,
         };
+    };
+    let replacement = replacement.stmt_list()?;
 
-        acc.add(assist_id, assist_label, target, |builder| {
-            builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
-        })
-    }
-}
+    acc.add(AssistId::refactor_rewrite("unwrap_block"), "Unwrap block", target, |builder| {
+        let mut edit = builder.make_editor(block.syntax());
+        let replacement = replacement.dedent(from_indent).indent(into_indent);
+        let container = prefer_container.unwrap_or(container);
 
-fn update_expr_string(expr_string: String) -> String {
-    update_expr_string_with_pat(expr_string, &[' ', '\n'])
-}
+        edit.replace_with_many(&container, extract_statements(replacement));
+        delete_else_before(container, &mut edit);
 
-fn update_expr_string_without_newline(expr_string: String) -> String {
-    update_expr_string_with_pat(expr_string, &[' '])
+        builder.add_file_edits(ctx.vfs_file_id(), edit);
+    })
 }
 
-fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String {
-    // Remove leading whitespace, index to remove the leading '{',
-    // then continue to remove leading whitespace.
-    // We cannot assume the `{` is the first character because there are block modifiers
-    // (`unsafe`, `async` etc.).
-    let after_open_brace_index = expr_str.find('{').map_or(0, |it| it + 1);
-    let expr_str = expr_str[after_open_brace_index..].trim_start_matches(whitespace_pat);
-
-    // Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}',
-    // then continue to remove trailing whitespace.
-    let expr_str = expr_str.trim_end_matches(whitespace_pat);
-    let expr_str = expr_str[..expr_str.len() - 1].trim_end_matches(whitespace_pat);
+fn delete_else_before(container: SyntaxNode, edit: &mut SyntaxEditor) {
+    let Some(else_token) = container
+        .siblings_with_tokens(syntax::Direction::Prev)
+        .skip(1)
+        .map_while(|it| it.into_token())
+        .find(|it| it.kind() == T![else])
+    else {
+        return;
+    };
+    itertools::chain(else_token.prev_token(), else_token.next_token())
+        .filter(|it| it.kind() == SyntaxKind::WHITESPACE)
+        .for_each(|it| edit.delete(it));
+    let indent = IndentLevel::from_node(&container);
+    let newline = make::tokens::whitespace(&format!("\n{indent}"));
+    edit.replace(else_token, newline);
+}
+
+fn wrap_let(assign: &ast::LetStmt, replacement: ast::BlockExpr) -> ast::BlockExpr {
+    let try_wrap_assign = || {
+        let initializer = assign.initializer()?.syntax().syntax_element();
+        let replacement = replacement.clone_subtree();
+        let assign = assign.clone_for_update();
+        let tail_expr = replacement.tail_expr()?;
+        let before =
+            assign.syntax().children_with_tokens().take_while(|it| *it != initializer).collect();
+        let after = assign
+            .syntax()
+            .children_with_tokens()
+            .skip_while(|it| *it != initializer)
+            .skip(1)
+            .collect();
+
+        let mut edit = SyntaxEditor::new(replacement.syntax().clone());
+        edit.insert_all(Position::before(tail_expr.syntax()), before);
+        edit.insert_all(Position::after(tail_expr.syntax()), after);
+        ast::BlockExpr::cast(edit.finish().new_root().clone())
+    };
+    try_wrap_assign().unwrap_or(replacement)
+}
 
-    expr_str
-        .lines()
-        .map(|line| line.replacen("    ", "", 1)) // Delete indentation
-        .collect::>()
-        .join("\n")
+fn extract_statements(stmt_list: ast::StmtList) -> Vec {
+    let mut elements = stmt_list
+        .syntax()
+        .children_with_tokens()
+        .filter(|it| !matches!(it.kind(), T!['{'] | T!['}']))
+        .skip_while(|it| it.kind() == SyntaxKind::WHITESPACE)
+        .collect::>();
+    while elements.pop_if(|it| it.kind() == SyntaxKind::WHITESPACE).is_some() {}
+    elements
 }
 
 #[cfg(test)]
@@ -593,6 +580,30 @@ fn main() {
         );
     }
 
+    #[test]
+    fn unwrap_match_arm_in_let() {
+        check_assist(
+            unwrap_block,
+            r#"
+fn main() {
+    let value = match rel_path {
+        Ok(rel_path) => {$0
+            let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+            Some((*id, rel_path))
+        }
+        Err(_) => None,
+    };
+}
+"#,
+            r#"
+fn main() {
+    let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+    let value = Some((*id, rel_path));
+}
+"#,
+        );
+    }
+
     #[test]
     fn simple_if_in_while_bad_cursor_position() {
         check_assist_not_applicable(
@@ -751,27 +762,27 @@ fn main() -> i32 {
             unwrap_block,
             r#"
 fn main() {
-    let x = {$0};
+    let x = {$0
+        bar
+    };
 }
 "#,
             r#"
 fn main() {
-    let x = ();
+    let x = bar;
 }
 "#,
         );
         check_assist(
             unwrap_block,
             r#"
-fn main() {
-    let x = {$0
-        bar
-    };
+fn main() -> i32 {
+    let _ = {$01; 2};
 }
 "#,
             r#"
-fn main() {
-    let x = bar;
+fn main() -> i32 {
+    1; let _ = 2;
 }
 "#,
         );
@@ -779,13 +790,12 @@ fn main() {
             unwrap_block,
             r#"
 fn main() -> i32 {
-    let _ = {$01; 2};
+    let mut a = {$01; 2};
 }
 "#,
             r#"
 fn main() -> i32 {
-    1;
-    let _ = 2;
+    1; let mut a = 2;
 }
 "#,
         );
@@ -793,13 +803,18 @@ fn main() -> i32 {
             unwrap_block,
             r#"
 fn main() -> i32 {
-    let mut a = {$01; 2};
+    let mut a = {$0
+        1;
+        2;
+        3
+    };
 }
 "#,
             r#"
 fn main() -> i32 {
     1;
-    let mut a = 2;
+    2;
+    let mut a = 3;
 }
 "#,
         );

From 8fb704c17510f22700d4a84fc7151f67d9d20a27 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Tue, 13 Jan 2026 19:57:37 +0100
Subject: [PATCH 059/131] fix: Hide renamed imports from macros in symbol index

---
 src/tools/rust-analyzer/crates/hir/src/symbols.rs | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index 4461659f5c4e1..c088f3aa0cc09 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -5,8 +5,8 @@ use std::marker::PhantomData;
 use base_db::FxIndexSet;
 use either::Either;
 use hir_def::{
-    AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
-    ModuleDefId, ModuleId, TraitId,
+    AdtId, AssocItemId, AstIdLoc, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId,
+    Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
     db::DefDatabase,
     item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob},
     nameres::crate_def_map,
@@ -169,6 +169,7 @@ impl<'a> SymbolCollector<'a> {
 
     fn collect_from_module(&mut self, module_id: ModuleId) {
         let collect_pub_only = self.collect_pub_only;
+        let is_block_module = module_id.is_block_module(self.db);
         let push_decl = |this: &mut Self, def: ModuleDefId, name, vis| {
             if collect_pub_only && vis != Visibility::Public {
                 return;
@@ -240,6 +241,10 @@ impl<'a> SymbolCollector<'a> {
             let source = import_child_source_cache
                 .entry(i.use_)
                 .or_insert_with(|| i.use_.child_source(this.db));
+            if is_block_module && source.file_id.is_macro() {
+                // Macros tend to generate a lot of imports, the user really won't care about them
+                return;
+            }
             let Some(use_tree_src) = source.value.get(i.idx) else { return };
             let rename = use_tree_src.rename().and_then(|rename| rename.name());
             let name_syntax = match rename {
@@ -276,6 +281,12 @@ impl<'a> SymbolCollector<'a> {
                     return;
                 }
                 let loc = i.lookup(this.db);
+                if is_block_module && loc.ast_id().file_id.is_macro() {
+                    // Macros (especially derivves) tend to generate renamed extern crate items,
+                    // the user really won't care about them
+                    return;
+                }
+
                 let source = loc.source(this.db);
                 let rename = source.value.rename().and_then(|rename| rename.name());
 

From 6948a66ad81e149d8a0af5dcc0888b9a44f3d2b4 Mon Sep 17 00:00:00 2001
From: Shoyu Vanilla 
Date: Wed, 14 Jan 2026 05:40:27 +0900
Subject: [PATCH 060/131] fix: Sync cast checks to rustc again

---
 .../crates/hir-ty/src/infer/cast.rs           | 270 +++++++++++++-----
 .../crates/hir-ty/src/infer/unify.rs          |  10 -
 .../src/handlers/invalid_cast.rs              |  10 +-
 3 files changed, 201 insertions(+), 89 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
index d073b06ccc8a5..d69b00adb7f7b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
@@ -2,8 +2,10 @@
 
 use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
 use rustc_ast_ir::Mutability;
+use rustc_hash::FxHashSet;
 use rustc_type_ir::{
-    Flags, InferTy, TypeFlags, UintTy,
+    InferTy, TypeVisitableExt, UintTy, elaborate,
+    error::TypeError,
     inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _},
 };
 use stdx::never;
@@ -12,7 +14,10 @@ use crate::{
     InferenceDiagnostic,
     db::HirDatabase,
     infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead},
-    next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
+    next_solver::{
+        BoundExistentialPredicates, ExistentialPredicate, ParamTy, Region, Ty, TyKind,
+        infer::traits::ObligationCause,
+    },
 };
 
 #[derive(Debug)]
@@ -66,12 +71,13 @@ pub enum CastError {
     DifferingKinds,
     SizedUnsizedCast,
     IllegalCast,
-    IntToFatCast,
+    IntToWideCast,
     NeedDeref,
     NeedViaPtr,
     NeedViaThinPtr,
     NeedViaInt,
     NonScalar,
+    PtrPtrAddingAutoTraits,
     // We don't want to report errors with unknown types currently.
     // UnknownCastPtrKind,
     // UnknownExprPtrKind,
@@ -137,22 +143,13 @@ impl<'db> CastCheck<'db> {
             return Ok(());
         }
 
-        if !self.cast_ty.flags().contains(TypeFlags::HAS_TY_INFER)
-            && !ctx.table.is_sized(self.cast_ty)
-        {
+        if !self.cast_ty.has_infer_types() && !ctx.table.is_sized(self.cast_ty) {
             return Err(InferenceDiagnostic::CastToUnsized {
                 expr: self.expr,
                 cast_ty: self.cast_ty.store(),
             });
         }
 
-        // Chalk doesn't support trait upcasting and fails to solve some obvious goals
-        // when the trait environment contains some recursive traits (See issue #18047)
-        // We skip cast checks for such cases for now, until the next-gen solver.
-        if contains_dyn_trait(self.cast_ty) {
-            return Ok(());
-        }
-
         self.do_check(ctx).map_err(|e| e.into_diagnostic(self.expr, self.expr_ty, self.cast_ty))
     }
 
@@ -162,22 +159,23 @@ impl<'db> CastCheck<'db> {
                 (Some(t_from), Some(t_cast)) => (t_from, t_cast),
                 (None, Some(t_cast)) => match self.expr_ty.kind() {
                     TyKind::FnDef(..) => {
-                        let sig =
-                            self.expr_ty.callable_sig(ctx.interner()).expect("FnDef had no sig");
-                        let sig = ctx.table.normalize_associated_types_in(sig);
+                        // rustc calls `FnCtxt::normalize` on this but it's a no-op in next-solver
+                        let sig = self.expr_ty.fn_sig(ctx.interner());
                         let fn_ptr = Ty::new_fn_ptr(ctx.interner(), sig);
-                        if ctx
-                            .coerce(
-                                self.source_expr.into(),
-                                self.expr_ty,
-                                fn_ptr,
-                                AllowTwoPhase::No,
-                                ExprIsRead::Yes,
-                            )
-                            .is_ok()
-                        {
-                        } else {
-                            return Err(CastError::IllegalCast);
+                        match ctx.coerce(
+                            self.source_expr.into(),
+                            self.expr_ty,
+                            fn_ptr,
+                            AllowTwoPhase::No,
+                            ExprIsRead::Yes,
+                        ) {
+                            Ok(_) => {}
+                            Err(TypeError::IntrinsicCast) => {
+                                return Err(CastError::IllegalCast);
+                            }
+                            Err(_) => {
+                                return Err(CastError::NonScalar);
+                            }
                         }
 
                         (CastTy::FnPtr, t_cast)
@@ -213,23 +211,41 @@ impl<'db> CastCheck<'db> {
         // rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym
 
         match (t_from, t_cast) {
+            // These types have invariants! can't cast into them.
             (_, CastTy::Int(Int::CEnum) | CastTy::FnPtr) => Err(CastError::NonScalar),
+
+            // * -> Bool
             (_, CastTy::Int(Int::Bool)) => Err(CastError::CastToBool),
-            (CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()),
+
+            // * -> Char
+            (CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()), // u8-char-cast
             (_, CastTy::Int(Int::Char)) => Err(CastError::CastToChar),
+
+            // prim -> float,ptr
             (CastTy::Int(Int::Bool | Int::CEnum | Int::Char), CastTy::Float) => {
                 Err(CastError::NeedViaInt)
             }
+
             (CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
             | (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
-            (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst),
+
+            // ptr -> ptr
+            (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst), // ptr-ptr-cast
+
+            // // ptr-addr-cast
             (CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, src),
+            (CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
+
+            // addr-ptr-cast
             (CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, dst),
+
+            // fn-ptr-cast
             (CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, dst),
+
+            // prim -> prim
             (CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
             (CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
             (CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
-            (CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
         }
     }
 
@@ -241,10 +257,16 @@ impl<'db> CastCheck<'db> {
         t_cast: Ty<'db>,
         m_cast: Mutability,
     ) -> Result<(), CastError> {
-        // Mutability order is opposite to rustc. `Mut < Not`
-        if m_expr <= m_cast
+        let t_expr = ctx.table.try_structurally_resolve_type(t_expr);
+        let t_cast = ctx.table.try_structurally_resolve_type(t_cast);
+
+        if m_expr >= m_cast
             && let TyKind::Array(ety, _) = t_expr.kind()
+            && ctx.infcx().can_eq(ctx.table.param_env, ety, t_cast)
         {
+            // Due to historical reasons we allow directly casting references of
+            // arrays into raw pointers of their element type.
+
             // Coerce to a raw pointer so that we generate RawPtr in MIR.
             let array_ptr_type = Ty::new_ptr(ctx.interner(), t_expr, m_expr);
             if ctx
@@ -265,14 +287,9 @@ impl<'db> CastCheck<'db> {
                 );
             }
 
-            // This is a less strict condition than rustc's `demand_eqtype`,
-            // but false negative is better than false positive
-            if ctx
-                .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes)
-                .is_ok()
-            {
-                return Ok(());
-            }
+            // this will report a type mismatch if needed
+            let _ = ctx.demand_eqtype(self.expr.into(), ety, t_cast);
+            return Ok(());
         }
 
         Err(CastError::IllegalCast)
@@ -289,30 +306,147 @@ impl<'db> CastCheck<'db> {
 
         match (src_kind, dst_kind) {
             (Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()),
+
             // (_, None) => Err(CastError::UnknownCastPtrKind),
             // (None, _) => Err(CastError::UnknownExprPtrKind),
             (_, None) | (None, _) => Ok(()),
+
+            // Cast to thin pointer is OK
             (_, Some(PointerKind::Thin)) => Ok(()),
+
+            // thin -> fat? report invalid cast (don't complain about vtable kinds)
             (Some(PointerKind::Thin), _) => Err(CastError::SizedUnsizedCast),
+
+            // trait object -> trait object? need to do additional checks
             (Some(PointerKind::VTable(src_tty)), Some(PointerKind::VTable(dst_tty))) => {
                 match (src_tty.principal_def_id(), dst_tty.principal_def_id()) {
+                    // A + SrcAuto> -> B + DstAuto>. need to make sure
+                    // - `Src` and `Dst` traits are the same
+                    // - traits have the same generic arguments
+                    // - projections are the same
+                    // - `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`
+                    //
+                    // Note that trait upcasting goes through a different mechanism (`coerce_unsized`)
+                    // and is unaffected by this check.
                     (Some(src_principal), Some(dst_principal)) => {
                         if src_principal == dst_principal {
                             return Ok(());
                         }
-                        let src_principal = ctx.db.trait_signature(src_principal.0);
-                        let dst_principal = ctx.db.trait_signature(dst_principal.0);
-                        if src_principal.flags.contains(TraitFlags::AUTO)
-                            && dst_principal.flags.contains(TraitFlags::AUTO)
+
+                        // We need to reconstruct trait object types.
+                        // `m_src` and `m_dst` won't work for us here because they will potentially
+                        // contain wrappers, which we do not care about.
+                        //
+                        // e.g. we want to allow `dyn T -> (dyn T,)`, etc.
+                        //
+                        // We also need to skip auto traits to emit an FCW and not an error.
+                        let src_obj = Ty::new_dynamic(
+                            ctx.interner(),
+                            BoundExistentialPredicates::new_from_iter(
+                                ctx.interner(),
+                                src_tty.iter().filter(|pred| {
+                                    !matches!(
+                                        pred.skip_binder(),
+                                        ExistentialPredicate::AutoTrait(_)
+                                    )
+                                }),
+                            ),
+                            Region::new_erased(ctx.interner()),
+                        );
+                        let dst_obj = Ty::new_dynamic(
+                            ctx.interner(),
+                            BoundExistentialPredicates::new_from_iter(
+                                ctx.interner(),
+                                dst_tty.iter().filter(|pred| {
+                                    !matches!(
+                                        pred.skip_binder(),
+                                        ExistentialPredicate::AutoTrait(_)
+                                    )
+                                }),
+                            ),
+                            Region::new_erased(ctx.interner()),
+                        );
+
+                        // `dyn Src = dyn Dst`, this checks for matching traits/generics/projections
+                        // This is `fcx.demand_eqtype`, but inlined to give a better error.
+                        if ctx
+                            .table
+                            .at(&ObligationCause::dummy())
+                            .eq(src_obj, dst_obj)
+                            .map(|infer_ok| ctx.table.register_infer_ok(infer_ok))
+                            .is_err()
                         {
-                            Ok(())
-                        } else {
-                            Err(CastError::DifferingKinds)
+                            return Err(CastError::DifferingKinds);
                         }
+
+                        // Check that `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`.
+                        // Emit an FCW otherwise.
+                        let src_auto: FxHashSet<_> = src_tty
+                            .auto_traits()
+                            .into_iter()
+                            .chain(
+                                elaborate::supertrait_def_ids(ctx.interner(), src_principal)
+                                    .filter(|trait_| {
+                                        ctx.db
+                                            .trait_signature(trait_.0)
+                                            .flags
+                                            .contains(TraitFlags::AUTO)
+                                    }),
+                            )
+                            .collect();
+
+                        let added = dst_tty
+                            .auto_traits()
+                            .into_iter()
+                            .any(|trait_| !src_auto.contains(&trait_));
+
+                        if added {
+                            return Err(CastError::PtrPtrAddingAutoTraits);
+                        }
+
+                        Ok(())
                     }
-                    _ => Err(CastError::Unknown),
+
+                    // dyn Auto -> dyn Auto'? ok.
+                    (None, None) => Ok(()),
+
+                    // dyn Trait -> dyn Auto? not ok (for now).
+                    //
+                    // Although dropping the principal is already allowed for unsizing coercions
+                    // (e.g. `*const (dyn Trait + Auto)` to `*const dyn Auto`), dropping it is
+                    // currently **NOT** allowed for (non-coercion) ptr-to-ptr casts (e.g
+                    // `*const Foo` to `*const Bar` where `Foo` has a `dyn Trait + Auto` tail
+                    // and `Bar` has a `dyn Auto` tail), because the underlying MIR operations
+                    // currently work very differently:
+                    //
+                    // * A MIR unsizing coercion on raw pointers to trait objects (`*const dyn Src`
+                    //   to `*const dyn Dst`) is currently equivalent to downcasting the source to
+                    //   the concrete sized type that it was originally unsized from first (via a
+                    //   ptr-to-ptr cast from `*const Src` to `*const T` with `T: Sized`) and then
+                    //   unsizing this thin pointer to the target type (unsizing `*const T` to
+                    //   `*const Dst`). In particular, this means that the pointer's metadata
+                    //   (vtable) will semantically change, e.g. for const eval and miri, even
+                    //   though the vtables will always be merged for codegen.
+                    //
+                    // * A MIR ptr-to-ptr cast is currently equivalent to a transmute and does not
+                    //   change the pointer metadata (vtable) at all.
+                    //
+                    // In addition to this potentially surprising difference between coercion and
+                    // non-coercion casts, casting away the principal with a MIR ptr-to-ptr cast
+                    // is currently considered undefined behavior:
+                    //
+                    // As a validity invariant of pointers to trait objects, we currently require
+                    // that the principal of the vtable in the pointer metadata exactly matches
+                    // the principal of the pointee type, where "no principal" is also considered
+                    // a kind of principal.
+                    (Some(_), None) => Err(CastError::DifferingKinds),
+
+                    // dyn Auto -> dyn Trait? not ok.
+                    (None, Some(_)) => Err(CastError::DifferingKinds),
                 }
             }
+
+            // fat -> fat? metadata kinds must match
             (Some(src_kind), Some(dst_kind)) if src_kind == dst_kind => Ok(()),
             (_, _) => Err(CastError::DifferingKinds),
         }
@@ -342,9 +476,9 @@ impl<'db> CastCheck<'db> {
             None => Ok(()),
             Some(PointerKind::Error) => Ok(()),
             Some(PointerKind::Thin) => Ok(()),
-            Some(PointerKind::VTable(_)) => Err(CastError::IntToFatCast),
-            Some(PointerKind::Length) => Err(CastError::IntToFatCast),
-            Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToFatCast),
+            Some(PointerKind::VTable(_)) => Err(CastError::IntToWideCast),
+            Some(PointerKind::Length) => Err(CastError::IntToWideCast),
+            Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToWideCast),
         }
     }
 
@@ -363,15 +497,20 @@ impl<'db> CastCheck<'db> {
     }
 }
 
+/// The kind of pointer and associated metadata (thin, length or vtable) - we
+/// only allow casts between wide pointers if their metadata have the same
+/// kind.
 #[derive(Debug, PartialEq, Eq)]
 enum PointerKind<'db> {
-    // thin pointer
+    /// No metadata attached, ie pointer to sized type or foreign type
     Thin,
-    // trait object
+    /// A trait object
     VTable(BoundExistentialPredicates<'db>),
-    // slice
+    /// Slice
     Length,
+    /// The unsize info of this projection or opaque type
     OfAlias,
+    /// The unsize info of this parameter
     OfParam(ParamTy),
     Error,
 }
@@ -439,24 +578,3 @@ fn pointer_kind<'db>(
         }
     }
 }
-
-fn contains_dyn_trait<'db>(ty: Ty<'db>) -> bool {
-    use std::ops::ControlFlow;
-
-    use rustc_type_ir::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
-
-    struct DynTraitVisitor;
-
-    impl<'db> TypeVisitor> for DynTraitVisitor {
-        type Result = ControlFlow<()>;
-
-        fn visit_ty(&mut self, ty: Ty<'db>) -> ControlFlow<()> {
-            match ty.kind() {
-                TyKind::Dynamic(..) => ControlFlow::Break(()),
-                _ => ty.super_visit_with(self),
-            }
-        }
-    }
-
-    ty.visit_with(&mut DynTraitVisitor).is_break()
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index d55fc0ab0da66..2057159c46d28 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -261,16 +261,6 @@ impl<'db> InferenceTable<'db> {
         self.infer_ctxt.canonicalize_response(t)
     }
 
-    // FIXME: We should get rid of this method. We cannot deeply normalize during inference, only when finishing.
-    // Inference should use shallow normalization (`try_structurally_resolve_type()`) only, when needed.
-    pub(crate) fn normalize_associated_types_in(&mut self, ty: T) -> T
-    where
-        T: TypeFoldable> + Clone,
-    {
-        let ty = self.resolve_vars_with_obligations(ty);
-        self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty)
-    }
-
     pub(crate) fn normalize_alias_ty(&mut self, alias: Ty<'db>) -> Ty<'db> {
         self.infer_ctxt
             .at(&ObligationCause::new(), self.param_env)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index a59077b757b10..7479f8147d2eb 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -51,7 +51,7 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_
             DiagnosticCode::RustcHardError("E0606"),
             format_ty!(ctx, "casting `{}` as `{}` is invalid", d.expr_ty, d.cast_ty),
         ),
-        CastError::IntToFatCast => (
+        CastError::IntToWideCast => (
             DiagnosticCode::RustcHardError("E0606"),
             format_ty!(ctx, "cannot cast `{}` to a fat pointer `{}`", d.expr_ty, d.cast_ty),
         ),
@@ -95,6 +95,10 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_
             DiagnosticCode::RustcHardError("E0605"),
             format_ty!(ctx, "non-primitive cast: `{}` as `{}`", d.expr_ty, d.cast_ty),
         ),
+        CastError::PtrPtrAddingAutoTraits => (
+            DiagnosticCode::RustcHardError("E0804"),
+            "cannot add auto trait to dyn bound via pointer cast".to_owned(),
+        ),
         // CastError::UnknownCastPtrKind | CastError::UnknownExprPtrKind => (
         //     DiagnosticCode::RustcHardError("E0641"),
         //     "cannot cast to a pointer of an unknown kind".to_owned(),
@@ -444,8 +448,8 @@ fn main() {
     q as *const [i32];
   //^^^^^^^^^^^^^^^^^ error: cannot cast thin pointer `*const i32` to fat pointer `*const [i32]`
 
-    // FIXME: This should emit diagnostics but disabled to prevent many false positives
     let t: *mut (dyn Trait + 'static) = 0 as *mut _;
+                                      //^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*mut (dyn Trait + 'static)`
 
     let mut fail: *const str = 0 as *const str;
                              //^^^^^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*const str`
@@ -543,7 +547,7 @@ fn main() {
     fn ptr_to_trait_obj_ok() {
         check_diagnostics(
             r#"
-//- minicore: pointee
+//- minicore: pointee, send, sync
 trait Trait<'a> {}
 
 fn remove_auto<'a>(x: *mut (dyn Trait<'a> + Send)) -> *mut dyn Trait<'a> {

From 05939a8d38959b1ff418a0f54663eaba0f5e3108 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Wed, 14 Jan 2026 17:54:23 +0800
Subject: [PATCH 061/131] Fix false positive precedence in `(2 as i32) < 3`

Example
---
```rust
fn f() { _ = $0(1 as u32) << 10; }
```

**Before this PR**

This is syntax error

```rust
fn f() { _ = 1 as u32 << 10; }
```

**After this PR**

Assist not applicable
---
 .../src/handlers/remove_parentheses.rs        |  6 +++++
 .../crates/syntax/src/ast/prec.rs             | 24 +++++++++++++++----
 2 files changed, 26 insertions(+), 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
index aa4d2bcadb011..f07da489e23ae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -321,6 +321,12 @@ mod tests {
         );
     }
 
+    #[test]
+    fn remove_parens_conflict_cast_before_l_angle() {
+        check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) << 10; }"#);
+        check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) < 10; }"#);
+    }
+
     #[test]
     fn remove_parens_double_paren_stmt() {
         check_assist(
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
index 8c88224a761ac..d99cf492616e2 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
@@ -154,6 +154,11 @@ fn check_ancestry(ancestor: &SyntaxNode, descendent: &SyntaxNode) -> bool {
     bail()
 }
 
+fn next_token_of(node: &SyntaxNode) -> Option {
+    let last = node.last_token()?;
+    skip_trivia_token(last.next_token()?, Direction::Next)
+}
+
 impl Expr {
     pub fn precedence(&self) -> ExprPrecedence {
         precedence(self)
@@ -197,6 +202,8 @@ impl Expr {
         if is_parent_call_expr && is_field_expr {
             return true;
         }
+        let place_of_parent =
+            || place_of.ancestors().find(|it| it.parent().is_none_or(|p| &p == parent.syntax()));
 
         // Special-case block weirdness
         if parent.child_is_followed_by_a_block() {
@@ -226,15 +233,24 @@ impl Expr {
         // For `&&`, we avoid introducing ` && ` into a binary chain.
 
         if self.precedence() == ExprPrecedence::Jump
-            && let Some(node) =
-                place_of.ancestors().find(|it| it.parent().is_none_or(|p| &p == parent.syntax()))
-            && let Some(next) =
-                node.last_token().and_then(|t| skip_trivia_token(t.next_token()?, Direction::Next))
+            && let Some(node) = place_of_parent()
+            && let Some(next) = next_token_of(&node)
             && matches!(next.kind(), T![||] | T![&&])
         {
             return true;
         }
 
+        // Special-case `2 as x < 3`
+        if let ast::Expr::CastExpr(it) = self
+            && let Some(ty) = it.ty()
+            && ty.syntax().last_token().and_then(|it| ast::NameLike::cast(it.parent()?)).is_some()
+            && let Some(node) = place_of_parent()
+            && let Some(next) = next_token_of(&node)
+            && matches!(next.kind(), T![<] | T![<<])
+        {
+            return true;
+        }
+
         if self.is_paren_like()
             || parent.is_paren_like()
             || self.is_prefix()

From 4b5b42c82c1cf2095fcb183c72406eb82c908b21 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Wed, 14 Jan 2026 16:34:31 +0530
Subject: [PATCH 062/131] remove postcard from legacy

---
 .../src/bidirectional_protocol.rs             |  9 +---
 .../proc-macro-api/src/legacy_protocol.rs     |  8 +---
 .../crates/proc-macro-api/src/lib.rs          |  3 --
 .../crates/proc-macro-api/src/process.rs      | 45 ++++++-------------
 .../crates/proc-macro-srv-cli/src/main.rs     |  5 ---
 .../proc-macro-srv-cli/src/main_loop.rs       |  1 -
 6 files changed, 16 insertions(+), 55 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index e44723a6a3898..5996f882981c0 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -212,14 +212,7 @@ fn run_request(
     if let Some(err) = srv.exited() {
         return Err(err.clone());
     }
-
-    match srv.use_postcard() {
-        true => srv.run_bidirectional::(msg, callback),
-        false => Err(ServerError {
-            message: "bidirectional messaging does not support JSON".to_owned(),
-            io: None,
-        }),
-    }
+    srv.run_bidirectional::(msg, callback)
 }
 
 pub fn reject_subrequests(req: SubRequest) -> Result {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
index 4524d1b66bfe2..aabe5a0118519 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
@@ -19,7 +19,7 @@ use crate::{
     },
     process::ProcMacroServerProcess,
     transport::codec::Codec,
-    transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
+    transport::codec::json::JsonProtocol,
     version,
 };
 
@@ -148,11 +148,7 @@ fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result(send_request::, req)
-    } else {
-        srv.send_task::<_, _, JsonProtocol>(send_request::, req)
-    }
+    srv.send_task::<_, _, JsonProtocol>(send_request::, req)
 }
 
 /// Sends a request to the server and reads the response.
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 822809943a364..01195c10feef5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -49,8 +49,6 @@ pub mod version {
 pub enum ProtocolFormat {
     /// JSON-based legacy protocol (newline-delimited JSON).
     JsonLegacy,
-    /// Postcard-based legacy protocol (COBS-encoded postcard).
-    PostcardLegacy,
     /// Bidirectional postcard protocol with sub-request support.
     BidirectionalPostcardPrototype,
 }
@@ -59,7 +57,6 @@ impl fmt::Display for ProtocolFormat {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
             ProtocolFormat::JsonLegacy => write!(f, "json-legacy"),
-            ProtocolFormat::PostcardLegacy => write!(f, "postcard-legacy"),
             ProtocolFormat::BidirectionalPostcardPrototype => {
                 write!(f, "bidirectional-postcard-prototype")
             }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index 4f87621587908..cd387dad0d0b3 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -43,7 +43,6 @@ impl std::fmt::Debug for ProcMacroServerProcess {
 #[derive(Debug, Clone)]
 pub(crate) enum Protocol {
     LegacyJson { mode: SpanMode },
-    LegacyPostcard { mode: SpanMode },
     BidirectionalPostcardPrototype { mode: SpanMode },
 }
 
@@ -136,7 +135,6 @@ impl ProcMacroServerProcess {
         {
             &[
                 Some(ProtocolFormat::BidirectionalPostcardPrototype),
-                Some(ProtocolFormat::PostcardLegacy),
                 Some(ProtocolFormat::JsonLegacy),
             ]
         } else {
@@ -155,9 +153,6 @@ impl ProcMacroServerProcess {
                         Some(ProtocolFormat::BidirectionalPostcardPrototype) => {
                             Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id }
                         }
-                        Some(ProtocolFormat::PostcardLegacy) => {
-                            Protocol::LegacyPostcard { mode: SpanMode::Id }
-                        }
                         Some(ProtocolFormat::JsonLegacy) | None => {
                             Protocol::LegacyJson { mode: SpanMode::Id }
                         }
@@ -185,7 +180,6 @@ impl ProcMacroServerProcess {
                     {
                         match &mut srv.protocol {
                             Protocol::LegacyJson { mode }
-                            | Protocol::LegacyPostcard { mode }
                             | Protocol::BidirectionalPostcardPrototype { mode } => *mode = new_mode,
                         }
                     }
@@ -208,10 +202,6 @@ impl ProcMacroServerProcess {
         self.exited.get().map(|it| &it.0)
     }
 
-    pub(crate) fn use_postcard(&self) -> bool {
-        matches!(self.protocol, Protocol::LegacyPostcard { .. })
-    }
-
     /// Retrieves the API version of the proc-macro server.
     pub(crate) fn version(&self) -> u32 {
         self.version
@@ -221,7 +211,6 @@ impl ProcMacroServerProcess {
     pub(crate) fn rust_analyzer_spans(&self) -> bool {
         match self.protocol {
             Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer,
-            Protocol::LegacyPostcard { mode } => mode == SpanMode::RustAnalyzer,
             Protocol::BidirectionalPostcardPrototype { mode } => mode == SpanMode::RustAnalyzer,
         }
     }
@@ -229,9 +218,7 @@ impl ProcMacroServerProcess {
     /// Checks the API version of the running proc-macro server.
     fn version_check(&self, callback: Option>) -> Result {
         match self.protocol {
-            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::version_check(self)
-            }
+            Protocol::LegacyJson { .. } => legacy_protocol::version_check(self),
             Protocol::BidirectionalPostcardPrototype { .. } => {
                 let cb = callback.expect("callback required for bidirectional protocol");
                 bidirectional_protocol::version_check(self, cb)
@@ -245,9 +232,7 @@ impl ProcMacroServerProcess {
         callback: Option>,
     ) -> Result {
         match self.protocol {
-            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::enable_rust_analyzer_spans(self)
-            }
+            Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
             Protocol::BidirectionalPostcardPrototype { .. } => {
                 let cb = callback.expect("callback required for bidirectional protocol");
                 bidirectional_protocol::enable_rust_analyzer_spans(self, cb)
@@ -262,9 +247,7 @@ impl ProcMacroServerProcess {
         callback: Option>,
     ) -> Result, String>, ServerError> {
         match self.protocol {
-            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::find_proc_macros(self, dylib_path)
-            }
+            Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
             Protocol::BidirectionalPostcardPrototype { .. } => {
                 let cb = callback.expect("callback required for bidirectional protocol");
                 bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
@@ -285,18 +268,16 @@ impl ProcMacroServerProcess {
         callback: Option>,
     ) -> Result, ServerError> {
         match self.protocol {
-            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::expand(
-                    proc_macro,
-                    subtree,
-                    attr,
-                    env,
-                    def_site,
-                    call_site,
-                    mixed_site,
-                    current_dir,
-                )
-            }
+            Protocol::LegacyJson { .. } => legacy_protocol::expand(
+                proc_macro,
+                subtree,
+                attr,
+                env,
+                def_site,
+                call_site,
+                mixed_site,
+                current_dir,
+            ),
             Protocol::BidirectionalPostcardPrototype { .. } => bidirectional_protocol::expand(
                 proc_macro,
                 subtree,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
index a246d4d3f28f9..928753659f1cc 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -67,7 +67,6 @@ impl ValueEnum for ProtocolFormatArg {
     fn value_variants<'a>() -> &'a [Self] {
         &[
             ProtocolFormatArg(ProtocolFormat::JsonLegacy),
-            ProtocolFormatArg(ProtocolFormat::PostcardLegacy),
             ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype),
         ]
     }
@@ -75,9 +74,6 @@ impl ValueEnum for ProtocolFormatArg {
     fn to_possible_value(&self) -> Option {
         match self.0 {
             ProtocolFormat::JsonLegacy => Some(clap::builder::PossibleValue::new("json-legacy")),
-            ProtocolFormat::PostcardLegacy => {
-                Some(clap::builder::PossibleValue::new("postcard-legacy"))
-            }
             ProtocolFormat::BidirectionalPostcardPrototype => {
                 Some(clap::builder::PossibleValue::new("bidirectional-postcard-prototype"))
             }
@@ -87,7 +83,6 @@ impl ValueEnum for ProtocolFormatArg {
     fn from_str(input: &str, _ignore_case: bool) -> Result {
         match input {
             "json-legacy" => Ok(ProtocolFormatArg(ProtocolFormat::JsonLegacy)),
-            "postcard-legacy" => Ok(ProtocolFormatArg(ProtocolFormat::PostcardLegacy)),
             "bidirectional-postcard-prototype" => {
                 Ok(ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype))
             }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
index 5180ede9fb900..70e1e091c1975 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -41,7 +41,6 @@ pub fn run(
 ) -> io::Result<()> {
     match format {
         ProtocolFormat::JsonLegacy => run_old::(stdin, stdout),
-        ProtocolFormat::PostcardLegacy => run_old::(stdin, stdout),
         ProtocolFormat::BidirectionalPostcardPrototype => {
             run_new::(stdin, stdout)
         }

From 31817f6deaf57c58f713bb61666d32bbd8306a3b Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Wed, 14 Jan 2026 16:34:57 +0530
Subject: [PATCH 063/131] remove flatten from ExpandMacro message in
 bidirectional messages

---
 .../crates/proc-macro-api/src/bidirectional_protocol/msg.rs      | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
index 57e7b1ee8f684..c56ed51916948 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
@@ -70,7 +70,6 @@ pub struct ExpandMacro {
     pub lib: Utf8PathBuf,
     pub env: Vec<(String, String)>,
     pub current_dir: Option,
-    #[serde(flatten)]
     pub data: ExpandMacroData,
 }
 

From 98e1f7103b5d6bfcfcad849fa30a19614303160d Mon Sep 17 00:00:00 2001
From: Roberto Aloi 
Date: Wed, 14 Jan 2026 13:15:09 +0100
Subject: [PATCH 064/131] Bump camino to 1.2.2

---
 src/tools/rust-analyzer/Cargo.lock | 4 ++--
 src/tools/rust-analyzer/Cargo.toml | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index d6c6250e13dc7..a2a18cf8eeea1 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -178,9 +178,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
 
 [[package]]
 name = "camino"
-version = "1.2.0"
+version = "1.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603"
+checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
 dependencies = [
  "serde_core",
 ]
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 8003cb2fba8ed..04b513b38b582 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -107,7 +107,7 @@ anyhow = "1.0.98"
 arrayvec = "0.7.6"
 bitflags = "2.9.1"
 cargo_metadata = "0.23.0"
-camino = "1.1.10"
+camino = "1.2.2"
 crossbeam-channel = "0.5.15"
 dissimilar = "1.0.10"
 dot = "0.1.4"

From 0d8aa8991c33f7dec592856760b3a1c32251bfba Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Wed, 14 Jan 2026 09:16:17 +0100
Subject: [PATCH 065/131] fix: Fix path symbol search not respecting re-exports

---
 src/tools/rust-analyzer/crates/hir/src/lib.rs | 17 ++++
 .../crates/ide-db/src/symbol_index.rs         | 97 ++++++++++++++-----
 2 files changed, 91 insertions(+), 23 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 78be5a7e8fa94..252d71fb80a46 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -610,6 +610,23 @@ impl Module {
         res
     }
 
+    pub fn modules_in_scope(&self, db: &dyn HirDatabase, pub_only: bool) -> Vec<(Name, Module)> {
+        let def_map = self.id.def_map(db);
+        let scope = &def_map[self.id].scope;
+
+        let mut res = Vec::new();
+
+        for (name, item) in scope.types() {
+            if let ModuleDefId::ModuleId(m) = item.def
+                && (!pub_only || item.vis == Visibility::Public)
+            {
+                res.push((name.clone(), Module { id: m }));
+            }
+        }
+
+        res
+    }
+
     /// Returns a `ModuleScope`: a set of items, visible in this module.
     pub fn scope(
         self,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index c95b541748ecc..d7f4c66f465bd 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -35,6 +35,7 @@ use hir::{
     import_map::{AssocSearchMode, SearchMode},
     symbols::{FileSymbol, SymbolCollector},
 };
+use itertools::Itertools;
 use rayon::prelude::*;
 use salsa::Update;
 
@@ -224,12 +225,10 @@ pub fn world_symbols(db: &RootDatabase, mut query: Query) -> Vec>
     // Search for crates by name (handles "::" and "::foo" queries)
     let indices: Vec<_> = if query.is_crate_search() {
         query.only_types = false;
-        query.libs = true;
         vec![SymbolIndex::extern_prelude_symbols(db)]
         // If we have a path filter, resolve it to target modules
     } else if !query.path_filter.is_empty() {
         query.only_types = false;
-        query.libs = true;
         let target_modules = resolve_path_to_modules(
             db,
             &query.path_filter,
@@ -313,11 +312,11 @@ fn resolve_path_to_modules(
 
     // If anchor_to_crate is true, first segment MUST be a crate name
     // If anchor_to_crate is false, first segment could be a crate OR a module in local crates
-    let mut candidate_modules: Vec = vec![];
+    let mut candidate_modules: Vec<(Module, bool)> = vec![];
 
     // Add crate root modules for matching crates
     for krate in matching_crates {
-        candidate_modules.push(krate.root_module(db));
+        candidate_modules.push((krate.root_module(db), krate.origin(db).is_local()));
     }
 
     // If not anchored to crate, also search for modules matching first segment in local crates
@@ -329,7 +328,7 @@ fn resolve_path_to_modules(
                     if let Some(name) = child.name(db)
                         && names_match(name.as_str(), first_segment)
                     {
-                        candidate_modules.push(child);
+                        candidate_modules.push((child, true));
                     }
                 }
             }
@@ -340,11 +339,14 @@ fn resolve_path_to_modules(
     for segment in rest_segments {
         candidate_modules = candidate_modules
             .into_iter()
-            .flat_map(|module| {
-                module.children(db).filter(|child| {
-                    child.name(db).is_some_and(|name| names_match(name.as_str(), segment))
-                })
+            .flat_map(|(module, local)| {
+                module
+                    .modules_in_scope(db, !local)
+                    .into_iter()
+                    .filter(|(name, _)| names_match(name.as_str(), segment))
+                    .map(move |(_, module)| (module, local))
             })
+            .unique()
             .collect();
 
         if candidate_modules.is_empty() {
@@ -352,7 +354,7 @@ fn resolve_path_to_modules(
         }
     }
 
-    candidate_modules
+    candidate_modules.into_iter().map(|(module, _)| module).collect()
 }
 
 #[derive(Default)]
@@ -839,7 +841,7 @@ pub struct Foo;
         assert_eq!(item, "foo");
         assert!(anchor);
 
-        // Trailing :: (module browsing)
+        // Trailing ::
         let (path, item, anchor) = Query::parse_path_query("foo::");
         assert_eq!(path, vec!["foo"]);
         assert_eq!(item, "");
@@ -909,7 +911,7 @@ pub mod nested {
     }
 
     #[test]
-    fn test_module_browsing() {
+    fn test_path_search_module() {
         let (mut db, _) = RootDatabase::with_many_files(
             r#"
 //- /lib.rs crate:main
@@ -1066,20 +1068,11 @@ pub fn root_fn() {}
         let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
         assert!(names.contains(&"RootItem"), "Expected RootItem at crate root in {:?}", names);
 
-        // Browse crate root
         let query = Query::new("mylib::".to_owned());
         let symbols = world_symbols(&db, query);
         let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
-        assert!(
-            names.contains(&"RootItem"),
-            "Expected RootItem when browsing crate root in {:?}",
-            names
-        );
-        assert!(
-            names.contains(&"root_fn"),
-            "Expected root_fn when browsing crate root in {:?}",
-            names
-        );
+        assert!(names.contains(&"RootItem"), "Expected RootItem {:?}", names);
+        assert!(names.contains(&"root_fn"), "Expected root_fn {:?}", names);
     }
 
     #[test]
@@ -1163,4 +1156,62 @@ pub struct FooStruct;
         let symbols = world_symbols(&db, query);
         assert!(symbols.is_empty(), "Expected empty results for non-matching crate pattern");
     }
+
+    #[test]
+    fn test_path_search_with_use_reexport() {
+        // Test that module resolution works for `use` items (re-exports), not just `mod` items
+        let (mut db, _) = RootDatabase::with_many_files(
+            r#"
+//- /lib.rs crate:main
+mod inner;
+pub use inner::nested;
+
+//- /inner.rs
+pub mod nested {
+    pub struct NestedStruct;
+    pub fn nested_fn() {}
+}
+"#,
+        );
+
+        let mut local_roots = FxHashSet::default();
+        local_roots.insert(WORKSPACE);
+        LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+        // Search via the re-exported path (main::nested::NestedStruct)
+        // This should work because `nested` is in scope via `pub use inner::nested`
+        let query = Query::new("main::nested::NestedStruct".to_owned());
+        let symbols = world_symbols(&db, query);
+        let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+        assert!(
+            names.contains(&"NestedStruct"),
+            "Expected NestedStruct via re-exported path in {:?}",
+            names
+        );
+
+        // Also verify the original path still works
+        let query = Query::new("main::inner::nested::NestedStruct".to_owned());
+        let symbols = world_symbols(&db, query);
+        let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+        assert!(
+            names.contains(&"NestedStruct"),
+            "Expected NestedStruct via original path in {:?}",
+            names
+        );
+
+        // Browse the re-exported module
+        let query = Query::new("main::nested::".to_owned());
+        let symbols = world_symbols(&db, query);
+        let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+        assert!(
+            names.contains(&"NestedStruct"),
+            "Expected NestedStruct when browsing re-exported module in {:?}",
+            names
+        );
+        assert!(
+            names.contains(&"nested_fn"),
+            "Expected nested_fn when browsing re-exported module in {:?}",
+            names
+        );
+    }
 }

From 6ecee2a415db5f3abf32414372e8325d3ff47f7b Mon Sep 17 00:00:00 2001
From: Wilfred Hughes 
Date: Wed, 14 Jan 2026 18:26:02 +0000
Subject: [PATCH 066/131] internal: Improve docs for discoverConfig

Add concrete examples of CLI invocations and JSONL outputs, use BUCK
for consistency with the first example, and polish the wording.
---
 .../crates/rust-analyzer/src/config.rs        | 116 +++++++++++-------
 .../docs/book/src/configuration_generated.md  | 116 +++++++++++-------
 .../docs/book/src/non_cargo_based_projects.md |   2 +-
 .../rust-analyzer/editors/code/package.json   |   2 +-
 4 files changed, 142 insertions(+), 94 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 28ac94e4deb61..8d6b19a84caac 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -478,14 +478,26 @@ config_data! {
         typing_triggerChars: Option = Some("=.".to_owned()),
 
 
-        /// Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+        /// Configure a command that rust-analyzer can invoke to
+        /// obtain configuration.
         ///
-        /// [`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`.
-        /// `progressLabel` is used for the title in progress indicators, whereas `filesToWatch`
-        /// is used to determine which build system-specific files should be watched in order to
-        /// reload rust-analyzer.
+        /// This is an alternative to manually generating
+        /// `rust-project.json`: it enables rust-analyzer to generate
+        /// rust-project.json on the fly, and regenerate it when
+        /// switching or modifying projects.
+        ///
+        /// This is an object with three fields:
+        ///
+        /// * `command`: the shell command to invoke
+        ///
+        /// * `filesToWatch`: which build system-specific files should
+        /// be watched to trigger regenerating the configuration
+        ///
+        /// * `progressLabel`: the name of the command, used in
+        /// progress indicators in the IDE
+        ///
+        /// Here's an example of a valid configuration:
         ///
-        /// Below is an example of a valid configuration:
         /// ```json
         /// "rust-analyzer.workspace.discoverConfig": {
         ///     "command": [
@@ -500,12 +512,49 @@ config_data! {
         /// }
         /// ```
         ///
-        /// ## Workspace Discovery Protocol
+        /// ## Argument Substitutions
+        ///
+        /// If `command` includes the argument `{arg}`, that argument will be substituted
+        /// with the JSON-serialized form of the following enum:
+        ///
+        /// ```norun
+        /// #[derive(PartialEq, Clone, Debug, Serialize)]
+        /// #[serde(rename_all = "camelCase")]
+        /// pub enum DiscoverArgument {
+        ///    Path(AbsPathBuf),
+        ///    Buildfile(AbsPathBuf),
+        /// }
+        /// ```
+        ///
+        /// rust-analyzer will use the path invocation to find and
+        /// generate a `rust-project.json` and therefore a
+        /// workspace. Example:
+        ///
+        ///
+        /// ```norun
+        /// rust-project develop-json '{ "path": "myproject/src/main.rs" }'
+        /// ```
+        ///
+        /// rust-analyzer will use build file invocations to update an
+        /// existing workspace. Example:
+        ///
+        /// Or with a build file and the configuration above:
+        ///
+        /// ```norun
+        /// rust-project develop-json '{ "buildfile": "myproject/BUCK" }'
+        /// ```
+        ///
+        /// As a reference for implementors, buck2's `rust-project`
+        /// will likely be useful:
+        /// .
+        ///
+        /// ## Discover Command Output
         ///
         /// **Warning**: This format is provisional and subject to change.
         ///
-        /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to
-        /// `DiscoverProjectData::Finished`:
+        /// The discover command should output JSON objects, one per
+        /// line (JSONL format). These objects should correspond to
+        /// this Rust data type:
         ///
         /// ```norun
         /// #[derive(Debug, Clone, Deserialize, Serialize)]
@@ -518,7 +567,14 @@ config_data! {
         /// }
         /// ```
         ///
-        /// As JSON, `DiscoverProjectData::Finished` is:
+        /// For example, a progress event:
+        ///
+        /// ```json
+        /// {"kind":"progress","message":"generating rust-project.json"}
+        /// ```
+        ///
+        /// A finished event can look like this (expanded and
+        /// commented for readability):
         ///
         /// ```json
         /// {
@@ -526,7 +582,7 @@ config_data! {
         ///     "kind": "finished",
         ///     // the file used by a non-Cargo build system to define
         ///     // a package or target.
-        ///     "buildfile": "rust-analyzer/BUILD",
+        ///     "buildfile": "rust-analyzer/BUCK",
         ///     // the contents of a rust-project.json, elided for brevity
         ///     "project": {
         ///         "sysroot": "foo",
@@ -535,41 +591,9 @@ config_data! {
         /// }
         /// ```
         ///
-        /// It is encouraged, but not required, to use the other variants on `DiscoverProjectData`
-        /// to provide a more polished end-user experience.
-        ///
-        /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be
-        /// substituted with the JSON-serialized form of the following enum:
-        ///
-        /// ```norun
-        /// #[derive(PartialEq, Clone, Debug, Serialize)]
-        /// #[serde(rename_all = "camelCase")]
-        /// pub enum DiscoverArgument {
-        ///    Path(AbsPathBuf),
-        ///    Buildfile(AbsPathBuf),
-        /// }
-        /// ```
-        ///
-        /// The JSON representation of `DiscoverArgument::Path` is:
-        ///
-        /// ```json
-        /// {
-        ///     "path": "src/main.rs"
-        /// }
-        /// ```
-        ///
-        /// Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
-        ///
-        /// ```json
-        /// {
-        ///     "buildfile": "BUILD"
-        /// }
-        /// ```
-        ///
-        /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
-        /// therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
-        /// existing workspace. As a reference for implementors, buck2's `rust-project` will likely
-        /// be useful: .
+        /// Only the finished event is required, but the other
+        /// variants are encouraged to give users more feedback about
+        /// progress or errors.
         workspace_discoverConfig: Option = None,
     }
 }
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
index c4124aaae0753..9bc412631039c 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
@@ -1619,14 +1619,26 @@ though Cargo might be the eventual consumer.
 
 Default: `null`
 
-Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+Configure a command that rust-analyzer can invoke to
+obtain configuration.
 
-[`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`.
-`progressLabel` is used for the title in progress indicators, whereas `filesToWatch`
-is used to determine which build system-specific files should be watched in order to
-reload rust-analyzer.
+This is an alternative to manually generating
+`rust-project.json`: it enables rust-analyzer to generate
+rust-project.json on the fly, and regenerate it when
+switching or modifying projects.
+
+This is an object with three fields:
+
+* `command`: the shell command to invoke
+
+* `filesToWatch`: which build system-specific files should
+be watched to trigger regenerating the configuration
+
+* `progressLabel`: the name of the command, used in
+progress indicators in the IDE
+
+Here's an example of a valid configuration:
 
-Below is an example of a valid configuration:
 ```json
 "rust-analyzer.workspace.discoverConfig": {
     "command": [
@@ -1641,12 +1653,49 @@ Below is an example of a valid configuration:
 }
 ```
 
-## Workspace Discovery Protocol
+## Argument Substitutions
+
+If `command` includes the argument `{arg}`, that argument will be substituted
+with the JSON-serialized form of the following enum:
+
+```norun
+#[derive(PartialEq, Clone, Debug, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum DiscoverArgument {
+   Path(AbsPathBuf),
+   Buildfile(AbsPathBuf),
+}
+```
+
+rust-analyzer will use the path invocation to find and
+generate a `rust-project.json` and therefore a
+workspace. Example:
+
+
+```norun
+rust-project develop-json '{ "path": "myproject/src/main.rs" }'
+```
+
+rust-analyzer will use build file invocations to update an
+existing workspace. Example:
+
+Or with a build file and the configuration above:
+
+```norun
+rust-project develop-json '{ "buildfile": "myproject/BUCK" }'
+```
+
+As a reference for implementors, buck2's `rust-project`
+will likely be useful:
+.
+
+## Discover Command Output
 
 **Warning**: This format is provisional and subject to change.
 
-[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to
-`DiscoverProjectData::Finished`:
+The discover command should output JSON objects, one per
+line (JSONL format). These objects should correspond to
+this Rust data type:
 
 ```norun
 #[derive(Debug, Clone, Deserialize, Serialize)]
@@ -1659,7 +1708,14 @@ enum DiscoverProjectData {
 }
 ```
 
-As JSON, `DiscoverProjectData::Finished` is:
+For example, a progress event:
+
+```json
+{"kind":"progress","message":"generating rust-project.json"}
+```
+
+A finished event can look like this (expanded and
+commented for readability):
 
 ```json
 {
@@ -1667,7 +1723,7 @@ As JSON, `DiscoverProjectData::Finished` is:
     "kind": "finished",
     // the file used by a non-Cargo build system to define
     // a package or target.
-    "buildfile": "rust-analyzer/BUILD",
+    "buildfile": "rust-analyzer/BUCK",
     // the contents of a rust-project.json, elided for brevity
     "project": {
         "sysroot": "foo",
@@ -1676,41 +1732,9 @@ As JSON, `DiscoverProjectData::Finished` is:
 }
 ```
 
-It is encouraged, but not required, to use the other variants on `DiscoverProjectData`
-to provide a more polished end-user experience.
-
-`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be
-substituted with the JSON-serialized form of the following enum:
-
-```norun
-#[derive(PartialEq, Clone, Debug, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub enum DiscoverArgument {
-   Path(AbsPathBuf),
-   Buildfile(AbsPathBuf),
-}
-```
-
-The JSON representation of `DiscoverArgument::Path` is:
-
-```json
-{
-    "path": "src/main.rs"
-}
-```
-
-Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
-
-```json
-{
-    "buildfile": "BUILD"
-}
-```
-
-`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
-therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
-existing workspace. As a reference for implementors, buck2's `rust-project` will likely
-be useful: .
+Only the finished event is required, but the other
+variants are encouraged to give users more feedback about
+progress or errors.
 
 
 ## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports}
diff --git a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md
index a48b025c7b3a6..f1f10ae336534 100644
--- a/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md
+++ b/src/tools/rust-analyzer/docs/book/src/non_cargo_based_projects.md
@@ -237,7 +237,7 @@ There are four ways to feed `rust-project.json` to rust-analyzer:
     [`"rust-analyzer.workspace.discoverConfig": … }`](./configuration.md#workspace.discoverConfig)
     to specify a workspace discovery command to generate project descriptions
     on-the-fly. Please note that the command output is message-oriented and must
-    follow [the discovery protocol](./configuration.md#workspace-discovery-protocol).
+    output JSONL [as described in the configuration docs](./configuration.md#workspace.discoverConfig).
 
 -   Place `rust-project.json` file at the root of the project, and
     rust-analyzer will discover it.
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index 0d91378706a40..a197b7abd84c0 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -3135,7 +3135,7 @@
                 "title": "Workspace",
                 "properties": {
                     "rust-analyzer.workspace.discoverConfig": {
-                        "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progressLabel` and `filesToWatch`.\n`progressLabel` is used for the title in progress indicators, whereas `filesToWatch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\",\n                \"{arg}\"\n        ],\n        \"progressLabel\": \"buck2/rust-project\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## Workspace Discovery Protocol\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option },\n        Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUILD\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n        \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n        \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: .",
+                        "markdownDescription": "Configure a command that rust-analyzer can invoke to\nobtain configuration.\n\nThis is an alternative to manually generating\n`rust-project.json`: it enables rust-analyzer to generate\nrust-project.json on the fly, and regenerate it when\nswitching or modifying projects.\n\nThis is an object with three fields:\n\n* `command`: the shell command to invoke\n\n* `filesToWatch`: which build system-specific files should\nbe watched to trigger regenerating the configuration\n\n* `progressLabel`: the name of the command, used in\nprogress indicators in the IDE\n\nHere's an example of a valid configuration:\n\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\",\n                \"{arg}\"\n        ],\n        \"progressLabel\": \"buck2/rust-project\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## Argument Substitutions\n\nIf `command` includes the argument `{arg}`, that argument will be substituted\nwith the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nrust-analyzer will use the path invocation to find and\ngenerate a `rust-project.json` and therefore a\nworkspace. Example:\n\n\n```norun\nrust-project develop-json '{ \"path\": \"myproject/src/main.rs\" }'\n```\n\nrust-analyzer will use build file invocations to update an\nexisting workspace. Example:\n\nOr with a build file and the configuration above:\n\n```norun\nrust-project develop-json '{ \"buildfile\": \"myproject/BUCK\" }'\n```\n\nAs a reference for implementors, buck2's `rust-project`\nwill likely be useful:\n.\n\n## Discover Command Output\n\n**Warning**: This format is provisional and subject to change.\n\nThe discover command should output JSON objects, one per\nline (JSONL format). These objects should correspond to\nthis Rust data type:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option },\n        Progress { message: String },\n}\n```\n\nFor example, a progress event:\n\n```json\n{\"kind\":\"progress\",\"message\":\"generating rust-project.json\"}\n```\n\nA finished event can look like this (expanded and\ncommented for readability):\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUCK\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nOnly the finished event is required, but the other\nvariants are encouraged to give users more feedback about\nprogress or errors.",
                         "default": null,
                         "anyOf": [
                             {

From c31add03870e80556703be3c9544109328559379 Mon Sep 17 00:00:00 2001
From: The rustc-josh-sync Cronjob Bot 
Date: Thu, 15 Jan 2026 04:21:53 +0000
Subject: [PATCH 067/131] Prepare for merging from rust-lang/rust

This updates the rust-version file to b6fdaf2a15736cbccf248b532f48e33179614d40.
---
 src/tools/rust-analyzer/rust-version | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index b53a66c667517..a6ccd9bab3930 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-44a5b55557c26353f388400d7da95527256fe260
+b6fdaf2a15736cbccf248b532f48e33179614d40

From d85965634996e60e80440b4fef7415b6379d2487 Mon Sep 17 00:00:00 2001
From: lummax 
Date: Thu, 15 Jan 2026 12:54:01 +0100
Subject: [PATCH 068/131] fix: lookup flycheck by ID instead of vector index

After a recent introduction of per-package flycheck for JSON projects, the code
assumed that `world.flycheck` indices matched `world.workspaces` indices.
However, not all workspaces have flycheck enabled (e.g., JSON projects
without a flycheck template configured), so the flycheck vector can be
shorter than the workspaces vector.

This caused an index-out-of-bounds panic when saving a file in a JSON
project without flycheck configured:

  thread 'Worker' panicked at notification.rs:
  index out of bounds: the len is 0 but the index is 0

Fix by looking up the flycheck handle by its ID (which is the workspace
index set during spawn) rather than using the workspace index directly
as a vector index.
---
 .../src/handlers/notification.rs              | 23 ++++++++++++-------
 1 file changed, 15 insertions(+), 8 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index d956010433301..6cc40677fb514 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -387,14 +387,21 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
                                         } => false,
                                     });
                                 if let Some(idx) = package_workspace_idx {
-                                    let workspace_deps =
-                                        world.all_workspace_dependencies_for_package(&package);
-                                    world.flycheck[idx].restart_for_package(
-                                        package,
-                                        target,
-                                        workspace_deps,
-                                        saved_file.clone(),
-                                    );
+                                    // flycheck handles are indexed by their ID (which is the workspace index),
+                                    // but not all workspaces have flycheck enabled (e.g., JSON projects without
+                                    // a flycheck template). Find the flycheck handle by its ID.
+                                    if let Some(flycheck) =
+                                        world.flycheck.iter().find(|fc| fc.id() == idx)
+                                    {
+                                        let workspace_deps =
+                                            world.all_workspace_dependencies_for_package(&package);
+                                        flycheck.restart_for_package(
+                                            package,
+                                            target,
+                                            workspace_deps,
+                                            saved_file.clone(),
+                                        );
+                                    }
                                 }
                             }
                         }

From 4870a5fb692e0fb796a4db6a8259a22b047e14e9 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Thu, 1 Jan 2026 09:41:09 +0530
Subject: [PATCH 069/131] add worker abstraction

---
 .../src/bidirectional_protocol.rs             | 12 +--
 .../proc-macro-api/src/legacy_protocol.rs     |  4 +-
 .../crates/proc-macro-api/src/lib.rs          | 17 ++--
 .../crates/proc-macro-api/src/process.rs      | 94 +++++++++++++++++--
 4 files changed, 103 insertions(+), 24 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index 5996f882981c0..643ba98f5176e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -21,7 +21,7 @@ use crate::{
             serialize_span_data_index_map,
         },
     },
-    process::ProcMacroServerProcess,
+    process::{ProcMacroWorker, SynIO},
     transport::codec::postcard::PostcardProtocol,
     version,
 };
@@ -84,7 +84,7 @@ fn wrap_decode(err: io::Error) -> ServerError {
 }
 
 pub(crate) fn version_check(
-    srv: &ProcMacroServerProcess,
+    srv: &dyn ProcMacroWorker,
     callback: SubCallback<'_>,
 ) -> Result {
     let request = BidirectionalMessage::Request(Request::ApiVersionCheck {});
@@ -101,7 +101,7 @@ pub(crate) fn version_check(
 
 /// Enable support for rust-analyzer span mode if the server supports it.
 pub(crate) fn enable_rust_analyzer_spans(
-    srv: &ProcMacroServerProcess,
+    srv: &dyn ProcMacroWorker,
     callback: SubCallback<'_>,
 ) -> Result {
     let request = BidirectionalMessage::Request(Request::SetConfig(ServerConfig {
@@ -120,7 +120,7 @@ pub(crate) fn enable_rust_analyzer_spans(
 
 /// Finds proc-macros in a given dynamic library.
 pub(crate) fn find_proc_macros(
-    srv: &ProcMacroServerProcess,
+    srv: &dyn ProcMacroWorker,
     dylib_path: &AbsPath,
     callback: SubCallback<'_>,
 ) -> Result, String>, ServerError> {
@@ -175,7 +175,7 @@ pub(crate) fn expand(
         current_dir: Some(current_dir),
     })));
 
-    let response_payload = run_request(&proc_macro.process, task, callback)?;
+    let response_payload = run_request(proc_macro.process.as_ref(), task, callback)?;
 
     match response_payload {
         BidirectionalMessage::Response(Response::ExpandMacro(it)) => Ok(it
@@ -205,7 +205,7 @@ pub(crate) fn expand(
 }
 
 fn run_request(
-    srv: &ProcMacroServerProcess,
+    srv: &dyn ProcMacroWorker,
     msg: BidirectionalMessage,
     callback: SubCallback<'_>,
 ) -> Result {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
index aabe5a0118519..56bf863a88e92 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
@@ -113,7 +113,7 @@ pub(crate) fn expand(
         current_dir: Some(current_dir),
     };
 
-    let response = send_task(&proc_macro.process, Request::ExpandMacro(Box::new(task)))?;
+    let response = send_task(proc_macro.process.as_ref(), Request::ExpandMacro(Box::new(task)))?;
 
     match response {
         Response::ExpandMacro(it) => Ok(it
@@ -143,7 +143,7 @@ pub(crate) fn expand(
 }
 
 /// Sends a request to the proc-macro server and waits for a response.
-fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result {
+fn send_task(srv: &dyn ProcMacroWorker, req: Request) -> Result {
     if let Some(server_error) = srv.exited() {
         return Err(server_error.clone());
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 01195c10feef5..dd0c89103a2bf 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -27,7 +27,10 @@ use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
 use std::{fmt, io, sync::Arc, time::SystemTime};
 
 pub use crate::transport::codec::Codec;
-use crate::{bidirectional_protocol::SubCallback, process::ProcMacroServerProcess};
+use crate::{
+    bidirectional_protocol::SubCallback,
+    process::{ProcMacroServerProcess, ProcMacroWorker},
+};
 
 /// The versions of the server protocol
 pub mod version {
@@ -85,7 +88,7 @@ pub struct ProcMacroClient {
     ///
     /// That means that concurrent salsa requests may block each other when expanding proc macros,
     /// which is unfortunate, but simple and good enough for the time being.
-    process: Arc,
+    worker: Arc,
     path: AbsPathBuf,
 }
 
@@ -107,7 +110,7 @@ impl MacroDylib {
 /// we share a single expander process for all macros within a workspace.
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
-    process: Arc,
+    process: Arc,
     dylib_path: Arc,
     name: Box,
     kind: ProcMacroKind,
@@ -171,7 +174,7 @@ impl ProcMacroClient {
         version: Option<&Version>,
     ) -> io::Result {
         let process = ProcMacroServerProcess::run(spawn, version, || "".to_owned())?;
-        Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
+        Ok(ProcMacroClient { worker: Arc::new(process), path: process_path.to_owned() })
     }
 
     /// Returns the absolute path to the proc-macro server.
@@ -186,7 +189,7 @@ impl ProcMacroClient {
         callback: Option>,
     ) -> Result, ServerError> {
         let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
-        let macros = self.process.find_proc_macros(&dylib.path, callback)?;
+        let macros = self.worker.find_proc_macros(&dylib.path, callback)?;
 
         let dylib_path = Arc::new(dylib.path);
         let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
@@ -196,7 +199,7 @@ impl ProcMacroClient {
             Ok(macros) => Ok(macros
                 .into_iter()
                 .map(|(name, kind)| ProcMacro {
-                    process: self.process.clone(),
+                    process: self.worker.clone(),
                     name: name.into(),
                     kind,
                     dylib_path: dylib_path.clone(),
@@ -209,7 +212,7 @@ impl ProcMacroClient {
 
     /// Checks if the proc-macro server has exited.
     pub fn exited(&self) -> Option<&ServerError> {
-        self.process.exited()
+        self.worker.exited()
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index cd387dad0d0b3..a206e9fc5d905 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -1,8 +1,9 @@
 //! Handle process life-time and message passing for proc-macro client
 
 use std::{
+    fmt::Debug,
     io::{self, BufRead, BufReader, Read, Write},
-    panic::AssertUnwindSafe,
+    panic::{AssertUnwindSafe, RefUnwindSafe},
     process::{Child, ChildStdin, ChildStdout, Command, Stdio},
     sync::{Arc, Mutex, OnceLock},
 };
@@ -74,12 +75,79 @@ impl ProcessExit for Process {
 }
 
 /// Maintains the state of the proc-macro server process.
-struct ProcessSrvState {
+pub(crate) struct ProcessSrvState {
     process: Box,
     stdin: Box,
     stdout: Box,
 }
 
+impl ProcMacroWorker for ProcMacroServerProcess {
+    fn find_proc_macros(
+        &self,
+        dylib_path: &AbsPath,
+        callback: Option>,
+    ) -> Result, String>, ServerError> {
+        ProcMacroServerProcess::find_proc_macros(self, dylib_path, callback)
+    }
+
+    fn expand(
+        &self,
+        proc_macro: &ProcMacro,
+        subtree: tt::SubtreeView<'_>,
+        attr: Option>,
+        env: Vec<(String, String)>,
+        def_site: Span,
+        call_site: Span,
+        mixed_site: Span,
+        current_dir: String,
+        callback: Option>,
+    ) -> Result, ServerError> {
+        ProcMacroServerProcess::expand(
+            self,
+            proc_macro,
+            subtree,
+            attr,
+            env,
+            def_site,
+            call_site,
+            mixed_site,
+            current_dir,
+            callback,
+        )
+    }
+
+    fn exited(&self) -> Option<&ServerError> {
+        ProcMacroServerProcess::exited(self)
+    }
+
+    fn version(&self) -> u32 {
+        ProcMacroServerProcess::version(self)
+    }
+
+    fn rust_analyzer_spans(&self) -> bool {
+        ProcMacroServerProcess::rust_analyzer_spans(self)
+    }
+
+    fn enable_rust_analyzer_spans(
+        &self,
+        callback: Option>,
+    ) -> Result {
+        ProcMacroServerProcess::enable_rust_analyzer_spans(self, callback)
+    }
+
+    fn use_postcard(&self) -> bool {
+        ProcMacroServerProcess::use_postcard(self)
+    }
+
+    fn state(&self) -> &Mutex {
+        &self.state
+    }
+
+    fn get_exited(&self) -> &OnceLock> {
+        &self.exited
+    }
+}
+
 impl ProcMacroServerProcess {
     /// Starts the proc-macro server and performs a version check
     pub(crate) fn spawn<'a>(
@@ -291,9 +359,13 @@ impl ProcMacroServerProcess {
             ),
         }
     }
+}
+
+pub(crate) struct SynIO;
 
+impl SynIO {
     pub(crate) fn send_task(
-        &self,
+        proc_macro_worker: &dyn ProcMacroWorker,
         send: impl FnOnce(
             &mut dyn Write,
             &mut dyn BufRead,
@@ -302,7 +374,7 @@ impl ProcMacroServerProcess {
         ) -> Result, ServerError>,
         req: Request,
     ) -> Result {
-        self.with_locked_io::(|writer, reader, buf| {
+        SynIO::with_locked_io::(proc_macro_worker, |writer, reader, buf| {
             send(writer, reader, req, buf).and_then(|res| {
                 res.ok_or_else(|| {
                     let message = "proc-macro server did not respond with data".to_owned();
@@ -319,10 +391,10 @@ impl ProcMacroServerProcess {
     }
 
     pub(crate) fn with_locked_io(
-        &self,
+        proc_macro_worker: &dyn ProcMacroWorker,
         f: impl FnOnce(&mut dyn Write, &mut dyn BufRead, &mut C::Buf) -> Result,
     ) -> Result {
-        let state = &mut *self.state.lock().unwrap();
+        let state = &mut *proc_macro_worker.state().lock().unwrap();
         let mut buf = C::Buf::default();
 
         f(&mut state.stdin, &mut state.stdout, &mut buf).map_err(|e| {
@@ -330,7 +402,11 @@ impl ProcMacroServerProcess {
                 match state.process.exit_err() {
                     None => e,
                     Some(server_error) => {
-                        self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone()
+                        proc_macro_worker
+                            .get_exited()
+                            .get_or_init(|| AssertUnwindSafe(server_error))
+                            .0
+                            .clone()
                     }
                 }
             } else {
@@ -340,11 +416,11 @@ impl ProcMacroServerProcess {
     }
 
     pub(crate) fn run_bidirectional(
-        &self,
+        proc_macro_worker: &dyn ProcMacroWorker,
         initial: BidirectionalMessage,
         callback: SubCallback<'_>,
     ) -> Result {
-        self.with_locked_io::(|writer, reader, buf| {
+        SynIO::with_locked_io::(proc_macro_worker, |writer, reader, buf| {
             bidirectional_protocol::run_conversation::(writer, reader, buf, initial, callback)
         })
     }

From 98d4496a1e9f03a5193e85304f069757d8032768 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Thu, 1 Jan 2026 13:39:04 +0530
Subject: [PATCH 070/131] add termination flag to procmacroserverprocess

---
 .../crates/proc-macro-api/src/process.rs      | 20 ++++++++++++++++++-
 1 file changed, 19 insertions(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index a206e9fc5d905..efb8e0e84a1f4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -29,6 +29,7 @@ pub(crate) struct ProcMacroServerProcess {
     protocol: Protocol,
     /// Populated when the server exits.
     exited: OnceLock>,
+    single_use: bool,
 }
 
 impl std::fmt::Debug for ProcMacroServerProcess {
@@ -146,6 +147,10 @@ impl ProcMacroWorker for ProcMacroServerProcess {
     fn get_exited(&self) -> &OnceLock> {
         &self.exited
     }
+
+    fn is_reusable(&self) -> bool {
+        !self.single_use
+    }
 }
 
 impl ProcMacroServerProcess {
@@ -226,6 +231,7 @@ impl ProcMacroServerProcess {
                         }
                     },
                     exited: OnceLock::new(),
+                    single_use,
                 })
             };
             let mut srv = create_srv()?;
@@ -335,7 +341,7 @@ impl ProcMacroServerProcess {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
-        match self.protocol {
+        let result = match self.protocol {
             Protocol::LegacyJson { .. } => legacy_protocol::expand(
                 proc_macro,
                 subtree,
@@ -357,6 +363,18 @@ impl ProcMacroServerProcess {
                 current_dir,
                 callback.expect("callback required for bidirectional protocol"),
             ),
+        };
+
+        if self.is_reusable() {
+            self.terminate();
+        }
+
+        result
+    }
+
+    fn terminate(&self) {
+        if let Ok(mut state) = self.state.lock() {
+            let _ = state.process.child.kill();
         }
     }
 }

From 9fb5d34626d2ce137167e556d53c01f04e410329 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Thu, 1 Jan 2026 16:05:33 +0530
Subject: [PATCH 071/131] add pool of processes

---
 .../src/bidirectional_protocol.rs             |  10 +-
 .../proc-macro-api/src/legacy_protocol.rs     |   2 +-
 .../crates/proc-macro-api/src/lib.rs          |  31 +---
 .../crates/proc-macro-api/src/pool.rs         |  61 ++++++++
 .../crates/proc-macro-api/src/process.rs      | 147 ++++++------------
 5 files changed, 122 insertions(+), 129 deletions(-)
 create mode 100644 src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index 643ba98f5176e..137f2dafc0de8 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -21,7 +21,7 @@ use crate::{
             serialize_span_data_index_map,
         },
     },
-    process::{ProcMacroWorker, SynIO},
+    process::ProcMacroServerProcess,
     transport::codec::postcard::PostcardProtocol,
     version,
 };
@@ -84,7 +84,7 @@ fn wrap_decode(err: io::Error) -> ServerError {
 }
 
 pub(crate) fn version_check(
-    srv: &dyn ProcMacroWorker,
+    srv: &ProcMacroServerProcess,
     callback: SubCallback<'_>,
 ) -> Result {
     let request = BidirectionalMessage::Request(Request::ApiVersionCheck {});
@@ -101,7 +101,7 @@ pub(crate) fn version_check(
 
 /// Enable support for rust-analyzer span mode if the server supports it.
 pub(crate) fn enable_rust_analyzer_spans(
-    srv: &dyn ProcMacroWorker,
+    srv: &ProcMacroServerProcess,
     callback: SubCallback<'_>,
 ) -> Result {
     let request = BidirectionalMessage::Request(Request::SetConfig(ServerConfig {
@@ -120,7 +120,7 @@ pub(crate) fn enable_rust_analyzer_spans(
 
 /// Finds proc-macros in a given dynamic library.
 pub(crate) fn find_proc_macros(
-    srv: &dyn ProcMacroWorker,
+    srv: &ProcMacroServerProcess,
     dylib_path: &AbsPath,
     callback: SubCallback<'_>,
 ) -> Result, String>, ServerError> {
@@ -205,7 +205,7 @@ pub(crate) fn expand(
 }
 
 fn run_request(
-    srv: &dyn ProcMacroWorker,
+    srv: &ProcMacroServerProcess,
     msg: BidirectionalMessage,
     callback: SubCallback<'_>,
 ) -> Result {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
index 56bf863a88e92..7b546cf7aef61 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
@@ -143,7 +143,7 @@ pub(crate) fn expand(
 }
 
 /// Sends a request to the proc-macro server and waits for a response.
-fn send_task(srv: &dyn ProcMacroWorker, req: Request) -> Result {
+fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result {
     if let Some(server_error) = srv.exited() {
         return Err(server_error.clone());
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index dd0c89103a2bf..ffae28f92c05e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -18,6 +18,7 @@ extern crate rustc_driver as _;
 
 pub mod bidirectional_protocol;
 pub mod legacy_protocol;
+pub mod pool;
 pub mod process;
 pub mod transport;
 
@@ -29,7 +30,8 @@ use std::{fmt, io, sync::Arc, time::SystemTime};
 pub use crate::transport::codec::Codec;
 use crate::{
     bidirectional_protocol::SubCallback,
-    process::{ProcMacroServerProcess, ProcMacroWorker},
+    pool::{ProcMacroServerPool, default_pool_size},
+    process::ProcMacroServerProcess,
 };
 
 /// The versions of the server protocol
@@ -88,7 +90,7 @@ pub struct ProcMacroClient {
     ///
     /// That means that concurrent salsa requests may block each other when expanding proc macros,
     /// which is unfortunate, but simple and good enough for the time being.
-    worker: Arc,
+    pool: Arc,
     path: AbsPathBuf,
 }
 
@@ -110,7 +112,7 @@ impl MacroDylib {
 /// we share a single expander process for all macros within a workspace.
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
-    process: Arc,
+    process: Arc,
     dylib_path: Arc,
     name: Box,
     kind: ProcMacroKind,
@@ -188,31 +190,12 @@ impl ProcMacroClient {
         dylib: MacroDylib,
         callback: Option>,
     ) -> Result, ServerError> {
-        let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
-        let macros = self.worker.find_proc_macros(&dylib.path, callback)?;
-
-        let dylib_path = Arc::new(dylib.path);
-        let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
-            .ok()
-            .and_then(|metadata| metadata.modified().ok());
-        match macros {
-            Ok(macros) => Ok(macros
-                .into_iter()
-                .map(|(name, kind)| ProcMacro {
-                    process: self.worker.clone(),
-                    name: name.into(),
-                    kind,
-                    dylib_path: dylib_path.clone(),
-                    dylib_last_modified,
-                })
-                .collect()),
-            Err(message) => Err(ServerError { message, io: None }),
-        }
+        self.pool.load_dylib(&dylib, callback)
     }
 
     /// Checks if the proc-macro server has exited.
     pub fn exited(&self) -> Option<&ServerError> {
-        self.worker.exited()
+        self.pool.exited()
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
new file mode 100644
index 0000000000000..685bc05be62af
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -0,0 +1,61 @@
+use std::sync::Arc;
+
+use crate::{
+    MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
+    process::ProcMacroServerProcess,
+};
+
+#[derive(Debug)]
+pub(crate) struct ProcMacroServerPool {
+    workers: Vec>,
+}
+
+impl ProcMacroServerPool {
+    pub(crate) fn new(workers: Vec>) -> Self {
+        Self { workers }
+    }
+}
+
+impl ProcMacroServerPool {
+    pub(crate) fn exited(&self) -> Option<&ServerError> {
+        for worker in &self.workers {
+            if let Some(e) = worker.exited() {
+                return Some(e);
+            }
+        }
+        None
+    }
+
+    pub(crate) fn load_dylib(
+        &self,
+        dylib: &MacroDylib,
+        _callback: Option>,
+    ) -> Result, ServerError> {
+        let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
+        let mut all_macros = Vec::new();
+
+        for worker in &self.workers {
+            let dylib_path = Arc::new(dylib.path.clone());
+            let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
+                .ok()
+                .and_then(|metadata| metadata.modified().ok());
+            let macros = worker.load_dylib(&dylib.path, None)?;
+
+            for (name, kind) in macros {
+                all_macros.push(ProcMacro {
+                    process: worker.clone(),
+                    name: name.into(),
+                    kind,
+                    dylib_path: Arc::new(dylib.path.clone()),
+                    dylib_last_modified,
+                });
+            }
+        }
+
+        Ok(all_macros)
+    }
+}
+
+pub(crate) fn default_pool_size() -> usize {
+    std::thread::available_parallelism().map(|n| n.get()).unwrap_or(1).min(4)
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index efb8e0e84a1f4..30877c5cf491b 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -3,7 +3,7 @@
 use std::{
     fmt::Debug,
     io::{self, BufRead, BufReader, Read, Write},
-    panic::{AssertUnwindSafe, RefUnwindSafe},
+    panic::AssertUnwindSafe,
     process::{Child, ChildStdin, ChildStdout, Command, Stdio},
     sync::{Arc, Mutex, OnceLock},
 };
@@ -82,77 +82,6 @@ pub(crate) struct ProcessSrvState {
     stdout: Box,
 }
 
-impl ProcMacroWorker for ProcMacroServerProcess {
-    fn find_proc_macros(
-        &self,
-        dylib_path: &AbsPath,
-        callback: Option>,
-    ) -> Result, String>, ServerError> {
-        ProcMacroServerProcess::find_proc_macros(self, dylib_path, callback)
-    }
-
-    fn expand(
-        &self,
-        proc_macro: &ProcMacro,
-        subtree: tt::SubtreeView<'_>,
-        attr: Option>,
-        env: Vec<(String, String)>,
-        def_site: Span,
-        call_site: Span,
-        mixed_site: Span,
-        current_dir: String,
-        callback: Option>,
-    ) -> Result, ServerError> {
-        ProcMacroServerProcess::expand(
-            self,
-            proc_macro,
-            subtree,
-            attr,
-            env,
-            def_site,
-            call_site,
-            mixed_site,
-            current_dir,
-            callback,
-        )
-    }
-
-    fn exited(&self) -> Option<&ServerError> {
-        ProcMacroServerProcess::exited(self)
-    }
-
-    fn version(&self) -> u32 {
-        ProcMacroServerProcess::version(self)
-    }
-
-    fn rust_analyzer_spans(&self) -> bool {
-        ProcMacroServerProcess::rust_analyzer_spans(self)
-    }
-
-    fn enable_rust_analyzer_spans(
-        &self,
-        callback: Option>,
-    ) -> Result {
-        ProcMacroServerProcess::enable_rust_analyzer_spans(self, callback)
-    }
-
-    fn use_postcard(&self) -> bool {
-        ProcMacroServerProcess::use_postcard(self)
-    }
-
-    fn state(&self) -> &Mutex {
-        &self.state
-    }
-
-    fn get_exited(&self) -> &OnceLock> {
-        &self.exited
-    }
-
-    fn is_reusable(&self) -> bool {
-        !self.single_use
-    }
-}
-
 impl ProcMacroServerProcess {
     /// Starts the proc-macro server and performs a version check
     pub(crate) fn spawn<'a>(
@@ -220,7 +149,11 @@ impl ProcMacroServerProcess {
                 let (process, stdin, stdout) = spawn(format)?;
 
                 io::Result::Ok(ProcMacroServerProcess {
-                    state: Mutex::new(ProcessSrvState { process, stdin, stdout }),
+                    state: Mutex::new(ProcessSrvState {
+                        process,
+                        stdin,
+                        stdout,
+                    }),
                     version: 0,
                     protocol: match format {
                         Some(ProtocolFormat::BidirectionalPostcardPrototype) => {
@@ -271,6 +204,37 @@ impl ProcMacroServerProcess {
         Err(err.unwrap())
     }
 
+    pub(crate) fn load_dylib(
+        &self,
+        dylib_path: &AbsPath,
+        callback: Option>,
+    ) -> Result, ServerError> {
+        let _state = self.state.lock().unwrap();
+
+        // if state.loaded_dylibs.contains(dylib_path) {
+        //     // Already loaded in this worker
+        //     return Ok(Vec::new());
+        // }
+
+        let result = match self.protocol {
+            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
+                legacy_protocol::find_proc_macros(self, dylib_path)?
+            }
+            Protocol::BidirectionalPostcardPrototype { .. } => {
+                let cb = callback.expect("callback required");
+                bidirectional_protocol::find_proc_macros(self, dylib_path, cb)?
+            }
+        };
+
+        match result {
+            Ok(macros) => {
+                // state.loaded_dylibs.insert(dylib_path.to_owned());
+                Ok(macros)
+            }
+            Err(message) => Err(ServerError { message, io: None }),
+        }
+    }
+
     /// Returns the server error if the process has exited.
     pub(crate) fn exited(&self) -> Option<&ServerError> {
         self.exited.get().map(|it| &it.0)
@@ -314,21 +278,6 @@ impl ProcMacroServerProcess {
         }
     }
 
-    /// Finds proc-macros in a given dynamic library.
-    pub(crate) fn find_proc_macros(
-        &self,
-        dylib_path: &AbsPath,
-        callback: Option>,
-    ) -> Result, String>, ServerError> {
-        match self.protocol {
-            Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
-            Protocol::BidirectionalPostcardPrototype { .. } => {
-                let cb = callback.expect("callback required for bidirectional protocol");
-                bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
-            }
-        }
-    }
-
     pub(crate) fn expand(
         &self,
         proc_macro: &ProcMacro,
@@ -365,25 +314,25 @@ impl ProcMacroServerProcess {
             ),
         };
 
-        if self.is_reusable() {
+        if !self.is_reusable() {
             self.terminate();
         }
 
         result
     }
 
+    fn is_reusable(&self) -> bool {
+        self.single_use
+    }
+
     fn terminate(&self) {
         if let Ok(mut state) = self.state.lock() {
             let _ = state.process.child.kill();
         }
     }
-}
 
-pub(crate) struct SynIO;
-
-impl SynIO {
     pub(crate) fn send_task(
-        proc_macro_worker: &dyn ProcMacroWorker,
+        &self,
         send: impl FnOnce(
             &mut dyn Write,
             &mut dyn BufRead,
@@ -392,7 +341,7 @@ impl SynIO {
         ) -> Result, ServerError>,
         req: Request,
     ) -> Result {
-        SynIO::with_locked_io::(proc_macro_worker, |writer, reader, buf| {
+        self.with_locked_io::(|writer, reader, buf| {
             send(writer, reader, req, buf).and_then(|res| {
                 res.ok_or_else(|| {
                     let message = "proc-macro server did not respond with data".to_owned();
@@ -409,10 +358,10 @@ impl SynIO {
     }
 
     pub(crate) fn with_locked_io(
-        proc_macro_worker: &dyn ProcMacroWorker,
+        &self,
         f: impl FnOnce(&mut dyn Write, &mut dyn BufRead, &mut C::Buf) -> Result,
     ) -> Result {
-        let state = &mut *proc_macro_worker.state().lock().unwrap();
+        let state = &mut *self.state.lock().unwrap();
         let mut buf = C::Buf::default();
 
         f(&mut state.stdin, &mut state.stdout, &mut buf).map_err(|e| {
@@ -434,11 +383,11 @@ impl SynIO {
     }
 
     pub(crate) fn run_bidirectional(
-        proc_macro_worker: &dyn ProcMacroWorker,
+        &self,
         initial: BidirectionalMessage,
         callback: SubCallback<'_>,
     ) -> Result {
-        SynIO::with_locked_io::(proc_macro_worker, |writer, reader, buf| {
+        self.with_locked_io::(|writer, reader, buf| {
             bidirectional_protocol::run_conversation::(writer, reader, buf, initial, callback)
         })
     }

From c685aa912349b96f21485c434d03ee288cfceb9e Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Fri, 2 Jan 2026 11:03:20 +0530
Subject: [PATCH 072/131] direct client calls via pool

---
 .../src/bidirectional_protocol.rs             | 11 +--
 .../proc-macro-api/src/legacy_protocol.rs     | 11 +--
 .../crates/proc-macro-api/src/lib.rs          | 21 +-----
 .../crates/proc-macro-api/src/pool.rs         | 72 ++++++++++++++++---
 .../crates/proc-macro-api/src/process.rs      | 65 ++++++-----------
 5 files changed, 98 insertions(+), 82 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index 137f2dafc0de8..cd1f6f6f1f335 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -138,6 +138,7 @@ pub(crate) fn find_proc_macros(
 
 pub(crate) fn expand(
     proc_macro: &ProcMacro,
+    process: &ProcMacroServerProcess,
     subtree: tt::SubtreeView<'_>,
     attr: Option>,
     env: Vec<(String, String)>,
@@ -147,7 +148,7 @@ pub(crate) fn expand(
     current_dir: String,
     callback: SubCallback<'_>,
 ) -> Result, crate::ServerError> {
-    let version = proc_macro.process.version();
+    let version = process.version();
     let mut span_data_table = SpanDataIndexMap::default();
     let def_site = span_data_table.insert_full(def_site).0;
     let call_site = span_data_table.insert_full(call_site).0;
@@ -164,7 +165,7 @@ pub(crate) fn expand(
                 call_site,
                 mixed_site,
             },
-            span_data_table: if proc_macro.process.rust_analyzer_spans() {
+            span_data_table: if process.rust_analyzer_spans() {
                 serialize_span_data_index_map(&span_data_table)
             } else {
                 Vec::new()
@@ -175,13 +176,13 @@ pub(crate) fn expand(
         current_dir: Some(current_dir),
     })));
 
-    let response_payload = run_request(proc_macro.process.as_ref(), task, callback)?;
+    let response_payload = run_request(process, task, callback)?;
 
     match response_payload {
         BidirectionalMessage::Response(Response::ExpandMacro(it)) => Ok(it
             .map(|tree| {
                 let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
-                if proc_macro.needs_fixup_change() {
+                if proc_macro.needs_fixup_change(process) {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
@@ -194,7 +195,7 @@ pub(crate) fn expand(
                     version,
                     &deserialize_span_data_index_map(&resp.span_data_table),
                 );
-                if proc_macro.needs_fixup_change() {
+                if proc_macro.needs_fixup_change(process) {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
index 7b546cf7aef61..412d207303241 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
@@ -77,6 +77,7 @@ pub(crate) fn find_proc_macros(
 
 pub(crate) fn expand(
     proc_macro: &ProcMacro,
+    process: &ProcMacroServerProcess,
     subtree: tt::SubtreeView<'_>,
     attr: Option>,
     env: Vec<(String, String)>,
@@ -85,7 +86,7 @@ pub(crate) fn expand(
     mixed_site: Span,
     current_dir: String,
 ) -> Result, crate::ServerError> {
-    let version = proc_macro.process.version();
+    let version = process.version();
     let mut span_data_table = SpanDataIndexMap::default();
     let def_site = span_data_table.insert_full(def_site).0;
     let call_site = span_data_table.insert_full(call_site).0;
@@ -102,7 +103,7 @@ pub(crate) fn expand(
                 call_site,
                 mixed_site,
             },
-            span_data_table: if proc_macro.process.rust_analyzer_spans() {
+            span_data_table: if process.rust_analyzer_spans() {
                 serialize_span_data_index_map(&span_data_table)
             } else {
                 Vec::new()
@@ -113,13 +114,13 @@ pub(crate) fn expand(
         current_dir: Some(current_dir),
     };
 
-    let response = send_task(proc_macro.process.as_ref(), Request::ExpandMacro(Box::new(task)))?;
+    let response = send_task(process, Request::ExpandMacro(Box::new(task)))?;
 
     match response {
         Response::ExpandMacro(it) => Ok(it
             .map(|tree| {
                 let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
-                if proc_macro.needs_fixup_change() {
+                if proc_macro.needs_fixup_change(process) {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
@@ -132,7 +133,7 @@ pub(crate) fn expand(
                     version,
                     &deserialize_span_data_index_map(&resp.span_data_table),
                 );
-                if proc_macro.needs_fixup_change() {
+                if proc_macro.needs_fixup_change(process) {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index ffae28f92c05e..fe17e14024cc5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -112,7 +112,7 @@ impl MacroDylib {
 /// we share a single expander process for all macros within a workspace.
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
-    process: Arc,
+    process: ProcMacroServerPool,
     dylib_path: Arc,
     name: Box,
     kind: ProcMacroKind,
@@ -126,7 +126,6 @@ impl PartialEq for ProcMacro {
             && self.kind == other.kind
             && self.dylib_path == other.dylib_path
             && self.dylib_last_modified == other.dylib_last_modified
-            && Arc::ptr_eq(&self.process, &other.process)
     }
 }
 
@@ -210,8 +209,8 @@ impl ProcMacro {
         self.kind
     }
 
-    fn needs_fixup_change(&self) -> bool {
-        let version = self.process.version();
+    fn needs_fixup_change(&self, process: &ProcMacroServerProcess) -> bool {
+        let version = process.version();
         (version::RUST_ANALYZER_SPAN_SUPPORT..version::HASHED_AST_ID).contains(&version)
     }
 
@@ -241,20 +240,6 @@ impl ProcMacro {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
-        let (mut subtree, mut attr) = (subtree, attr);
-        let (mut subtree_changed, mut attr_changed);
-        if self.needs_fixup_change() {
-            subtree_changed = tt::TopSubtree::from_subtree(subtree);
-            self.change_fixup_to_match_old_server(&mut subtree_changed);
-            subtree = subtree_changed.view();
-
-            if let Some(attr) = &mut attr {
-                attr_changed = tt::TopSubtree::from_subtree(*attr);
-                self.change_fixup_to_match_old_server(&mut attr_changed);
-                *attr = attr_changed.view();
-            }
-        }
-
         self.process.expand(
             self,
             subtree,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 685bc05be62af..4639374f3e243 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -1,29 +1,38 @@
 use std::sync::Arc;
 
+use tt::Span;
+
 use crate::{
     MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
     process::ProcMacroServerProcess,
 };
 
-#[derive(Debug)]
+#[derive(Debug, Clone)]
 pub(crate) struct ProcMacroServerPool {
-    workers: Vec>,
+    workers: Arc<[ProcMacroServerProcess]>,
 }
 
 impl ProcMacroServerPool {
-    pub(crate) fn new(workers: Vec>) -> Self {
-        Self { workers }
+    pub(crate) fn new(workers: Vec) -> Self {
+        Self { workers: workers.into() }
     }
 }
 
 impl ProcMacroServerPool {
     pub(crate) fn exited(&self) -> Option<&ServerError> {
-        for worker in &self.workers {
-            if let Some(e) = worker.exited() {
-                return Some(e);
+        for worker in &*self.workers {
+            worker.exited()?;
+        }
+        self.workers[0].exited()
+    }
+
+    fn pick_process(&self) -> &ProcMacroServerProcess {
+        for workers in &*self.workers {
+            if workers.can_use() {
+                return workers;
             }
         }
-        None
+        &self.workers[0]
     }
 
     pub(crate) fn load_dylib(
@@ -34,16 +43,16 @@ impl ProcMacroServerPool {
         let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
         let mut all_macros = Vec::new();
 
-        for worker in &self.workers {
+        for worker in &*self.workers {
             let dylib_path = Arc::new(dylib.path.clone());
             let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
                 .ok()
                 .and_then(|metadata| metadata.modified().ok());
-            let macros = worker.load_dylib(&dylib.path, None)?;
+            let macros = worker.find_proc_macros(&dylib.path, None)?.unwrap();
 
             for (name, kind) in macros {
                 all_macros.push(ProcMacro {
-                    process: worker.clone(),
+                    process: self.clone(),
                     name: name.into(),
                     kind,
                     dylib_path: Arc::new(dylib.path.clone()),
@@ -54,6 +63,47 @@ impl ProcMacroServerPool {
 
         Ok(all_macros)
     }
+
+    pub(crate) fn expand(
+        &self,
+        proc_macro: &ProcMacro,
+        subtree: tt::SubtreeView<'_>,
+        attr: Option>,
+        env: Vec<(String, String)>,
+        def_site: Span,
+        call_site: Span,
+        mixed_site: Span,
+        current_dir: String,
+        callback: Option>,
+    ) -> Result, ServerError> {
+        let process = self.pick_process();
+
+        let (mut subtree, mut attr) = (subtree, attr);
+        let (mut subtree_changed, mut attr_changed);
+        if proc_macro.needs_fixup_change(process) {
+            subtree_changed = tt::TopSubtree::from_subtree(subtree);
+            proc_macro.change_fixup_to_match_old_server(&mut subtree_changed);
+            subtree = subtree_changed.view();
+
+            if let Some(attr) = &mut attr {
+                attr_changed = tt::TopSubtree::from_subtree(*attr);
+                proc_macro.change_fixup_to_match_old_server(&mut attr_changed);
+                *attr = attr_changed.view();
+            }
+        }
+
+        process.expand(
+            proc_macro,
+            subtree,
+            attr,
+            env,
+            def_site,
+            call_site,
+            mixed_site,
+            current_dir,
+            callback,
+        )
+    }
 }
 
 pub(crate) fn default_pool_size() -> usize {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index 30877c5cf491b..a41bb58e74beb 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -5,7 +5,10 @@ use std::{
     io::{self, BufRead, BufReader, Read, Write},
     panic::AssertUnwindSafe,
     process::{Child, ChildStdin, ChildStdout, Command, Stdio},
-    sync::{Arc, Mutex, OnceLock},
+    sync::{
+        Arc, Mutex, OnceLock,
+        atomic::{AtomicBool, Ordering},
+    },
 };
 
 use paths::AbsPath;
@@ -29,7 +32,7 @@ pub(crate) struct ProcMacroServerProcess {
     protocol: Protocol,
     /// Populated when the server exits.
     exited: OnceLock>,
-    single_use: bool,
+    can_use: AtomicBool,
 }
 
 impl std::fmt::Debug for ProcMacroServerProcess {
@@ -149,11 +152,7 @@ impl ProcMacroServerProcess {
                 let (process, stdin, stdout) = spawn(format)?;
 
                 io::Result::Ok(ProcMacroServerProcess {
-                    state: Mutex::new(ProcessSrvState {
-                        process,
-                        stdin,
-                        stdout,
-                    }),
+                    state: Mutex::new(ProcessSrvState { process, stdin, stdout }),
                     version: 0,
                     protocol: match format {
                         Some(ProtocolFormat::BidirectionalPostcardPrototype) => {
@@ -164,7 +163,7 @@ impl ProcMacroServerProcess {
                         }
                     },
                     exited: OnceLock::new(),
-                    single_use,
+                    can_use: AtomicBool::new(true),
                 })
             };
             let mut srv = create_srv()?;
@@ -204,34 +203,20 @@ impl ProcMacroServerProcess {
         Err(err.unwrap())
     }
 
-    pub(crate) fn load_dylib(
+    /// Finds proc-macros in a given dynamic library.
+    pub(crate) fn find_proc_macros(
         &self,
         dylib_path: &AbsPath,
         callback: Option>,
-    ) -> Result, ServerError> {
-        let _state = self.state.lock().unwrap();
-
-        // if state.loaded_dylibs.contains(dylib_path) {
-        //     // Already loaded in this worker
-        //     return Ok(Vec::new());
-        // }
-
-        let result = match self.protocol {
+    ) -> Result, String>, ServerError> {
+        match self.protocol {
             Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::find_proc_macros(self, dylib_path)?
+                legacy_protocol::find_proc_macros(self, dylib_path)
             }
             Protocol::BidirectionalPostcardPrototype { .. } => {
-                let cb = callback.expect("callback required");
-                bidirectional_protocol::find_proc_macros(self, dylib_path, cb)?
-            }
-        };
-
-        match result {
-            Ok(macros) => {
-                // state.loaded_dylibs.insert(dylib_path.to_owned());
-                Ok(macros)
+                let cb = callback.expect("callback required for bidirectional protocol");
+                bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
             }
-            Err(message) => Err(ServerError { message, io: None }),
         }
     }
 
@@ -290,9 +275,11 @@ impl ProcMacroServerProcess {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
+        self.can_use.store(false, Ordering::Release);
         let result = match self.protocol {
             Protocol::LegacyJson { .. } => legacy_protocol::expand(
                 proc_macro,
+                    self,
                 subtree,
                 attr,
                 env,
@@ -303,6 +290,7 @@ impl ProcMacroServerProcess {
             ),
             Protocol::BidirectionalPostcardPrototype { .. } => bidirectional_protocol::expand(
                 proc_macro,
+                self,
                 subtree,
                 attr,
                 env,
@@ -314,23 +302,10 @@ impl ProcMacroServerProcess {
             ),
         };
 
-        if !self.is_reusable() {
-            self.terminate();
-        }
-
+        self.can_use.store(true, Ordering::Release);
         result
     }
 
-    fn is_reusable(&self) -> bool {
-        self.single_use
-    }
-
-    fn terminate(&self) {
-        if let Ok(mut state) = self.state.lock() {
-            let _ = state.process.child.kill();
-        }
-    }
-
     pub(crate) fn send_task(
         &self,
         send: impl FnOnce(
@@ -391,6 +366,10 @@ impl ProcMacroServerProcess {
             bidirectional_protocol::run_conversation::(writer, reader, buf, initial, callback)
         })
     }
+
+    pub(crate) fn can_use(&self) -> bool {
+        self.can_use.load(Ordering::Acquire)
+    }
 }
 
 /// Manages the execution of the proc-macro server process.

From 82e758acc1a76b2a8d8083121b031fc2b3e3a653 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Fri, 2 Jan 2026 11:46:00 +0530
Subject: [PATCH 073/131] add better process picker and improve loading dylib

---
 .../crates/proc-macro-api/src/pool.rs         | 50 ++++++++++---------
 .../crates/proc-macro-api/src/process.rs      | 14 +++---
 2 files changed, 33 insertions(+), 31 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 4639374f3e243..4215b0f2c04f1 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -27,12 +27,10 @@ impl ProcMacroServerPool {
     }
 
     fn pick_process(&self) -> &ProcMacroServerProcess {
-        for workers in &*self.workers {
-            if workers.can_use() {
-                return workers;
-            }
-        }
-        &self.workers[0]
+        self.workers
+            .iter()
+            .min_by_key(|w| w.number_of_active_req())
+            .expect("worker pool must not be empty")
     }
 
     pub(crate) fn load_dylib(
@@ -41,27 +39,31 @@ impl ProcMacroServerPool {
         _callback: Option>,
     ) -> Result, ServerError> {
         let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
-        let mut all_macros = Vec::new();
 
-        for worker in &*self.workers {
-            let dylib_path = Arc::new(dylib.path.clone());
-            let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
-                .ok()
-                .and_then(|metadata| metadata.modified().ok());
-            let macros = worker.find_proc_macros(&dylib.path, None)?.unwrap();
-
-            for (name, kind) in macros {
-                all_macros.push(ProcMacro {
-                    process: self.clone(),
-                    name: name.into(),
-                    kind,
-                    dylib_path: Arc::new(dylib.path.clone()),
-                    dylib_last_modified,
-                });
-            }
+        let dylib_path = Arc::new(dylib.path.clone());
+        let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
+            .ok()
+            .and_then(|metadata| metadata.modified().ok());
+
+        let first = &self.workers[0];
+        let macros = first.find_proc_macros(&dylib.path, None)?.unwrap();
+
+        for worker in &self.workers[1..] {
+            let _ = worker.find_proc_macros(&dylib.path, None)?;
         }
 
-        Ok(all_macros)
+        let result = macros
+            .into_iter()
+            .map(|(name, kind)| ProcMacro {
+                process: self.clone(),
+                name: name.into(),
+                kind,
+                dylib_path: dylib_path.clone(),
+                dylib_last_modified,
+            })
+            .collect();
+
+        Ok(result)
     }
 
     pub(crate) fn expand(
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index a41bb58e74beb..775d59174f1cb 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -7,7 +7,7 @@ use std::{
     process::{Child, ChildStdin, ChildStdout, Command, Stdio},
     sync::{
         Arc, Mutex, OnceLock,
-        atomic::{AtomicBool, Ordering},
+        atomic::{AtomicU32, Ordering},
     },
 };
 
@@ -32,7 +32,7 @@ pub(crate) struct ProcMacroServerProcess {
     protocol: Protocol,
     /// Populated when the server exits.
     exited: OnceLock>,
-    can_use: AtomicBool,
+    active: AtomicU32,
 }
 
 impl std::fmt::Debug for ProcMacroServerProcess {
@@ -163,7 +163,7 @@ impl ProcMacroServerProcess {
                         }
                     },
                     exited: OnceLock::new(),
-                    can_use: AtomicBool::new(true),
+                    active: AtomicU32::new(0),
                 })
             };
             let mut srv = create_srv()?;
@@ -275,7 +275,7 @@ impl ProcMacroServerProcess {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
-        self.can_use.store(false, Ordering::Release);
+        self.active.fetch_add(1, Ordering::AcqRel);
         let result = match self.protocol {
             Protocol::LegacyJson { .. } => legacy_protocol::expand(
                 proc_macro,
@@ -302,7 +302,7 @@ impl ProcMacroServerProcess {
             ),
         };
 
-        self.can_use.store(true, Ordering::Release);
+        self.active.fetch_sub(1, Ordering::AcqRel);
         result
     }
 
@@ -367,8 +367,8 @@ impl ProcMacroServerProcess {
         })
     }
 
-    pub(crate) fn can_use(&self) -> bool {
-        self.can_use.load(Ordering::Acquire)
+    pub(crate) fn number_of_active_req(&self) -> u32 {
+        self.active.load(Ordering::Acquire)
     }
 }
 

From 922bc7e4d49da5656c901e0a2d4a9805f413dcf8 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Fri, 2 Jan 2026 11:56:19 +0530
Subject: [PATCH 074/131] rename process to pool in ProcMacro struct

---
 src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs  | 4 ++--
 src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index fe17e14024cc5..09999ea5081a0 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -112,7 +112,7 @@ impl MacroDylib {
 /// we share a single expander process for all macros within a workspace.
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
-    process: ProcMacroServerPool,
+    pool: ProcMacroServerPool,
     dylib_path: Arc,
     name: Box,
     kind: ProcMacroKind,
@@ -240,7 +240,7 @@ impl ProcMacro {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
-        self.process.expand(
+        self.pool.expand(
             self,
             subtree,
             attr,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 4215b0f2c04f1..eef8d0194dc57 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -55,7 +55,7 @@ impl ProcMacroServerPool {
         let result = macros
             .into_iter()
             .map(|(name, kind)| ProcMacro {
-                process: self.clone(),
+                pool: self.clone(),
                 name: name.into(),
                 kind,
                 dylib_path: dylib_path.clone(),

From c4c336ad7c291686542826bfd5b0bc8f2e298777 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Fri, 2 Jan 2026 12:11:11 +0530
Subject: [PATCH 075/131] keep it clean and tidy

---
 src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index eef8d0194dc57..fe4649441b890 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -1,3 +1,4 @@
+//! This module represents Process Pool
 use std::sync::Arc;
 
 use tt::Span;

From c8a3551bd162c6374d4c49bfc06070488edb44ed Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sat, 3 Jan 2026 11:32:08 +0530
Subject: [PATCH 076/131] change callback from FnMut to Fn as we only transform
 messages and not really change change state

---
 src/tools/rust-analyzer/crates/load-cargo/src/lib.rs        | 6 +++---
 .../crates/proc-macro-api/src/bidirectional_protocol.rs     | 2 +-
 .../rust-analyzer/crates/proc-macro-api/src/process.rs      | 4 ++--
 3 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index 8342492a33a4c..ccc9aa4291ec4 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -435,7 +435,7 @@ pub fn load_proc_macro(
 ) -> ProcMacroLoadResult {
     let res: Result, _> = (|| {
         let dylib = MacroDylib::new(path.to_path_buf());
-        let vec = server.load_dylib(dylib, Some(&mut reject_subrequests)).map_err(|e| {
+        let vec = server.load_dylib(dylib, Some(&reject_subrequests)).map_err(|e| {
             ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
         })?;
         if vec.is_empty() {
@@ -541,7 +541,7 @@ impl ProcMacroExpander for Expander {
         mixed_site: Span,
         current_dir: String,
     ) -> Result {
-        let mut cb = |req| match req {
+        let cb = |req| match req {
             SubRequest::LocalFilePath { file_id } => {
                 let file_id = FileId::from_raw(file_id);
                 let source_root_id = db.file_source_root(file_id).source_root_id(db);
@@ -613,7 +613,7 @@ impl ProcMacroExpander for Expander {
             call_site,
             mixed_site,
             current_dir,
-            Some(&mut cb),
+            Some(&cb),
         ) {
             Ok(Ok(subtree)) => Ok(subtree),
             Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err)),
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index cd1f6f6f1f335..25266c46fe895 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -28,7 +28,7 @@ use crate::{
 
 pub mod msg;
 
-pub type SubCallback<'a> = &'a mut dyn FnMut(SubRequest) -> Result;
+pub type SubCallback<'a> = &'a dyn Fn(SubRequest) -> Result;
 
 pub fn run_conversation(
     writer: &mut dyn Write,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index 775d59174f1cb..c1b95fa7f10e7 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -168,7 +168,7 @@ impl ProcMacroServerProcess {
             };
             let mut srv = create_srv()?;
             tracing::info!("sending proc-macro server version check");
-            match srv.version_check(Some(&mut reject_subrequests)) {
+            match srv.version_check(Some(&reject_subrequests)) {
                 Ok(v) if v > version::CURRENT_API_VERSION => {
                     let process_version = binary_server_version();
                     err = Some(io::Error::other(format!(
@@ -182,7 +182,7 @@ impl ProcMacroServerProcess {
                     srv.version = v;
                     if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT
                         && let Ok(new_mode) =
-                            srv.enable_rust_analyzer_spans(Some(&mut reject_subrequests))
+                            srv.enable_rust_analyzer_spans(Some(&reject_subrequests))
                     {
                         match &mut srv.protocol {
                             Protocol::LegacyJson { mode }

From 66bca6a25214332ff559acb7678d3ce423279e77 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sat, 3 Jan 2026 11:36:09 +0530
Subject: [PATCH 077/131] propagate error from load dylibs

---
 .../crates/proc-macro-api/src/pool.rs         | 28 ++++++++++---------
 1 file changed, 15 insertions(+), 13 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index fe4649441b890..20389f666883f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -37,23 +37,27 @@ impl ProcMacroServerPool {
     pub(crate) fn load_dylib(
         &self,
         dylib: &MacroDylib,
-        _callback: Option>,
+        callback: Option>,
     ) -> Result, ServerError> {
-        let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
+        let _span = tracing::info_span!("ProcMacroServer::load_dylib").entered();
 
         let dylib_path = Arc::new(dylib.path.clone());
-        let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
-            .ok()
-            .and_then(|metadata| metadata.modified().ok());
+        let dylib_last_modified =
+            std::fs::metadata(dylib_path.as_path()).ok().and_then(|m| m.modified().ok());
+
+        let (first, rest) = self.workers.split_first().expect("worker pool must not be empty");
 
-        let first = &self.workers[0];
-        let macros = first.find_proc_macros(&dylib.path, None)?.unwrap();
+        let macros = first
+            .find_proc_macros(&dylib.path, callback)?
+            .map_err(|e| ServerError { message: e, io: None })?;
 
-        for worker in &self.workers[1..] {
-            let _ = worker.find_proc_macros(&dylib.path, None)?;
+        for worker in rest {
+            worker
+                .find_proc_macros(&dylib.path, callback)?
+                .map_err(|e| ServerError { message: e, io: None })?;
         }
 
-        let result = macros
+        Ok(macros
             .into_iter()
             .map(|(name, kind)| ProcMacro {
                 pool: self.clone(),
@@ -62,9 +66,7 @@ impl ProcMacroServerPool {
                 dylib_path: dylib_path.clone(),
                 dylib_last_modified,
             })
-            .collect();
-
-        Ok(result)
+            .collect())
     }
 
     pub(crate) fn expand(

From 09c91b79a84faebb2e06094f8c8d1592a371a3e4 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sat, 3 Jan 2026 13:43:23 +0530
Subject: [PATCH 078/131] pick workers which have not exited

---
 .../rust-analyzer/crates/proc-macro-api/src/pool.rs    | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 20389f666883f..c75e9742a5d13 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -27,11 +27,15 @@ impl ProcMacroServerPool {
         self.workers[0].exited()
     }
 
-    fn pick_process(&self) -> &ProcMacroServerProcess {
+    fn pick_process(&self) -> Result<&ProcMacroServerProcess, ServerError> {
         self.workers
             .iter()
+            .filter(|w| w.exited().is_none())
             .min_by_key(|w| w.number_of_active_req())
-            .expect("worker pool must not be empty")
+            .ok_or_else(|| ServerError {
+                message: "all proc-macro server workers have exited".into(),
+                io: None,
+            })
     }
 
     pub(crate) fn load_dylib(
@@ -81,7 +85,7 @@ impl ProcMacroServerPool {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
-        let process = self.pick_process();
+        let process = self.pick_process()?;
 
         let (mut subtree, mut attr) = (subtree, attr);
         let (mut subtree_changed, mut attr_changed);

From 0936597b3ea2944869d1a0b2058746e295eeb16d Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 4 Jan 2026 05:02:15 +0530
Subject: [PATCH 079/131] add version to pool

---
 .../src/bidirectional_protocol.rs             |  4 ++--
 .../proc-macro-api/src/legacy_protocol.rs     |  4 ++--
 .../crates/proc-macro-api/src/lib.rs          | 18 ++++++++++++--
 .../crates/proc-macro-api/src/pool.rs         | 24 +++++++------------
 4 files changed, 28 insertions(+), 22 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index 25266c46fe895..b5f43e1d3726d 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -182,7 +182,7 @@ pub(crate) fn expand(
         BidirectionalMessage::Response(Response::ExpandMacro(it)) => Ok(it
             .map(|tree| {
                 let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
-                if proc_macro.needs_fixup_change(process) {
+                if proc_macro.needs_fixup_change() {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
@@ -195,7 +195,7 @@ pub(crate) fn expand(
                     version,
                     &deserialize_span_data_index_map(&resp.span_data_table),
                 );
-                if proc_macro.needs_fixup_change(process) {
+                if proc_macro.needs_fixup_change() {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
index 412d207303241..eedf66d460868 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs
@@ -120,7 +120,7 @@ pub(crate) fn expand(
         Response::ExpandMacro(it) => Ok(it
             .map(|tree| {
                 let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
-                if proc_macro.needs_fixup_change(process) {
+                if proc_macro.needs_fixup_change() {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
@@ -133,7 +133,7 @@ pub(crate) fn expand(
                     version,
                     &deserialize_span_data_index_map(&resp.span_data_table),
                 );
-                if proc_macro.needs_fixup_change(process) {
+                if proc_macro.needs_fixup_change() {
                     proc_macro.change_fixup_to_match_old_server(&mut expanded);
                 }
                 expanded
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 09999ea5081a0..4874e63244c45 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -209,8 +209,8 @@ impl ProcMacro {
         self.kind
     }
 
-    fn needs_fixup_change(&self, process: &ProcMacroServerProcess) -> bool {
-        let version = process.version();
+    fn needs_fixup_change(&self) -> bool {
+        let version = self.pool.version();
         (version::RUST_ANALYZER_SPAN_SUPPORT..version::HASHED_AST_ID).contains(&version)
     }
 
@@ -240,6 +240,20 @@ impl ProcMacro {
         current_dir: String,
         callback: Option>,
     ) -> Result, ServerError> {
+        let (mut subtree, mut attr) = (subtree, attr);
+        let (mut subtree_changed, mut attr_changed);
+        if self.needs_fixup_change() {
+            subtree_changed = tt::TopSubtree::from_subtree(subtree);
+            self.change_fixup_to_match_old_server(&mut subtree_changed);
+            subtree = subtree_changed.view();
+
+            if let Some(attr) = &mut attr {
+                attr_changed = tt::TopSubtree::from_subtree(*attr);
+                self.change_fixup_to_match_old_server(&mut attr_changed);
+                *attr = attr_changed.view();
+            }
+        }
+
         self.pool.expand(
             self,
             subtree,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index c75e9742a5d13..fd8b726f820e8 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -1,4 +1,4 @@
-//! This module represents Process Pool
+//! A pool of proc-macro server processes
 use std::sync::Arc;
 
 use tt::Span;
@@ -11,11 +11,13 @@ use crate::{
 #[derive(Debug, Clone)]
 pub(crate) struct ProcMacroServerPool {
     workers: Arc<[ProcMacroServerProcess]>,
+    version: u32,
 }
 
 impl ProcMacroServerPool {
     pub(crate) fn new(workers: Vec) -> Self {
-        Self { workers: workers.into() }
+        let version = workers[0].version();
+        Self { workers: workers.into(), version }
     }
 }
 
@@ -87,20 +89,6 @@ impl ProcMacroServerPool {
     ) -> Result, ServerError> {
         let process = self.pick_process()?;
 
-        let (mut subtree, mut attr) = (subtree, attr);
-        let (mut subtree_changed, mut attr_changed);
-        if proc_macro.needs_fixup_change(process) {
-            subtree_changed = tt::TopSubtree::from_subtree(subtree);
-            proc_macro.change_fixup_to_match_old_server(&mut subtree_changed);
-            subtree = subtree_changed.view();
-
-            if let Some(attr) = &mut attr {
-                attr_changed = tt::TopSubtree::from_subtree(*attr);
-                proc_macro.change_fixup_to_match_old_server(&mut attr_changed);
-                *attr = attr_changed.view();
-            }
-        }
-
         process.expand(
             proc_macro,
             subtree,
@@ -113,6 +101,10 @@ impl ProcMacroServerPool {
             callback,
         )
     }
+
+    pub(crate) fn version(&self) -> u32 {
+        self.version
+    }
 }
 
 pub(crate) fn default_pool_size() -> usize {

From 263015a4a4d4cbbbc1e7cd0e118c10e0a8e740df Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 4 Jan 2026 05:50:16 +0530
Subject: [PATCH 080/131] remove expand from pool

---
 .../crates/proc-macro-api/src/lib.rs          |  2 +-
 .../crates/proc-macro-api/src/pool.rs         | 31 +------------------
 2 files changed, 2 insertions(+), 31 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 4874e63244c45..ad462ff31a1b8 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -254,7 +254,7 @@ impl ProcMacro {
             }
         }
 
-        self.pool.expand(
+        self.pool.pick_process()?.expand(
             self,
             subtree,
             attr,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index fd8b726f820e8..13a4b5ee8f9eb 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -1,8 +1,6 @@
 //! A pool of proc-macro server processes
 use std::sync::Arc;
 
-use tt::Span;
-
 use crate::{
     MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
     process::ProcMacroServerProcess,
@@ -29,7 +27,7 @@ impl ProcMacroServerPool {
         self.workers[0].exited()
     }
 
-    fn pick_process(&self) -> Result<&ProcMacroServerProcess, ServerError> {
+    pub(crate) fn pick_process(&self) -> Result<&ProcMacroServerProcess, ServerError> {
         self.workers
             .iter()
             .filter(|w| w.exited().is_none())
@@ -75,33 +73,6 @@ impl ProcMacroServerPool {
             .collect())
     }
 
-    pub(crate) fn expand(
-        &self,
-        proc_macro: &ProcMacro,
-        subtree: tt::SubtreeView<'_>,
-        attr: Option>,
-        env: Vec<(String, String)>,
-        def_site: Span,
-        call_site: Span,
-        mixed_site: Span,
-        current_dir: String,
-        callback: Option>,
-    ) -> Result, ServerError> {
-        let process = self.pick_process()?;
-
-        process.expand(
-            proc_macro,
-            subtree,
-            attr,
-            env,
-            def_site,
-            call_site,
-            mixed_site,
-            current_dir,
-            callback,
-        )
-    }
-
     pub(crate) fn version(&self) -> u32 {
         self.version
     }

From e3e7c2905442499a6c3eb778be14730dc16d82af Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 09:41:39 +0530
Subject: [PATCH 081/131] remove default pool size from pool

---
 src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 13a4b5ee8f9eb..0cb505aa40a90 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -77,7 +77,3 @@ impl ProcMacroServerPool {
         self.version
     }
 }
-
-pub(crate) fn default_pool_size() -> usize {
-    std::thread::available_parallelism().map(|n| n.get()).unwrap_or(1).min(4)
-}

From 96ecd1773c56e11def32667a3d70f4b29563f137 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 09:44:15 +0530
Subject: [PATCH 082/131] add num process in config

---
 .../crates/rust-analyzer/src/config.rs          | 17 +++++++++++++++++
 1 file changed, 17 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 28ac94e4deb61..cb6552c32ffba 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -380,6 +380,8 @@ config_data! {
         /// The number of worker threads in the main loop. The default `null` means to pick
         /// automatically.
         numThreads: Option = None,
+        /// The number of proc-macro-srv processes 
+        proc_macro_processes: NumProcesses = NumProcesses::Concrete(1),
 
         /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
         procMacro_attributes_enable: bool = true,
@@ -2641,6 +2643,13 @@ impl Config {
         }
     }
 
+    pub fn proc_macro_num_processes(&self) -> usize {
+        match self.proc_macro_processes() {
+            NumProcesses::Concrete(0) | NumProcesses::Physical => num_cpus::get_physical(),
+            &NumProcesses::Concrete(n) => n,
+        }
+    }
+
     pub fn main_loop_num_threads(&self) -> usize {
         match self.numThreads() {
             Some(NumThreads::Concrete(0)) | None | Some(NumThreads::Physical) => {
@@ -3077,6 +3086,14 @@ pub enum NumThreads {
     Concrete(usize),
 }
 
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
+#[serde(rename_all = "snake_case")]
+pub enum NumProcesses {
+    Physical,
+    #[serde(untagged)]
+    Concrete(usize),
+}
+
 macro_rules! _default_val {
     ($default:expr, $ty:ty) => {{
         let default_: $ty = $default;

From 9d5e60005addf5eb8635884f051771a8119f0bb2 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 09:44:44 +0530
Subject: [PATCH 083/131] add proc_macro_processes in load config

---
 .../crates/load-cargo/src/lib.rs              | 24 ++++++++++++++-----
 1 file changed, 18 insertions(+), 6 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index ccc9aa4291ec4..c2935d94a8a71 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -45,6 +45,7 @@ pub struct LoadCargoConfig {
     pub load_out_dirs_from_check: bool,
     pub with_proc_macro_server: ProcMacroServerChoice,
     pub prefill_caches: bool,
+    pub proc_macro_processes: usize,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -113,15 +114,25 @@ pub fn load_workspace_into_db(
     let proc_macro_server = match &load_config.with_proc_macro_server {
         ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
             it.and_then(|it| {
-                ProcMacroClient::spawn(&it, extra_env, ws.toolchain.as_ref()).map_err(Into::into)
+                ProcMacroClient::spawn(
+                    &it,
+                    extra_env,
+                    ws.toolchain.as_ref(),
+                    load_config.proc_macro_processes,
+                )
+                .map_err(Into::into)
             })
             .map_err(|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
         }),
-        ProcMacroServerChoice::Explicit(path) => {
-            Some(ProcMacroClient::spawn(path, extra_env, ws.toolchain.as_ref()).map_err(|e| {
-                ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
-            }))
-        }
+        ProcMacroServerChoice::Explicit(path) => Some(
+            ProcMacroClient::spawn(
+                path,
+                extra_env,
+                ws.toolchain.as_ref(),
+                load_config.proc_macro_processes,
+            )
+            .map_err(|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())),
+        ),
         ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
     };
     match &proc_macro_server {
@@ -657,6 +668,7 @@ mod tests {
             load_out_dirs_from_check: false,
             with_proc_macro_server: ProcMacroServerChoice::None,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (db, _vfs, _proc_macro) =
             load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();

From 721361f2898b4e3c299e3687b8b4581b2e6c2b48 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 09:45:06 +0530
Subject: [PATCH 084/131] update all cli workflows

---
 .../crates/proc-macro-api/src/lib.rs          |  5 ++---
 .../rust-analyzer/src/cli/analysis_stats.rs   |  1 +
 .../rust-analyzer/src/cli/diagnostics.rs      |  1 +
 .../crates/rust-analyzer/src/cli/lsif.rs      |  1 +
 .../rust-analyzer/src/cli/prime_caches.rs     |  1 +
 .../crates/rust-analyzer/src/cli/run_tests.rs |  1 +
 .../rust-analyzer/src/cli/rustc_tests.rs      |  1 +
 .../crates/rust-analyzer/src/cli/scip.rs      |  1 +
 .../crates/rust-analyzer/src/cli/ssr.rs       |  2 ++
 .../src/cli/unresolved_references.rs          |  1 +
 .../src/integrated_benchmarks.rs              |  3 +++
 .../crates/rust-analyzer/src/reload.rs        | 20 +++++++++++--------
 12 files changed, 27 insertions(+), 11 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index ad462ff31a1b8..2c0008ae1d825 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -29,9 +29,7 @@ use std::{fmt, io, sync::Arc, time::SystemTime};
 
 pub use crate::transport::codec::Codec;
 use crate::{
-    bidirectional_protocol::SubCallback,
-    pool::{ProcMacroServerPool, default_pool_size},
-    process::ProcMacroServerProcess,
+    bidirectional_protocol::SubCallback, pool::ProcMacroServerPool, process::ProcMacroServerProcess,
 };
 
 /// The versions of the server protocol
@@ -155,6 +153,7 @@ impl ProcMacroClient {
             Item = (impl AsRef, &'a Option>),
         > + Clone,
         version: Option<&Version>,
+        num_process: usize,
     ) -> io::Result {
         let process = ProcMacroServerProcess::spawn(process_path, env, version)?;
         Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index a02d1a78564f6..1995d38898916 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -91,6 +91,7 @@ impl flags::AnalysisStats {
                 }
             },
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
 
         let build_scripts_time = if self.disable_build_scripts {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 776069f155f04..575c77f8428ca 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -41,6 +41,7 @@ impl flags::Diagnostics {
             load_out_dirs_from_check: !self.disable_build_scripts,
             with_proc_macro_server,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (db, _vfs, _proc_macro) =
             load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index f3b0699d55157..e5e238db63618 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -293,6 +293,7 @@ impl flags::Lsif {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path));
         let root = ProjectManifest::discover_single(&path)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs
index 467d8a53884a1..d5da6791797b8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs
@@ -38,6 +38,7 @@ impl flags::PrimeCaches {
             // we want to ensure that this command, not `load_workspace_at`,
             // is responsible for that work.
             prefill_caches: false,
+            proc_macro_processes: config.proc_macro_num_processes(),
         };
 
         let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root));
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
index 82ace8c8b315a..d4a56d773e7de 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -23,6 +23,7 @@ impl flags::RunTests {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (ref db, _vfs, _proc_macro) =
             load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
index 249566d2ac161..e8c6c5f4d4f70 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -103,6 +103,7 @@ impl Tester {
             load_out_dirs_from_check: false,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (db, _vfs, _proc_macro) =
             load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 271d2507bcfea..ed0476697c9cc 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -52,6 +52,7 @@ impl flags::Scip {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: true,
+            proc_macro_processes: config.proc_macro_num_processes(),
         };
         let cargo_config = config.cargo(None);
         let (db, vfs, _) = load_workspace_at(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 39186831459cc..5c69bda723fbd 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -20,6 +20,7 @@ impl flags::Ssr {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (ref db, vfs, _proc_macro) = load_workspace_at(
             &std::env::current_dir()?,
@@ -56,6 +57,7 @@ impl flags::Search {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
+            proc_macro_processes: 1,
         };
         let (ref db, _vfs, _proc_macro) = load_workspace_at(
             &std::env::current_dir()?,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 294add682d012..49c6fcb91ebfc 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -44,6 +44,7 @@ impl flags::UnresolvedReferences {
             load_out_dirs_from_check: !self.disable_build_scripts,
             with_proc_macro_server,
             prefill_caches: false,
+            proc_macro_processes: config.proc_macro_num_processes(),
         };
         let (db, vfs, _proc_macro) =
             load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index c61825b99fecc..d16ca2fb48ac0 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -53,6 +53,7 @@ fn integrated_highlighting_benchmark() {
         load_out_dirs_from_check: true,
         with_proc_macro_server: ProcMacroServerChoice::Sysroot,
         prefill_caches: false,
+        proc_macro_processes: 1,
     };
 
     let (db, vfs, _proc_macro) = {
@@ -121,6 +122,7 @@ fn integrated_completion_benchmark() {
         load_out_dirs_from_check: true,
         with_proc_macro_server: ProcMacroServerChoice::Sysroot,
         prefill_caches: true,
+        proc_macro_processes: 1,
     };
 
     let (db, vfs, _proc_macro) = {
@@ -322,6 +324,7 @@ fn integrated_diagnostics_benchmark() {
         load_out_dirs_from_check: true,
         with_proc_macro_server: ProcMacroServerChoice::Sysroot,
         prefill_caches: true,
+        proc_macro_processes: 1,
     };
 
     let (db, vfs, _proc_macro) = {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index ccafbd7b30b9d..83f4a19b39fad 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -701,15 +701,19 @@ impl GlobalState {
                     _ => Default::default(),
                 };
                 info!("Using proc-macro server at {path}");
+                let num_process = self.config.proc_macro_num_processes();
 
-                Some(ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref()).map_err(|err| {
-                    tracing::error!(
-                        "Failed to run proc-macro server from path {path}, error: {err:?}",
-                    );
-                    anyhow::format_err!(
-                        "Failed to run proc-macro server from path {path}, error: {err:?}",
-                    )
-                }))
+                Some(
+                    ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref(), num_process)
+                        .map_err(|err| {
+                            tracing::error!(
+                                "Failed to run proc-macro server from path {path}, error: {err:?}",
+                            );
+                            anyhow::format_err!(
+                                "Failed to run proc-macro server from path {path}, error: {err:?}",
+                            )
+                        }),
+                )
             }))
         }
 

From 0587cbdd6fc2321564394828d6172fd268fc6617 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 09:56:02 +0530
Subject: [PATCH 085/131] optimize pick_process to short circuit and return as
 early as possible if a valid process is found

---
 .../crates/proc-macro-api/src/pool.rs         | 28 +++++++++++++------
 .../crates/rust-analyzer/src/config.rs        |  2 +-
 2 files changed, 21 insertions(+), 9 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index 0cb505aa40a90..a637bc0e480a4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -28,14 +28,26 @@ impl ProcMacroServerPool {
     }
 
     pub(crate) fn pick_process(&self) -> Result<&ProcMacroServerProcess, ServerError> {
-        self.workers
-            .iter()
-            .filter(|w| w.exited().is_none())
-            .min_by_key(|w| w.number_of_active_req())
-            .ok_or_else(|| ServerError {
-                message: "all proc-macro server workers have exited".into(),
-                io: None,
-            })
+        let mut best: Option<&ProcMacroServerProcess> = None;
+        let mut best_load = u32::MAX;
+
+        for w in self.workers.iter().filter(|w| w.exited().is_none()) {
+            let load = w.number_of_active_req();
+
+            if load == 0 {
+                return Ok(w);
+            }
+
+            if load < best_load {
+                best = Some(w);
+                best_load = load;
+            }
+        }
+
+        best.ok_or_else(|| ServerError {
+            message: "all proc-macro server workers have exited".into(),
+            io: None,
+        })
     }
 
     pub(crate) fn load_dylib(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index cb6552c32ffba..409f2468a7b11 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -380,7 +380,7 @@ config_data! {
         /// The number of worker threads in the main loop. The default `null` means to pick
         /// automatically.
         numThreads: Option = None,
-        /// The number of proc-macro-srv processes 
+        /// The number of proc-macro-srv processes
         proc_macro_processes: NumProcesses = NumProcesses::Concrete(1),
 
         /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.

From b49417eea9fcb252ffe62d35068375534d553af0 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 10:22:49 +0530
Subject: [PATCH 086/131] fix test and update some autogen files

---
 .../crates/rust-analyzer/src/config.rs        | 24 ++++++++++++++++--
 .../docs/book/src/configuration_generated.md  | 10 ++++++++
 .../rust-analyzer/editors/code/package.json   | 25 +++++++++++++++++++
 3 files changed, 57 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 409f2468a7b11..015e6df96f2f5 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -380,8 +380,6 @@ config_data! {
         /// The number of worker threads in the main loop. The default `null` means to pick
         /// automatically.
         numThreads: Option = None,
-        /// The number of proc-macro-srv processes
-        proc_macro_processes: NumProcesses = NumProcesses::Concrete(1),
 
         /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
         procMacro_attributes_enable: bool = true,
@@ -392,6 +390,12 @@ config_data! {
         /// Internal config, path to proc-macro server executable.
         procMacro_server: Option = None,
 
+        /// Number of proc-macro server processes to spawn.
+        ///
+        /// Controls how many independent `proc-macro-srv` processes rust-analyzer
+        /// runs in parallel to handle macro expansion.
+        proc_macro_processes: NumProcesses = NumProcesses::Concrete(1),
+
         /// The path where to save memory profiling output.
         ///
         /// **Note:** Memory profiling is not enabled by default in rust-analyzer builds, you need to build
@@ -3920,6 +3924,22 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
                 },
             ],
         },
+        "NumProcesses" => set! {
+            "anyOf": [
+                {
+                    "type": "number",
+                    "minimum": 0,
+                    "maximum": 255
+                },
+                {
+                    "type": "string",
+                    "enum": ["physical"],
+                    "enumDescriptions": [
+                        "Use the number of physical cores",
+                    ],
+                },
+            ],
+        },
         "Option" => set! {
             "anyOf": [
                 {
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
index c4124aaae0753..d3f41fb152e10 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
@@ -1325,6 +1325,16 @@ Default: `null`
 Internal config, path to proc-macro server executable.
 
 
+## rust-analyzer.proc.macro.processes {#proc.macro.processes}
+
+Default: `1`
+
+Number of proc-macro server processes to spawn.
+
+Controls how many independent `proc-macro-srv` processes rust-analyzer
+runs in parallel to handle macro expansion.
+
+
 ## rust-analyzer.profiling.memoryProfile {#profiling.memoryProfile}
 
 Default: `null`
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index 0d91378706a40..a1266c4a67ed3 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -2783,6 +2783,31 @@
                     }
                 }
             },
+            {
+                "title": "Proc",
+                "properties": {
+                    "rust-analyzer.proc.macro.processes": {
+                        "markdownDescription": "Number of proc-macro server processes to spawn.\n\nControls how many independent `proc-macro-srv` processes rust-analyzer\nruns in parallel to handle macro expansion.",
+                        "default": 1,
+                        "anyOf": [
+                            {
+                                "type": "number",
+                                "minimum": 0,
+                                "maximum": 255
+                            },
+                            {
+                                "type": "string",
+                                "enum": [
+                                    "physical"
+                                ],
+                                "enumDescriptions": [
+                                    "Use the number of physical cores"
+                                ]
+                            }
+                        ]
+                    }
+                }
+            },
             {
                 "title": "Profiling",
                 "properties": {

From a81da31f4096bf754eed0c40b384ef2f5b4d854c Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 5 Jan 2026 10:32:00 +0530
Subject: [PATCH 087/131] rename from proc_macro_processes to
 procMacro_processes

---
 .../crates/rust-analyzer/src/config.rs        | 10 +++----
 .../docs/book/src/configuration_generated.md  | 16 +++++------
 .../rust-analyzer/editors/code/package.json   | 28 +++++++++----------
 3 files changed, 27 insertions(+), 27 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 015e6df96f2f5..98495f6150dad 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -387,14 +387,14 @@ config_data! {
         /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
         procMacro_enable: bool = true,
 
-        /// Internal config, path to proc-macro server executable.
-        procMacro_server: Option = None,
-
         /// Number of proc-macro server processes to spawn.
         ///
         /// Controls how many independent `proc-macro-srv` processes rust-analyzer
         /// runs in parallel to handle macro expansion.
-        proc_macro_processes: NumProcesses = NumProcesses::Concrete(1),
+        procMacro_processes: NumProcesses = NumProcesses::Concrete(1),
+
+        /// Internal config, path to proc-macro server executable.
+        procMacro_server: Option = None,
 
         /// The path where to save memory profiling output.
         ///
@@ -2648,7 +2648,7 @@ impl Config {
     }
 
     pub fn proc_macro_num_processes(&self) -> usize {
-        match self.proc_macro_processes() {
+        match self.procMacro_processes() {
             NumProcesses::Concrete(0) | NumProcesses::Physical => num_cpus::get_physical(),
             &NumProcesses::Concrete(n) => n,
         }
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
index d3f41fb152e10..5b1a2e111196e 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
@@ -1318,14 +1318,7 @@ These proc-macros will be ignored when trying to expand them.
 This config takes a map of crate names with the exported proc-macro names to ignore as values.
 
 
-## rust-analyzer.procMacro.server {#procMacro.server}
-
-Default: `null`
-
-Internal config, path to proc-macro server executable.
-
-
-## rust-analyzer.proc.macro.processes {#proc.macro.processes}
+## rust-analyzer.procMacro.processes {#procMacro.processes}
 
 Default: `1`
 
@@ -1335,6 +1328,13 @@ Controls how many independent `proc-macro-srv` processes rust-analyzer
 runs in parallel to handle macro expansion.
 
 
+## rust-analyzer.procMacro.server {#procMacro.server}
+
+Default: `null`
+
+Internal config, path to proc-macro server executable.
+
+
 ## rust-analyzer.profiling.memoryProfile {#profiling.memoryProfile}
 
 Default: `null`
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index a1266c4a67ed3..406e41767f6d7 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -2773,20 +2773,7 @@
             {
                 "title": "Proc Macro",
                 "properties": {
-                    "rust-analyzer.procMacro.server": {
-                        "markdownDescription": "Internal config, path to proc-macro server executable.",
-                        "default": null,
-                        "type": [
-                            "null",
-                            "string"
-                        ]
-                    }
-                }
-            },
-            {
-                "title": "Proc",
-                "properties": {
-                    "rust-analyzer.proc.macro.processes": {
+                    "rust-analyzer.procMacro.processes": {
                         "markdownDescription": "Number of proc-macro server processes to spawn.\n\nControls how many independent `proc-macro-srv` processes rust-analyzer\nruns in parallel to handle macro expansion.",
                         "default": 1,
                         "anyOf": [
@@ -2808,6 +2795,19 @@
                     }
                 }
             },
+            {
+                "title": "Proc Macro",
+                "properties": {
+                    "rust-analyzer.procMacro.server": {
+                        "markdownDescription": "Internal config, path to proc-macro server executable.",
+                        "default": null,
+                        "type": [
+                            "null",
+                            "string"
+                        ]
+                    }
+                }
+            },
             {
                 "title": "Profiling",
                 "properties": {

From 8da5de0ca02c316fee3fb97c11052e3d70a32bef Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 11 Jan 2026 15:46:03 +0530
Subject: [PATCH 088/131] rebased changes

---
 .../crates/proc-macro-api/src/lib.rs          | 26 +++++++++++++++----
 .../crates/proc-macro-api/src/process.rs      | 13 +++-------
 2 files changed, 25 insertions(+), 14 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 2c0008ae1d825..3acd0b292a310 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -155,8 +155,15 @@ impl ProcMacroClient {
         version: Option<&Version>,
         num_process: usize,
     ) -> io::Result {
-        let process = ProcMacroServerProcess::spawn(process_path, env, version)?;
-        Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
+        let pool_size = num_process;
+        let mut workers = Vec::with_capacity(pool_size);
+        for _ in 0..pool_size {
+            let worker = ProcMacroServerProcess::spawn(process_path, env.clone(), version)?;
+            workers.push(worker);
+        }
+
+        let pool = ProcMacroServerPool::new(workers);
+        Ok(ProcMacroClient { pool: Arc::new(pool), path: process_path.to_owned() })
     }
 
     /// Invokes `spawn` and returns a client connected to the resulting read and write handles.
@@ -170,11 +177,20 @@ impl ProcMacroClient {
             Box,
             Box,
             Box,
-        )>,
+        )> + Clone,
         version: Option<&Version>,
+        num_process: usize,
     ) -> io::Result {
-        let process = ProcMacroServerProcess::run(spawn, version, || "".to_owned())?;
-        Ok(ProcMacroClient { worker: Arc::new(process), path: process_path.to_owned() })
+        let pool_size = num_process;
+        let mut workers = Vec::with_capacity(pool_size);
+        for _ in 0..pool_size {
+            let worker =
+                ProcMacroServerProcess::run(spawn.clone(), version, || "".to_owned())?;
+            workers.push(worker);
+        }
+
+        let pool = ProcMacroServerPool::new(workers);
+        Ok(ProcMacroClient { pool: Arc::new(pool), path: process_path.to_owned() })
     }
 
     /// Returns the absolute path to the proc-macro server.
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index c1b95fa7f10e7..2f5bef69abd57 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -210,9 +210,8 @@ impl ProcMacroServerProcess {
         callback: Option>,
     ) -> Result, String>, ServerError> {
         match self.protocol {
-            Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
-                legacy_protocol::find_proc_macros(self, dylib_path)
-            }
+            Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
+
             Protocol::BidirectionalPostcardPrototype { .. } => {
                 let cb = callback.expect("callback required for bidirectional protocol");
                 bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
@@ -279,7 +278,7 @@ impl ProcMacroServerProcess {
         let result = match self.protocol {
             Protocol::LegacyJson { .. } => legacy_protocol::expand(
                 proc_macro,
-                    self,
+                self,
                 subtree,
                 attr,
                 env,
@@ -344,11 +343,7 @@ impl ProcMacroServerProcess {
                 match state.process.exit_err() {
                     None => e,
                     Some(server_error) => {
-                        proc_macro_worker
-                            .get_exited()
-                            .get_or_init(|| AssertUnwindSafe(server_error))
-                            .0
-                            .clone()
+                        self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone()
                     }
                 }
             } else {

From c31698b6958b5c818e4f4c86d3e4d12e128152f9 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sat, 17 Jan 2026 19:24:19 +0800
Subject: [PATCH 089/131] Improve move_guard redundanted block

Example
---
```rust
fn main() {
    match 92 {
        x $0if x > 10 => {
            let _ = true;
            false
        },
        _ => true
    }
}
```

**Before this PR**

```rust
fn main() {
    match 92 {
        x => if x > 10 {
            {
                let _ = true;
                false
            }
        },
        _ => true
    }
}
```

**After this PR**

```rust
fn main() {
    match 92 {
        x => if x > 10 {
            let _ = true;
            false
        },
        _ => true
    }
}
```
---
 .../ide-assists/src/handlers/move_guard.rs    | 35 ++++++++++++++++---
 .../crates/ide-assists/src/utils.rs           | 11 ++++++
 2 files changed, 42 insertions(+), 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
index 31baa63372ff0..84f02bdfdba67 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
@@ -49,7 +49,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
 
     let guard_condition = guard.condition()?.reset_indent();
     let arm_expr = match_arm.expr()?;
-    let then_branch = make::block_expr(None, Some(arm_expr.reset_indent().indent(1.into())));
+    let then_branch = crate::utils::wrap_block(&arm_expr);
     let if_expr = make::expr_if(guard_condition, then_branch, None).indent(arm_expr.indent_level());
 
     let target = guard.syntax().text_range();
@@ -344,6 +344,35 @@ fn main() {
         );
     }
 
+    #[test]
+    fn move_guard_to_block_arm_body_works() {
+        check_assist(
+            move_guard_to_arm_body,
+            r#"
+fn main() {
+    match 92 {
+        x $0if x > 10 => {
+            let _ = true;
+            false
+        },
+        _ => true
+    }
+}
+"#,
+            r#"
+fn main() {
+    match 92 {
+        x => if x > 10 {
+            let _ = true;
+            false
+        },
+        _ => true
+    }
+}
+"#,
+        );
+    }
+
     #[test]
     fn move_let_guard_to_arm_body_works() {
         check_assist(
@@ -395,9 +424,7 @@ fn main() {
             && true
             && true {
             {
-                {
-                    false
-                }
+                false
             }
         },
         _ => true
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 9a96374c00af3..4b8c193057934 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -86,6 +86,17 @@ pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option ast::BlockExpr {
+    if let ast::Expr::BlockExpr(block) = expr
+        && let Some(first) = block.syntax().first_token()
+        && first.kind() == T!['{']
+    {
+        block.reset_indent()
+    } else {
+        make::block_expr(None, Some(expr.reset_indent().indent(1.into())))
+    }
+}
+
 /// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
 /// `#[test_case(...)]`, `#[tokio::test]` and similar.
 /// Also a regular `#[test]` annotation is supported.

From cbad6dd11772b3d02a4eaca4e049f66907bb72a1 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 16 Jan 2026 16:23:56 +0100
Subject: [PATCH 090/131] fix: Do not show sysroot dependencies in symbol
 search

---
 src/tools/rust-analyzer/crates/base-db/src/input.rs       | 3 ++-
 src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs | 8 +++++++-
 .../rust-analyzer/crates/project-model/src/workspace.rs   | 6 +++++-
 3 files changed, 14 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index 240f1264917a0..94793a3618e18 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -221,6 +221,7 @@ pub enum LangCrateOrigin {
     ProcMacro,
     Std,
     Test,
+    Dependency,
     Other,
 }
 
@@ -245,7 +246,7 @@ impl fmt::Display for LangCrateOrigin {
             LangCrateOrigin::ProcMacro => "proc_macro",
             LangCrateOrigin::Std => "std",
             LangCrateOrigin::Test => "test",
-            LangCrateOrigin::Other => "other",
+            LangCrateOrigin::Other | LangCrateOrigin::Dependency => "other",
         };
         f.write_str(text)
     }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index d7f4c66f465bd..183f6b6495375 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -27,7 +27,7 @@ use std::{
     ops::ControlFlow,
 };
 
-use base_db::{LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
+use base_db::{CrateOrigin, LangCrateOrigin, LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
 use fst::{Automaton, Streamer, raw::IndexedValue};
 use hir::{
     Crate, Module,
@@ -446,6 +446,12 @@ impl<'db> SymbolIndex<'db> {
                     {
                         continue;
                     }
+                    if let CrateOrigin::Lang(LangCrateOrigin::Dependency | LangCrateOrigin::Other) =
+                        krate.origin(db)
+                    {
+                        // don't show dependencies of the sysroot
+                        continue;
+                    }
                     collector.push_crate_root(krate);
                 }
 
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index fa3a79e041e0f..8f15f7e1507c6 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -1161,6 +1161,8 @@ fn project_json_to_crate_graph(
                                 name: Some(name.canonical_name().to_owned()),
                             }
                         }
+                    } else if is_sysroot {
+                        CrateOrigin::Lang(LangCrateOrigin::Dependency)
                     } else {
                         CrateOrigin::Local { repo: None, name: None }
                     },
@@ -1294,6 +1296,8 @@ fn cargo_to_crate_graph(
                             name: Some(Symbol::intern(&pkg_data.name)),
                         }
                     }
+                } else if cargo.is_sysroot() {
+                    CrateOrigin::Lang(LangCrateOrigin::Dependency)
                 } else {
                     CrateOrigin::Library {
                         repo: pkg_data.repository.clone(),
@@ -1717,7 +1721,7 @@ fn extend_crate_graph_with_sysroot(
                     !matches!(lang_crate, LangCrateOrigin::Test | LangCrateOrigin::Alloc),
                 )),
                 LangCrateOrigin::ProcMacro => libproc_macro = Some(cid),
-                LangCrateOrigin::Other => (),
+                LangCrateOrigin::Other | LangCrateOrigin::Dependency => (),
             }
         }
     }

From bfbee86a2de8e281db70b08367ce8245082431a2 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 16 Jan 2026 13:35:31 +0100
Subject: [PATCH 091/131] feat: Trigger flycheck if non-workspace files get
 modified

Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com>
Co-authored-by: dino 
---
 .../crates/rust-analyzer/src/config.rs              |  6 ++++++
 .../rust-analyzer/src/handlers/notification.rs      | 13 +++++++++++++
 2 files changed, 19 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 28ac94e4deb61..7382edfa96f77 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -1043,6 +1043,7 @@ pub struct Config {
     /// The workspace roots as registered by the LSP client
     workspace_roots: Vec,
     caps: ClientCapabilities,
+    /// The LSP root path, deprecated in favor of `workspace_roots`
     root_path: AbsPathBuf,
     snippets: Vec,
     client_info: Option,
@@ -1366,6 +1367,10 @@ impl Config {
 
         self.discovered_projects_from_command.push(ProjectJsonFromCommand { data, buildfile });
     }
+
+    pub fn workspace_roots(&self) -> &[AbsPathBuf] {
+        &self.workspace_roots
+    }
 }
 
 #[derive(Default, Debug)]
@@ -1742,6 +1747,7 @@ impl Config {
     }
 
     pub fn root_path(&self) -> &AbsPathBuf {
+        // We should probably use `workspace_roots` here if set
         &self.root_path
     }
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index 6cc40677fb514..138310b78f627 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -289,11 +289,24 @@ pub(crate) fn handle_did_change_watched_files(
     state: &mut GlobalState,
     params: DidChangeWatchedFilesParams,
 ) -> anyhow::Result<()> {
+    // we want to trigger flycheck if a file outside of our workspaces has changed,
+    // as to reduce stale diagnostics when outside changes happen
+    let mut trigger_flycheck = false;
     for change in params.changes.iter().unique_by(|&it| &it.uri) {
         if let Ok(path) = from_proto::abs_path(&change.uri) {
+            if !trigger_flycheck {
+                trigger_flycheck =
+                    state.config.workspace_roots().iter().any(|root| !path.starts_with(root));
+            }
             state.loader.handle.invalidate(path);
         }
     }
+
+    if trigger_flycheck && state.config.check_on_save(None) {
+        for flycheck in state.flycheck.iter() {
+            flycheck.restart_workspace(None);
+        }
+    }
     Ok(())
 }
 

From ebcbff2a2e79d0c36442bb19a9eb154dc202f2d5 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Mon, 19 Jan 2026 04:50:23 +0200
Subject: [PATCH 092/131] Do not mix the order of builtin/regular derives in
 "Expand macro recursively"

---
 .../rust-analyzer/crates/hir/src/semantics.rs   |  7 +++++--
 .../crates/ide/src/expand_macro.rs              | 17 ++++++++++++++++-
 2 files changed, 21 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index e55b693ef0186..98f5739600f31 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -641,11 +641,14 @@ impl<'db> SemanticsImpl<'db> {
         })
     }
 
-    pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option>> {
+    pub fn expand_derive_macro(
+        &self,
+        attr: &ast::Attr,
+    ) -> Option>>> {
         let res: Vec<_> = self
             .derive_macro_calls(attr)?
             .into_iter()
-            .flat_map(|call| {
+            .map(|call| {
                 let file_id = call?.left()?;
                 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
                 let root_node = value.0.syntax_node();
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index ba8b3aa9cafea..44285d9315af4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -63,7 +63,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
             .take_while(|it| it != &token)
             .filter(|it| it.kind() == T![,])
             .count();
-        let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone();
+        let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone()?;
         let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?;
         let expansion_span_map = db.expansion_span_map(expansion_file_id);
         let mut expansion = format(
@@ -848,4 +848,19 @@ struct S {
                 u32"#]],
         );
     }
+
+    #[test]
+    fn regression_21489() {
+        check(
+            r#"
+//- proc_macros: derive_identity
+//- minicore: derive, fmt
+#[derive(Debug, proc_macros::DeriveIdentity$0)]
+struct Foo;
+        "#,
+            expect![[r#"
+                proc_macros::DeriveIdentity
+                struct Foo;"#]],
+        );
+    }
 }

From 4dad9b90819bac5aeb50182d77ee0a90c5a196d5 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Mon, 19 Jan 2026 05:21:54 +0200
Subject: [PATCH 093/131] Insert type vars and normalize for the type of a used
 `static`

They have their own special path, so they slipped through.
---
 .../crates/hir-ty/src/infer/path.rs           |  1 +
 .../crates/hir-ty/src/tests/simple.rs         | 43 +++++++++++++++++++
 2 files changed, 44 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index b11650bbcd9a8..ef1a610a323d8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -93,6 +93,7 @@ impl<'db> InferenceContext<'_, 'db> {
         if let GenericDefId::StaticId(_) = generic_def {
             // `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
             let ty = self.db.value_ty(value_def)?.skip_binder();
+            let ty = self.process_remote_user_written_ty(ty);
             return Some(ValuePathResolution::NonGeneric(ty));
         };
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 28759bcbae613..d2a4149bc630f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3997,3 +3997,46 @@ extern "C" fn foo() -> ! {
     "#,
     );
 }
+
+#[test]
+fn regression_21478() {
+    check_infer(
+        r#"
+//- minicore: unsize, coerce_unsized
+struct LazyLock(T);
+
+impl LazyLock {
+    const fn new() -> Self {
+        loop {}
+    }
+
+    fn force(this: &Self) -> &T {
+        loop {}
+    }
+}
+
+static VALUES_LAZY_LOCK: LazyLock<[u32; { 0 }]> = LazyLock::new();
+
+fn foo() {
+    let _ = LazyLock::force(&VALUES_LAZY_LOCK);
+}
+    "#,
+        expect![[r#"
+            73..96 '{     ...     }': LazyLock
+            83..90 'loop {}': !
+            88..90 '{}': ()
+            111..115 'this': &'? LazyLock
+            130..153 '{     ...     }': &'? T
+            140..147 'loop {}': !
+            145..147 '{}': ()
+            207..220 'LazyLock::new': fn new<[u32; _]>() -> LazyLock<[u32; _]>
+            207..222 'LazyLock::new()': LazyLock<[u32; _]>
+            234..285 '{     ...CK); }': ()
+            244..245 '_': &'? [u32; _]
+            248..263 'LazyLock::force': fn force<[u32; _]>(&'? LazyLock<[u32; _]>) -> &'? [u32; _]
+            248..282 'LazyLo..._LOCK)': &'? [u32; _]
+            264..281 '&VALUE...Y_LOCK': &'? LazyLock<[u32; _]>
+            265..281 'VALUES...Y_LOCK': LazyLock<[u32; _]>
+        "#]],
+    );
+}

From 1285b1b13c28e5e11637884e741b6b7e8f36efc8 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Mon, 19 Jan 2026 07:14:48 +0200
Subject: [PATCH 094/131] Ensure correct capturing of async fn params even when
 they use weird patterns

rustc does the same.
---
 .../crates/hir-def/src/expr_store/lower.rs    | 66 ++++++++++++++-----
 .../hir-def/src/expr_store/tests/body.rs      | 18 +++++
 .../crates/hir-ty/src/tests/regression.rs     |  1 -
 .../crates/hir-ty/src/tests/simple.rs         |  1 -
 .../crates/hir-ty/src/tests/traits.rs         |  3 -
 5 files changed, 68 insertions(+), 21 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index 4ae4271b92f5a..79222615929fa 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -150,6 +150,7 @@ pub(super) fn lower_body(
     };
 
     let body_expr = collector.collect(
+        &mut params,
         body,
         if is_async_fn {
             Awaitable::Yes
@@ -903,24 +904,57 @@ impl<'db> ExprCollector<'db> {
         })
     }
 
-    fn collect(&mut self, expr: Option, awaitable: Awaitable) -> ExprId {
+    /// An `async fn` needs to capture all parameters in the generated `async` block, even if they have
+    /// non-captured patterns such as wildcards (to ensure consistent drop order).
+    fn lower_async_fn(&mut self, params: &mut Vec, body: ExprId) -> ExprId {
+        let mut statements = Vec::new();
+        for param in params {
+            let name = match self.store.pats[*param] {
+                Pat::Bind { id, .. }
+                    if matches!(
+                        self.store.bindings[id].mode,
+                        BindingAnnotation::Unannotated | BindingAnnotation::Mutable
+                    ) =>
+                {
+                    // If this is a direct binding, we can leave it as-is, as it'll always be captured anyway.
+                    continue;
+                }
+                Pat::Bind { id, .. } => {
+                    // If this is a `ref` binding, we can't leave it as is but we can at least reuse the name, for better display.
+                    self.store.bindings[id].name.clone()
+                }
+                _ => self.generate_new_name(),
+            };
+            let binding_id =
+                self.alloc_binding(name.clone(), BindingAnnotation::Mutable, HygieneId::ROOT);
+            let pat_id = self.alloc_pat_desugared(Pat::Bind { id: binding_id, subpat: None });
+            let expr = self.alloc_expr_desugared(Expr::Path(name.into()));
+            statements.push(Statement::Let {
+                pat: *param,
+                type_ref: None,
+                initializer: Some(expr),
+                else_branch: None,
+            });
+            *param = pat_id;
+        }
+
+        self.alloc_expr_desugared(Expr::Async {
+            id: None,
+            statements: statements.into_boxed_slice(),
+            tail: Some(body),
+        })
+    }
+
+    fn collect(
+        &mut self,
+        params: &mut Vec,
+        expr: Option,
+        awaitable: Awaitable,
+    ) -> ExprId {
         self.awaitable_context.replace(awaitable);
         self.with_label_rib(RibKind::Closure, |this| {
-            if awaitable == Awaitable::Yes {
-                match expr {
-                    Some(e) => {
-                        let syntax_ptr = AstPtr::new(&e);
-                        let expr = this.collect_expr(e);
-                        this.alloc_expr_desugared_with_ptr(
-                            Expr::Async { id: None, statements: Box::new([]), tail: Some(expr) },
-                            syntax_ptr,
-                        )
-                    }
-                    None => this.missing_expr(),
-                }
-            } else {
-                this.collect_expr_opt(expr)
-            }
+            let body = this.collect_expr_opt(expr);
+            if awaitable == Awaitable::Yes { this.lower_async_fn(params, body) } else { body }
         })
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs
index 504c310684d65..8f857aeeff957 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs
@@ -659,3 +659,21 @@ fn main() {
         }"#]]
     .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
 }
+
+#[test]
+fn async_fn_weird_param_patterns() {
+    let (db, body, def) = lower(
+        r#"
+async fn main(&self, param1: i32, ref mut param2: i32, _: i32, param4 @ _: i32, 123: i32) {}
+"#,
+    );
+
+    expect![[r#"
+        fn main(self, param1, mut param2, mut 0, param4 @ _, mut 1) async {
+            let ref mut param2 = param2;
+            let _ = 0;
+            let 123 = 1;
+            {}
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index a04c46f8eabd1..4f1480c393667 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -2235,7 +2235,6 @@ async fn f() -> Bar {}
 "#,
         expect![[r#"
             64..66 '{}': ()
-            64..66 '{}': impl Future
         "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 28759bcbae613..80e21450c7a98 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -2139,7 +2139,6 @@ async fn main() {
         "#,
         expect![[r#"
             16..193 '{     ...2 }; }': ()
-            16..193 '{     ...2 }; }': impl Future
             26..27 'x': i32
             30..43 'unsafe { 92 }': i32
             39..41 '92': i32
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index b825a0a8f0e5a..390553c0d7a95 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -4869,7 +4869,6 @@ async fn baz i32>(c: T) {
         expect![[r#"
             37..38 'a': T
             43..83 '{     ...ait; }': ()
-            43..83 '{     ...ait; }': impl Future
             53..57 'fut1': >::CallRefFuture<'?>
             60..61 'a': T
             60..64 'a(0)': >::CallRefFuture<'?>
@@ -4878,7 +4877,6 @@ async fn baz i32>(c: T) {
             70..80 'fut1.await': i32
             124..129 'mut b': T
             134..174 '{     ...ait; }': ()
-            134..174 '{     ...ait; }': impl Future
             144..148 'fut2': >::CallRefFuture<'?>
             151..152 'b': T
             151..155 'b(0)': >::CallRefFuture<'?>
@@ -4887,7 +4885,6 @@ async fn baz i32>(c: T) {
             161..171 'fut2.await': i32
             216..217 'c': T
             222..262 '{     ...ait; }': ()
-            222..262 '{     ...ait; }': impl Future
             232..236 'fut3': >::CallOnceFuture
             239..240 'c': T
             239..243 'c(0)': >::CallOnceFuture

From 740eb6b59fa663e9f1b721d7f6995dcec4af4bc9 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Fri, 16 Jan 2026 01:16:57 +0530
Subject: [PATCH 095/131] remove non-describing field annotation from
 bidirectional message definition

---
 .../proc-macro-api/src/bidirectional_protocol/msg.rs   | 10 ----------
 1 file changed, 10 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
index c56ed51916948..2644cd406b637 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
@@ -84,18 +84,14 @@ pub struct ExpandMacroData {
     pub macro_body: FlatTree,
     pub macro_name: String,
     pub attributes: Option,
-    #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
     #[serde(default)]
     pub has_global_spans: ExpnGlobals,
-
-    #[serde(skip_serializing_if = "Vec::is_empty")]
     #[serde(default)]
     pub span_data_table: Vec,
 }
 
 #[derive(Clone, Copy, Default, Debug, Serialize, Deserialize)]
 pub struct ExpnGlobals {
-    #[serde(skip_serializing)]
     #[serde(default)]
     pub serialize: bool,
     pub def_site: usize,
@@ -103,10 +99,4 @@ pub struct ExpnGlobals {
     pub mixed_site: usize,
 }
 
-impl ExpnGlobals {
-    fn skip_serializing_if(&self) -> bool {
-        !self.serialize
-    }
-}
-
 impl Message for BidirectionalMessage {}

From ffa2dadf0eeab0a484688373cffe9f1939173084 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 18 Jan 2026 19:34:03 +0530
Subject: [PATCH 096/131] remove serialize from Expn Globals

---
 .../crates/proc-macro-api/src/bidirectional_protocol.rs   | 8 +-------
 .../proc-macro-api/src/bidirectional_protocol/msg.rs      | 2 --
 2 files changed, 1 insertion(+), 9 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
index b5f43e1d3726d..a13bff7d7d025 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -23,7 +23,6 @@ use crate::{
     },
     process::ProcMacroServerProcess,
     transport::codec::postcard::PostcardProtocol,
-    version,
 };
 
 pub mod msg;
@@ -159,12 +158,7 @@ pub(crate) fn expand(
             macro_name: proc_macro.name.to_string(),
             attributes: attr
                 .map(|subtree| FlatTree::from_subtree(subtree, version, &mut span_data_table)),
-            has_global_spans: ExpnGlobals {
-                serialize: version >= version::HAS_GLOBAL_SPANS,
-                def_site,
-                call_site,
-                mixed_site,
-            },
+            has_global_spans: ExpnGlobals { def_site, call_site, mixed_site },
             span_data_table: if process.rust_analyzer_spans() {
                 serialize_span_data_index_map(&span_data_table)
             } else {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
index 2644cd406b637..d030498e59c43 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
@@ -92,8 +92,6 @@ pub struct ExpandMacroData {
 
 #[derive(Clone, Copy, Default, Debug, Serialize, Deserialize)]
 pub struct ExpnGlobals {
-    #[serde(default)]
-    pub serialize: bool,
     pub def_site: usize,
     pub call_site: usize,
     pub mixed_site: usize,

From 8fd55694389332c648a76c0ad533ba9e01ecc1fe Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 18 Jan 2026 20:23:30 +0530
Subject: [PATCH 097/131] add bidirectional flow

---
 .../proc-macro-srv-cli/tests/common/utils.rs  | 570 +++++++++++-------
 1 file changed, 357 insertions(+), 213 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
index 722e92eec7e52..63b3a74aa4e8b 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
@@ -1,213 +1,357 @@
-use std::{
-    collections::VecDeque,
-    io::{self, BufRead, Read, Write},
-    sync::{Arc, Condvar, Mutex},
-    thread,
-};
-
-use paths::Utf8PathBuf;
-use proc_macro_api::{
-    legacy_protocol::msg::{FlatTree, Message, Request, Response, SpanDataIndexMap},
-    transport::codec::json::JsonProtocol,
-};
-use span::{Edition, EditionedFileId, FileId, Span, SpanAnchor, SyntaxContext, TextRange};
-use tt::{Delimiter, DelimiterKind, TopSubtreeBuilder};
-
-/// Shared state for an in-memory byte channel.
-#[derive(Default)]
-struct ChannelState {
-    buffer: VecDeque,
-    closed: bool,
-}
-
-type InMemoryChannel = Arc<(Mutex, Condvar)>;
-
-/// Writer end of an in-memory channel.
-pub(crate) struct ChannelWriter {
-    state: InMemoryChannel,
-}
-
-impl Write for ChannelWriter {
-    fn write(&mut self, buf: &[u8]) -> io::Result {
-        let (lock, cvar) = &*self.state;
-        let mut state = lock.lock().unwrap();
-        if state.closed {
-            return Err(io::Error::new(io::ErrorKind::BrokenPipe, "channel closed"));
-        }
-        state.buffer.extend(buf);
-        cvar.notify_all();
-        Ok(buf.len())
-    }
-
-    fn flush(&mut self) -> io::Result<()> {
-        Ok(())
-    }
-}
-
-impl Drop for ChannelWriter {
-    fn drop(&mut self) {
-        let (lock, cvar) = &*self.state;
-        let mut state = lock.lock().unwrap();
-        state.closed = true;
-        cvar.notify_all();
-    }
-}
-
-/// Reader end of an in-memory channel.
-pub(crate) struct ChannelReader {
-    state: InMemoryChannel,
-    internal_buf: Vec,
-}
-
-impl Read for ChannelReader {
-    fn read(&mut self, buf: &mut [u8]) -> io::Result {
-        let (lock, cvar) = &*self.state;
-        let mut state = lock.lock().unwrap();
-
-        while state.buffer.is_empty() && !state.closed {
-            state = cvar.wait(state).unwrap();
-        }
-
-        if state.buffer.is_empty() && state.closed {
-            return Ok(0);
-        }
-
-        let to_read = buf.len().min(state.buffer.len());
-        for (dst, src) in buf.iter_mut().zip(state.buffer.drain(..to_read)) {
-            *dst = src;
-        }
-        Ok(to_read)
-    }
-}
-
-impl BufRead for ChannelReader {
-    fn fill_buf(&mut self) -> io::Result<&[u8]> {
-        let (lock, cvar) = &*self.state;
-        let mut state = lock.lock().unwrap();
-
-        while state.buffer.is_empty() && !state.closed {
-            state = cvar.wait(state).unwrap();
-        }
-
-        self.internal_buf.clear();
-        self.internal_buf.extend(&state.buffer);
-        Ok(&self.internal_buf)
-    }
-
-    fn consume(&mut self, amt: usize) {
-        let (lock, _) = &*self.state;
-        let mut state = lock.lock().unwrap();
-        let to_drain = amt.min(state.buffer.len());
-        drop(state.buffer.drain(..to_drain));
-    }
-}
-
-/// Creates a connected pair of channels for bidirectional communication.
-fn create_channel_pair() -> (ChannelWriter, ChannelReader, ChannelWriter, ChannelReader) {
-    // Channel for client -> server communication
-    let client_to_server = Arc::new((
-        Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
-        Condvar::new(),
-    ));
-    let client_writer = ChannelWriter { state: client_to_server.clone() };
-    let server_reader = ChannelReader { state: client_to_server, internal_buf: Vec::new() };
-
-    // Channel for server -> client communication
-    let server_to_client = Arc::new((
-        Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
-        Condvar::new(),
-    ));
-
-    let server_writer = ChannelWriter { state: server_to_client.clone() };
-    let client_reader = ChannelReader { state: server_to_client, internal_buf: Vec::new() };
-
-    (client_writer, client_reader, server_writer, server_reader)
-}
-
-pub(crate) fn proc_macro_test_dylib_path() -> Utf8PathBuf {
-    let path = proc_macro_test::PROC_MACRO_TEST_LOCATION;
-    if path.is_empty() {
-        panic!("proc-macro-test dylib not available (requires nightly toolchain)");
-    }
-    path.into()
-}
-
-/// Runs a test with the server in a background thread.
-pub(crate) fn with_server(test_fn: F) -> R
-where
-    F: FnOnce(&mut dyn Write, &mut dyn BufRead) -> R,
-{
-    let (mut client_writer, mut client_reader, mut server_writer, mut server_reader) =
-        create_channel_pair();
-
-    let server_handle = thread::spawn(move || {
-        proc_macro_srv_cli::main_loop::run(
-            &mut server_reader,
-            &mut server_writer,
-            proc_macro_api::ProtocolFormat::JsonLegacy,
-        )
-    });
-
-    let result = test_fn(&mut client_writer, &mut client_reader);
-
-    // Close the client writer to signal the server to stop
-    drop(client_writer);
-
-    // Wait for server to finish
-    match server_handle.join() {
-        Ok(Ok(())) => {}
-        Ok(Err(e)) => {
-            // IO error from server is expected when client disconnects
-            if matches!(
-                e.kind(),
-                io::ErrorKind::BrokenPipe
-                    | io::ErrorKind::UnexpectedEof
-                    | io::ErrorKind::InvalidData
-            ) {
-                panic!("Server error: {e}");
-            }
-        }
-        Err(e) => std::panic::resume_unwind(e),
-    }
-
-    result
-}
-
-/// Sends a request and reads the response using JSON protocol.
-pub(crate) fn request(
-    writer: &mut dyn Write,
-    reader: &mut dyn BufRead,
-    request: Request,
-) -> Response {
-    request.write::(writer).expect("failed to write request");
-
-    let mut buf = String::new();
-    Response::read::(reader, &mut buf)
-        .expect("failed to read response")
-        .expect("no response received")
-}
-
-/// Creates a simple empty token tree suitable for testing.
-pub(crate) fn create_empty_token_tree(
-    version: u32,
-    span_data_table: &mut SpanDataIndexMap,
-) -> FlatTree {
-    let anchor = SpanAnchor {
-        file_id: EditionedFileId::new(FileId::from_raw(0), Edition::CURRENT),
-        ast_id: span::ROOT_ERASED_FILE_AST_ID,
-    };
-    let span = Span {
-        range: TextRange::empty(0.into()),
-        anchor,
-        ctx: SyntaxContext::root(Edition::CURRENT),
-    };
-
-    let builder = TopSubtreeBuilder::new(Delimiter {
-        open: span,
-        close: span,
-        kind: DelimiterKind::Invisible,
-    });
-    let tt = builder.build();
-
-    FlatTree::from_subtree(tt.view(), version, span_data_table)
-}
+use std::{
+    collections::VecDeque,
+    io::{self, BufRead, Read, Write},
+    sync::{Arc, Condvar, Mutex},
+    thread,
+};
+
+use paths::Utf8PathBuf;
+use proc_macro_api::{
+    ServerError,
+    bidirectional_protocol::msg::{
+        BidirectionalMessage, Request as BiRequest, Response as BiResponse, SubRequest, SubResponse,
+    },
+    legacy_protocol::msg::{FlatTree, Message, Request, Response, SpanDataIndexMap},
+    transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
+};
+use span::{Edition, EditionedFileId, FileId, Span, SpanAnchor, SyntaxContext, TextRange};
+use tt::{Delimiter, DelimiterKind, TopSubtreeBuilder};
+
+/// Shared state for an in-memory byte channel.
+#[derive(Default)]
+struct ChannelState {
+    buffer: VecDeque,
+    closed: bool,
+}
+
+type InMemoryChannel = Arc<(Mutex, Condvar)>;
+
+/// Writer end of an in-memory channel.
+pub(crate) struct ChannelWriter {
+    state: InMemoryChannel,
+}
+
+impl Write for ChannelWriter {
+    fn write(&mut self, buf: &[u8]) -> io::Result {
+        let (lock, cvar) = &*self.state;
+        let mut state = lock.lock().unwrap();
+        if state.closed {
+            return Err(io::Error::new(io::ErrorKind::BrokenPipe, "channel closed"));
+        }
+        state.buffer.extend(buf);
+        cvar.notify_all();
+        Ok(buf.len())
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        Ok(())
+    }
+}
+
+impl Drop for ChannelWriter {
+    fn drop(&mut self) {
+        let (lock, cvar) = &*self.state;
+        let mut state = lock.lock().unwrap();
+        state.closed = true;
+        cvar.notify_all();
+    }
+}
+
+/// Reader end of an in-memory channel.
+pub(crate) struct ChannelReader {
+    state: InMemoryChannel,
+    internal_buf: Vec,
+}
+
+impl Read for ChannelReader {
+    fn read(&mut self, buf: &mut [u8]) -> io::Result {
+        let (lock, cvar) = &*self.state;
+        let mut state = lock.lock().unwrap();
+
+        while state.buffer.is_empty() && !state.closed {
+            state = cvar.wait(state).unwrap();
+        }
+
+        if state.buffer.is_empty() && state.closed {
+            return Ok(0);
+        }
+
+        let to_read = buf.len().min(state.buffer.len());
+        for (dst, src) in buf.iter_mut().zip(state.buffer.drain(..to_read)) {
+            *dst = src;
+        }
+        Ok(to_read)
+    }
+}
+
+impl BufRead for ChannelReader {
+    fn fill_buf(&mut self) -> io::Result<&[u8]> {
+        let (lock, cvar) = &*self.state;
+        let mut state = lock.lock().unwrap();
+
+        while state.buffer.is_empty() && !state.closed {
+            state = cvar.wait(state).unwrap();
+        }
+
+        self.internal_buf.clear();
+        self.internal_buf.extend(&state.buffer);
+        Ok(&self.internal_buf)
+    }
+
+    fn consume(&mut self, amt: usize) {
+        let (lock, _) = &*self.state;
+        let mut state = lock.lock().unwrap();
+        let to_drain = amt.min(state.buffer.len());
+        drop(state.buffer.drain(..to_drain));
+    }
+}
+
+/// Creates a connected pair of channels for bidirectional communication.
+fn create_channel_pair() -> (ChannelWriter, ChannelReader, ChannelWriter, ChannelReader) {
+    // Channel for client -> server communication
+    let client_to_server = Arc::new((
+        Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
+        Condvar::new(),
+    ));
+    let client_writer = ChannelWriter { state: client_to_server.clone() };
+    let server_reader = ChannelReader { state: client_to_server, internal_buf: Vec::new() };
+
+    // Channel for server -> client communication
+    let server_to_client = Arc::new((
+        Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
+        Condvar::new(),
+    ));
+
+    let server_writer = ChannelWriter { state: server_to_client.clone() };
+    let client_reader = ChannelReader { state: server_to_client, internal_buf: Vec::new() };
+
+    (client_writer, client_reader, server_writer, server_reader)
+}
+
+pub(crate) fn proc_macro_test_dylib_path() -> Utf8PathBuf {
+    let path = proc_macro_test::PROC_MACRO_TEST_LOCATION;
+    if path.is_empty() {
+        panic!("proc-macro-test dylib not available (requires nightly toolchain)");
+    }
+    path.into()
+}
+
+/// Creates a simple empty token tree suitable for testing.
+pub(crate) fn create_empty_token_tree(
+    version: u32,
+    span_data_table: &mut SpanDataIndexMap,
+) -> FlatTree {
+    let anchor = SpanAnchor {
+        file_id: EditionedFileId::new(FileId::from_raw(0), Edition::CURRENT),
+        ast_id: span::ROOT_ERASED_FILE_AST_ID,
+    };
+    let span = Span {
+        range: TextRange::empty(0.into()),
+        anchor,
+        ctx: SyntaxContext::root(Edition::CURRENT),
+    };
+
+    let builder = TopSubtreeBuilder::new(Delimiter {
+        open: span,
+        close: span,
+        kind: DelimiterKind::Invisible,
+    });
+    let tt = builder.build();
+
+    FlatTree::from_subtree(tt.view(), version, span_data_table)
+}
+
+pub(crate) fn with_server(format: proc_macro_api::ProtocolFormat, test_fn: F) -> R
+where
+    F: FnOnce(&mut dyn Write, &mut dyn BufRead) -> R,
+{
+    let (mut client_writer, mut client_reader, mut server_writer, mut server_reader) =
+        create_channel_pair();
+
+    let server_handle = thread::spawn(move || {
+        proc_macro_srv_cli::main_loop::run(&mut server_reader, &mut server_writer, format)
+    });
+
+    let result = test_fn(&mut client_writer, &mut client_reader);
+
+    drop(client_writer);
+
+    match server_handle.join() {
+        Ok(Ok(())) => {}
+        Ok(Err(e)) => {
+            if !matches!(
+                e.kind(),
+                io::ErrorKind::BrokenPipe
+                    | io::ErrorKind::UnexpectedEof
+                    | io::ErrorKind::InvalidData
+            ) {
+                panic!("Server error: {e}");
+            }
+        }
+        Err(e) => std::panic::resume_unwind(e),
+    }
+
+    result
+}
+
+trait TestProtocol {
+    type Request;
+    type Response;
+
+    fn send(&self, writer: &mut dyn Write, req: Self::Request);
+    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> Self::Response;
+}
+
+struct JsonLegacy;
+
+impl TestProtocol for JsonLegacy {
+    type Request = Request;
+    type Response = Response;
+
+    fn send(&self, writer: &mut dyn Write, req: Request) {
+        req.write::(writer).expect("failed to write request");
+    }
+
+    fn drive(&self, reader: &mut dyn BufRead, _writer: &mut dyn Write) -> Response {
+        let mut buf = String::new();
+        Response::read::(reader, &mut buf)
+            .expect("failed to read response")
+            .expect("no response received")
+    }
+}
+
+struct PostcardBidirectional
+where
+    F: Fn(SubRequest) -> Result,
+{
+    callback: F,
+}
+
+impl TestProtocol for PostcardBidirectional
+where
+    F: Fn(SubRequest) -> Result,
+{
+    type Request = BiRequest;
+    type Response = BiResponse;
+
+    fn send(&self, writer: &mut dyn Write, req: BiRequest) {
+        let msg = BidirectionalMessage::Request(req);
+        msg.write::(writer).expect("failed to write request");
+    }
+
+    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> BiResponse {
+        let mut buf = Vec::new();
+
+        loop {
+            let msg = BidirectionalMessage::read::(reader, &mut buf)
+                .expect("failed to read message")
+                .expect("no message received");
+
+            match msg {
+                BidirectionalMessage::Response(resp) => return resp,
+                BidirectionalMessage::SubRequest(sr) => {
+                    let reply = (self.callback)(sr).expect("subrequest callback failed");
+                    let msg = BidirectionalMessage::SubResponse(reply);
+                    msg.write::(writer).expect("failed to write subresponse");
+                }
+                other => panic!("unexpected message: {other:?}"),
+            }
+        }
+    }
+}
+
+pub(crate) fn request(
+    writer: &mut dyn Write,
+    reader: &mut dyn BufRead,
+    request: impl Into,
+    callback: Option<&dyn Fn(SubRequest) -> Result>,
+) -> AutoResponse {
+    let protocol = match callback {
+        None => AutoProtocol::Legacy(JsonLegacy),
+        Some(cb) => AutoProtocol::Bidirectional(PostcardBidirectional { callback: cb }),
+    };
+
+    protocol.send(writer, request.into());
+    protocol.drive(reader, writer)
+}
+
+enum AutoProtocol
+where
+    F: Fn(SubRequest) -> Result,
+{
+    Legacy(JsonLegacy),
+    Bidirectional(PostcardBidirectional),
+}
+
+impl TestProtocol for AutoProtocol
+where
+    F: Fn(SubRequest) -> Result,
+{
+    type Request = AutoRequest;
+    type Response = AutoResponse;
+
+    fn send(&self, writer: &mut dyn Write, req: AutoRequest) {
+        match (self, req) {
+            (AutoProtocol::Legacy(p), AutoRequest::Legacy(r)) => {
+                p.send(writer, r);
+            }
+            (AutoProtocol::Bidirectional(p), AutoRequest::Bidirectional(r)) => {
+                p.send(writer, r);
+            }
+            (AutoProtocol::Legacy(_), AutoRequest::Bidirectional(_)) => {
+                panic!("bidirectional request used with legacy protocol");
+            }
+            (AutoProtocol::Bidirectional(_), AutoRequest::Legacy(_)) => {
+                panic!("legacy request used with bidirectional protocol");
+            }
+        }
+    }
+
+    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> AutoResponse {
+        match self {
+            AutoProtocol::Legacy(p) => AutoResponse::Legacy(p.drive(reader, writer)),
+            AutoProtocol::Bidirectional(p) => AutoResponse::Bidirectional(p.drive(reader, writer)),
+        }
+    }
+}
+
+pub(crate) enum AutoRequest {
+    Legacy(Request),
+    Bidirectional(BiRequest),
+}
+
+#[derive(Debug)]
+pub(crate) enum AutoResponse {
+    Legacy(Response),
+    Bidirectional(BiResponse),
+}
+
+impl From for AutoRequest {
+    fn from(req: Request) -> AutoRequest {
+        AutoRequest::Legacy(req)
+    }
+}
+
+impl From for AutoRequest {
+    fn from(req: BiRequest) -> AutoRequest {
+        AutoRequest::Bidirectional(req)
+    }
+}
+
+impl From for Response {
+    fn from(res: AutoResponse) -> Response {
+        match res {
+            AutoResponse::Legacy(res) => res,
+            _ => panic!("Should be legacy response"),
+        }
+    }
+}
+
+impl From for BiResponse {
+    fn from(res: AutoResponse) -> BiResponse {
+        match res {
+            AutoResponse::Bidirectional(res) => res,
+            _ => panic!("Should be bidirectional response"),
+        }
+    }
+}

From ee35fd6cb206e00b0e43b7e676a1aed8eb013684 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 18 Jan 2026 20:23:40 +0530
Subject: [PATCH 098/131] add bidirectional test

---
 .../tests/bidirectional_postcard.rs           | 223 ++++++++++++++++++
 1 file changed, 223 insertions(+)
 create mode 100644 src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
new file mode 100644
index 0000000000000..08e44bad37239
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
@@ -0,0 +1,223 @@
+#![cfg(feature = "sysroot-abi")]
+
+mod common {
+    pub(crate) mod utils;
+}
+
+use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server};
+use expect_test::expect;
+use proc_macro_api::{
+    ProtocolFormat::BidirectionalPostcardPrototype,
+    bidirectional_protocol::{
+        msg::{ExpandMacro, ExpandMacroData, ExpnGlobals, Request, Response},
+        reject_subrequests,
+    },
+    legacy_protocol::msg::{PanicMessage, ServerConfig, SpanDataIndexMap, SpanMode},
+    version::CURRENT_API_VERSION,
+};
+
+#[test]
+fn test_bidi_version_check_bidirectional() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let response =
+            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+
+        match response {
+            Response::ApiVersionCheck(version) => {
+                assert_eq!(version, CURRENT_API_VERSION);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_bidi_list_macros() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+        let response =
+            request(writer, reader, Request::ListMacros { dylib_path }, Some(&reject_subrequests))
+                .into();
+
+        let Response::ListMacros(Ok(macros)) = response else {
+            panic!("expected successful ListMacros response");
+        };
+
+        let mut macro_list: Vec<_> =
+            macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect();
+        macro_list.sort();
+        let macro_list_str = macro_list.join("\n");
+
+        expect![[r#"
+            DeriveEmpty [CustomDerive]
+            DeriveError [CustomDerive]
+            DerivePanic [CustomDerive]
+            DeriveReemit [CustomDerive]
+            attr_error [Attr]
+            attr_noop [Attr]
+            attr_panic [Attr]
+            fn_like_clone_tokens [Bang]
+            fn_like_error [Bang]
+            fn_like_mk_idents [Bang]
+            fn_like_mk_literals [Bang]
+            fn_like_noop [Bang]
+            fn_like_panic [Bang]
+            fn_like_span_join [Bang]
+            fn_like_span_line_column [Bang]
+            fn_like_span_ops [Bang]"#]]
+        .assert_eq(¯o_list_str);
+    });
+}
+
+#[test]
+fn test_bidi_list_macros_invalid_path() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let response = request(
+            writer,
+            reader,
+            Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
+            Some(&reject_subrequests),
+        )
+        .into();
+
+        match response {
+            Response::ListMacros(Err(e)) => assert!(
+                e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"),
+                "{e}"
+            ),
+            other => panic!("expected error response, got: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_bidi_set_config() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let config = ServerConfig { span_mode: SpanMode::Id };
+        let response =
+            request(writer, reader, Request::SetConfig(config), Some(&reject_subrequests)).into();
+
+        match response {
+            Response::SetConfig(returned_config) => {
+                assert_eq!(returned_config.span_mode, SpanMode::Id);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_bidi_set_config_rust_analyzer_mode() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
+        let response =
+            request(writer, reader, Request::SetConfig(config), Some(&reject_subrequests)).into();
+
+        match response {
+            Response::SetConfig(returned_config) => {
+                assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_bidi_expand_macro_panic() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let mut span_data_table = SpanDataIndexMap::default();
+        let macro_body =
+            common::utils::create_empty_token_tree(CURRENT_API_VERSION, &mut span_data_table);
+
+        let request1 = Request::ExpandMacro(Box::new(ExpandMacro {
+            lib: dylib_path,
+            env: vec![],
+            current_dir: None,
+            data: ExpandMacroData {
+                macro_body,
+                macro_name: "fn_like_panic".to_owned(),
+                attributes: None,
+                has_global_spans: ExpnGlobals { def_site: 0, call_site: 0, mixed_site: 0 },
+                span_data_table: vec![],
+            },
+        }));
+
+        let response = request(writer, reader, request1, Some(&reject_subrequests)).into();
+
+        match response {
+            Response::ExpandMacro(Err(PanicMessage(msg))) => {
+                assert!(msg.contains("fn_like_panic"), "panic message should mention macro name");
+            }
+            other => panic!("expected panic response, got: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_bidi_basic_call_flow() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let response1 =
+            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+        assert!(matches!(response1, Response::ApiVersionCheck(_)));
+
+        let response2 = request(
+            writer,
+            reader,
+            Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
+            Some(&reject_subrequests),
+        )
+        .into();
+        assert!(matches!(response2, Response::SetConfig(_)));
+
+        let response3 = request(
+            writer,
+            reader,
+            Request::ListMacros { dylib_path: dylib_path.clone() },
+            Some(&reject_subrequests),
+        )
+        .into();
+        assert!(matches!(response3, Response::ListMacros(Ok(_))));
+    });
+}
+
+#[test]
+fn test_bidi_expand_nonexistent_macro() {
+    with_server(BidirectionalPostcardPrototype, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let version_response =
+            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+        let Response::ApiVersionCheck(version) = version_response else {
+            panic!("expected version check response");
+        };
+
+        let mut span_data_table = SpanDataIndexMap::default();
+        let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+        let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+            lib: dylib_path,
+            env: vec![],
+            current_dir: None,
+            data: ExpandMacroData {
+                macro_body,
+                macro_name: "NonexistentMacro".to_owned(),
+                attributes: None,
+                has_global_spans: ExpnGlobals { def_site: 0, call_site: 0, mixed_site: 0 },
+                span_data_table: vec![],
+            },
+        }));
+
+        let response = request(writer, reader, expand_request, Some(&reject_subrequests)).into();
+
+        match response {
+            Response::ExpandMacro(Err(PanicMessage(msg))) => {
+                expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg)
+            }
+            other => panic!("expected error for nonexistent macro, got: {other:?}"),
+        }
+    });
+}

From a151d7dc5617ecd8c0cf8fbb688654c66e9776b6 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 18 Jan 2026 20:23:49 +0530
Subject: [PATCH 099/131] adapt json test

---
 .../proc-macro-srv-cli/tests/legacy_json.rs   | 457 +++++++++---------
 1 file changed, 233 insertions(+), 224 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
index 1fa886219a8a4..8daee7b2bceb1 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
@@ -1,224 +1,233 @@
-//! Integration tests for the proc-macro-srv-cli main loop.
-//!
-//! These tests exercise the full client-server RPC procedure using in-memory
-//! channels without needing to spawn the actual server and client processes.
-
-#![cfg(feature = "sysroot-abi")]
-
-mod common {
-    pub(crate) mod utils;
-}
-
-use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server};
-use expect_test::expect;
-use proc_macro_api::{
-    legacy_protocol::msg::{
-        ExpandMacro, ExpandMacroData, ExpnGlobals, PanicMessage, Request, Response, ServerConfig,
-        SpanDataIndexMap, SpanMode,
-    },
-    version::CURRENT_API_VERSION,
-};
-
-#[test]
-fn test_version_check() {
-    with_server(|writer, reader| {
-        let response = request(writer, reader, Request::ApiVersionCheck {});
-
-        match response {
-            Response::ApiVersionCheck(version) => {
-                assert_eq!(version, CURRENT_API_VERSION);
-            }
-            other => panic!("unexpected response: {other:?}"),
-        }
-    });
-}
-
-#[test]
-fn test_list_macros() {
-    with_server(|writer, reader| {
-        let dylib_path = proc_macro_test_dylib_path();
-        let response = request(writer, reader, Request::ListMacros { dylib_path });
-
-        let Response::ListMacros(Ok(macros)) = response else {
-            panic!("expected successful ListMacros response");
-        };
-
-        let mut macro_list: Vec<_> =
-            macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect();
-        macro_list.sort();
-        let macro_list_str = macro_list.join("\n");
-
-        expect![[r#"
-            DeriveEmpty [CustomDerive]
-            DeriveError [CustomDerive]
-            DerivePanic [CustomDerive]
-            DeriveReemit [CustomDerive]
-            attr_error [Attr]
-            attr_noop [Attr]
-            attr_panic [Attr]
-            fn_like_clone_tokens [Bang]
-            fn_like_error [Bang]
-            fn_like_mk_idents [Bang]
-            fn_like_mk_literals [Bang]
-            fn_like_noop [Bang]
-            fn_like_panic [Bang]
-            fn_like_span_join [Bang]
-            fn_like_span_line_column [Bang]
-            fn_like_span_ops [Bang]"#]]
-        .assert_eq(¯o_list_str);
-    });
-}
-
-#[test]
-fn test_list_macros_invalid_path() {
-    with_server(|writer, reader| {
-        let response = request(
-            writer,
-            reader,
-            Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
-        );
-
-        match response {
-            Response::ListMacros(Err(e)) => assert!(
-                e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"),
-                "{e}"
-            ),
-            other => panic!("expected error response, got: {other:?}"),
-        }
-    });
-}
-
-#[test]
-fn test_set_config() {
-    with_server(|writer, reader| {
-        let config = ServerConfig { span_mode: SpanMode::Id };
-        let response = request(writer, reader, Request::SetConfig(config));
-
-        match response {
-            Response::SetConfig(returned_config) => {
-                assert_eq!(returned_config.span_mode, SpanMode::Id);
-            }
-            other => panic!("unexpected response: {other:?}"),
-        }
-    });
-}
-
-#[test]
-fn test_set_config_rust_analyzer_mode() {
-    with_server(|writer, reader| {
-        let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
-        let response = request(writer, reader, Request::SetConfig(config));
-
-        match response {
-            Response::SetConfig(returned_config) => {
-                assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer);
-            }
-            other => panic!("unexpected response: {other:?}"),
-        }
-    });
-}
-
-#[test]
-fn test_expand_macro_panic() {
-    with_server(|writer, reader| {
-        let dylib_path = proc_macro_test_dylib_path();
-
-        let version_response = request(writer, reader, Request::ApiVersionCheck {});
-        let Response::ApiVersionCheck(version) = version_response else {
-            panic!("expected version check response");
-        };
-
-        let mut span_data_table = SpanDataIndexMap::default();
-        let macro_body = create_empty_token_tree(version, &mut span_data_table);
-
-        let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
-            lib: dylib_path,
-            env: vec![],
-            current_dir: None,
-            data: ExpandMacroData {
-                macro_body,
-                macro_name: "fn_like_panic".to_owned(),
-                attributes: None,
-                has_global_spans: ExpnGlobals {
-                    serialize: version >= 3,
-                    def_site: 0,
-                    call_site: 0,
-                    mixed_site: 0,
-                },
-                span_data_table: vec![],
-            },
-        }));
-
-        let response = request(writer, reader, expand_request);
-
-        match response {
-            Response::ExpandMacro(Err(PanicMessage(msg))) => {
-                assert!(msg.contains("fn_like_panic"), "panic message should mention the macro");
-            }
-            Response::ExpandMacro(Ok(_)) => {
-                panic!("expected panic, but macro succeeded");
-            }
-            other => panic!("unexpected response: {other:?}"),
-        }
-    });
-}
-
-#[test]
-fn test_basic_call_flow() {
-    with_server(|writer, reader| {
-        let dylib_path = proc_macro_test_dylib_path();
-
-        let response1 = request(writer, reader, Request::ApiVersionCheck {});
-        assert!(matches!(response1, Response::ApiVersionCheck(_)));
-
-        let response2 =
-            request(writer, reader, Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }));
-        assert!(matches!(response2, Response::SetConfig(_)));
-
-        let response3 =
-            request(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() });
-        assert!(matches!(response3, Response::ListMacros(Ok(_))));
-    });
-}
-
-#[test]
-fn test_expand_nonexistent_macro() {
-    with_server(|writer, reader| {
-        let dylib_path = proc_macro_test_dylib_path();
-
-        let version_response = request(writer, reader, Request::ApiVersionCheck {});
-        let Response::ApiVersionCheck(version) = version_response else {
-            panic!("expected version check response");
-        };
-
-        let mut span_data_table = SpanDataIndexMap::default();
-        let macro_body = create_empty_token_tree(version, &mut span_data_table);
-
-        let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
-            lib: dylib_path,
-            env: vec![],
-            current_dir: None,
-            data: ExpandMacroData {
-                macro_body,
-                macro_name: "NonexistentMacro".to_owned(),
-                attributes: None,
-                has_global_spans: ExpnGlobals {
-                    serialize: version >= 3,
-                    def_site: 0,
-                    call_site: 0,
-                    mixed_site: 0,
-                },
-                span_data_table: vec![],
-            },
-        }));
-
-        let response = request(writer, reader, expand_request);
-
-        match response {
-            Response::ExpandMacro(Err(PanicMessage(msg))) => {
-                expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg)
-            }
-            other => panic!("expected error for nonexistent macro, got: {other:?}"),
-        }
-    });
-}
+//! Integration tests for the proc-macro-srv-cli main loop.
+//!
+//! These tests exercise the full client-server RPC procedure using in-memory
+//! channels without needing to spawn the actual server and client processes.
+
+#![cfg(feature = "sysroot-abi")]
+
+mod common {
+    pub(crate) mod utils;
+}
+
+use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server};
+use expect_test::expect;
+use proc_macro_api::{
+    ProtocolFormat::JsonLegacy,
+    legacy_protocol::msg::{
+        ExpandMacro, ExpandMacroData, ExpnGlobals, PanicMessage, Request, Response, ServerConfig,
+        SpanDataIndexMap, SpanMode,
+    },
+    version::CURRENT_API_VERSION,
+};
+
+#[test]
+fn test_version_check() {
+    with_server(JsonLegacy, |writer, reader| {
+        let response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+
+        match response {
+            Response::ApiVersionCheck(version) => {
+                assert_eq!(version, CURRENT_API_VERSION);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_list_macros() {
+    with_server(JsonLegacy, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+        let response = request(writer, reader, Request::ListMacros { dylib_path }, None).into();
+
+        let Response::ListMacros(Ok(macros)) = response else {
+            panic!("expected successful ListMacros response");
+        };
+
+        let mut macro_list: Vec<_> =
+            macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect();
+        macro_list.sort();
+        let macro_list_str = macro_list.join("\n");
+
+        expect![[r#"
+            DeriveEmpty [CustomDerive]
+            DeriveError [CustomDerive]
+            DerivePanic [CustomDerive]
+            DeriveReemit [CustomDerive]
+            attr_error [Attr]
+            attr_noop [Attr]
+            attr_panic [Attr]
+            fn_like_clone_tokens [Bang]
+            fn_like_error [Bang]
+            fn_like_mk_idents [Bang]
+            fn_like_mk_literals [Bang]
+            fn_like_noop [Bang]
+            fn_like_panic [Bang]
+            fn_like_span_join [Bang]
+            fn_like_span_line_column [Bang]
+            fn_like_span_ops [Bang]"#]]
+        .assert_eq(¯o_list_str);
+    });
+}
+
+#[test]
+fn test_list_macros_invalid_path() {
+    with_server(JsonLegacy, |writer, reader| {
+        let response = request(
+            writer,
+            reader,
+            Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
+            None,
+        )
+        .into();
+
+        match response {
+            Response::ListMacros(Err(e)) => assert!(
+                e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"),
+                "{e}"
+            ),
+            other => panic!("expected error response, got: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_set_config() {
+    with_server(JsonLegacy, |writer, reader| {
+        let config = ServerConfig { span_mode: SpanMode::Id };
+        let response = request(writer, reader, Request::SetConfig(config), None).into();
+
+        match response {
+            Response::SetConfig(returned_config) => {
+                assert_eq!(returned_config.span_mode, SpanMode::Id);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_set_config_rust_analyzer_mode() {
+    with_server(JsonLegacy, |writer, reader| {
+        let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
+        let response = request(writer, reader, Request::SetConfig(config), None).into();
+
+        match response {
+            Response::SetConfig(returned_config) => {
+                assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer);
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_expand_macro_panic() {
+    with_server(JsonLegacy, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let version_response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let Response::ApiVersionCheck(version) = version_response else {
+            panic!("expected version check response");
+        };
+
+        let mut span_data_table = SpanDataIndexMap::default();
+        let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+        let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+            lib: dylib_path,
+            env: vec![],
+            current_dir: None,
+            data: ExpandMacroData {
+                macro_body,
+                macro_name: "fn_like_panic".to_owned(),
+                attributes: None,
+                has_global_spans: ExpnGlobals {
+                    serialize: version >= 3,
+                    def_site: 0,
+                    call_site: 0,
+                    mixed_site: 0,
+                },
+                span_data_table: vec![],
+            },
+        }));
+
+        let response = request(writer, reader, expand_request, None).into();
+
+        match response {
+            Response::ExpandMacro(Err(PanicMessage(msg))) => {
+                assert!(msg.contains("fn_like_panic"), "panic message should mention the macro");
+            }
+            Response::ExpandMacro(Ok(_)) => {
+                panic!("expected panic, but macro succeeded");
+            }
+            other => panic!("unexpected response: {other:?}"),
+        }
+    });
+}
+
+#[test]
+fn test_basic_call_flow() {
+    with_server(JsonLegacy, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let response1 = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        assert!(matches!(response1, Response::ApiVersionCheck(_)));
+
+        let response2 = request(
+            writer,
+            reader,
+            Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
+            None,
+        )
+        .into();
+        assert!(matches!(response2, Response::SetConfig(_)));
+
+        let response3 =
+            request(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() }, None)
+                .into();
+        assert!(matches!(response3, Response::ListMacros(Ok(_))));
+    });
+}
+
+#[test]
+fn test_expand_nonexistent_macro() {
+    with_server(JsonLegacy, |writer, reader| {
+        let dylib_path = proc_macro_test_dylib_path();
+
+        let version_response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let Response::ApiVersionCheck(version) = version_response else {
+            panic!("expected version check response");
+        };
+
+        let mut span_data_table = SpanDataIndexMap::default();
+        let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+        let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+            lib: dylib_path,
+            env: vec![],
+            current_dir: None,
+            data: ExpandMacroData {
+                macro_body,
+                macro_name: "NonexistentMacro".to_owned(),
+                attributes: None,
+                has_global_spans: ExpnGlobals {
+                    serialize: version >= 3,
+                    def_site: 0,
+                    call_site: 0,
+                    mixed_site: 0,
+                },
+                span_data_table: vec![],
+            },
+        }));
+
+        let response = request(writer, reader, expand_request, None).into();
+
+        match response {
+            Response::ExpandMacro(Err(PanicMessage(msg))) => {
+                expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg)
+            }
+            other => panic!("expected error for nonexistent macro, got: {other:?}"),
+        }
+    });
+}

From 095b0138028d51e45ca987e0b0b1dcf4e16a876d Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Sun, 18 Jan 2026 20:38:58 +0530
Subject: [PATCH 100/131] rename send and drive to request and receive and
 remove auto*

---
 .../tests/bidirectional_postcard.rs           |  48 +++----
 .../proc-macro-srv-cli/tests/common/utils.rs  | 118 ++++--------------
 .../proc-macro-srv-cli/tests/legacy_json.rs   |  37 +++---
 3 files changed, 67 insertions(+), 136 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
index 08e44bad37239..33ca1d791de7e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
@@ -4,7 +4,9 @@ mod common {
     pub(crate) mod utils;
 }
 
-use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server};
+use common::utils::{
+    create_empty_token_tree, proc_macro_test_dylib_path, request_bidirectional, with_server,
+};
 use expect_test::expect;
 use proc_macro_api::{
     ProtocolFormat::BidirectionalPostcardPrototype,
@@ -20,7 +22,7 @@ use proc_macro_api::{
 fn test_bidi_version_check_bidirectional() {
     with_server(BidirectionalPostcardPrototype, |writer, reader| {
         let response =
-            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+            request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
 
         match response {
             Response::ApiVersionCheck(version) => {
@@ -35,9 +37,12 @@ fn test_bidi_version_check_bidirectional() {
 fn test_bidi_list_macros() {
     with_server(BidirectionalPostcardPrototype, |writer, reader| {
         let dylib_path = proc_macro_test_dylib_path();
-        let response =
-            request(writer, reader, Request::ListMacros { dylib_path }, Some(&reject_subrequests))
-                .into();
+        let response = request_bidirectional(
+            writer,
+            reader,
+            Request::ListMacros { dylib_path },
+            &reject_subrequests,
+        );
 
         let Response::ListMacros(Ok(macros)) = response else {
             panic!("expected successful ListMacros response");
@@ -72,13 +77,12 @@ fn test_bidi_list_macros() {
 #[test]
 fn test_bidi_list_macros_invalid_path() {
     with_server(BidirectionalPostcardPrototype, |writer, reader| {
-        let response = request(
+        let response = request_bidirectional(
             writer,
             reader,
             Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
-            Some(&reject_subrequests),
-        )
-        .into();
+            reject_subrequests,
+        );
 
         match response {
             Response::ListMacros(Err(e)) => assert!(
@@ -95,7 +99,7 @@ fn test_bidi_set_config() {
     with_server(BidirectionalPostcardPrototype, |writer, reader| {
         let config = ServerConfig { span_mode: SpanMode::Id };
         let response =
-            request(writer, reader, Request::SetConfig(config), Some(&reject_subrequests)).into();
+            request_bidirectional(writer, reader, Request::SetConfig(config), reject_subrequests);
 
         match response {
             Response::SetConfig(returned_config) => {
@@ -111,7 +115,7 @@ fn test_bidi_set_config_rust_analyzer_mode() {
     with_server(BidirectionalPostcardPrototype, |writer, reader| {
         let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
         let response =
-            request(writer, reader, Request::SetConfig(config), Some(&reject_subrequests)).into();
+            request_bidirectional(writer, reader, Request::SetConfig(config), reject_subrequests);
 
         match response {
             Response::SetConfig(returned_config) => {
@@ -144,7 +148,7 @@ fn test_bidi_expand_macro_panic() {
             },
         }));
 
-        let response = request(writer, reader, request1, Some(&reject_subrequests)).into();
+        let response = request_bidirectional(writer, reader, request1, reject_subrequests);
 
         match response {
             Response::ExpandMacro(Err(PanicMessage(msg))) => {
@@ -161,25 +165,23 @@ fn test_bidi_basic_call_flow() {
         let dylib_path = proc_macro_test_dylib_path();
 
         let response1 =
-            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+            request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
         assert!(matches!(response1, Response::ApiVersionCheck(_)));
 
-        let response2 = request(
+        let response2 = request_bidirectional(
             writer,
             reader,
             Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
-            Some(&reject_subrequests),
-        )
-        .into();
+            reject_subrequests,
+        );
         assert!(matches!(response2, Response::SetConfig(_)));
 
-        let response3 = request(
+        let response3 = request_bidirectional(
             writer,
             reader,
             Request::ListMacros { dylib_path: dylib_path.clone() },
-            Some(&reject_subrequests),
-        )
-        .into();
+            reject_subrequests,
+        );
         assert!(matches!(response3, Response::ListMacros(Ok(_))));
     });
 }
@@ -190,7 +192,7 @@ fn test_bidi_expand_nonexistent_macro() {
         let dylib_path = proc_macro_test_dylib_path();
 
         let version_response =
-            request(writer, reader, Request::ApiVersionCheck {}, Some(&reject_subrequests)).into();
+            request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
         let Response::ApiVersionCheck(version) = version_response else {
             panic!("expected version check response");
         };
@@ -211,7 +213,7 @@ fn test_bidi_expand_nonexistent_macro() {
             },
         }));
 
-        let response = request(writer, reader, expand_request, Some(&reject_subrequests)).into();
+        let response = request_bidirectional(writer, reader, expand_request, reject_subrequests);
 
         match response {
             Response::ExpandMacro(Err(PanicMessage(msg))) => {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
index 63b3a74aa4e8b..85c394734b33b 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/common/utils.rs
@@ -198,21 +198,22 @@ trait TestProtocol {
     type Request;
     type Response;
 
-    fn send(&self, writer: &mut dyn Write, req: Self::Request);
-    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> Self::Response;
+    fn request(&self, writer: &mut dyn Write, req: Self::Request);
+    fn receive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> Self::Response;
 }
 
+#[allow(dead_code)]
 struct JsonLegacy;
 
 impl TestProtocol for JsonLegacy {
     type Request = Request;
     type Response = Response;
 
-    fn send(&self, writer: &mut dyn Write, req: Request) {
+    fn request(&self, writer: &mut dyn Write, req: Request) {
         req.write::(writer).expect("failed to write request");
     }
 
-    fn drive(&self, reader: &mut dyn BufRead, _writer: &mut dyn Write) -> Response {
+    fn receive(&self, reader: &mut dyn BufRead, _writer: &mut dyn Write) -> Response {
         let mut buf = String::new();
         Response::read::(reader, &mut buf)
             .expect("failed to read response")
@@ -220,6 +221,7 @@ impl TestProtocol for JsonLegacy {
     }
 }
 
+#[allow(dead_code)]
 struct PostcardBidirectional
 where
     F: Fn(SubRequest) -> Result,
@@ -234,12 +236,12 @@ where
     type Request = BiRequest;
     type Response = BiResponse;
 
-    fn send(&self, writer: &mut dyn Write, req: BiRequest) {
+    fn request(&self, writer: &mut dyn Write, req: BiRequest) {
         let msg = BidirectionalMessage::Request(req);
         msg.write::(writer).expect("failed to write request");
     }
 
-    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> BiResponse {
+    fn receive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> BiResponse {
         let mut buf = Vec::new();
 
         loop {
@@ -260,98 +262,28 @@ where
     }
 }
 
-pub(crate) fn request(
+#[allow(dead_code)]
+pub(crate) fn request_legacy(
     writer: &mut dyn Write,
     reader: &mut dyn BufRead,
-    request: impl Into,
-    callback: Option<&dyn Fn(SubRequest) -> Result>,
-) -> AutoResponse {
-    let protocol = match callback {
-        None => AutoProtocol::Legacy(JsonLegacy),
-        Some(cb) => AutoProtocol::Bidirectional(PostcardBidirectional { callback: cb }),
-    };
-
-    protocol.send(writer, request.into());
-    protocol.drive(reader, writer)
-}
-
-enum AutoProtocol
-where
-    F: Fn(SubRequest) -> Result,
-{
-    Legacy(JsonLegacy),
-    Bidirectional(PostcardBidirectional),
+    request: Request,
+) -> Response {
+    let protocol = JsonLegacy;
+    protocol.request(writer, request);
+    protocol.receive(reader, writer)
 }
 
-impl TestProtocol for AutoProtocol
+#[allow(dead_code)]
+pub(crate) fn request_bidirectional(
+    writer: &mut dyn Write,
+    reader: &mut dyn BufRead,
+    request: BiRequest,
+    callback: F,
+) -> BiResponse
 where
     F: Fn(SubRequest) -> Result,
 {
-    type Request = AutoRequest;
-    type Response = AutoResponse;
-
-    fn send(&self, writer: &mut dyn Write, req: AutoRequest) {
-        match (self, req) {
-            (AutoProtocol::Legacy(p), AutoRequest::Legacy(r)) => {
-                p.send(writer, r);
-            }
-            (AutoProtocol::Bidirectional(p), AutoRequest::Bidirectional(r)) => {
-                p.send(writer, r);
-            }
-            (AutoProtocol::Legacy(_), AutoRequest::Bidirectional(_)) => {
-                panic!("bidirectional request used with legacy protocol");
-            }
-            (AutoProtocol::Bidirectional(_), AutoRequest::Legacy(_)) => {
-                panic!("legacy request used with bidirectional protocol");
-            }
-        }
-    }
-
-    fn drive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> AutoResponse {
-        match self {
-            AutoProtocol::Legacy(p) => AutoResponse::Legacy(p.drive(reader, writer)),
-            AutoProtocol::Bidirectional(p) => AutoResponse::Bidirectional(p.drive(reader, writer)),
-        }
-    }
-}
-
-pub(crate) enum AutoRequest {
-    Legacy(Request),
-    Bidirectional(BiRequest),
-}
-
-#[derive(Debug)]
-pub(crate) enum AutoResponse {
-    Legacy(Response),
-    Bidirectional(BiResponse),
-}
-
-impl From for AutoRequest {
-    fn from(req: Request) -> AutoRequest {
-        AutoRequest::Legacy(req)
-    }
-}
-
-impl From for AutoRequest {
-    fn from(req: BiRequest) -> AutoRequest {
-        AutoRequest::Bidirectional(req)
-    }
-}
-
-impl From for Response {
-    fn from(res: AutoResponse) -> Response {
-        match res {
-            AutoResponse::Legacy(res) => res,
-            _ => panic!("Should be legacy response"),
-        }
-    }
-}
-
-impl From for BiResponse {
-    fn from(res: AutoResponse) -> BiResponse {
-        match res {
-            AutoResponse::Bidirectional(res) => res,
-            _ => panic!("Should be bidirectional response"),
-        }
-    }
+    let protocol = PostcardBidirectional { callback };
+    protocol.request(writer, request);
+    protocol.receive(reader, writer)
 }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
index 8daee7b2bceb1..c0dbfd1679f7b 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
@@ -9,7 +9,9 @@ mod common {
     pub(crate) mod utils;
 }
 
-use common::utils::{create_empty_token_tree, proc_macro_test_dylib_path, request, with_server};
+use common::utils::{
+    create_empty_token_tree, proc_macro_test_dylib_path, request_legacy, with_server,
+};
 use expect_test::expect;
 use proc_macro_api::{
     ProtocolFormat::JsonLegacy,
@@ -23,7 +25,7 @@ use proc_macro_api::{
 #[test]
 fn test_version_check() {
     with_server(JsonLegacy, |writer, reader| {
-        let response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let response = request_legacy(writer, reader, Request::ApiVersionCheck {});
 
         match response {
             Response::ApiVersionCheck(version) => {
@@ -38,7 +40,7 @@ fn test_version_check() {
 fn test_list_macros() {
     with_server(JsonLegacy, |writer, reader| {
         let dylib_path = proc_macro_test_dylib_path();
-        let response = request(writer, reader, Request::ListMacros { dylib_path }, None).into();
+        let response = request_legacy(writer, reader, Request::ListMacros { dylib_path });
 
         let Response::ListMacros(Ok(macros)) = response else {
             panic!("expected successful ListMacros response");
@@ -73,13 +75,11 @@ fn test_list_macros() {
 #[test]
 fn test_list_macros_invalid_path() {
     with_server(JsonLegacy, |writer, reader| {
-        let response = request(
+        let response = request_legacy(
             writer,
             reader,
             Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
-            None,
-        )
-        .into();
+        );
 
         match response {
             Response::ListMacros(Err(e)) => assert!(
@@ -95,7 +95,7 @@ fn test_list_macros_invalid_path() {
 fn test_set_config() {
     with_server(JsonLegacy, |writer, reader| {
         let config = ServerConfig { span_mode: SpanMode::Id };
-        let response = request(writer, reader, Request::SetConfig(config), None).into();
+        let response = request_legacy(writer, reader, Request::SetConfig(config));
 
         match response {
             Response::SetConfig(returned_config) => {
@@ -110,7 +110,7 @@ fn test_set_config() {
 fn test_set_config_rust_analyzer_mode() {
     with_server(JsonLegacy, |writer, reader| {
         let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
-        let response = request(writer, reader, Request::SetConfig(config), None).into();
+        let response = request_legacy(writer, reader, Request::SetConfig(config));
 
         match response {
             Response::SetConfig(returned_config) => {
@@ -126,7 +126,7 @@ fn test_expand_macro_panic() {
     with_server(JsonLegacy, |writer, reader| {
         let dylib_path = proc_macro_test_dylib_path();
 
-        let version_response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let version_response = request_legacy(writer, reader, Request::ApiVersionCheck {});
         let Response::ApiVersionCheck(version) = version_response else {
             panic!("expected version check response");
         };
@@ -152,7 +152,7 @@ fn test_expand_macro_panic() {
             },
         }));
 
-        let response = request(writer, reader, expand_request, None).into();
+        let response = request_legacy(writer, reader, expand_request);
 
         match response {
             Response::ExpandMacro(Err(PanicMessage(msg))) => {
@@ -171,21 +171,18 @@ fn test_basic_call_flow() {
     with_server(JsonLegacy, |writer, reader| {
         let dylib_path = proc_macro_test_dylib_path();
 
-        let response1 = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let response1 = request_legacy(writer, reader, Request::ApiVersionCheck {});
         assert!(matches!(response1, Response::ApiVersionCheck(_)));
 
-        let response2 = request(
+        let response2 = request_legacy(
             writer,
             reader,
             Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
-            None,
-        )
-        .into();
+        );
         assert!(matches!(response2, Response::SetConfig(_)));
 
         let response3 =
-            request(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() }, None)
-                .into();
+            request_legacy(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() });
         assert!(matches!(response3, Response::ListMacros(Ok(_))));
     });
 }
@@ -195,7 +192,7 @@ fn test_expand_nonexistent_macro() {
     with_server(JsonLegacy, |writer, reader| {
         let dylib_path = proc_macro_test_dylib_path();
 
-        let version_response = request(writer, reader, Request::ApiVersionCheck {}, None).into();
+        let version_response = request_legacy(writer, reader, Request::ApiVersionCheck {});
         let Response::ApiVersionCheck(version) = version_response else {
             panic!("expected version check response");
         };
@@ -221,7 +218,7 @@ fn test_expand_nonexistent_macro() {
             },
         }));
 
-        let response = request(writer, reader, expand_request, None).into();
+        let response = request_legacy(writer, reader, expand_request);
 
         match response {
             Response::ExpandMacro(Err(PanicMessage(msg))) => {

From 29aad0493b082c5d36a006fa39656d61b2c3ddc3 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Mon, 19 Jan 2026 09:58:15 +0200
Subject: [PATCH 101/131] Cache `Clauses::empty()`

---
 .../rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs
index 5758e2dc7e93c..6f4fae7073178 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs
@@ -273,9 +273,8 @@ impl<'db> std::fmt::Debug for Clauses<'db> {
 
 impl<'db> Clauses<'db> {
     #[inline]
-    pub fn empty(_interner: DbInterner<'db>) -> Self {
-        // FIXME: Get from a static.
-        Self::new_from_slice(&[])
+    pub fn empty(interner: DbInterner<'db>) -> Self {
+        interner.default_types().empty.clauses
     }
 
     #[inline]

From e07da1fb86ce0ccbe6c5b542f545fd304efb2e3f Mon Sep 17 00:00:00 2001
From: Youseok Yang 
Date: Tue, 20 Jan 2026 07:16:59 +0000
Subject: [PATCH 102/131] feat(hir-ty): add method references_only_ty_error to
 detect type errors

Add a new method `references_only_ty_error` to the `Ty` implementation
to determine if a type contains only type errors, ignoring const and
lifetime errors. Enhance test suite for const generic method resolution.
---
 .../hir-ty/src/method_resolution/probe.rs     |  6 +-
 .../crates/hir-ty/src/next_solver/ty.rs       | 19 +++++
 .../hir-ty/src/tests/regression/new_solver.rs | 71 ++++++++++++++++++-
 3 files changed, 92 insertions(+), 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs
index 42a590e8b4cb3..fdd501723fb53 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs
@@ -1246,9 +1246,9 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
             .filter(|step| step.reachable_via_deref)
             .filter(|step| {
                 debug!("pick_all_method: step={:?}", step);
-                // skip types that are from a type error or that would require dereferencing
-                // a raw pointer
-                !step.self_ty.value.value.references_non_lt_error() && !step.from_unsafe_deref
+                // Skip types with type errors (but not const/lifetime errors, which are
+                // often spurious due to incomplete const evaluation) and raw pointer derefs.
+                !step.self_ty.value.value.references_only_ty_error() && !step.from_unsafe_deref
             })
             .try_for_each(|step| {
                 let InferOk { value: self_ty, obligations: instantiate_self_ty_obligations } = self
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 66a24d3949908..1173028a1092d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -508,6 +508,11 @@ impl<'db> Ty<'db> {
         references_non_lt_error(&self)
     }
 
+    /// Whether the type contains a type error (ignoring const and lifetime errors).
+    pub fn references_only_ty_error(self) -> bool {
+        references_only_ty_error(&self)
+    }
+
     pub fn callable_sig(self, interner: DbInterner<'db>) -> Option>> {
         match self.kind() {
             TyKind::FnDef(callable, args) => {
@@ -777,6 +782,20 @@ impl<'db> TypeVisitor> for ReferencesNonLifetimeError {
     }
 }
 
+pub fn references_only_ty_error<'db, T: TypeVisitableExt>>(t: &T) -> bool {
+    t.references_error() && t.visit_with(&mut ReferencesOnlyTyError).is_break()
+}
+
+struct ReferencesOnlyTyError;
+
+impl<'db> TypeVisitor> for ReferencesOnlyTyError {
+    type Result = ControlFlow<()>;
+
+    fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
+        if ty.is_ty_error() { ControlFlow::Break(()) } else { ty.super_visit_with(self) }
+    }
+}
+
 impl<'db> std::fmt::Debug for Ty<'db> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         self.inner().internee.fmt(f)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
index be6ab23ad761e..f47a26d429fdf 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -471,7 +471,76 @@ fn foo() {
             244..246 '_x': {unknown}
             249..257 'to_bytes': fn to_bytes() -> [u8; _]
             249..259 'to_bytes()': [u8; _]
-            249..268 'to_byt..._vec()': {unknown}
+            249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
+        "#]],
+    );
+}
+
+#[test]
+fn regression_21315() {
+    check_infer(
+        r#"
+struct Consts;
+impl Consts { const MAX: usize = 0; }
+
+struct Between(T);
+
+impl Between {
+    fn sep_once(self, _sep: &str, _other: Self) -> Self {
+        self
+    }
+}
+
+trait Parser: Sized {
+    fn at_least(self) -> Between {
+        Between(self)
+    }
+    fn at_most(self) -> Between<0, N, Self> {
+        Between(self)
+    }
+}
+
+impl Parser for char {}
+
+fn test_at_least() {
+    let num = '9'.at_least::<1>();
+    let _ver = num.sep_once(".", num);
+}
+
+fn test_at_most() {
+    let num = '9'.at_most::<1>();
+}
+    "#,
+        expect![[r#"
+            48..49 '0': usize
+            182..186 'self': Between
+            188..192 '_sep': &'? str
+            200..206 '_other': Between
+            222..242 '{     ...     }': Between
+            232..236 'self': Between
+            300..304 'self': Self
+            343..372 '{     ...     }': Between
+            353..360 'Between': fn Between(Self) -> Between
+            353..366 'Between(self)': Between
+            361..365 'self': Self
+            404..408 'self': Self
+            433..462 '{     ...     }': Between<0, N, Self>
+            443..450 'Between': fn Between<0, N, Self>(Self) -> Between<0, N, Self>
+            443..456 'Between(self)': Between<0, N, Self>
+            451..455 'self': Self
+            510..587 '{     ...um); }': ()
+            520..523 'num': Between<1, _, char>
+            526..529 ''9'': char
+            526..545 ''9'.at...:<1>()': Between<1, _, char>
+            555..559 '_ver': Between<1, _, char>
+            562..565 'num': Between<1, _, char>
+            562..584 'num.se..., num)': Between<1, _, char>
+            575..578 '"."': &'static str
+            580..583 'num': Between<1, _, char>
+            607..644 '{     ...>(); }': ()
+            617..620 'num': Between<0, 1, char>
+            623..626 ''9'': char
+            623..641 ''9'.at...:<1>()': Between<0, 1, char>
         "#]],
     );
 }

From 61b9b33c1cd8857c2e79caba782ed13edb654100 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Tue, 20 Jan 2026 23:02:34 +0800
Subject: [PATCH 103/131] Fix demorgan applicable on pattern conditional

Example
---
```rust
fn f() {
    if let 1 = 1 &&$0 true { }
}
```

**Before this PR**

```rust
fn f() {
    if !(!let 1 = 1 || false) { }
}
```

**After this PR**

Assist not applicable
---
 .../ide-assists/src/handlers/apply_demorgan.rs   | 16 +++++++++++++++-
 1 file changed, 15 insertions(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
index d193e8a9d8dc6..80d0a6da12434 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -3,7 +3,7 @@ use std::collections::VecDeque;
 use ide_db::{
     assists::GroupLabel,
     famous_defs::FamousDefs,
-    syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+    syntax_helpers::node_ext::{for_each_tail_expr, is_pattern_cond, walk_expr},
 };
 use syntax::{
     NodeOrToken, SyntaxKind, T,
@@ -69,6 +69,10 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
         }
     }
 
+    if is_pattern_cond(bin_expr.clone().into()) {
+        return None;
+    }
+
     let op = bin_expr.op_kind()?;
     let (inv_token, prec) = match op {
         ast::BinaryOp::LogicOp(ast::LogicOp::And) => (SyntaxKind::PIPE2, ExprPrecedence::LOr),
@@ -375,6 +379,16 @@ fn f() { !(S <= S || S < S) }
         )
     }
 
+    #[test]
+    fn demorgan_doesnt_handles_pattern() {
+        check_assist_not_applicable(
+            apply_demorgan,
+            r#"
+fn f() { if let 1 = 1 &&$0 true { } }
+"#,
+        );
+    }
+
     #[test]
     fn demorgan_on_not() {
         check_assist(

From 43111396e36bf29344535f2394b906d689ed9b40 Mon Sep 17 00:00:00 2001
From: Manuel Drehwald 
Date: Fri, 9 Jan 2026 14:09:17 -0800
Subject: [PATCH 104/131] move initialization of omp/ol runtimes into
 global_ctor/dtor

---
 compiler/rustc_codegen_llvm/src/builder.rs    | 13 ---
 .../src/builder/gpu_offload.rs                | 91 ++++++++++++-------
 compiler/rustc_codegen_llvm/src/common.rs     |  4 +
 compiler/rustc_codegen_llvm/src/intrinsic.rs  |  5 +-
 4 files changed, 68 insertions(+), 45 deletions(-)

diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index 9379faf1156fc..35bf629ae81ac 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -188,19 +188,6 @@ impl<'a, 'll, CX: Borrow>> GenericBuilder<'a, 'll, CX> {
             load
         }
     }
-
-    fn memset(&mut self, ptr: &'ll Value, fill_byte: &'ll Value, size: &'ll Value, align: Align) {
-        unsafe {
-            llvm::LLVMRustBuildMemSet(
-                self.llbuilder,
-                ptr,
-                align.bytes() as c_uint,
-                fill_byte,
-                size,
-                false,
-            );
-        }
-    }
 }
 
 /// Empty string, to be used where LLVM expects an instruction name, indicating
diff --git a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
index f1735b9a0f586..0cf4c1d4f8c78 100644
--- a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
+++ b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
@@ -19,8 +19,6 @@ pub(crate) struct OffloadGlobals<'ll> {
     pub launcher_fn: &'ll llvm::Value,
     pub launcher_ty: &'ll llvm::Type,
 
-    pub bin_desc: &'ll llvm::Type,
-
     pub kernel_args_ty: &'ll llvm::Type,
 
     pub offload_entry_ty: &'ll llvm::Type,
@@ -31,8 +29,6 @@ pub(crate) struct OffloadGlobals<'ll> {
 
     pub ident_t_global: &'ll llvm::Value,
 
-    pub register_lib: &'ll llvm::Value,
-    pub unregister_lib: &'ll llvm::Value,
     pub init_rtls: &'ll llvm::Value,
 }
 
@@ -44,15 +40,6 @@ impl<'ll> OffloadGlobals<'ll> {
         let (begin_mapper, _, end_mapper, mapper_fn_ty) = gen_tgt_data_mappers(cx);
         let ident_t_global = generate_at_one(cx);
 
-        let tptr = cx.type_ptr();
-        let ti32 = cx.type_i32();
-        let tgt_bin_desc_ty = vec![ti32, tptr, tptr, tptr];
-        let bin_desc = cx.type_named_struct("struct.__tgt_bin_desc");
-        cx.set_struct_body(bin_desc, &tgt_bin_desc_ty, false);
-
-        let reg_lib_decl = cx.type_func(&[cx.type_ptr()], cx.type_void());
-        let register_lib = declare_offload_fn(&cx, "__tgt_register_lib", reg_lib_decl);
-        let unregister_lib = declare_offload_fn(&cx, "__tgt_unregister_lib", reg_lib_decl);
         let init_ty = cx.type_func(&[], cx.type_void());
         let init_rtls = declare_offload_fn(cx, "__tgt_init_all_rtls", init_ty);
 
@@ -63,20 +50,77 @@ impl<'ll> OffloadGlobals<'ll> {
         OffloadGlobals {
             launcher_fn,
             launcher_ty,
-            bin_desc,
             kernel_args_ty,
             offload_entry_ty,
             begin_mapper,
             end_mapper,
             mapper_fn_ty,
             ident_t_global,
-            register_lib,
-            unregister_lib,
             init_rtls,
         }
     }
 }
 
+// We need to register offload before using it. We also should unregister it once we are done, for
+// good measures. Previously we have done so before and after each individual offload intrinsic
+// call, but that comes at a performance cost. The repeated (un)register calls might also confuse
+// the LLVM ompOpt pass, which tries to move operations to a better location. The easiest solution,
+// which we copy from clang, is to just have those two calls once, in the global ctor/dtor section
+// of the final binary.
+pub(crate) fn register_offload<'ll>(cx: &CodegenCx<'ll, '_>) {
+    let reg_lib_decl = cx.type_func(&[cx.type_ptr()], cx.type_void());
+    let register_lib = declare_offload_fn(&cx, "__tgt_register_lib", reg_lib_decl);
+    let unregister_lib = declare_offload_fn(&cx, "__tgt_unregister_lib", reg_lib_decl);
+
+    let ptr_null = cx.const_null(cx.type_ptr());
+    let const_struct = cx.const_struct(&[cx.get_const_i32(0), ptr_null, ptr_null, ptr_null], false);
+    let omp_descriptor =
+        add_global(cx, ".omp_offloading.descriptor", const_struct, InternalLinkage);
+    // @.omp_offloading.descriptor = internal constant %__tgt_bin_desc { i32 1, ptr @.omp_offloading.device_images, ptr @__start_llvm_offload_entries, ptr @__stop_llvm_offload_entries }
+    // @.omp_offloading.descriptor = internal constant %__tgt_bin_desc { i32 0, ptr null, ptr null, ptr null }
+
+    let atexit = cx.type_func(&[cx.type_ptr()], cx.type_i32());
+    let atexit_fn = declare_offload_fn(cx, "atexit", atexit);
+
+    let desc_ty = cx.type_func(&[], cx.type_void());
+    let reg_name = ".omp_offloading.descriptor_reg";
+    let unreg_name = ".omp_offloading.descriptor_unreg";
+    let desc_reg_fn = declare_offload_fn(cx, reg_name, desc_ty);
+    let desc_unreg_fn = declare_offload_fn(cx, unreg_name, desc_ty);
+    llvm::set_linkage(desc_reg_fn, InternalLinkage);
+    llvm::set_linkage(desc_unreg_fn, InternalLinkage);
+    llvm::set_section(desc_reg_fn, c".text.startup");
+    llvm::set_section(desc_unreg_fn, c".text.startup");
+
+    // define internal void @.omp_offloading.descriptor_reg() section ".text.startup" {
+    // entry:
+    //   call void @__tgt_register_lib(ptr @.omp_offloading.descriptor)
+    //   %0 = call i32 @atexit(ptr @.omp_offloading.descriptor_unreg)
+    //   ret void
+    // }
+    let bb = Builder::append_block(cx, desc_reg_fn, "entry");
+    let mut a = Builder::build(cx, bb);
+    a.call(reg_lib_decl, None, None, register_lib, &[omp_descriptor], None, None);
+    a.call(atexit, None, None, atexit_fn, &[desc_unreg_fn], None, None);
+    a.ret_void();
+
+    // define internal void @.omp_offloading.descriptor_unreg() section ".text.startup" {
+    // entry:
+    //   call void @__tgt_unregister_lib(ptr @.omp_offloading.descriptor)
+    //   ret void
+    // }
+    let bb = Builder::append_block(cx, desc_unreg_fn, "entry");
+    let mut a = Builder::build(cx, bb);
+    a.call(reg_lib_decl, None, None, unregister_lib, &[omp_descriptor], None, None);
+    a.ret_void();
+
+    // @llvm.global_ctors = appending global [1 x { i32, ptr, ptr }] [{ i32, ptr, ptr } { i32 101, ptr @.omp_offloading.descriptor_reg, ptr null }]
+    let args = vec![cx.get_const_i32(101), desc_reg_fn, ptr_null];
+    let const_struct = cx.const_struct(&args, false);
+    let arr = cx.const_array(cx.val_ty(const_struct), &[const_struct]);
+    add_global(cx, "llvm.global_ctors", arr, AppendingLinkage);
+}
+
 pub(crate) struct OffloadKernelDims<'ll> {
     num_workgroups: &'ll Value,
     threads_per_block: &'ll Value,
@@ -487,9 +531,6 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
     let tgt_decl = offload_globals.launcher_fn;
     let tgt_target_kernel_ty = offload_globals.launcher_ty;
 
-    // %struct.__tgt_bin_desc = type { i32, ptr, ptr, ptr }
-    let tgt_bin_desc = offload_globals.bin_desc;
-
     let tgt_kernel_decl = offload_globals.kernel_args_ty;
     let begin_mapper_decl = offload_globals.begin_mapper;
     let end_mapper_decl = offload_globals.end_mapper;
@@ -513,12 +554,9 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
     }
 
     // Step 0)
-    // %struct.__tgt_bin_desc = type { i32, ptr, ptr, ptr }
-    // %6 = alloca %struct.__tgt_bin_desc, align 8
     unsafe {
         llvm::LLVMRustPositionBuilderPastAllocas(&builder.llbuilder, builder.llfn());
     }
-    let tgt_bin_desc_alloca = builder.direct_alloca(tgt_bin_desc, Align::EIGHT, "EmptyDesc");
 
     let ty = cx.type_array(cx.type_ptr(), num_args);
     // Baseptr are just the input pointer to the kernel, stored in a local alloca
@@ -536,7 +574,6 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
     unsafe {
         llvm::LLVMPositionBuilderAtEnd(&builder.llbuilder, bb);
     }
-    builder.memset(tgt_bin_desc_alloca, cx.get_const_i8(0), cx.get_const_i64(32), Align::EIGHT);
 
     // Now we allocate once per function param, a copy to be passed to one of our maps.
     let mut vals = vec![];
@@ -574,15 +611,9 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
         geps.push(gep);
     }
 
-    let mapper_fn_ty = cx.type_func(&[cx.type_ptr()], cx.type_void());
-    let register_lib_decl = offload_globals.register_lib;
-    let unregister_lib_decl = offload_globals.unregister_lib;
     let init_ty = cx.type_func(&[], cx.type_void());
     let init_rtls_decl = offload_globals.init_rtls;
 
-    // FIXME(offload): Later we want to add them to the wrapper code, rather than our main function.
-    // call void @__tgt_register_lib(ptr noundef %6)
-    builder.call(mapper_fn_ty, None, None, register_lib_decl, &[tgt_bin_desc_alloca], None, None);
     // call void @__tgt_init_all_rtls()
     builder.call(init_ty, None, None, init_rtls_decl, &[], None, None);
 
@@ -679,6 +710,4 @@ pub(crate) fn gen_call_handling<'ll, 'tcx>(
         num_args,
         s_ident_t,
     );
-
-    builder.call(mapper_fn_ty, None, None, unregister_lib_decl, &[tgt_bin_desc_alloca], None, None);
 }
diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs
index b0cf9925019d2..f2261ab79340f 100644
--- a/compiler/rustc_codegen_llvm/src/common.rs
+++ b/compiler/rustc_codegen_llvm/src/common.rs
@@ -124,6 +124,10 @@ impl<'ll, CX: Borrow>> GenericCx<'ll, CX> {
     pub(crate) fn const_null(&self, t: &'ll Type) -> &'ll Value {
         unsafe { llvm::LLVMConstNull(t) }
     }
+
+    pub(crate) fn const_struct(&self, elts: &[&'ll Value], packed: bool) -> &'ll Value {
+        struct_in_context(self.llcx(), elts, packed)
+    }
 }
 
 impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs
index 20eac4cf92c20..97bc929dff32a 100644
--- a/compiler/rustc_codegen_llvm/src/intrinsic.rs
+++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs
@@ -30,7 +30,9 @@ use tracing::debug;
 use crate::abi::FnAbiLlvmExt;
 use crate::builder::Builder;
 use crate::builder::autodiff::{adjust_activity_to_abi, generate_enzyme_call};
-use crate::builder::gpu_offload::{OffloadKernelDims, gen_call_handling, gen_define_handling};
+use crate::builder::gpu_offload::{
+    OffloadKernelDims, gen_call_handling, gen_define_handling, register_offload,
+};
 use crate::context::CodegenCx;
 use crate::declare::declare_raw_fn;
 use crate::errors::{
@@ -1410,6 +1412,7 @@ fn codegen_offload<'ll, 'tcx>(
             return;
         }
     };
+    register_offload(cx);
     let offload_data = gen_define_handling(&cx, &metadata, target_symbol, offload_globals);
     gen_call_handling(bx, &offload_data, &args, &types, &metadata, offload_globals, &offload_dims);
 }

From 8e937d4f4a105d01603e0e2d9b8a94ebce9f1079 Mon Sep 17 00:00:00 2001
From: Roberto Aloi 
Date: Wed, 21 Jan 2026 10:52:07 +0100
Subject: [PATCH 105/131] Bump notify from 8.0.0. to 8.2.0

---
 src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index bd6c8331e66c2..ce7ea53b53734 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -16,7 +16,7 @@ doctest = false
 tracing.workspace = true
 walkdir = "2.5.0"
 crossbeam-channel.workspace = true
-notify = "8.0.0"
+notify = "8.2.0"
 rayon = "1.10.0"
 
 stdx.workspace = true

From 6753155bd67536b5469113e957adb93eff75dd27 Mon Sep 17 00:00:00 2001
From: The rustc-josh-sync Cronjob Bot 
Date: Thu, 22 Jan 2026 04:25:38 +0000
Subject: [PATCH 106/131] Prepare for merging from rust-lang/rust

This updates the rust-version file to 004d710faff53f8764a1cf69d87a5a5963850b60.
---
 src/tools/rust-analyzer/rust-version | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index a6ccd9bab3930..1fe86330b4a8e 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-b6fdaf2a15736cbccf248b532f48e33179614d40
+004d710faff53f8764a1cf69d87a5a5963850b60

From 4e9e37ee5889dbcfd8af30f6693adfe1c61c6f95 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 23 Jan 2026 11:57:14 +0100
Subject: [PATCH 107/131] internal: Add tests for rust-lang/rust#146972

---
 .../crates/hir-def/src/nameres/tests.rs       |  2 +-
 .../hir-def/src/nameres/tests/imports.rs      | 63 +++++++++++++++++++
 .../hir-def/src/nameres/tests/primitives.rs   | 23 -------
 3 files changed, 64 insertions(+), 24 deletions(-)
 create mode 100644 src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/imports.rs
 delete mode 100644 src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs

diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index 23d60d58f085c..fe55252e25404 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -1,8 +1,8 @@
 mod globs;
+mod imports;
 mod incremental;
 mod macros;
 mod mod_resolution;
-mod primitives;
 
 use base_db::RootQueryDb;
 use expect_test::{Expect, expect};
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/imports.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/imports.rs
new file mode 100644
index 0000000000000..b1960b785a836
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/imports.rs
@@ -0,0 +1,63 @@
+use super::*;
+
+#[test]
+fn kw_path_renames() {
+    check(
+        r#"
+macro_rules! m {
+    () => {
+        pub use $crate as dollar_crate;
+        pub use $crate::{self as self_dollar_crate};
+    };
+}
+
+pub use self as this;
+pub use crate as krate;
+
+pub use crate::{self as self_krate};
+m!();
+
+mod foo {
+    pub use super as zuper;
+    pub use super::{self as self_zuper};
+}
+"#,
+        expect![[r#"
+            crate
+            - dollar_crate : type (import)
+            - foo : type
+            - krate : type (import)
+            - self_dollar_crate : type (import)
+            - self_krate : type (import)
+            - this : type (import)
+            - (legacy) m : macro!
+
+            crate::foo
+            - self_zuper : type (import)
+            - zuper : type (import)
+            - (legacy) m : macro!
+        "#]],
+    );
+}
+
+#[test]
+fn primitive_reexport() {
+    check(
+        r#"
+//- /lib.rs
+mod foo;
+use foo::int;
+
+//- /foo.rs
+pub use i32 as int;
+"#,
+        expect![[r#"
+            crate
+            - foo : type
+            - int : type (import)
+
+            crate::foo
+            - int : type (import)
+        "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
deleted file mode 100644
index 861690238d475..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-use super::*;
-
-#[test]
-fn primitive_reexport() {
-    check(
-        r#"
-//- /lib.rs
-mod foo;
-use foo::int;
-
-//- /foo.rs
-pub use i32 as int;
-"#,
-        expect![[r#"
-            crate
-            - foo : type
-            - int : type (import)
-
-            crate::foo
-            - int : type (import)
-        "#]],
-    );
-}

From bd91c887b5873e5fbc94f3b492c1af3355d10fe9 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sat, 24 Jan 2026 11:39:14 +0800
Subject: [PATCH 108/131] Improve extract_function name

If the name contains `_`, it is likely to be descriptive

Example
---
```rust
fn foo(kind: i32) {
    let is_complex = $0kind != 0$0;
}
```

**Before this PR**

```rust
fn foo(kind: i32) {
    let is_complex = fun_name(kind);
}

fn fun_name(kind: i32) -> bool {
    kind != 0
}
```

**After this PR**

```rust
fn foo(kind: i32) {
    let is_complex = is_complex(kind);
}

fn is_complex(kind: i32) -> bool {
    kind != 0
}
```
---
 .../src/handlers/extract_function.rs          | 26 +++++++++++++++----
 1 file changed, 21 insertions(+), 5 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 294e5f7da8b3c..2230c391cbade 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -120,7 +120,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
 
             let params = body.extracted_function_params(ctx, &container_info, locals_used);
 
-            let name = make_function_name(&semantics_scope);
+            let name = make_function_name(&semantics_scope, &body);
 
             let fun = Function {
                 name,
@@ -241,7 +241,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
     )
 }
 
-fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
+fn make_function_name(
+    semantics_scope: &hir::SemanticsScope<'_>,
+    body: &FunctionBody,
+) -> ast::NameRef {
     let mut names_in_scope = vec![];
     semantics_scope.process_all_names(&mut |name, _| {
         names_in_scope.push(
@@ -252,7 +255,10 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
 
     let default_name = "fun_name";
 
-    let mut name = default_name.to_owned();
+    let mut name = body
+        .suggest_name()
+        .filter(|name| name.contains('_'))
+        .unwrap_or_else(|| default_name.to_owned());
     let mut counter = 0;
     while names_in_scope.contains(&name) {
         counter += 1;
@@ -779,6 +785,16 @@ impl FunctionBody {
     fn contains_node(&self, node: &SyntaxNode) -> bool {
         self.contains_range(node.text_range())
     }
+
+    fn suggest_name(&self) -> Option {
+        if let Some(ast::Pat::IdentPat(pat)) = self.parent().and_then(ast::LetStmt::cast)?.pat()
+            && let Some(name) = pat.name().and_then(|it| it.ident_token())
+        {
+            Some(name.text().to_owned())
+        } else {
+            None
+        }
+    }
 }
 
 impl FunctionBody {
@@ -5430,12 +5446,12 @@ impl Struct {
 
 impl Trait for Struct {
     fn bar(&self) -> i32 {
-        let three_squared = fun_name();
+        let three_squared = three_squared();
         self.0 + three_squared
     }
 }
 
-fn $0fun_name() -> i32 {
+fn $0three_squared() -> i32 {
     3 * 3
 }
 "#,

From 3f3959310695b6bb7b55d980ddb3ff8fcdbdecba Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sat, 24 Jan 2026 12:42:33 +0800
Subject: [PATCH 109/131] fix: Fix incorrect continue for
 convert_range_for_to_while

Example
---
```rust
fn foo() {
    $0for mut i in 3..7 {
        foo(i);
        continue;
        bar(i);
    }
}
```

**Before this PR**

This may cause an infinite loop

```rust
fn foo() {
    let mut i = 3;
    while i < 7 {
        foo(i);
        continue;
        bar(i);
        i += 1;
    }
}
```

**After this PR**

```rust
fn foo() {
    let mut i = 3;
    while i < 7 {
        'cont: {
            foo(i);
            break 'cont;
            bar(i);
        }
        i += 1;
    }
}
```
---
 .../handlers/convert_range_for_to_while.rs    | 157 +++++++++++++++++-
 1 file changed, 151 insertions(+), 6 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_range_for_to_while.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
index ba577b217df78..2e649f14be26e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
@@ -1,13 +1,15 @@
 use ide_db::assists::AssistId;
 use itertools::Itertools;
 use syntax::{
-    AstNode, T,
+    AstNode, SyntaxElement,
+    SyntaxKind::WHITESPACE,
+    T,
     algo::previous_non_trivia_token,
     ast::{
         self, HasArgList, HasLoopBody, HasName, RangeItem, edit::AstNodeEdit, make,
         syntax_factory::SyntaxFactory,
     },
-    syntax_editor::{Element, Position},
+    syntax_editor::{Element, Position, SyntaxEditor},
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -40,8 +42,8 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
     let iterable = for_.iterable()?;
     let (start, end, step, inclusive) = extract_range(&iterable)?;
     let name = pat.name()?;
-    let body = for_.loop_body()?;
-    let last = previous_non_trivia_token(body.stmt_list()?.r_curly_token()?)?;
+    let body = for_.loop_body()?.stmt_list()?;
+    let label = for_.label();
 
     let description = if end.is_some() {
         "Replace with while expression"
@@ -90,8 +92,10 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
             );
 
             let op = ast::BinaryOp::Assignment { op: Some(ast::ArithOp::Add) };
-            edit.insert_all(
-                Position::after(last),
+            process_loop_body(
+                body,
+                label,
+                &mut edit,
                 vec![
                     make.whitespace(&format!("\n{}", indent + 1)).syntax_element(),
                     make.expr_bin(var_expr, op, step).syntax().syntax_element(),
@@ -121,6 +125,86 @@ fn extract_range(iterable: &ast::Expr) -> Option<(ast::Expr, Option,
     })
 }
 
+fn process_loop_body(
+    body: ast::StmtList,
+    label: Option,
+    edit: &mut SyntaxEditor,
+    incrementer: Vec,
+) -> Option<()> {
+    let last = previous_non_trivia_token(body.r_curly_token()?)?.syntax_element();
+
+    let new_body = body.indent(1.into()).clone_subtree();
+    let mut continues = vec![];
+    collect_continue_to(
+        &mut continues,
+        &label.and_then(|it| it.lifetime()),
+        new_body.syntax(),
+        false,
+    );
+
+    if continues.is_empty() {
+        edit.insert_all(Position::after(last), incrementer);
+        return Some(());
+    }
+
+    let mut children = body
+        .syntax()
+        .children_with_tokens()
+        .filter(|it| !matches!(it.kind(), WHITESPACE | T!['{'] | T!['}']));
+    let first = children.next()?;
+    let block_content = first.clone()..=children.last().unwrap_or(first);
+
+    let continue_label = make::lifetime("'cont");
+    let break_expr = make::expr_break(Some(continue_label.clone()), None).clone_for_update();
+    let mut new_edit = SyntaxEditor::new(new_body.syntax().clone());
+    for continue_expr in &continues {
+        new_edit.replace(continue_expr.syntax(), break_expr.syntax());
+    }
+    let new_body = new_edit.finish().new_root().clone();
+    let elements = itertools::chain(
+        [
+            continue_label.syntax().clone_for_update().syntax_element(),
+            make::token(T![:]).syntax_element(),
+            make::tokens::single_space().syntax_element(),
+            new_body.syntax_element(),
+        ],
+        incrementer,
+    );
+    edit.replace_all(block_content, elements.collect());
+
+    Some(())
+}
+
+fn collect_continue_to(
+    acc: &mut Vec,
+    label: &Option,
+    node: &syntax::SyntaxNode,
+    only_label: bool,
+) {
+    let match_label = |it: &Option, label: &Option| match (it, label)
+    {
+        (None, _) => !only_label,
+        (Some(a), Some(b)) if a.text() == b.text() => true,
+        _ => false,
+    };
+    if let Some(expr) = ast::ContinueExpr::cast(node.clone())
+        && match_label(&expr.lifetime(), label)
+    {
+        acc.push(expr);
+    } else if let Some(any_loop) = ast::AnyHasLoopBody::cast(node.clone()) {
+        if match_label(label, &any_loop.label().and_then(|it| it.lifetime())) {
+            return;
+        }
+        for children in node.children() {
+            collect_continue_to(acc, label, &children, true);
+        }
+    } else {
+        for children in node.children() {
+            collect_continue_to(acc, label, &children, only_label);
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_assist, check_assist_not_applicable};
@@ -219,6 +303,67 @@ fn foo() {
         );
     }
 
+    #[test]
+    fn test_convert_range_for_to_while_with_continue() {
+        check_assist(
+            convert_range_for_to_while,
+            "
+fn foo() {
+    $0for mut i in 3..7 {
+        foo(i);
+        continue;
+        loop { break; continue }
+        bar(i);
+    }
+}
+            ",
+            "
+fn foo() {
+    let mut i = 3;
+    while i < 7 {
+        'cont: {
+            foo(i);
+            break 'cont;
+            loop { break; continue }
+            bar(i);
+        }
+        i += 1;
+    }
+}
+            ",
+        );
+
+        check_assist(
+            convert_range_for_to_while,
+            "
+fn foo() {
+    'x: $0for mut i in 3..7 {
+        foo(i);
+        continue 'x;
+        loop { break; continue 'x }
+        'x: loop { continue 'x }
+        bar(i);
+    }
+}
+            ",
+            "
+fn foo() {
+    let mut i = 3;
+    'x: while i < 7 {
+        'cont: {
+            foo(i);
+            break 'cont;
+            loop { break; break 'cont }
+            'x: loop { continue 'x }
+            bar(i);
+        }
+        i += 1;
+    }
+}
+            ",
+        );
+    }
+
     #[test]
     fn test_convert_range_for_to_while_step_by() {
         check_assist(

From cb603ad5906baa50bf4e1410a7e6a13ba5da2051 Mon Sep 17 00:00:00 2001
From: Pavan Kumar Sunkara 
Date: Sun, 25 Jan 2026 06:37:15 +0530
Subject: [PATCH 110/131] internal: Use parser expect where possible

---
 .../crates/parser/src/grammar/attributes.rs            | 10 +++-------
 .../test_data/parser/err/0002_duplicate_shebang.rast   |  2 +-
 .../test_data/parser/err/0005_attribute_recover.rast   |  2 +-
 .../parser/err/0032_match_arms_inner_attrs.rast        |  6 +++---
 4 files changed, 8 insertions(+), 12 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
index ccb556b2ccacb..54b5c8a275a8e 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
@@ -24,15 +24,11 @@ fn attr(p: &mut Parser<'_>, inner: bool) {
         p.bump(T![!]);
     }
 
-    if p.eat(T!['[']) {
+    if p.expect(T!['[']) {
         meta(p);
-
-        if !p.eat(T![']']) {
-            p.error("expected `]`");
-        }
-    } else {
-        p.error("expected `[`");
+        p.expect(T![']']);
     }
+
     attr.complete(p, ATTR);
 }
 
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
index 7ee1ecfbb1591..60cc690f7c986 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
@@ -28,7 +28,7 @@ SOURCE_FILE
         NAME_REF
           IDENT "rusti"
   WHITESPACE "\n"
-error 23: expected `[`
+error 23: expected L_BRACK
 error 23: expected an item
 error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
 error 28: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
index 6ff072e207cda..77b4d06321d5a 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
@@ -58,5 +58,5 @@ SOURCE_FILE
           R_CURLY "}"
   WHITESPACE "\n"
 error 53: expected R_PAREN
-error 53: expected `]`
+error 53: expected R_BRACK
 error 53: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
index 327bf94a49e63..b657e9834156a 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
@@ -192,14 +192,14 @@ SOURCE_FILE
         WHITESPACE "\n"
         R_CURLY "}"
   WHITESPACE "\n"
-error 52: expected `[`
+error 52: expected L_BRACK
 error 52: expected pattern
 error 53: expected FAT_ARROW
 error 78: expected `,`
-error 161: expected `[`
+error 161: expected L_BRACK
 error 161: expected pattern
 error 162: expected FAT_ARROW
-error 232: expected `[`
+error 232: expected L_BRACK
 error 232: expected pattern
 error 233: expected FAT_ARROW
 error 250: expected `,`

From 2a3614b5557b89da3d92b17c3497ac6e954360b8 Mon Sep 17 00:00:00 2001
From: bit-aloo 
Date: Mon, 26 Jan 2026 05:44:04 +0530
Subject: [PATCH 111/131] correct ungrammar path in patch

---
 src/tools/rust-analyzer/Cargo.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 04b513b38b582..2288933a96ccb 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -42,7 +42,7 @@ debug = 2
 # lsp-server = { path = "lib/lsp-server" }
 
 
-# ungrammar = { path = "lin/ungrammar" }
+# ungrammar = { path = "lib/ungrammar" }
 
 # salsa = { path = "../salsa" }
 # salsa-macros = { path = "../salsa/components/salsa-macros" }

From 9fd291ed7ee4d1313cfdaa5a5651c77b0a57fd33 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Mon, 19 Jan 2026 20:09:30 +0800
Subject: [PATCH 112/131] Fix not complete 'else' before tuple

Example
---
```rust
fn foo() -> (i32, i32) {
    if foo {} el$0
    (2, 3)
}
```

**Before this PR**

```rust
...
kw crate::
kw false
kw for
...
```

**After this PR**

```rust
...
kw crate::
kw else
kw else if
kw false
kw for
...
```
---
 .../ide-completion/src/context/analysis.rs    |  7 ++---
 .../ide-completion/src/tests/expression.rs    | 26 +++++++++++++++++++
 2 files changed, 30 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 0db93b0837cda..8842d29c8d905 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -2030,9 +2030,10 @@ fn is_after_if_expr(node: SyntaxNode) -> bool {
         Some(stmt) => stmt.syntax().clone(),
         None => node,
     };
-    let prev_sibling =
-        non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
-    iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
+    let Some(prev_token) = previous_non_trivia_token(node) else { return false };
+    prev_token
+        .parent_ancestors()
+        .take_while(|it| it.text_range().end() == prev_token.text_range().end())
         .find_map(ast::IfExpr::cast)
         .is_some()
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index ff005a29218b4..df39591a33460 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -2182,6 +2182,32 @@ fn foo() { match () { () => if foo {} $0, _ => (), } }
             kw ref
         "#]],
     );
+    check(
+        r#"
+fn foo() -> (i32, i32) { if foo {} el$0 (2, 3) }
+"#,
+        expect![[r#"
+            fn foo fn() -> (i32, i32)
+            bt u32                u32
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw false
+            kw for
+            kw if
+            kw if let
+            kw loop
+            kw match
+            kw return
+            kw self::
+            kw true
+            kw unsafe
+            kw while
+            kw while let
+            ex foo()
+        "#]],
+    );
     // FIXME: support else completion after ast::RecordExprField
 }
 

From 2f49df3140280f1cdcaeb653c3fb9774de080f19 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Mon, 26 Jan 2026 15:41:43 +0800
Subject: [PATCH 113/131] Improve filter predicate to length cond

---
 .../crates/ide-assists/src/handlers/extract_function.rs         | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 2230c391cbade..f2363c6f7ba27 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -257,7 +257,7 @@ fn make_function_name(
 
     let mut name = body
         .suggest_name()
-        .filter(|name| name.contains('_'))
+        .filter(|name| name.len() > 2)
         .unwrap_or_else(|| default_name.to_owned());
     let mut counter = 0;
     while names_in_scope.contains(&name) {

From 38bb09eeff34272c36dfbaeeb31e62f9e0674698 Mon Sep 17 00:00:00 2001
From: Edwin Cheng 
Date: Mon, 26 Jan 2026 16:45:48 +0800
Subject: [PATCH 114/131] Fix rust-src installation command in FAQ

Correct the command to install rust-src in the FAQ.
---
 src/tools/rust-analyzer/docs/book/src/faq.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/docs/book/src/faq.md b/src/tools/rust-analyzer/docs/book/src/faq.md
index 8c143ab949357..9eeb2ae555390 100644
--- a/src/tools/rust-analyzer/docs/book/src/faq.md
+++ b/src/tools/rust-analyzer/docs/book/src/faq.md
@@ -4,7 +4,7 @@
 
 rust-analyzer fails to resolve `None`, and thinks you are binding to a variable
 named `None`. That's usually a sign of a corrupted sysroot. Try removing and re-installing
-it: `rustup component remove rust-src` then `rustup component install rust-src`.
+it: `rustup component remove rust-src` then `rustup component add rust-src`.
 
 ### Rust Analyzer and Cargo compete over the build lock
 

From b75c58d4b3495021b414778d8b91c12285d3d7ee Mon Sep 17 00:00:00 2001
From: kouhe <25522053+kouhe3@users.noreply.github.com>
Date: Mon, 5 Jan 2026 19:10:34 +0800
Subject: [PATCH 115/131] Implement default field values `..` syntax

- Added `RecordSpread` enum to distinguish between no spread, field defaults, and spread expressions
- Updated `FieldData` to include `default_value` field
- Modified record literal lowering to handle default field values
- Updated diagnostics to check for missing fields considering defaults
- Added methods to get matched fields for records for completions
- Enhanced hover support for struct rest patterns
---
 .../crates/hir-def/src/expr_store.rs          |  10 +-
 .../crates/hir-def/src/expr_store/lower.rs    |  14 ++-
 .../src/expr_store/lower/format_args.rs       |   5 +-
 .../crates/hir-def/src/expr_store/pretty.rs   |  20 ++-
 .../rust-analyzer/crates/hir-def/src/hir.rs   |   9 +-
 .../crates/hir-def/src/signatures.rs          |  12 +-
 .../crates/hir-ty/src/diagnostics/expr.rs     |  46 +++++--
 .../hir-ty/src/infer/closure/analysis.rs      |   4 +-
 .../crates/hir-ty/src/infer/expr.rs           |   6 +-
 .../crates/hir-ty/src/infer/mutability.rs     |  10 +-
 .../crates/hir-ty/src/mir/lower.rs            |  11 +-
 .../rust-analyzer/crates/hir/src/semantics.rs |  18 +++
 .../crates/hir/src/source_analyzer.rs         | 118 ++++++++++++++++--
 .../src/handlers/expand_rest_pattern.rs       |   6 +-
 .../ide-completion/src/completions/expr.rs    |   2 +-
 .../ide-completion/src/completions/record.rs  |  14 +--
 .../crates/ide-completion/src/tests/record.rs |  40 +++++-
 .../src/handlers/missing_fields.rs            |  77 ++++++++++++
 .../crates/ide/src/hover/render.rs            |   8 +-
 19 files changed, 355 insertions(+), 75 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
index 10cd460d1d36b..edbfd42d1314e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
@@ -32,7 +32,7 @@ use crate::{
     expr_store::path::Path,
     hir::{
         Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
-        PatId, RecordFieldPat, Statement,
+        PatId, RecordFieldPat, RecordSpread, Statement,
     },
     nameres::{DefMap, block_def_map},
     type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId},
@@ -575,8 +575,8 @@ impl ExpressionStore {
                 for field in fields.iter() {
                     f(field.expr);
                 }
-                if let &Some(expr) = spread {
-                    f(expr);
+                if let RecordSpread::Expr(expr) = spread {
+                    f(*expr);
                 }
             }
             Expr::Closure { body, .. } => {
@@ -706,8 +706,8 @@ impl ExpressionStore {
                 for field in fields.iter() {
                     f(field.expr);
                 }
-                if let &Some(expr) = spread {
-                    f(expr);
+                if let RecordSpread::Expr(expr) = spread {
+                    f(*expr);
                 }
             }
             Expr::Closure { body, .. } => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index 79222615929fa..4fbf6d951779e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -47,7 +47,7 @@ use crate::{
     hir::{
         Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
         Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
-        RecordFieldPat, RecordLitField, Statement, generics::GenericParams,
+        RecordFieldPat, RecordLitField, RecordSpread, Statement, generics::GenericParams,
     },
     item_scope::BuiltinShadowMode,
     item_tree::FieldsShape,
@@ -1266,10 +1266,16 @@ impl<'db> ExprCollector<'db> {
                             Some(RecordLitField { name, expr })
                         })
                         .collect();
-                    let spread = nfl.spread().map(|s| self.collect_expr(s));
+                    let spread_expr = nfl.spread().map(|s| self.collect_expr(s));
+                    let has_spread_syntax = nfl.dotdot_token().is_some();
+                    let spread = match (spread_expr, has_spread_syntax) {
+                        (None, false) => RecordSpread::None,
+                        (None, true) => RecordSpread::FieldDefaults,
+                        (Some(expr), _) => RecordSpread::Expr(expr),
+                    };
                     Expr::RecordLit { path, fields, spread }
                 } else {
-                    Expr::RecordLit { path, fields: Box::default(), spread: None }
+                    Expr::RecordLit { path, fields: Box::default(), spread: RecordSpread::None }
                 };
 
                 self.alloc_expr(record_lit, syntax_ptr)
@@ -1995,7 +2001,7 @@ impl<'db> ExprCollector<'db> {
         }
     }
 
-    fn collect_expr_opt(&mut self, expr: Option) -> ExprId {
+    pub fn collect_expr_opt(&mut self, expr: Option) -> ExprId {
         match expr {
             Some(expr) => self.collect_expr(expr),
             None => self.missing_expr(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/format_args.rs
index 7ef84f27f6641..51616afb3892c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/format_args.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/format_args.rs
@@ -10,7 +10,8 @@ use crate::{
     builtin_type::BuiltinUint,
     expr_store::{HygieneId, lower::ExprCollector, path::Path},
     hir::{
-        Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, Statement,
+        Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, RecordSpread,
+        Statement,
         format_args::{
             self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
             FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
@@ -869,7 +870,7 @@ impl<'db> ExprCollector<'db> {
             self.alloc_expr_desugared(Expr::RecordLit {
                 path: self.lang_path(lang_items.FormatPlaceholder).map(Box::new),
                 fields: Box::new([position, flags, precision, width]),
-                spread: None,
+                spread: RecordSpread::None,
             })
         } else {
             let format_placeholder_new =
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
index f5ef8e1a35953..35f3cd114e36d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
@@ -16,7 +16,8 @@ use crate::{
     attrs::AttrFlags,
     expr_store::path::{GenericArg, GenericArgs},
     hir::{
-        Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
+        Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, RecordSpread,
+        Statement,
         generics::{GenericParams, WherePredicate},
     },
     lang_item::LangItemTarget,
@@ -139,7 +140,7 @@ pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: E
     }
 
     for (_, data) in fields.fields().iter() {
-        let FieldData { name, type_ref, visibility, is_unsafe } = data;
+        let FieldData { name, type_ref, visibility, is_unsafe, default_value: _ } = data;
         match visibility {
             crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => {
                 w!(p, "pub(in {})", interned.display(db, p.edition))
@@ -679,10 +680,17 @@ impl Printer<'_> {
                         p.print_expr(field.expr);
                         wln!(p, ",");
                     }
-                    if let Some(spread) = spread {
-                        w!(p, "..");
-                        p.print_expr(*spread);
-                        wln!(p);
+                    match spread {
+                        RecordSpread::None => {}
+                        RecordSpread::FieldDefaults => {
+                            w!(p, "..");
+                            wln!(p);
+                        }
+                        RecordSpread::Expr(spread_expr) => {
+                            w!(p, "..");
+                            p.print_expr(*spread_expr);
+                            wln!(p);
+                        }
                     }
                 });
                 w!(self, "}}");
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
index 53be0de7d9c31..7781a8fe54ee0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
@@ -187,6 +187,13 @@ impl From for Literal {
     }
 }
 
+#[derive(Debug, Clone, Eq, PartialEq, Copy)]
+pub enum RecordSpread {
+    None,
+    FieldDefaults,
+    Expr(ExprId),
+}
+
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub enum Expr {
     /// This is produced if the syntax tree does not have a required expression piece.
@@ -259,7 +266,7 @@ pub enum Expr {
     RecordLit {
         path: Option>,
         fields: Box<[RecordLitField]>,
-        spread: Option,
+        spread: RecordSpread,
     },
     Field {
         expr: ExprId,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
index 0dd88edbfb087..37c8f762fe5d0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
@@ -12,7 +12,7 @@ use intern::{Symbol, sym};
 use la_arena::{Arena, Idx};
 use rustc_abi::{IntegerType, ReprOptions};
 use syntax::{
-    NodeOrToken, SyntaxNodePtr, T,
+    AstNode, NodeOrToken, SyntaxNodePtr, T,
     ast::{self, HasGenericParams, HasName, HasVisibility, IsString},
 };
 use thin_vec::ThinVec;
@@ -754,6 +754,7 @@ pub struct FieldData {
     pub type_ref: TypeRefId,
     pub visibility: RawVisibility,
     pub is_unsafe: bool,
+    pub default_value: Option,
 }
 
 pub type LocalFieldId = Idx;
@@ -903,7 +904,14 @@ fn lower_fields(
                     .filter_map(NodeOrToken::into_token)
                     .any(|token| token.kind() == T![unsafe]);
                 let name = field_name(idx, &field);
-                arena.alloc(FieldData { name, type_ref, visibility, is_unsafe });
+
+                // Check if field has default value (only for record fields)
+                let default_value = ast::RecordField::cast(field.syntax().clone())
+                    .and_then(|rf| rf.eq_token().is_some().then_some(rf.expr()))
+                    .flatten()
+                    .map(|expr| col.collect_expr_opt(Some(expr)));
+
+                arena.alloc(FieldData { name, type_ref, visibility, is_unsafe, default_value });
                 idx += 1;
             }
             Err(cfg) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index dd1fc3b36ef8d..4e1bb6f4c533a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -41,7 +41,7 @@ use crate::{
 pub(crate) use hir_def::{
     LocalFieldId, VariantId,
     expr_store::Body,
-    hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
+    hir::{Expr, ExprId, MatchArm, Pat, PatId, RecordSpread, Statement},
 };
 
 pub enum BodyValidationDiagnostic {
@@ -123,7 +123,7 @@ impl<'db> ExprValidator<'db> {
         }
 
         for (id, expr) in body.exprs() {
-            if let Some((variant, missed_fields, true)) =
+            if let Some((variant, missed_fields)) =
                 record_literal_missing_fields(db, self.infer, id, expr)
             {
                 self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@@ -154,7 +154,7 @@ impl<'db> ExprValidator<'db> {
         }
 
         for (id, pat) in body.pats() {
-            if let Some((variant, missed_fields, true)) =
+            if let Some((variant, missed_fields)) =
                 record_pattern_missing_fields(db, self.infer, id, pat)
             {
                 self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@@ -557,9 +557,9 @@ pub fn record_literal_missing_fields(
     infer: &InferenceResult,
     id: ExprId,
     expr: &Expr,
-) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> {
-    let (fields, exhaustive) = match expr {
-        Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()),
+) -> Option<(VariantId, Vec)> {
+    let (fields, spread) = match expr {
+        Expr::RecordLit { fields, spread, .. } => (fields, spread),
         _ => return None,
     };
 
@@ -571,15 +571,28 @@ pub fn record_literal_missing_fields(
     let variant_data = variant_def.fields(db);
 
     let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+    // don't show missing fields if:
+    // - has ..expr
+    // - or has default value + ..
+    // - or already in code
     let missed_fields: Vec = variant_data
         .fields()
         .iter()
-        .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+        .filter_map(|(f, d)| {
+            if specified_fields.contains(&d.name)
+                || matches!(spread, RecordSpread::Expr(_))
+                || (d.default_value.is_some() && matches!(spread, RecordSpread::FieldDefaults))
+            {
+                None
+            } else {
+                Some(f)
+            }
+        })
         .collect();
     if missed_fields.is_empty() {
         return None;
     }
-    Some((variant_def, missed_fields, exhaustive))
+    Some((variant_def, missed_fields))
 }
 
 pub fn record_pattern_missing_fields(
@@ -587,9 +600,9 @@ pub fn record_pattern_missing_fields(
     infer: &InferenceResult,
     id: PatId,
     pat: &Pat,
-) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> {
-    let (fields, exhaustive) = match pat {
-        Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+) -> Option<(VariantId, Vec)> {
+    let (fields, ellipsis) = match pat {
+        Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
         _ => return None,
     };
 
@@ -601,15 +614,22 @@ pub fn record_pattern_missing_fields(
     let variant_data = variant_def.fields(db);
 
     let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+    // don't show missing fields if:
+    // - in code
+    // - or has ..
     let missed_fields: Vec = variant_data
         .fields()
         .iter()
-        .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+        .filter_map(
+            |(f, d)| {
+                if specified_fields.contains(&d.name) || ellipsis { None } else { Some(f) }
+            },
+        )
         .collect();
     if missed_fields.is_empty() {
         return None;
     }
-    Some((variant_def, missed_fields, exhaustive))
+    Some((variant_def, missed_fields))
 }
 
 fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
index b25901cc3b995..5a3eba1a71aef 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
@@ -8,7 +8,7 @@ use hir_def::{
     expr_store::path::Path,
     hir::{
         Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
-        Statement, UnaryOp,
+        RecordSpread, Statement, UnaryOp,
     },
     item_tree::FieldsShape,
     resolver::ValueNs,
@@ -627,7 +627,7 @@ impl<'db> InferenceContext<'_, 'db> {
                 self.consume_expr(expr);
             }
             Expr::RecordLit { fields, spread, .. } => {
-                if let &Some(expr) = spread {
+                if let RecordSpread::Expr(expr) = *spread {
                     self.consume_expr(expr);
                 }
                 self.consume_exprs(fields.iter().map(|it| it.expr));
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index c57d41cc5f734..9f2d9d25b9572 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -8,7 +8,7 @@ use hir_def::{
     expr_store::path::{GenericArgs as HirGenericArgs, Path},
     hir::{
         Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
-        InlineAsmKind, LabelId, Literal, Pat, PatId, Statement, UnaryOp,
+        InlineAsmKind, LabelId, Literal, Pat, PatId, RecordSpread, Statement, UnaryOp,
     },
     resolver::ValueNs,
 };
@@ -657,8 +657,8 @@ impl<'db> InferenceContext<'_, 'db> {
                         }
                     }
                 }
-                if let Some(expr) = spread {
-                    self.infer_expr(*expr, &Expectation::has_type(ty), ExprIsRead::Yes);
+                if let RecordSpread::Expr(expr) = *spread {
+                    self.infer_expr(expr, &Expectation::has_type(ty), ExprIsRead::Yes);
                 }
                 ty
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
index 729ed214daea8..45fa141b6d3d3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
@@ -2,7 +2,8 @@
 //! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
 
 use hir_def::hir::{
-    Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp,
+    Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, RecordSpread,
+    Statement, UnaryOp,
 };
 use rustc_ast_ir::Mutability;
 
@@ -132,8 +133,11 @@ impl<'db> InferenceContext<'_, 'db> {
             Expr::Become { expr } => {
                 self.infer_mut_expr(*expr, Mutability::Not);
             }
-            Expr::RecordLit { path: _, fields, spread } => {
-                self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
+            Expr::RecordLit { path: _, fields, spread, .. } => {
+                self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr));
+                if let RecordSpread::Expr(expr) = *spread {
+                    self.infer_mut_expr(expr, Mutability::Not);
+                }
             }
             &Expr::Index { base, index } => {
                 if mutability == Mutability::Mut {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 1579f00e92666..199db7a3e7187 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -9,7 +9,7 @@ use hir_def::{
     expr_store::{Body, ExpressionStore, HygieneId, path::Path},
     hir::{
         ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
-        Pat, PatId, RecordFieldPat, RecordLitField,
+        Pat, PatId, RecordFieldPat, RecordLitField, RecordSpread,
     },
     item_tree::FieldsShape,
     lang_item::LangItems,
@@ -867,16 +867,17 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
             }
             Expr::Become { .. } => not_supported!("tail-calls"),
             Expr::Yield { .. } => not_supported!("yield"),
-            Expr::RecordLit { fields, path, spread } => {
-                let spread_place = match spread {
-                    &Some(it) => {
+            Expr::RecordLit { fields, path, spread, .. } => {
+                let spread_place = match *spread {
+                    RecordSpread::Expr(it) => {
                         let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
                             return Ok(None);
                         };
                         current = c;
                         Some(p)
                     }
-                    None => None,
+                    RecordSpread::None => None,
+                    RecordSpread::FieldDefaults => not_supported!("empty record spread"),
                 };
                 let variant_id =
                     self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 98f5739600f31..4bc757da44174 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -2003,6 +2003,15 @@ impl<'db> SemanticsImpl<'db> {
             .unwrap_or_default()
     }
 
+    pub fn record_literal_matched_fields(
+        &self,
+        literal: &ast::RecordExpr,
+    ) -> Vec<(Field, Type<'db>)> {
+        self.analyze(literal.syntax())
+            .and_then(|it| it.record_literal_matched_fields(self.db, literal))
+            .unwrap_or_default()
+    }
+
     pub fn record_pattern_missing_fields(
         &self,
         pattern: &ast::RecordPat,
@@ -2012,6 +2021,15 @@ impl<'db> SemanticsImpl<'db> {
             .unwrap_or_default()
     }
 
+    pub fn record_pattern_matched_fields(
+        &self,
+        pattern: &ast::RecordPat,
+    ) -> Vec<(Field, Type<'db>)> {
+        self.analyze(pattern.syntax())
+            .and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
+            .unwrap_or_default()
+    }
+
     fn with_ctx) -> T, T>(&self, f: F) -> T {
         let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
         f(&mut ctx)
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index 6ba7a42c19460..4e85e299a97f3 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -17,7 +17,7 @@ use hir_def::{
         path::Path,
         scope::{ExprScopes, ScopeId},
     },
-    hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
+    hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId},
     lang_item::LangItems,
     nameres::MacroSubNs,
     resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
@@ -44,6 +44,7 @@ use hir_ty::{
 };
 use intern::sym;
 use itertools::Itertools;
+use rustc_hash::FxHashSet;
 use rustc_type_ir::{
     AliasTyKind,
     inherent::{AdtDef, IntoKind, Ty as _},
@@ -1241,21 +1242,31 @@ impl<'db> SourceAnalyzer<'db> {
         let body = self.store()?;
         let infer = self.infer()?;
 
-        let expr_id = self.expr_id(literal.clone().into())?;
-        let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1;
-
-        let (variant, missing_fields, _exhaustive) = match expr_id {
-            ExprOrPatId::ExprId(expr_id) => {
-                record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?
-            }
-            ExprOrPatId::PatId(pat_id) => {
-                record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?
-            }
-        };
+        let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
+        let substs = infer.expr_ty(expr_id).as_adt()?.1;
+        let (variant, missing_fields) =
+            record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
         let res = self.missing_fields(db, substs, variant, missing_fields);
         Some(res)
     }
 
+    pub(crate) fn record_literal_matched_fields(
+        &self,
+        db: &'db dyn HirDatabase,
+        literal: &ast::RecordExpr,
+    ) -> Option)>> {
+        let body = self.store()?;
+        let infer = self.infer()?;
+
+        let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
+        let substs = infer.expr_ty(expr_id).as_adt()?.1;
+        let (variant, matched_fields) =
+            record_literal_matched_fields(db, infer, expr_id, &body[expr_id])?;
+
+        let res = self.missing_fields(db, substs, variant, matched_fields);
+        Some(res)
+    }
+
     pub(crate) fn record_pattern_missing_fields(
         &self,
         db: &'db dyn HirDatabase,
@@ -1267,12 +1278,29 @@ impl<'db> SourceAnalyzer<'db> {
         let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
         let substs = infer.pat_ty(pat_id).as_adt()?.1;
 
-        let (variant, missing_fields, _exhaustive) =
+        let (variant, missing_fields) =
             record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
         let res = self.missing_fields(db, substs, variant, missing_fields);
         Some(res)
     }
 
+    pub(crate) fn record_pattern_matched_fields(
+        &self,
+        db: &'db dyn HirDatabase,
+        pattern: &ast::RecordPat,
+    ) -> Option)>> {
+        let body = self.store()?;
+        let infer = self.infer()?;
+
+        let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
+        let substs = infer.pat_ty(pat_id).as_adt()?.1;
+
+        let (variant, matched_fields) =
+            record_pattern_matched_fields(db, infer, pat_id, &body[pat_id])?;
+        let res = self.missing_fields(db, substs, variant, matched_fields);
+        Some(res)
+    }
+
     fn missing_fields(
         &self,
         db: &'db dyn HirDatabase,
@@ -1810,3 +1838,67 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
     let ctx = span_map.span_at(name.value.text_range().start()).ctx;
     HygieneId::new(ctx.opaque_and_semiopaque(db))
 }
+
+fn record_literal_matched_fields(
+    db: &dyn HirDatabase,
+    infer: &InferenceResult,
+    id: ExprId,
+    expr: &Expr,
+) -> Option<(VariantId, Vec)> {
+    let (fields, _spread) = match expr {
+        Expr::RecordLit { fields, spread, .. } => (fields, spread),
+        _ => return None,
+    };
+
+    let variant_def = infer.variant_resolution_for_expr(id)?;
+    if let VariantId::UnionId(_) = variant_def {
+        return None;
+    }
+
+    let variant_data = variant_def.fields(db);
+
+    let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+    // suggest fields if:
+    // - not in code
+    let matched_fields: Vec = variant_data
+        .fields()
+        .iter()
+        .filter_map(|(f, d)| (!specified_fields.contains(&d.name)).then_some(f))
+        .collect();
+    if matched_fields.is_empty() {
+        return None;
+    }
+    Some((variant_def, matched_fields))
+}
+
+fn record_pattern_matched_fields(
+    db: &dyn HirDatabase,
+    infer: &InferenceResult,
+    id: PatId,
+    pat: &Pat,
+) -> Option<(VariantId, Vec)> {
+    let (fields, _ellipsis) = match pat {
+        Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
+        _ => return None,
+    };
+
+    let variant_def = infer.variant_resolution_for_pat(id)?;
+    if let VariantId::UnionId(_) = variant_def {
+        return None;
+    }
+
+    let variant_data = variant_def.fields(db);
+
+    let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+    // suggest fields if:
+    // - not in code
+    let matched_fields: Vec = variant_data
+        .fields()
+        .iter()
+        .filter_map(|(f, d)| if !specified_fields.contains(&d.name) { Some(f) } else { None })
+        .collect();
+    if matched_fields.is_empty() {
+        return None;
+    }
+    Some((variant_def, matched_fields))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b746099e72791..867ac48518646 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -33,8 +33,8 @@ fn expand_record_rest_pattern(
     record_pat: ast::RecordPat,
     rest_pat: ast::RestPat,
 ) -> Option<()> {
-    let missing_fields = ctx.sema.record_pattern_missing_fields(&record_pat);
-    if missing_fields.is_empty() {
+    let matched_fields = ctx.sema.record_pattern_matched_fields(&record_pat);
+    if matched_fields.is_empty() {
         cov_mark::hit!(no_missing_fields);
         return None;
     }
@@ -53,7 +53,7 @@ fn expand_record_rest_pattern(
         |builder| {
             let make = SyntaxFactory::with_mappings();
             let mut editor = builder.make_editor(rest_pat.syntax());
-            let new_fields = old_field_list.fields().chain(missing_fields.iter().map(|(f, _)| {
+            let new_fields = old_field_list.fields().chain(matched_fields.iter().map(|(f, _)| {
                 make.record_pat_field_shorthand(
                     make.ident_pat(
                         false,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
index 77734c5d6f98f..8c532e0f4d04f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -340,7 +340,7 @@ pub(crate) fn complete_expr_path(
                             let missing_fields =
                                 ctx.sema.record_literal_missing_fields(record_expr);
                             if !missing_fields.is_empty() {
-                                add_default_update(acc, ctx, ty);
+                                add_default_update(acc, ctx, ty.as_ref());
                             }
                         }
                     };
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
index c5bfdcb8b7347..12c564af5cba4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -36,7 +36,7 @@ pub(crate) fn complete_record_pattern_fields(
                     true => return,
                 }
             }
-            _ => ctx.sema.record_pattern_missing_fields(record_pat),
+            _ => ctx.sema.record_pattern_matched_fields(record_pat),
         };
         complete_fields(acc, ctx, missing_fields);
     }
@@ -69,14 +69,14 @@ pub(crate) fn complete_record_expr_fields(
             }
         }
         _ => {
-            let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+            let suggest_fields = ctx.sema.record_literal_matched_fields(record_expr);
             let update_exists = record_expr
                 .record_expr_field_list()
                 .is_some_and(|list| list.dotdot_token().is_some());
 
-            if !missing_fields.is_empty() && !update_exists {
+            if !suggest_fields.is_empty() && !update_exists {
                 cov_mark::hit!(functional_update_field);
-                add_default_update(acc, ctx, ty);
+                add_default_update(acc, ctx, ty.as_ref());
             }
             if dot_prefix {
                 cov_mark::hit!(functional_update_one_dot);
@@ -90,7 +90,7 @@ pub(crate) fn complete_record_expr_fields(
                 item.add_to(acc, ctx.db);
                 return;
             }
-            missing_fields
+            suggest_fields
         }
     };
     complete_fields(acc, ctx, missing_fields);
@@ -99,11 +99,11 @@ pub(crate) fn complete_record_expr_fields(
 pub(crate) fn add_default_update(
     acc: &mut Completions,
     ctx: &CompletionContext<'_>,
-    ty: Option>,
+    ty: Option<&hir::TypeInfo<'_>>,
 ) {
     let default_trait = ctx.famous_defs().core_default_Default();
     let impls_default_trait = default_trait
-        .zip(ty.as_ref())
+        .zip(ty)
         .is_some_and(|(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
     if impls_default_trait {
         // FIXME: This should make use of scope_def like completions so we get all the other goodies
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
index d9be6556fa5ba..045b2d03b0515 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -286,6 +286,24 @@ fn main() {
     );
 }
 
+#[test]
+fn functional_update_fields_completion() {
+    // Complete fields before functional update `..`
+    check(
+        r#"
+struct Point { x: i32 = 0, y: i32 = 0 }
+
+fn main() {
+    let p = Point { $0, .. };
+}
+"#,
+        expect![[r#"
+            fd x i32
+            fd y i32
+        "#]],
+    );
+}
+
 #[test]
 fn empty_union_literal() {
     check(
@@ -302,7 +320,27 @@ fn foo() {
             fd bar f32
             fd foo u32
         "#]],
-    )
+    );
+}
+
+#[test]
+fn record_pattern_field_with_rest_pat() {
+    // When .. is present, complete all unspecified fields (even those with default values)
+    check(
+        r#"
+struct UserInfo { id: i32, age: f32, email: u64 }
+
+fn foo(u1: UserInfo) {
+    let UserInfo { id, $0, .. } = u1;
+}
+"#,
+        expect![[r#"
+            fd age   f32
+            fd email u64
+            kw mut
+            kw ref
+        "#]],
+    );
 }
 
 #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 2a251382d465f..d5f25dfaf208e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -857,4 +857,81 @@ pub struct Claims {
         "#,
         );
     }
+
+    #[test]
+    fn test_default_field_values_basic() {
+        // This should work without errors - only field 'b' is required
+        check_diagnostics(
+            r#"
+#![feature(default_field_values)]
+struct Struct {
+    a: usize = 0,
+    b: usize,
+}
+
+fn main() {
+    Struct { b: 1, .. };
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_default_field_values_missing_field_error() {
+        // This should report a missing field error because email is required
+        check_diagnostics(
+            r#"
+#![feature(default_field_values)]
+struct UserInfo {
+    id: i32,
+    age: f32 = 1.0,
+    email: String,
+}
+
+fn main() {
+    UserInfo { id: 20, .. };
+//  ^^^^^^^^💡 error: missing structure fields:
+//         |- email
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_default_field_values_requires_spread_syntax() {
+        // without `..` should report missing fields
+        check_diagnostics(
+            r#"
+#![feature(default_field_values)]
+struct Point {
+    x: i32 = 0,
+    y: i32 = 0,
+}
+
+fn main() {
+    Point { x: 0 };
+//  ^^^^^💡 error: missing structure fields:
+//      |- y
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_default_field_values_pattern_matching() {
+        check_diagnostics(
+            r#"
+#![feature(default_field_values)]
+struct Point {
+    x: i32 = 0,
+    y: i32 = 0,
+    z: i32,
+}
+
+fn main() {
+    let Point { x, .. } = Point { z: 5, .. };
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index feac5fff84a7a..15ea92d1c6ec5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -272,9 +272,9 @@ pub(super) fn struct_rest_pat(
     edition: Edition,
     display_target: DisplayTarget,
 ) -> HoverResult {
-    let missing_fields = sema.record_pattern_missing_fields(pattern);
+    let matched_fields = sema.record_pattern_matched_fields(pattern);
 
-    // if there are no missing fields, the end result is a hover that shows ".."
+    // if there are no matched fields, the end result is a hover that shows ".."
     // should be left in to indicate that there are no more fields in the pattern
     // example, S {a: 1, b: 2, ..} when struct S {a: u32, b: u32}
 
@@ -285,13 +285,13 @@ pub(super) fn struct_rest_pat(
             targets.push(item);
         }
     };
-    for (_, t) in &missing_fields {
+    for (_, t) in &matched_fields {
         walk_and_push_ty(sema.db, t, &mut push_new_def);
     }
 
     res.markup = {
         let mut s = String::from(".., ");
-        for (f, _) in &missing_fields {
+        for (f, _) in &matched_fields {
             s += f.display(sema.db, display_target).to_string().as_ref();
             s += ", ";
         }

From 2c2602bf0d1edc47cb886dbe4be24a247eb64ab5 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Mon, 26 Jan 2026 16:13:06 +0200
Subject: [PATCH 116/131] Fix macro matching of `meta` then `=>` or `==`

The parser declared it was invalid meta because it consumed the lone `=`, which is incorrect.
---
 .../src/macro_expansion_tests/mbe/matching.rs | 20 +++++++++++++++++++
 .../crates/parser/src/grammar/attributes.rs   |  2 +-
 2 files changed, 21 insertions(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
index e33a366769b09..bbadcf8794bf0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -237,3 +237,23 @@ fn test() {
 "#]],
     );
 }
+
+#[test]
+fn meta_fat_arrow() {
+    check(
+        r#"
+macro_rules! m {
+    ( $m:meta => ) => {};
+}
+
+m! { foo => }
+    "#,
+        expect![[r#"
+macro_rules! m {
+    ( $m:meta => ) => {};
+}
+
+
+    "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
index 54b5c8a275a8e..c0cf43a87bf77 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
@@ -70,7 +70,7 @@ pub(super) fn meta(p: &mut Parser<'_>) {
     paths::attr_path(p);
 
     match p.current() {
-        T![=] => {
+        T![=] if !p.at(T![=>]) && !p.at(T![==]) => {
             p.bump(T![=]);
             if expressions::expr(p).is_none() {
                 p.error("expected expression");

From cd8fe54a1a743c0bf88f8a9c28ae8554345f3d58 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Mon, 26 Jan 2026 01:46:13 +0800
Subject: [PATCH 117/131] Fix semicolon for toggle_macro_delimiter

Example
---
```rust
macro_rules! sth {
    () => {};
}

sth!$0{ }
```

(old test `sth!{};` is a syntax error in item place)

**Before this PR**

```rust
macro_rules! sth {
    () => {};
}

sth![ ]
```

**After this PR**

```rust
macro_rules! sth {
    () => {};
}

sth![ ];
```
---
 .../src/handlers/toggle_macro_delimiter.rs    | 149 +++++++++++++++++-
 1 file changed, 145 insertions(+), 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index bf1546986ed27..60b0797f028a9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -86,7 +86,14 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
                 }
                 MacroDelims::LCur | MacroDelims::RCur => {
                     editor.replace(ltoken, make.token(T!['[']));
-                    editor.replace(rtoken, make.token(T![']']));
+                    if semicolon.is_some() || !needs_semicolon(token_tree) {
+                        editor.replace(rtoken, make.token(T![']']));
+                    } else {
+                        editor.replace_with_many(
+                            rtoken,
+                            vec![make.token(T![']']).into(), make.token(T![;]).into()],
+                        );
+                    }
                 }
             }
             editor.add_mappings(make.finish_with_mappings());
@@ -103,6 +110,30 @@ fn macro_semicolon(makro: &ast::MacroCall) -> Option {
     })
 }
 
+fn needs_semicolon(tt: ast::TokenTree) -> bool {
+    (|| {
+        let call = ast::MacroCall::cast(tt.syntax().parent()?)?;
+        let container = call.syntax().parent()?;
+        let kind = container.kind();
+
+        if call.semicolon_token().is_some() {
+            return Some(false);
+        }
+
+        Some(
+            ast::ItemList::can_cast(kind)
+                || ast::SourceFile::can_cast(kind)
+                || ast::AssocItemList::can_cast(kind)
+                || ast::ExternItemList::can_cast(kind)
+                || ast::MacroItems::can_cast(kind)
+                || ast::MacroExpr::can_cast(kind)
+                    && ast::ExprStmt::cast(container.parent()?)
+                        .is_some_and(|it| it.semicolon_token().is_none()),
+        )
+    })()
+    .unwrap_or(false)
+}
+
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_assist, check_assist_not_applicable};
@@ -161,7 +192,7 @@ macro_rules! sth {
     () => {};
 }
 
-sth!$0{ };
+sth!$0{ }
             "#,
             r#"
 macro_rules! sth {
@@ -170,7 +201,117 @@ macro_rules! sth {
 
 sth![ ];
             "#,
-        )
+        );
+
+        check_assist(
+            toggle_macro_delimiter,
+            r#"
+macro_rules! sth {
+    () => {};
+}
+
+fn foo() -> i32 {
+    sth!$0{ }
+    2
+}
+            "#,
+            r#"
+macro_rules! sth {
+    () => {};
+}
+
+fn foo() -> i32 {
+    sth![ ];
+    2
+}
+            "#,
+        );
+
+        check_assist(
+            toggle_macro_delimiter,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() {
+    sth!$0{ };
+}
+            "#,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() {
+    sth![ ];
+}
+            "#,
+        );
+
+        check_assist(
+            toggle_macro_delimiter,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() -> i32 {
+    sth!$0{ }
+}
+            "#,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() -> i32 {
+    sth![ ]
+}
+            "#,
+        );
+
+        check_assist(
+            toggle_macro_delimiter,
+            r#"
+macro_rules! sth {
+    () => {};
+}
+impl () {
+    sth!$0{}
+}
+            "#,
+            r#"
+macro_rules! sth {
+    () => {};
+}
+impl () {
+    sth![];
+}
+            "#,
+        );
+
+        check_assist(
+            toggle_macro_delimiter,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() -> i32 {
+    bar(sth!$0{ })
+}
+            "#,
+            r#"
+macro_rules! sth {
+    () => {2};
+}
+
+fn foo() -> i32 {
+    bar(sth![ ])
+}
+            "#,
+        );
     }
 
     #[test]
@@ -204,7 +345,7 @@ mod abc {
         () => {};
     }
 
-    sth!$0{ };
+    sth!$0{ }
 }
             "#,
             r#"

From 6664fc61946f5bc3c2f8e372310ce83ac41e2ab0 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Tue, 27 Jan 2026 08:57:25 +0100
Subject: [PATCH 118/131] minor: Downgrade noisy log

---
 src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 62a3b3a17bdfc..71c4b2accce98 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -309,10 +309,10 @@ impl GlobalState {
 
         let event_dbg_msg = format!("{event:?}");
         tracing::debug!(?loop_start, ?event, "handle_event");
-        if tracing::enabled!(tracing::Level::INFO) {
+        if tracing::enabled!(tracing::Level::TRACE) {
             let task_queue_len = self.task_pool.handle.len();
             if task_queue_len > 0 {
-                tracing::info!("task queue len: {}", task_queue_len);
+                tracing::trace!("task queue len: {}", task_queue_len);
             }
         }
 

From 89d7695040cb21b5c29d4f2a5e69681f4b192172 Mon Sep 17 00:00:00 2001
From: Zalathar 
Date: Sun, 25 Jan 2026 13:48:39 +1100
Subject: [PATCH 119/131] Rename `DynamicQuery` to `QueryVTable`

---
 compiler/rustc_middle/src/query/mod.rs      |  2 +-
 compiler/rustc_middle/src/query/plumbing.rs | 16 ++++++---
 compiler/rustc_query_impl/src/lib.rs        | 39 ++++++++++-----------
 compiler/rustc_query_impl/src/plumbing.rs   | 14 ++++----
 4 files changed, 38 insertions(+), 33 deletions(-)

diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
index 9d17c998a8f29..901a023c4f308 100644
--- a/compiler/rustc_middle/src/query/mod.rs
+++ b/compiler/rustc_middle/src/query/mod.rs
@@ -122,7 +122,7 @@ use crate::mir::mono::{
     CodegenUnit, CollectionMode, MonoItem, MonoItemPartitions, NormalizationErrorInMono,
 };
 use crate::query::erase::{Erase, erase, restore};
-use crate::query::plumbing::{CyclePlaceholder, DynamicQuery};
+use crate::query::plumbing::CyclePlaceholder;
 use crate::traits::query::{
     CanonicalAliasGoal, CanonicalDropckOutlivesGoal, CanonicalImpliedOutlivesBoundsGoal,
     CanonicalMethodAutoderefStepsGoal, CanonicalPredicateGoal, CanonicalTypeOpAscribeUserTypeGoal,
diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs
index 9ee8d743e64a4..0e536352563f4 100644
--- a/compiler/rustc_middle/src/query/plumbing.rs
+++ b/compiler/rustc_middle/src/query/plumbing.rs
@@ -14,11 +14,14 @@ use crate::dep_graph;
 use crate::dep_graph::DepKind;
 use crate::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache};
 use crate::query::{
-    DynamicQueries, ExternProviders, Providers, QueryArenas, QueryCaches, QueryEngine, QueryStates,
+    ExternProviders, PerQueryVTables, Providers, QueryArenas, QueryCaches, QueryEngine, QueryStates,
 };
 use crate::ty::TyCtxt;
 
-pub struct DynamicQuery<'tcx, C: QueryCache> {
+/// Stores function pointers and other metadata for a particular query.
+///
+/// Used indirectly by query plumbing in `rustc_query_system`, via a trait.
+pub struct QueryVTable<'tcx, C: QueryCache> {
     pub name: &'static str,
     pub eval_always: bool,
     pub dep_kind: DepKind,
@@ -62,7 +65,7 @@ pub struct QuerySystem<'tcx> {
     pub states: QueryStates<'tcx>,
     pub arenas: WorkerLocal>,
     pub caches: QueryCaches<'tcx>,
-    pub dynamic_queries: DynamicQueries<'tcx>,
+    pub query_vtables: PerQueryVTables<'tcx>,
 
     /// This provides access to the incremental compilation on-disk cache for query results.
     /// Do not access this directly. It is only meant to be used by
@@ -418,9 +421,12 @@ macro_rules! define_callbacks {
             })*
         }
 
-        pub struct DynamicQueries<'tcx> {
+        /// Holds a `QueryVTable` for each query.
+        ///
+        /// ("Per" just makes this pluralized name more visually distinct.)
+        pub struct PerQueryVTables<'tcx> {
             $(
-                pub $name: DynamicQuery<'tcx, queries::$name::Storage<'tcx>>,
+                pub $name: ::rustc_middle::query::plumbing::QueryVTable<'tcx, queries::$name::Storage<'tcx>>,
             )*
         }
 
diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index e8983bfa1ddba..a8dcd3eb153b1 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -12,10 +12,9 @@ use rustc_middle::arena::Arena;
 use rustc_middle::dep_graph::{self, DepKind, DepKindVTable, DepNodeIndex};
 use rustc_middle::query::erase::{Erase, erase, restore};
 use rustc_middle::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache};
-use rustc_middle::query::plumbing::{DynamicQuery, QuerySystem, QuerySystemFns};
+use rustc_middle::query::plumbing::{QuerySystem, QuerySystemFns, QueryVTable};
 use rustc_middle::query::{
-    AsLocalKey, DynamicQueries, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates,
-    queries,
+    AsLocalKey, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates, queries,
 };
 use rustc_middle::ty::TyCtxt;
 use rustc_query_system::Value;
@@ -44,7 +43,7 @@ struct DynamicConfig<
     const DEPTH_LIMIT: bool,
     const FEEDABLE: bool,
 > {
-    dynamic: &'tcx DynamicQuery<'tcx, C>,
+    vtable: &'tcx QueryVTable<'tcx, C>,
 }
 
 impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Copy
@@ -70,12 +69,12 @@ where
 
     #[inline(always)]
     fn name(self) -> &'static str {
-        self.dynamic.name
+        self.vtable.name
     }
 
     #[inline(always)]
     fn cache_on_disk(self, tcx: TyCtxt<'tcx>, key: &Self::Key) -> bool {
-        (self.dynamic.cache_on_disk)(tcx, key)
+        (self.vtable.cache_on_disk)(tcx, key)
     }
 
     #[inline(always)]
@@ -90,7 +89,7 @@ where
         // This is just manually doing the subfield referencing through pointer math.
         unsafe {
             &*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>)
-                .byte_add(self.dynamic.query_state)
+                .byte_add(self.vtable.query_state)
                 .cast::>>()
         }
     }
@@ -104,19 +103,19 @@ where
         // This is just manually doing the subfield referencing through pointer math.
         unsafe {
             &*(&qcx.tcx.query_system.caches as *const QueryCaches<'tcx>)
-                .byte_add(self.dynamic.query_cache)
+                .byte_add(self.vtable.query_cache)
                 .cast::()
         }
     }
 
     #[inline(always)]
     fn execute_query(self, tcx: TyCtxt<'tcx>, key: Self::Key) -> Self::Value {
-        (self.dynamic.execute_query)(tcx, key)
+        (self.vtable.execute_query)(tcx, key)
     }
 
     #[inline(always)]
     fn compute(self, qcx: QueryCtxt<'tcx>, key: Self::Key) -> Self::Value {
-        (self.dynamic.compute)(qcx.tcx, key)
+        (self.vtable.compute)(qcx.tcx, key)
     }
 
     #[inline(always)]
@@ -127,8 +126,8 @@ where
         prev_index: SerializedDepNodeIndex,
         index: DepNodeIndex,
     ) -> Option {
-        if self.dynamic.can_load_from_disk {
-            (self.dynamic.try_load_from_disk)(qcx.tcx, key, prev_index, index)
+        if self.vtable.can_load_from_disk {
+            (self.vtable.try_load_from_disk)(qcx.tcx, key, prev_index, index)
         } else {
             None
         }
@@ -141,7 +140,7 @@ where
         key: &Self::Key,
         index: SerializedDepNodeIndex,
     ) -> bool {
-        (self.dynamic.loadable_from_disk)(qcx.tcx, key, index)
+        (self.vtable.loadable_from_disk)(qcx.tcx, key, index)
     }
 
     fn value_from_cycle_error(
@@ -150,12 +149,12 @@ where
         cycle_error: &CycleError,
         guar: ErrorGuaranteed,
     ) -> Self::Value {
-        (self.dynamic.value_from_cycle_error)(tcx, cycle_error, guar)
+        (self.vtable.value_from_cycle_error)(tcx, cycle_error, guar)
     }
 
     #[inline(always)]
     fn format_value(self) -> fn(&Self::Value) -> String {
-        self.dynamic.format_value
+        self.vtable.format_value
     }
 
     #[inline(always)]
@@ -165,7 +164,7 @@ where
 
     #[inline(always)]
     fn eval_always(self) -> bool {
-        self.dynamic.eval_always
+        self.vtable.eval_always
     }
 
     #[inline(always)]
@@ -180,17 +179,17 @@ where
 
     #[inline(always)]
     fn dep_kind(self) -> DepKind {
-        self.dynamic.dep_kind
+        self.vtable.dep_kind
     }
 
     #[inline(always)]
     fn cycle_error_handling(self) -> CycleErrorHandling {
-        self.dynamic.cycle_error_handling
+        self.vtable.cycle_error_handling
     }
 
     #[inline(always)]
     fn hash_result(self) -> HashResult {
-        self.dynamic.hash_result
+        self.vtable.hash_result
     }
 }
 
@@ -217,7 +216,7 @@ pub fn query_system<'a>(
         states: Default::default(),
         arenas: Default::default(),
         caches: Default::default(),
-        dynamic_queries: dynamic_queries(),
+        query_vtables: make_query_vtables(),
         on_disk_cache,
         fns: QuerySystemFns {
             engine: engine(incremental),
diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs
index 0c3fcf25c6550..0104998a1f0d4 100644
--- a/compiler/rustc_query_impl/src/plumbing.rs
+++ b/compiler/rustc_query_impl/src/plumbing.rs
@@ -641,10 +641,10 @@ macro_rules! define_queries {
                 }
             }
 
-            pub(crate) fn dynamic_query<'tcx>()
-                -> DynamicQuery<'tcx, queries::$name::Storage<'tcx>>
+            pub(crate) fn make_query_vtable<'tcx>()
+                -> QueryVTable<'tcx, queries::$name::Storage<'tcx>>
             {
-                DynamicQuery {
+                QueryVTable {
                     name: stringify!($name),
                     eval_always: is_eval_always!([$($modifiers)*]),
                     dep_kind: dep_graph::dep_kinds::$name,
@@ -725,7 +725,7 @@ macro_rules! define_queries {
                 #[inline(always)]
                 fn config(tcx: TyCtxt<'tcx>) -> Self::Config {
                     DynamicConfig {
-                        dynamic: &tcx.query_system.dynamic_queries.$name,
+                        vtable: &tcx.query_system.query_vtables.$name,
                     }
                 }
 
@@ -810,10 +810,10 @@ macro_rules! define_queries {
             }
         }
 
-        pub fn dynamic_queries<'tcx>() -> DynamicQueries<'tcx> {
-            DynamicQueries {
+        pub fn make_query_vtables<'tcx>() -> ::rustc_middle::query::PerQueryVTables<'tcx> {
+            ::rustc_middle::query::PerQueryVTables {
                 $(
-                    $name: query_impl::$name::dynamic_query(),
+                    $name: query_impl::$name::make_query_vtable(),
                 )*
             }
         }

From 2c9175d73db97065ef306758dbbc684e2a17536a Mon Sep 17 00:00:00 2001
From: Zalathar 
Date: Sun, 25 Jan 2026 13:55:02 +1100
Subject: [PATCH 120/131] Rename trait `QueryConfig` to `QueryDispatcher`

---
 compiler/rustc_query_impl/src/lib.rs          | 46 +++++++++++++-----
 compiler/rustc_query_impl/src/plumbing.rs     | 47 ++++++++++---------
 .../src/query/{config.rs => dispatcher.rs}    | 12 +++--
 compiler/rustc_query_system/src/query/mod.rs  |  4 +-
 .../rustc_query_system/src/query/plumbing.rs  | 28 +++++------
 5 files changed, 82 insertions(+), 55 deletions(-)
 rename compiler/rustc_query_system/src/query/{config.rs => dispatcher.rs} (85%)

diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index a8dcd3eb153b1..b628224db5369 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -21,7 +21,7 @@ use rustc_query_system::Value;
 use rustc_query_system::dep_graph::SerializedDepNodeIndex;
 use rustc_query_system::ich::StableHashingContext;
 use rustc_query_system::query::{
-    CycleError, CycleErrorHandling, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode,
+    CycleError, CycleErrorHandling, HashResult, QueryCache, QueryDispatcher, QueryMap, QueryMode,
     QueryStackDeferred, QueryState, get_query_incr, get_query_non_incr,
 };
 use rustc_span::{ErrorGuaranteed, Span};
@@ -36,7 +36,13 @@ pub use crate::plumbing::{QueryCtxt, query_key_hash_verify_all};
 mod profiling_support;
 pub use self::profiling_support::alloc_self_profile_query_strings;
 
-struct DynamicConfig<
+/// Combines a [`QueryVTable`] with some additional compile-time booleans
+/// to implement [`QueryDispatcher`], for use by code in [`rustc_query_system`].
+///
+/// Baking these boolean flags into the type gives a modest but measurable
+/// improvement to compiler perf and compiler code size; see
+/// .
+struct SemiDynamicQueryDispatcher<
     'tcx,
     C: QueryCache,
     const ANON: bool,
@@ -46,20 +52,23 @@ struct DynamicConfig<
     vtable: &'tcx QueryVTable<'tcx, C>,
 }
 
+// Manually implement Copy/Clone, because deriving would put trait bounds on the cache type.
 impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Copy
-    for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
+    for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
 {
 }
 impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool> Clone
-    for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
+    for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
 {
     fn clone(&self) -> Self {
         *self
     }
 }
 
+// This is `impl QueryDispatcher for SemiDynamicQueryDispatcher`.
 impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDABLE: bool>
-    QueryConfig> for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
+    QueryDispatcher>
+    for SemiDynamicQueryDispatcher<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
 where
     for<'a> C::Key: HashStable>,
 {
@@ -193,17 +202,28 @@ where
     }
 }
 
-/// This is implemented per query. It allows restoring query values from their erased state
-/// and constructing a QueryConfig.
-trait QueryConfigRestored<'tcx> {
-    type RestoredValue;
-    type Config: QueryConfig>;
+/// Provides access to vtable-like operations for a query
+/// (by creating a [`QueryDispatcher`]),
+/// but also keeps track of the "unerased" value type of the query
+/// (i.e. the actual result type in the query declaration).
+///
+/// This trait allows some per-query code to be defined in generic functions
+/// with a trait bound, instead of having to be defined inline within a macro
+/// expansion.
+///
+/// There is one macro-generated implementation of this trait for each query,
+/// on the type `rustc_query_impl::query_impl::$name::QueryType`.
+trait QueryDispatcherUnerased<'tcx> {
+    type UnerasedValue;
+    type Dispatcher: QueryDispatcher>;
 
     const NAME: &'static &'static str;
 
-    fn config(tcx: TyCtxt<'tcx>) -> Self::Config;
-    fn restore(value: >>::Value)
-    -> Self::RestoredValue;
+    fn query_dispatcher(tcx: TyCtxt<'tcx>) -> Self::Dispatcher;
+
+    fn restore_val(
+        value: >>::Value,
+    ) -> Self::UnerasedValue;
 }
 
 pub fn query_system<'a>(
diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs
index 0104998a1f0d4..6ead03a527a7a 100644
--- a/compiler/rustc_query_impl/src/plumbing.rs
+++ b/compiler/rustc_query_impl/src/plumbing.rs
@@ -27,14 +27,14 @@ use rustc_middle::ty::{self, TyCtxt};
 use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
 use rustc_query_system::ich::StableHashingContext;
 use rustc_query_system::query::{
-    QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect,
+    QueryCache, QueryContext, QueryDispatcher, QueryJobId, QueryMap, QuerySideEffect,
     QueryStackDeferred, QueryStackFrame, QueryStackFrameExtra, force_query,
 };
 use rustc_query_system::{QueryOverflow, QueryOverflowNote};
 use rustc_serialize::{Decodable, Encodable};
 use rustc_span::def_id::LOCAL_CRATE;
 
-use crate::QueryConfigRestored;
+use crate::QueryDispatcherUnerased;
 
 /// Implements [`QueryContext`] for use by [`rustc_query_system`], since that
 /// crate does not have direct access to [`TyCtxt`].
@@ -387,13 +387,13 @@ pub(crate) fn create_query_frame<
 }
 
 pub(crate) fn encode_query_results<'a, 'tcx, Q>(
-    query: Q::Config,
+    query: Q::Dispatcher,
     qcx: QueryCtxt<'tcx>,
     encoder: &mut CacheEncoder<'a, 'tcx>,
     query_result_index: &mut EncodedDepNodeIndex,
 ) where
-    Q: super::QueryConfigRestored<'tcx>,
-    Q::RestoredValue: Encodable>,
+    Q: QueryDispatcherUnerased<'tcx>,
+    Q::UnerasedValue: Encodable>,
 {
     let _timer = qcx.tcx.prof.generic_activity_with_arg("encode_query_results_for", query.name());
 
@@ -408,13 +408,13 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
 
             // Encode the type check tables with the `SerializedDepNodeIndex`
             // as tag.
-            encoder.encode_tagged(dep_node, &Q::restore(*value));
+            encoder.encode_tagged(dep_node, &Q::restore_val(*value));
         }
     });
 }
 
 pub(crate) fn query_key_hash_verify<'tcx>(
-    query: impl QueryConfig>,
+    query: impl QueryDispatcher>,
     qcx: QueryCtxt<'tcx>,
 ) {
     let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());
@@ -442,7 +442,7 @@ pub(crate) fn query_key_hash_verify<'tcx>(
 
 fn try_load_from_on_disk_cache<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode)
 where
-    Q: QueryConfig>,
+    Q: QueryDispatcher>,
 {
     debug_assert!(tcx.dep_graph.is_green(&dep_node));
 
@@ -488,7 +488,7 @@ where
 
 fn force_from_dep_node<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode) -> bool
 where
-    Q: QueryConfig>,
+    Q: QueryDispatcher>,
 {
     // We must avoid ever having to call `force_from_dep_node()` for a
     // `DepNode::codegen_unit`:
@@ -521,9 +521,10 @@ pub(crate) fn make_dep_kind_vtable_for_query<'tcx, Q>(
     is_eval_always: bool,
 ) -> DepKindVTable<'tcx>
 where
-    Q: QueryConfigRestored<'tcx>,
+    Q: QueryDispatcherUnerased<'tcx>,
 {
-    let fingerprint_style = >>::Key::fingerprint_style();
+    let fingerprint_style =
+        >>::Key::fingerprint_style();
 
     if is_anon || !fingerprint_style.reconstructible() {
         return DepKindVTable {
@@ -541,10 +542,10 @@ where
         is_eval_always,
         fingerprint_style,
         force_from_dep_node: Some(|tcx, dep_node, _| {
-            force_from_dep_node(Q::config(tcx), tcx, dep_node)
+            force_from_dep_node(Q::query_dispatcher(tcx), tcx, dep_node)
         }),
         try_load_from_on_disk_cache: Some(|tcx, dep_node| {
-            try_load_from_on_disk_cache(Q::config(tcx), tcx, dep_node)
+            try_load_from_on_disk_cache(Q::query_dispatcher(tcx), tcx, dep_node)
         }),
         name: Q::NAME,
     }
@@ -613,7 +614,7 @@ macro_rules! define_queries {
                     #[cfg(debug_assertions)]
                     let _guard = tracing::span!(tracing::Level::TRACE, stringify!($name), ?key).entered();
                     get_query_incr(
-                        QueryType::config(tcx),
+                        QueryType::query_dispatcher(tcx),
                         QueryCtxt::new(tcx),
                         span,
                         key,
@@ -633,7 +634,7 @@ macro_rules! define_queries {
                     __mode: QueryMode,
                 ) -> Option>> {
                     Some(get_query_non_incr(
-                        QueryType::config(tcx),
+                        QueryType::query_dispatcher(tcx),
                         QueryCtxt::new(tcx),
                         span,
                         key,
@@ -710,9 +711,9 @@ macro_rules! define_queries {
                 data: PhantomData<&'tcx ()>
             }
 
-            impl<'tcx> QueryConfigRestored<'tcx> for QueryType<'tcx> {
-                type RestoredValue = queries::$name::Value<'tcx>;
-                type Config = DynamicConfig<
+            impl<'tcx> QueryDispatcherUnerased<'tcx> for QueryType<'tcx> {
+                type UnerasedValue = queries::$name::Value<'tcx>;
+                type Dispatcher = SemiDynamicQueryDispatcher<
                     'tcx,
                     queries::$name::Storage<'tcx>,
                     { is_anon!([$($modifiers)*]) },
@@ -723,14 +724,14 @@ macro_rules! define_queries {
                 const NAME: &'static &'static str = &stringify!($name);
 
                 #[inline(always)]
-                fn config(tcx: TyCtxt<'tcx>) -> Self::Config {
-                    DynamicConfig {
+                fn query_dispatcher(tcx: TyCtxt<'tcx>) -> Self::Dispatcher {
+                    SemiDynamicQueryDispatcher {
                         vtable: &tcx.query_system.query_vtables.$name,
                     }
                 }
 
                 #[inline(always)]
-                fn restore(value: >>::Value) -> Self::RestoredValue {
+                fn restore_val(value: >>::Value) -> Self::UnerasedValue {
                     restore::>(value)
                 }
             }
@@ -782,7 +783,7 @@ macro_rules! define_queries {
                     query_result_index: &mut EncodedDepNodeIndex
                 ) {
                     $crate::plumbing::encode_query_results::>(
-                        query_impl::$name::QueryType::config(tcx),
+                        query_impl::$name::QueryType::query_dispatcher(tcx),
                         QueryCtxt::new(tcx),
                         encoder,
                         query_result_index,
@@ -792,7 +793,7 @@ macro_rules! define_queries {
 
             pub(crate) fn query_key_hash_verify<'tcx>(tcx: TyCtxt<'tcx>) {
                 $crate::plumbing::query_key_hash_verify(
-                    query_impl::$name::QueryType::config(tcx),
+                    query_impl::$name::QueryType::query_dispatcher(tcx),
                     QueryCtxt::new(tcx),
                 )
             }
diff --git a/compiler/rustc_query_system/src/query/config.rs b/compiler/rustc_query_system/src/query/dispatcher.rs
similarity index 85%
rename from compiler/rustc_query_system/src/query/config.rs
rename to compiler/rustc_query_system/src/query/dispatcher.rs
index 66b04aa7b467b..bba1703dfbb6b 100644
--- a/compiler/rustc_query_system/src/query/config.rs
+++ b/compiler/rustc_query_system/src/query/dispatcher.rs
@@ -1,5 +1,3 @@
-//! Query configuration and description traits.
-
 use std::fmt::Debug;
 use std::hash::Hash;
 
@@ -14,7 +12,15 @@ use crate::query::{CycleError, CycleErrorHandling, DepNodeIndex, QueryContext, Q
 
 pub type HashResult = Option, &V) -> Fingerprint>;
 
-pub trait QueryConfig: Copy {
+/// Trait that can be used as a vtable for a single query, providing operations
+/// and metadata for that query.
+///
+/// Implemented by `rustc_query_impl::SemiDynamicQueryDispatcher`, which
+/// mostly delegates to `rustc_middle::query::plumbing::QueryVTable`.
+/// Those types are not visible from this `rustc_query_system` crate.
+///
+/// "Dispatcher" should be understood as a near-synonym of "vtable".
+pub trait QueryDispatcher: Copy {
     fn name(self) -> &'static str;
 
     // `Key` and `Value` are `Copy` instead of `Clone` to ensure copying them stays cheap,
diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs
index 3ff980fa9bc5c..701253d50fcca 100644
--- a/compiler/rustc_query_system/src/query/mod.rs
+++ b/compiler/rustc_query_system/src/query/mod.rs
@@ -13,7 +13,7 @@ use rustc_span::Span;
 use rustc_span::def_id::DefId;
 
 pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache};
-pub use self::config::{HashResult, QueryConfig};
+pub use self::dispatcher::{HashResult, QueryDispatcher};
 pub use self::job::{
     QueryInfo, QueryJob, QueryJobId, QueryJobInfo, QueryMap, break_query_cycles, print_query_stack,
     report_cycle,
@@ -22,7 +22,7 @@ pub use self::plumbing::*;
 use crate::dep_graph::{DepKind, DepNodeIndex, HasDepContext, SerializedDepNodeIndex};
 
 mod caches;
-mod config;
+mod dispatcher;
 mod job;
 mod plumbing;
 
diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs
index 5be4ee1452082..9afad1546e9eb 100644
--- a/compiler/rustc_query_system/src/query/plumbing.rs
+++ b/compiler/rustc_query_system/src/query/plumbing.rs
@@ -18,7 +18,7 @@ use rustc_errors::{Diag, FatalError, StashKey};
 use rustc_span::{DUMMY_SP, Span};
 use tracing::instrument;
 
-use super::{QueryConfig, QueryStackFrameExtra};
+use super::{QueryDispatcher, QueryStackFrameExtra};
 use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams};
 use crate::ich::StableHashingContext;
 use crate::query::caches::QueryCache;
@@ -126,7 +126,7 @@ where
 #[inline(never)]
 fn mk_cycle(query: Q, qcx: Qcx, cycle_error: CycleError) -> Q::Value
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     let error = report_cycle(qcx.dep_context().sess(), &cycle_error);
@@ -140,7 +140,7 @@ fn handle_cycle_error(
     error: Diag<'_>,
 ) -> Q::Value
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     match query.cycle_error_handling() {
@@ -279,7 +279,7 @@ fn cycle_error(
     span: Span,
 ) -> (Q::Value, Option)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     // Ensure there was no errors collecting all active jobs.
@@ -300,7 +300,7 @@ fn wait_for_query(
     current: Option,
 ) -> (Q::Value, Option)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     // For parallel queries, we'll block and wait until the query running
@@ -349,7 +349,7 @@ fn try_execute_query(
     dep_node: Option,
 ) -> (Q::Value, Option)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     let state = query.query_state(qcx);
@@ -421,7 +421,7 @@ fn execute_job(
     dep_node: Option,
 ) -> (Q::Value, Option)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     // Use `JobOwner` so the query will be poisoned if executing it panics.
@@ -491,7 +491,7 @@ fn execute_job_non_incr(
     job_id: QueryJobId,
 ) -> (Q::Value, DepNodeIndex)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@@ -530,7 +530,7 @@ fn execute_job_incr(
     job_id: QueryJobId,
 ) -> (Q::Value, DepNodeIndex)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     if !query.anon() && !query.eval_always() {
@@ -585,7 +585,7 @@ fn try_load_from_disk_and_cache_in_memory(
     dep_node: &DepNode,
 ) -> Option<(Q::Value, DepNodeIndex)>
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     // Note this function can be called concurrently from the same query
@@ -771,7 +771,7 @@ fn ensure_must_run(
     check_cache: bool,
 ) -> (bool, Option)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     if query.eval_always() {
@@ -819,7 +819,7 @@ pub enum QueryMode {
 #[inline(always)]
 pub fn get_query_non_incr(query: Q, qcx: Qcx, span: Span, key: Q::Key) -> Q::Value
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     debug_assert!(!qcx.dep_context().dep_graph().is_fully_enabled());
@@ -836,7 +836,7 @@ pub fn get_query_incr(
     mode: QueryMode,
 ) -> Option
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     debug_assert!(qcx.dep_context().dep_graph().is_fully_enabled());
@@ -862,7 +862,7 @@ where
 
 pub fn force_query(query: Q, qcx: Qcx, key: Q::Key, dep_node: DepNode)
 where
-    Q: QueryConfig,
+    Q: QueryDispatcher,
     Qcx: QueryContext,
 {
     // We may be concurrently trying both execute and force a query.

From c8975a2468eef8fab7572f5a50ff89531ec3ec1b Mon Sep 17 00:00:00 2001
From: Jonathan Brouwer 
Date: Tue, 27 Jan 2026 13:05:27 +0100
Subject: [PATCH 121/131] Add `extern core` to diagnostic tests

---
 .../diagnostic-derive-doc-comment-field.rs    |  1 +
 .../session-diagnostic/diagnostic-derive.rs   |  8 +++---
 .../subdiagnostic-derive.rs                   | 25 +++++++------------
 3 files changed, 15 insertions(+), 19 deletions(-)

diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.rs b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.rs
index 37f78a7777c46..4f08c5327c153 100644
--- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.rs
+++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.rs
@@ -18,6 +18,7 @@ extern crate rustc_fluent_macro;
 extern crate rustc_macros;
 extern crate rustc_session;
 extern crate rustc_span;
+extern crate core;
 
 use rustc_errors::{Applicability, DiagMessage, SubdiagMessage};
 use rustc_macros::{Diagnostic, Subdiagnostic};
diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.rs b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.rs
index fa2d037064d29..fcae379d982fc 100644
--- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.rs
+++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.rs
@@ -29,6 +29,8 @@ use rustc_errors::{Applicability, DiagMessage, ErrCode, MultiSpan, SubdiagMessag
 
 extern crate rustc_session;
 
+extern crate core;
+
 rustc_fluent_macro::fluent_messages! { "./example.ftl" }
 
 // E0123 and E0456 are no longer used, so we define our own constants here just for this test.
@@ -56,7 +58,7 @@ enum DiagnosticOnEnum {
 #[derive(Diagnostic)]
 #[diag(no_crate_example, code = E0123)]
 #[diag = "E0123"]
-//~^ ERROR failed to resolve: you might be missing crate `core`
+//~^ ERROR expected parentheses: #[diag(...)]
 struct WrongStructAttrStyle {}
 
 #[derive(Diagnostic)]
@@ -801,7 +803,7 @@ struct SuggestionsNoItem {
 struct SuggestionsInvalidItem {
     #[suggestion(code(foo))]
     //~^ ERROR `code(...)` must contain only string literals
-    //~| ERROR failed to resolve: you might be missing crate `core`
+    //~| ERROR unexpected token, expected `)`
     sub: Span,
 }
 
@@ -809,7 +811,7 @@ struct SuggestionsInvalidItem {
 #[diag(no_crate_example)]
 struct SuggestionsInvalidLiteral {
     #[suggestion(code = 3)]
-    //~^ ERROR failed to resolve: you might be missing crate `core`
+    //~^ ERROR expected string literal
     sub: Span,
 }
 
diff --git a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
index c837372a7a7a7..6481e8a3307c3 100644
--- a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
+++ b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
@@ -16,6 +16,7 @@ extern crate rustc_fluent_macro;
 extern crate rustc_macros;
 extern crate rustc_session;
 extern crate rustc_span;
+extern crate core;
 
 use rustc_errors::{Applicability, DiagMessage, SubdiagMessage};
 use rustc_macros::Subdiagnostic;
@@ -94,8 +95,7 @@ struct G {
 
 #[derive(Subdiagnostic)]
 #[label("...")]
-//~^ ERROR failed to resolve: you might be missing crate `core`
-//~| NOTE you might be missing crate `core`
+//~^ ERROR unexpected literal in nested attribute, expected ident
 struct H {
     #[primary_span]
     span: Span,
@@ -310,8 +310,7 @@ struct AB {
 
 #[derive(Subdiagnostic)]
 union AC {
-    //~^ ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~^ ERROR unexpected unsupported untagged union
     span: u32,
     b: u64,
 }
@@ -581,8 +580,7 @@ struct BD {
     span2: Span,
     #[suggestion_part(foo = "bar")]
     //~^ ERROR `code` is the only valid nested attribute
-    //~| ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~| ERROR expected `,`
     span4: Span,
     #[suggestion_part(code = "...")]
     //~^ ERROR the `#[suggestion_part(...)]` attribute can only be applied to fields of type `Span` or `MultiSpan`
@@ -674,8 +672,7 @@ enum BL {
 struct BM {
     #[suggestion_part(code("foo"))]
     //~^ ERROR expected exactly one string literal for `code = ...`
-    //~| ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~| ERROR unexpected token, expected `)`
     span: Span,
     r#type: String,
 }
@@ -685,8 +682,7 @@ struct BM {
 struct BN {
     #[suggestion_part(code("foo", "bar"))]
     //~^ ERROR expected exactly one string literal for `code = ...`
-    //~| ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~| ERROR unexpected token, expected `)`
     span: Span,
     r#type: String,
 }
@@ -696,8 +692,7 @@ struct BN {
 struct BO {
     #[suggestion_part(code(3))]
     //~^ ERROR expected exactly one string literal for `code = ...`
-    //~| ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~| ERROR unexpected token, expected `)`
     span: Span,
     r#type: String,
 }
@@ -718,8 +713,7 @@ struct BP {
 #[multipart_suggestion(no_crate_example)]
 struct BQ {
     #[suggestion_part(code = 3)]
-    //~^ ERROR failed to resolve: you might be missing crate `core`
-    //~| NOTE you might be missing crate `core`
+    //~^ ERROR expected string literal
     span: Span,
     r#type: String,
 }
@@ -811,8 +805,7 @@ struct SuggestionStyleInvalid3 {
 #[derive(Subdiagnostic)]
 #[suggestion(no_crate_example, code = "", style("foo"))]
 //~^ ERROR expected `= "xxx"`
-//~| ERROR failed to resolve: you might be missing crate `core`
-//~| NOTE you might be missing crate `core`
+//~| ERROR expected `,`
 struct SuggestionStyleInvalid4 {
     #[primary_span]
     sub: Span,

From c420ed5632d90b793a5ab75f47129af7cf312923 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Tue, 27 Jan 2026 14:03:16 +0100
Subject: [PATCH 122/131] fix: Do not panic if rust-analyzer fails to spawn the
 discover command

---
 .../crates/rust-analyzer/src/command.rs       |  8 +++--
 .../crates/rust-analyzer/src/discover.rs      |  4 +--
 .../crates/rust-analyzer/src/main_loop.rs     | 34 ++++++++-----------
 .../crates/rust-analyzer/src/test_runner.rs   |  2 +-
 4 files changed, 24 insertions(+), 24 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
index 2f052618cdfab..49ce6db4ea9a3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
@@ -10,6 +10,7 @@ use std::{
     process::{ChildStderr, ChildStdout, Command, Stdio},
 };
 
+use anyhow::Context;
 use crossbeam_channel::Sender;
 use paths::Utf8PathBuf;
 use process_wrap::std::{StdChildWrapper, StdCommandWrap};
@@ -156,7 +157,7 @@ impl CommandHandle {
         parser: impl JsonLinesParser,
         sender: Sender,
         out_file: Option,
-    ) -> std::io::Result {
+    ) -> anyhow::Result {
         command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
 
         let program = command.get_program().into();
@@ -168,7 +169,10 @@ impl CommandHandle {
         child.wrap(process_wrap::std::ProcessSession);
         #[cfg(windows)]
         child.wrap(process_wrap::std::JobObject);
-        let mut child = child.spawn().map(JodGroupChild)?;
+        let mut child = child
+            .spawn()
+            .map(JodGroupChild)
+            .with_context(|| "Failed to spawn command: {child:?}")?;
 
         let stdout = child.0.stdout().take().unwrap();
         let stderr = child.0.stderr().take().unwrap();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
index f129f156a030a..098b6a4d986d7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
@@ -1,6 +1,6 @@
 //! Infrastructure for lazy project discovery. Currently only support rust-project.json discovery
 //! via a custom discover command.
-use std::{io, path::Path};
+use std::path::Path;
 
 use crossbeam_channel::Sender;
 use ide_db::FxHashMap;
@@ -47,7 +47,7 @@ impl DiscoverCommand {
         &self,
         discover_arg: DiscoverArgument,
         current_dir: &Path,
-    ) -> io::Result {
+    ) -> anyhow::Result {
         let command = &self.command[0];
         let args = &self.command[1..];
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 71c4b2accce98..f5cead5d8f9d3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -825,33 +825,29 @@ impl GlobalState {
             }
             Task::DiscoverLinkedProjects(arg) => {
                 if let Some(cfg) = self.config.discover_workspace_config() {
-                    // the clone is unfortunately necessary to avoid a borrowck error when
-                    // `self.report_progress` is called later
-                    let title = &cfg.progress_label.clone();
                     let command = cfg.command.clone();
                     let discover = DiscoverCommand::new(self.discover_sender.clone(), command);
 
-                    if self.discover_jobs_active == 0 {
-                        self.report_progress(title, Progress::Begin, None, None, None);
-                    }
-                    self.discover_jobs_active += 1;
-
                     let arg = match arg {
                         DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it),
                         DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
                     };
 
-                    let handle = discover
-                        .spawn(
-                            arg,
-                            &std::env::current_dir()
-                                .expect("Failed to get cwd during project discovery"),
-                        )
-                        .unwrap_or_else(|e| {
-                            panic!("Failed to spawn project discovery command: {e}")
-                        });
-
-                    self.discover_handles.push(handle);
+                    match discover.spawn(arg, self.config.root_path().as_ref()) {
+                        Ok(handle) => {
+                            if self.discover_jobs_active == 0 {
+                                let title = &cfg.progress_label.clone();
+                                self.report_progress(title, Progress::Begin, None, None, None);
+                            }
+                            self.discover_jobs_active += 1;
+                            self.discover_handles.push(handle)
+                        }
+                        Err(e) => self.show_message(
+                            lsp_types::MessageType::ERROR,
+                            format!("Failed to spawn project discovery command: {e:#}"),
+                            false,
+                        ),
+                    }
                 }
             }
             Task::FetchBuildData(progress) => {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
index f0020f9088e3f..0d9c8310d8587 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
@@ -101,7 +101,7 @@ impl CargoTestHandle {
         ws_target_dir: Option<&Utf8Path>,
         test_target: TestTarget,
         sender: Sender,
-    ) -> std::io::Result {
+    ) -> anyhow::Result {
         let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env);
         cmd.env("RUSTC_BOOTSTRAP", "1");
         cmd.arg("--color=always");

From bf12b8bca5e4e6c313f043de0301e68fdb7cec7d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Tue, 27 Jan 2026 15:19:00 +0200
Subject: [PATCH 123/131] Fix linking of proc-macro-srv-cli

---
 src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs | 5 +++++
 .../crates/proc-macro-srv-cli/tests/legacy_json.rs           | 4 ++++
 2 files changed, 9 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs
index 9e6f03bf46046..8475c05ae8a1e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/lib.rs
@@ -2,5 +2,10 @@
 //!
 //! This module exposes the server main loop and protocol format for integration testing.
 
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
+
 #[cfg(feature = "sysroot-abi")]
 pub mod main_loop;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
index c0dbfd1679f7b..562cf0c2516f4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/legacy_json.rs
@@ -4,6 +4,10 @@
 //! channels without needing to spawn the actual server and client processes.
 
 #![cfg(feature = "sysroot-abi")]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
 
 mod common {
     pub(crate) mod utils;

From 3491099f88c40da8dcd7d19fde0e45ccde6b2d4e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Tue, 27 Jan 2026 15:56:27 +0200
Subject: [PATCH 124/131] Prepare for merging from rust-lang/rust

This updates the rust-version file to 94a0cd15f5976fa35e5e6784e621c04e9f958e57.
---
 src/tools/rust-analyzer/rust-version | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index 1fe86330b4a8e..17b678eed936b 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-004d710faff53f8764a1cf69d87a5a5963850b60
+94a0cd15f5976fa35e5e6784e621c04e9f958e57

From c5faba78eedbb5e065f2b3c5ecbd494b1c1b1f4c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Tue, 27 Jan 2026 16:40:49 +0200
Subject: [PATCH 125/131] Fix sysroot-abi build

---
 src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index 77c3809e5d143..e04f744ae2b01 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -22,8 +22,11 @@
 )]
 #![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
 
+#[cfg(not(feature = "in-rust-tree"))]
+extern crate proc_macro as rustc_proc_macro;
 #[cfg(feature = "in-rust-tree")]
 extern crate rustc_driver as _;
+#[cfg(feature = "in-rust-tree")]
 extern crate rustc_proc_macro;
 
 #[cfg(not(feature = "in-rust-tree"))]

From 4a0c044c63bde5ba242772bc8c712444c29e4176 Mon Sep 17 00:00:00 2001
From: Jonathan Brouwer 
Date: Tue, 27 Jan 2026 13:05:38 +0100
Subject: [PATCH 126/131] Update stderrs

---
 ...diagnostic-derive-doc-comment-field.stderr |   8 +-
 .../diagnostic-derive.stderr                  | 218 +++++++-------
 .../subdiagnostic-derive-2.rs                 |  37 +++
 .../subdiagnostic-derive-2.stderr             |   9 +
 .../subdiagnostic-derive.rs                   |  13 -
 .../subdiagnostic-derive.stderr               | 272 +++++++++---------
 6 files changed, 289 insertions(+), 268 deletions(-)
 create mode 100644 tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.rs
 create mode 100644 tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.stderr

diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr
index 316f23888bc1f..0b00e098f6d4a 100644
--- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr
+++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive-doc-comment-field.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the trait bound `NotIntoDiagArg: IntoDiagArg` is not satisfied
-  --> $DIR/diagnostic-derive-doc-comment-field.rs:36:10
+  --> $DIR/diagnostic-derive-doc-comment-field.rs:37:10
    |
 LL | #[derive(Diagnostic)]
    |          ---------- required by a bound introduced by this call
@@ -8,7 +8,7 @@ LL |     arg: NotIntoDiagArg,
    |          ^^^^^^^^^^^^^^ unsatisfied trait bound
    |
 help: the nightly-only, unstable trait `IntoDiagArg` is not implemented for `NotIntoDiagArg`
-  --> $DIR/diagnostic-derive-doc-comment-field.rs:28:1
+  --> $DIR/diagnostic-derive-doc-comment-field.rs:29:1
    |
 LL | struct NotIntoDiagArg;
    | ^^^^^^^^^^^^^^^^^^^^^
@@ -21,7 +21,7 @@ note: required by a bound in `Diag::<'a, G>::arg`
    = note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0277]: the trait bound `NotIntoDiagArg: IntoDiagArg` is not satisfied
-  --> $DIR/diagnostic-derive-doc-comment-field.rs:46:10
+  --> $DIR/diagnostic-derive-doc-comment-field.rs:47:10
    |
 LL | #[derive(Subdiagnostic)]
    |          ------------- required by a bound introduced by this call
@@ -30,7 +30,7 @@ LL |     arg: NotIntoDiagArg,
    |          ^^^^^^^^^^^^^^ unsatisfied trait bound
    |
 help: the nightly-only, unstable trait `IntoDiagArg` is not implemented for `NotIntoDiagArg`
-  --> $DIR/diagnostic-derive-doc-comment-field.rs:28:1
+  --> $DIR/diagnostic-derive-doc-comment-field.rs:29:1
    |
 LL | struct NotIntoDiagArg;
    | ^^^^^^^^^^^^^^^^^^^^^
diff --git a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr
index 77c48aceca8ec..cf5c0c2e6491f 100644
--- a/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr
+++ b/tests/ui-fulldeps/session-diagnostic/diagnostic-derive.stderr
@@ -1,11 +1,11 @@
 error: derive(Diagnostic): unsupported type attribute for diagnostic derive enum
-  --> $DIR/diagnostic-derive.rs:47:1
+  --> $DIR/diagnostic-derive.rs:49:1
    |
 LL | #[diag(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:50:5
+  --> $DIR/diagnostic-derive.rs:52:5
    |
 LL |     Foo,
    |     ^^^
@@ -13,21 +13,27 @@ LL |     Foo,
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:52:5
+  --> $DIR/diagnostic-derive.rs:54:5
    |
 LL |     Bar,
    |     ^^^
    |
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
+error: expected parentheses: #[diag(...)]
+  --> $DIR/diagnostic-derive.rs:60:8
+   |
+LL | #[diag = "E0123"]
+   |        ^
+
 error: derive(Diagnostic): `#[nonsense(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:63:1
+  --> $DIR/diagnostic-derive.rs:65:1
    |
 LL | #[nonsense(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:63:1
+  --> $DIR/diagnostic-derive.rs:65:1
    |
 LL | #[nonsense(no_crate_example, code = E0123)]
    | ^
@@ -35,7 +41,7 @@ LL | #[nonsense(no_crate_example, code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:70:1
+  --> $DIR/diagnostic-derive.rs:72:1
    |
 LL | #[diag(code = E0123)]
    | ^
@@ -43,13 +49,13 @@ LL | #[diag(code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): diagnostic slug must be the first argument
-  --> $DIR/diagnostic-derive.rs:80:16
+  --> $DIR/diagnostic-derive.rs:82:16
    |
 LL | #[diag(nonsense("foo"), code = E0123, slug = "foo")]
    |                ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:80:1
+  --> $DIR/diagnostic-derive.rs:82:1
    |
 LL | #[diag(nonsense("foo"), code = E0123, slug = "foo")]
    | ^
@@ -57,7 +63,7 @@ LL | #[diag(nonsense("foo"), code = E0123, slug = "foo")]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): unknown argument
-  --> $DIR/diagnostic-derive.rs:86:8
+  --> $DIR/diagnostic-derive.rs:88:8
    |
 LL | #[diag(nonsense = "...", code = E0123, slug = "foo")]
    |        ^^^^^^^^
@@ -65,7 +71,7 @@ LL | #[diag(nonsense = "...", code = E0123, slug = "foo")]
    = note: only the `code` parameter is valid after the slug
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:86:1
+  --> $DIR/diagnostic-derive.rs:88:1
    |
 LL | #[diag(nonsense = "...", code = E0123, slug = "foo")]
    | ^
@@ -73,7 +79,7 @@ LL | #[diag(nonsense = "...", code = E0123, slug = "foo")]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): unknown argument
-  --> $DIR/diagnostic-derive.rs:92:8
+  --> $DIR/diagnostic-derive.rs:94:8
    |
 LL | #[diag(nonsense = 4, code = E0123, slug = "foo")]
    |        ^^^^^^^^
@@ -81,7 +87,7 @@ LL | #[diag(nonsense = 4, code = E0123, slug = "foo")]
    = note: only the `code` parameter is valid after the slug
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:92:1
+  --> $DIR/diagnostic-derive.rs:94:1
    |
 LL | #[diag(nonsense = 4, code = E0123, slug = "foo")]
    | ^
@@ -89,7 +95,7 @@ LL | #[diag(nonsense = 4, code = E0123, slug = "foo")]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): unknown argument
-  --> $DIR/diagnostic-derive.rs:98:40
+  --> $DIR/diagnostic-derive.rs:100:40
    |
 LL | #[diag(no_crate_example, code = E0123, slug = "foo")]
    |                                        ^^^^
@@ -97,55 +103,55 @@ LL | #[diag(no_crate_example, code = E0123, slug = "foo")]
    = note: only the `code` parameter is valid after the slug
 
 error: derive(Diagnostic): `#[suggestion = ...]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:105:5
+  --> $DIR/diagnostic-derive.rs:107:5
    |
 LL |     #[suggestion = "bar"]
    |     ^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:112:8
+  --> $DIR/diagnostic-derive.rs:114:8
    |
 LL | #[diag(no_crate_example, code = E0456)]
    |        ^^^^^^^^^^^^^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:111:8
+  --> $DIR/diagnostic-derive.rs:113:8
    |
 LL | #[diag(no_crate_example, code = E0123)]
    |        ^^^^^^^^^^^^^^^^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:112:26
+  --> $DIR/diagnostic-derive.rs:114:26
    |
 LL | #[diag(no_crate_example, code = E0456)]
    |                          ^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:111:26
+  --> $DIR/diagnostic-derive.rs:113:26
    |
 LL | #[diag(no_crate_example, code = E0123)]
    |                          ^^^^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:118:40
+  --> $DIR/diagnostic-derive.rs:120:40
    |
 LL | #[diag(no_crate_example, code = E0123, code = E0456)]
    |                                        ^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:118:26
+  --> $DIR/diagnostic-derive.rs:120:26
    |
 LL | #[diag(no_crate_example, code = E0123, code = E0456)]
    |                          ^^^^
 
 error: derive(Diagnostic): diagnostic slug must be the first argument
-  --> $DIR/diagnostic-derive.rs:123:43
+  --> $DIR/diagnostic-derive.rs:125:43
    |
 LL | #[diag(no_crate_example, no_crate::example, code = E0123)]
    |                                           ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:128:1
+  --> $DIR/diagnostic-derive.rs:130:1
    |
 LL | struct KindNotProvided {}
    | ^^^^^^
@@ -153,7 +159,7 @@ LL | struct KindNotProvided {}
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:131:1
+  --> $DIR/diagnostic-derive.rs:133:1
    |
 LL | #[diag(code = E0123)]
    | ^
@@ -161,31 +167,31 @@ LL | #[diag(code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): the `#[primary_span]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/diagnostic-derive.rs:142:5
+  --> $DIR/diagnostic-derive.rs:144:5
    |
 LL |     #[primary_span]
    |     ^
 
 error: derive(Diagnostic): `#[nonsense]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:150:5
+  --> $DIR/diagnostic-derive.rs:152:5
    |
 LL |     #[nonsense]
    |     ^
 
 error: derive(Diagnostic): the `#[label(...)]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/diagnostic-derive.rs:167:5
+  --> $DIR/diagnostic-derive.rs:169:5
    |
 LL |     #[label(no_crate_label)]
    |     ^
 
 error: derive(Diagnostic): `name` doesn't refer to a field on this type
-  --> $DIR/diagnostic-derive.rs:175:46
+  --> $DIR/diagnostic-derive.rs:177:46
    |
 LL |     #[suggestion(no_crate_suggestion, code = "{name}")]
    |                                              ^^^^^^^^
 
 error: invalid format string: expected `}` but string was terminated
-  --> $DIR/diagnostic-derive.rs:180:10
+  --> $DIR/diagnostic-derive.rs:182:10
    |
 LL | #[derive(Diagnostic)]
    |          ^^^^^^^^^^ expected `}` in format string
@@ -194,7 +200,7 @@ LL | #[derive(Diagnostic)]
    = note: this error originates in the derive macro `Diagnostic` (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: invalid format string: unmatched `}` found
-  --> $DIR/diagnostic-derive.rs:190:10
+  --> $DIR/diagnostic-derive.rs:192:10
    |
 LL | #[derive(Diagnostic)]
    |          ^^^^^^^^^^ unmatched `}` in format string
@@ -203,19 +209,19 @@ LL | #[derive(Diagnostic)]
    = note: this error originates in the derive macro `Diagnostic` (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: derive(Diagnostic): the `#[label(...)]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/diagnostic-derive.rs:210:5
+  --> $DIR/diagnostic-derive.rs:212:5
    |
 LL |     #[label(no_crate_label)]
    |     ^
 
 error: derive(Diagnostic): suggestion without `code = "..."`
-  --> $DIR/diagnostic-derive.rs:229:5
+  --> $DIR/diagnostic-derive.rs:231:5
    |
 LL |     #[suggestion(no_crate_suggestion)]
    |     ^
 
 error: derive(Diagnostic): invalid nested attribute
-  --> $DIR/diagnostic-derive.rs:237:18
+  --> $DIR/diagnostic-derive.rs:239:18
    |
 LL |     #[suggestion(nonsense = "bar")]
    |                  ^^^^^^^^
@@ -223,13 +229,13 @@ LL |     #[suggestion(nonsense = "bar")]
    = help: only `no_span`, `style`, `code` and `applicability` are valid nested attributes
 
 error: derive(Diagnostic): suggestion without `code = "..."`
-  --> $DIR/diagnostic-derive.rs:237:5
+  --> $DIR/diagnostic-derive.rs:239:5
    |
 LL |     #[suggestion(nonsense = "bar")]
    |     ^
 
 error: derive(Diagnostic): invalid nested attribute
-  --> $DIR/diagnostic-derive.rs:246:18
+  --> $DIR/diagnostic-derive.rs:248:18
    |
 LL |     #[suggestion(msg = "bar")]
    |                  ^^^
@@ -237,13 +243,13 @@ LL |     #[suggestion(msg = "bar")]
    = help: only `no_span`, `style`, `code` and `applicability` are valid nested attributes
 
 error: derive(Diagnostic): suggestion without `code = "..."`
-  --> $DIR/diagnostic-derive.rs:246:5
+  --> $DIR/diagnostic-derive.rs:248:5
    |
 LL |     #[suggestion(msg = "bar")]
    |     ^
 
 error: derive(Diagnostic): wrong field type for suggestion
-  --> $DIR/diagnostic-derive.rs:269:5
+  --> $DIR/diagnostic-derive.rs:271:5
    |
 LL |     #[suggestion(no_crate_suggestion, code = "This is suggested code")]
    |     ^
@@ -251,79 +257,79 @@ LL |     #[suggestion(no_crate_suggestion, code = "This is suggested code")]
    = help: `#[suggestion(...)]` should be applied to fields of type `Span` or `(Span, Applicability)`
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:285:24
+  --> $DIR/diagnostic-derive.rs:287:24
    |
 LL |     suggestion: (Span, Span, Applicability),
    |                        ^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:285:18
+  --> $DIR/diagnostic-derive.rs:287:18
    |
 LL |     suggestion: (Span, Span, Applicability),
    |                  ^^^^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:293:33
+  --> $DIR/diagnostic-derive.rs:295:33
    |
 LL |     suggestion: (Applicability, Applicability, Span),
    |                                 ^^^^^^^^^^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:293:18
+  --> $DIR/diagnostic-derive.rs:295:18
    |
 LL |     suggestion: (Applicability, Applicability, Span),
    |                  ^^^^^^^^^^^^^
 
 error: derive(Diagnostic): `#[label = ...]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:300:5
+  --> $DIR/diagnostic-derive.rs:302:5
    |
 LL |     #[label = "bar"]
    |     ^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:451:5
+  --> $DIR/diagnostic-derive.rs:453:5
    |
 LL |     #[suggestion(no_crate_suggestion, code = "...", applicability = "maybe-incorrect")]
    |     ^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:453:24
+  --> $DIR/diagnostic-derive.rs:455:24
    |
 LL |     suggestion: (Span, Applicability),
    |                        ^^^^^^^^^^^^^
 
 error: derive(Diagnostic): invalid applicability
-  --> $DIR/diagnostic-derive.rs:459:69
+  --> $DIR/diagnostic-derive.rs:461:69
    |
 LL |     #[suggestion(no_crate_suggestion, code = "...", applicability = "batman")]
    |                                                                     ^^^^^^^^
 
 error: derive(Diagnostic): the `#[help(...)]` attribute can only be applied to fields of type `Span`, `MultiSpan`, `bool` or `()`
-  --> $DIR/diagnostic-derive.rs:526:5
+  --> $DIR/diagnostic-derive.rs:528:5
    |
 LL |     #[help(no_crate_help)]
    |     ^
 
 error: derive(Diagnostic): a diagnostic slug must be the first argument to the attribute
-  --> $DIR/diagnostic-derive.rs:535:32
+  --> $DIR/diagnostic-derive.rs:537:32
    |
 LL |     #[label(no_crate_label, foo)]
    |                                ^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/diagnostic-derive.rs:543:29
+  --> $DIR/diagnostic-derive.rs:545:29
    |
 LL |     #[label(no_crate_label, foo = "...")]
    |                             ^^^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/diagnostic-derive.rs:551:29
+  --> $DIR/diagnostic-derive.rs:553:29
    |
 LL |     #[label(no_crate_label, foo("..."))]
    |                             ^^^
 
 error: derive(Diagnostic): `#[primary_span]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:563:5
+  --> $DIR/diagnostic-derive.rs:565:5
    |
 LL |     #[primary_span]
    |     ^
@@ -331,13 +337,13 @@ LL |     #[primary_span]
    = help: the `primary_span` field attribute is not valid for lint diagnostics
 
 error: derive(Diagnostic): `#[error(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:583:1
+  --> $DIR/diagnostic-derive.rs:585:1
    |
 LL | #[error(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:583:1
+  --> $DIR/diagnostic-derive.rs:585:1
    |
 LL | #[error(no_crate_example, code = E0123)]
    | ^
@@ -345,13 +351,13 @@ LL | #[error(no_crate_example, code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): `#[warn_(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:590:1
+  --> $DIR/diagnostic-derive.rs:592:1
    |
 LL | #[warn_(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:590:1
+  --> $DIR/diagnostic-derive.rs:592:1
    |
 LL | #[warn_(no_crate_example, code = E0123)]
    | ^
@@ -359,13 +365,13 @@ LL | #[warn_(no_crate_example, code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): `#[lint(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:597:1
+  --> $DIR/diagnostic-derive.rs:599:1
    |
 LL | #[lint(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:597:1
+  --> $DIR/diagnostic-derive.rs:599:1
    |
 LL | #[lint(no_crate_example, code = E0123)]
    | ^
@@ -373,13 +379,13 @@ LL | #[lint(no_crate_example, code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): `#[lint(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:604:1
+  --> $DIR/diagnostic-derive.rs:606:1
    |
 LL | #[lint(no_crate_example, code = E0123)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug not specified
-  --> $DIR/diagnostic-derive.rs:604:1
+  --> $DIR/diagnostic-derive.rs:606:1
    |
 LL | #[lint(no_crate_example, code = E0123)]
    | ^
@@ -387,19 +393,19 @@ LL | #[lint(no_crate_example, code = E0123)]
    = help: specify the slug as the first argument to the `#[diag(...)]` attribute, such as `#[diag(hir_analysis_example_error)]`
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/diagnostic-derive.rs:613:53
+  --> $DIR/diagnostic-derive.rs:615:53
    |
 LL |     #[suggestion(no_crate_suggestion, code = "...", code = ",,,")]
    |                                                     ^^^^
    |
 note: previously specified here
-  --> $DIR/diagnostic-derive.rs:613:39
+  --> $DIR/diagnostic-derive.rs:615:39
    |
 LL |     #[suggestion(no_crate_suggestion, code = "...", code = ",,,")]
    |                                       ^^^^
 
 error: derive(Diagnostic): wrong types for suggestion
-  --> $DIR/diagnostic-derive.rs:622:24
+  --> $DIR/diagnostic-derive.rs:624:24
    |
 LL |     suggestion: (Span, usize),
    |                        ^^^^^
@@ -407,7 +413,7 @@ LL |     suggestion: (Span, usize),
    = help: `#[suggestion(...)]` on a tuple field must be applied to fields of type `(Span, Applicability)`
 
 error: derive(Diagnostic): wrong types for suggestion
-  --> $DIR/diagnostic-derive.rs:630:17
+  --> $DIR/diagnostic-derive.rs:632:17
    |
 LL |     suggestion: (Span,),
    |                 ^^^^^^^
@@ -415,13 +421,13 @@ LL |     suggestion: (Span,),
    = help: `#[suggestion(...)]` on a tuple field must be applied to fields of type `(Span, Applicability)`
 
 error: derive(Diagnostic): suggestion without `code = "..."`
-  --> $DIR/diagnostic-derive.rs:637:5
+  --> $DIR/diagnostic-derive.rs:639:5
    |
 LL |     #[suggestion(no_crate_suggestion)]
    |     ^
 
 error: derive(Diagnostic): `#[multipart_suggestion(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:644:1
+  --> $DIR/diagnostic-derive.rs:646:1
    |
 LL | #[multipart_suggestion(no_crate_suggestion)]
    | ^
@@ -429,7 +435,7 @@ LL | #[multipart_suggestion(no_crate_suggestion)]
    = help: consider creating a `Subdiagnostic` instead
 
 error: derive(Diagnostic): `#[multipart_suggestion(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:647:1
+  --> $DIR/diagnostic-derive.rs:649:1
    |
 LL | #[multipart_suggestion()]
    | ^
@@ -437,7 +443,7 @@ LL | #[multipart_suggestion()]
    = help: consider creating a `Subdiagnostic` instead
 
 error: derive(Diagnostic): `#[multipart_suggestion(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:651:5
+  --> $DIR/diagnostic-derive.rs:653:5
    |
 LL |     #[multipart_suggestion(no_crate_suggestion)]
    |     ^
@@ -445,7 +451,7 @@ LL |     #[multipart_suggestion(no_crate_suggestion)]
    = help: consider creating a `Subdiagnostic` instead
 
 error: derive(Diagnostic): `#[suggestion(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:659:1
+  --> $DIR/diagnostic-derive.rs:661:1
    |
 LL | #[suggestion(no_crate_suggestion, code = "...")]
    | ^
@@ -453,7 +459,7 @@ LL | #[suggestion(no_crate_suggestion, code = "...")]
    = help: `#[label]` and `#[suggestion]` can only be applied to fields
 
 error: derive(Diagnostic): `#[label]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:668:1
+  --> $DIR/diagnostic-derive.rs:670:1
    |
 LL | #[label]
    | ^
@@ -461,61 +467,73 @@ LL | #[label]
    = help: `#[label]` and `#[suggestion]` can only be applied to fields
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:702:5
+  --> $DIR/diagnostic-derive.rs:704:5
    |
 LL |     #[subdiagnostic(bad)]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic = ...]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:710:5
+  --> $DIR/diagnostic-derive.rs:712:5
    |
 LL |     #[subdiagnostic = "bad"]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:718:5
+  --> $DIR/diagnostic-derive.rs:720:5
    |
 LL |     #[subdiagnostic(bad, bad)]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:726:5
+  --> $DIR/diagnostic-derive.rs:728:5
    |
 LL |     #[subdiagnostic("bad")]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:734:5
+  --> $DIR/diagnostic-derive.rs:736:5
    |
 LL |     #[subdiagnostic(eager)]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:742:5
+  --> $DIR/diagnostic-derive.rs:744:5
    |
 LL |     #[subdiagnostic(eager)]
    |     ^
 
 error: derive(Diagnostic): `#[subdiagnostic(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:763:5
+  --> $DIR/diagnostic-derive.rs:765:5
    |
 LL |     #[subdiagnostic(eager)]
    |     ^
 
 error: derive(Diagnostic): expected at least one string literal for `code(...)`
-  --> $DIR/diagnostic-derive.rs:794:23
+  --> $DIR/diagnostic-derive.rs:796:23
    |
 LL |     #[suggestion(code())]
    |                       ^
 
 error: derive(Diagnostic): `code(...)` must contain only string literals
-  --> $DIR/diagnostic-derive.rs:802:23
+  --> $DIR/diagnostic-derive.rs:804:23
    |
 LL |     #[suggestion(code(foo))]
    |                       ^^^
 
+error: unexpected token, expected `)`
+  --> $DIR/diagnostic-derive.rs:804:23
+   |
+LL |     #[suggestion(code(foo))]
+   |                       ^^^
+
+error: expected string literal
+  --> $DIR/diagnostic-derive.rs:813:25
+   |
+LL |     #[suggestion(code = 3)]
+   |                         ^
+
 error: derive(Diagnostic): `#[suggestion(...)]` is not a valid attribute
-  --> $DIR/diagnostic-derive.rs:826:5
+  --> $DIR/diagnostic-derive.rs:828:5
    |
 LL |     #[suggestion(no_crate_suggestion, code = "")]
    |     ^
@@ -524,38 +542,20 @@ LL |     #[suggestion(no_crate_suggestion, code = "")]
    = help: to show a suggestion consisting of multiple parts, use a `Subdiagnostic` annotated with `#[multipart_suggestion(...)]`
    = help: to show a variable set of suggestions, use a `Vec` of `Subdiagnostic`s annotated with `#[suggestion(...)]`
 
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/diagnostic-derive.rs:58:8
-   |
-LL | #[diag = "E0123"]
-   |        ^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/diagnostic-derive.rs:802:23
-   |
-LL |     #[suggestion(code(foo))]
-   |                       ^^^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/diagnostic-derive.rs:811:25
-   |
-LL |     #[suggestion(code = 3)]
-   |                         ^ you might be missing crate `core`
-
 error: cannot find attribute `nonsense` in this scope
-  --> $DIR/diagnostic-derive.rs:63:3
+  --> $DIR/diagnostic-derive.rs:65:3
    |
 LL | #[nonsense(no_crate_example, code = E0123)]
    |   ^^^^^^^^
 
 error: cannot find attribute `nonsense` in this scope
-  --> $DIR/diagnostic-derive.rs:150:7
+  --> $DIR/diagnostic-derive.rs:152:7
    |
 LL |     #[nonsense]
    |       ^^^^^^^^
 
 error: cannot find attribute `error` in this scope
-  --> $DIR/diagnostic-derive.rs:583:3
+  --> $DIR/diagnostic-derive.rs:585:3
    |
 LL | #[error(no_crate_example, code = E0123)]
    |   ^^^^^
@@ -567,7 +567,7 @@ LL | struct ErrorAttribute {}
    |
 
 error: cannot find attribute `warn_` in this scope
-  --> $DIR/diagnostic-derive.rs:590:3
+  --> $DIR/diagnostic-derive.rs:592:3
    |
 LL | #[warn_(no_crate_example, code = E0123)]
    |   ^^^^^
@@ -579,7 +579,7 @@ LL + #[warn(no_crate_example, code = E0123)]
    |
 
 error: cannot find attribute `lint` in this scope
-  --> $DIR/diagnostic-derive.rs:597:3
+  --> $DIR/diagnostic-derive.rs:599:3
    |
 LL | #[lint(no_crate_example, code = E0123)]
    |   ^^^^
@@ -591,7 +591,7 @@ LL + #[link(no_crate_example, code = E0123)]
    |
 
 error: cannot find attribute `lint` in this scope
-  --> $DIR/diagnostic-derive.rs:604:3
+  --> $DIR/diagnostic-derive.rs:606:3
    |
 LL | #[lint(no_crate_example, code = E0123)]
    |   ^^^^
@@ -603,7 +603,7 @@ LL + #[link(no_crate_example, code = E0123)]
    |
 
 error: cannot find attribute `multipart_suggestion` in this scope
-  --> $DIR/diagnostic-derive.rs:644:3
+  --> $DIR/diagnostic-derive.rs:646:3
    |
 LL | #[multipart_suggestion(no_crate_suggestion)]
    |   ^^^^^^^^^^^^^^^^^^^^
@@ -615,7 +615,7 @@ LL | struct MultipartSuggestion {
    |
 
 error: cannot find attribute `multipart_suggestion` in this scope
-  --> $DIR/diagnostic-derive.rs:647:3
+  --> $DIR/diagnostic-derive.rs:649:3
    |
 LL | #[multipart_suggestion()]
    |   ^^^^^^^^^^^^^^^^^^^^
@@ -627,7 +627,7 @@ LL | struct MultipartSuggestion {
    |
 
 error: cannot find attribute `multipart_suggestion` in this scope
-  --> $DIR/diagnostic-derive.rs:651:7
+  --> $DIR/diagnostic-derive.rs:653:7
    |
 LL |     #[multipart_suggestion(no_crate_suggestion)]
    |       ^^^^^^^^^^^^^^^^^^^^
@@ -635,13 +635,13 @@ LL |     #[multipart_suggestion(no_crate_suggestion)]
    = note: `multipart_suggestion` is an attribute that can be used by the derive macro `Subdiagnostic`, you might be missing a `derive` attribute
 
 error[E0425]: cannot find value `nonsense` in module `crate::fluent_generated`
-  --> $DIR/diagnostic-derive.rs:75:8
+  --> $DIR/diagnostic-derive.rs:77:8
    |
 LL | #[diag(nonsense, code = E0123)]
    |        ^^^^^^^^ not found in `crate::fluent_generated`
 
 error[E0425]: cannot find value `__code_34` in this scope
-  --> $DIR/diagnostic-derive.rs:808:10
+  --> $DIR/diagnostic-derive.rs:810:10
    |
 LL | #[derive(Diagnostic)]
    |          ^^^^^^^^^^ not found in this scope
@@ -649,7 +649,7 @@ LL | #[derive(Diagnostic)]
    = note: this error originates in the derive macro `Diagnostic` (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0277]: the trait bound `Hello: IntoDiagArg` is not satisfied
-  --> $DIR/diagnostic-derive.rs:349:12
+  --> $DIR/diagnostic-derive.rs:351:12
    |
 LL | #[derive(Diagnostic)]
    |          ---------- required by a bound introduced by this call
@@ -658,7 +658,7 @@ LL |     other: Hello,
    |            ^^^^^ unsatisfied trait bound
    |
 help: the nightly-only, unstable trait `IntoDiagArg` is not implemented for `Hello`
-  --> $DIR/diagnostic-derive.rs:40:1
+  --> $DIR/diagnostic-derive.rs:42:1
    |
 LL | struct Hello {}
    | ^^^^^^^^^^^^
@@ -672,5 +672,5 @@ note: required by a bound in `Diag::<'a, G>::arg`
 
 error: aborting due to 85 previous errors
 
-Some errors have detailed explanations: E0277, E0425, E0433.
+Some errors have detailed explanations: E0277, E0425.
 For more information about an error, try `rustc --explain E0277`.
diff --git a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.rs b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.rs
new file mode 100644
index 0000000000000..f42fe16898428
--- /dev/null
+++ b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.rs
@@ -0,0 +1,37 @@
+//@ check-fail
+// Tests error conditions for specifying subdiagnostics using #[derive(Subdiagnostic)].
+// This test is split off from the main `subdiagnostic-derive`,
+// because this error is generated post-expansion.
+
+// The proc_macro2 crate handles spans differently when on beta/stable release rather than nightly,
+// changing the output of this test. Since Subdiagnostic is strictly internal to the compiler
+// the test is just ignored on stable and beta:
+//@ ignore-stage1
+//@ ignore-beta
+//@ ignore-stable
+
+#![feature(rustc_private)]
+#![crate_type = "lib"]
+
+extern crate rustc_errors;
+extern crate rustc_fluent_macro;
+extern crate rustc_macros;
+extern crate rustc_session;
+extern crate rustc_span;
+extern crate core;
+
+use rustc_errors::{Applicability, DiagMessage, SubdiagMessage};
+use rustc_macros::Subdiagnostic;
+use rustc_span::Span;
+
+rustc_fluent_macro::fluent_messages! { "./example.ftl" }
+
+#[derive(Subdiagnostic)]
+#[label(slug)]
+//~^ ERROR cannot find value `slug` in module `crate::fluent_generated`
+//~^^ NOTE not found in `crate::fluent_generated`
+struct L {
+    #[primary_span]
+    span: Span,
+    var: String,
+}
diff --git a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.stderr b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.stderr
new file mode 100644
index 0000000000000..37566e39fcd67
--- /dev/null
+++ b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive-2.stderr
@@ -0,0 +1,9 @@
+error[E0425]: cannot find value `slug` in module `crate::fluent_generated`
+  --> $DIR/subdiagnostic-derive-2.rs:30:9
+   |
+LL | #[label(slug)]
+   |         ^^^^ not found in `crate::fluent_generated`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0425`.
diff --git a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
index 6481e8a3307c3..941668ad602e4 100644
--- a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
+++ b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.rs
@@ -122,16 +122,6 @@ struct K {
     var: String,
 }
 
-#[derive(Subdiagnostic)]
-#[label(slug)]
-//~^ ERROR cannot find value `slug` in module `crate::fluent_generated`
-//~^^ NOTE not found in `crate::fluent_generated`
-struct L {
-    #[primary_span]
-    span: Span,
-    var: String,
-}
-
 #[derive(Subdiagnostic)]
 #[label()]
 //~^ ERROR diagnostic slug must be first argument of a `#[label(...)]` attribute
@@ -707,9 +697,6 @@ struct BP {
 }
 
 #[derive(Subdiagnostic)]
-//~^ ERROR cannot find value `__code_29` in this scope
-//~| NOTE in this expansion
-//~| NOTE not found in this scope
 #[multipart_suggestion(no_crate_example)]
 struct BQ {
     #[suggestion_part(code = 3)]
diff --git a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.stderr b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.stderr
index 0ae7ba4c4973d..c31da4421d255 100644
--- a/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.stderr
+++ b/tests/ui-fulldeps/session-diagnostic/subdiagnostic-derive.stderr
@@ -1,39 +1,45 @@
 error: derive(Diagnostic): label without `#[primary_span]` field
-  --> $DIR/subdiagnostic-derive.rs:51:1
+  --> $DIR/subdiagnostic-derive.rs:52:1
    |
 LL | #[label(no_crate_example)]
    | ^
 
 error: derive(Diagnostic): diagnostic slug must be first argument of a `#[label(...)]` attribute
-  --> $DIR/subdiagnostic-derive.rs:58:1
+  --> $DIR/subdiagnostic-derive.rs:59:1
    |
 LL | #[label]
    | ^
 
 error: derive(Diagnostic): `#[foo]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:67:1
+  --> $DIR/subdiagnostic-derive.rs:68:1
    |
 LL | #[foo]
    | ^
 
 error: derive(Diagnostic): `#[label = ...]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:77:1
+  --> $DIR/subdiagnostic-derive.rs:78:1
    |
 LL | #[label = "..."]
    | ^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:86:9
+  --> $DIR/subdiagnostic-derive.rs:87:9
    |
 LL | #[label(bug = "...")]
    |         ^^^
 
 error: derive(Diagnostic): diagnostic slug must be first argument of a `#[label(...)]` attribute
-  --> $DIR/subdiagnostic-derive.rs:86:1
+  --> $DIR/subdiagnostic-derive.rs:87:1
    |
 LL | #[label(bug = "...")]
    | ^
 
+error: unexpected literal in nested attribute, expected ident
+  --> $DIR/subdiagnostic-derive.rs:97:9
+   |
+LL | #[label("...")]
+   |         ^^^^^
+
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
   --> $DIR/subdiagnostic-derive.rs:106:9
    |
@@ -59,85 +65,85 @@ LL | #[label(slug("..."))]
    | ^
 
 error: derive(Diagnostic): diagnostic slug must be first argument of a `#[label(...)]` attribute
-  --> $DIR/subdiagnostic-derive.rs:136:1
+  --> $DIR/subdiagnostic-derive.rs:126:1
    |
 LL | #[label()]
    | ^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:145:27
+  --> $DIR/subdiagnostic-derive.rs:135:27
    |
 LL | #[label(no_crate_example, code = "...")]
    |                           ^^^^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:154:27
+  --> $DIR/subdiagnostic-derive.rs:144:27
    |
 LL | #[label(no_crate_example, applicability = "machine-applicable")]
    |                           ^^^^^^^^^^^^^
 
 error: derive(Diagnostic): unsupported type attribute for subdiagnostic enum
-  --> $DIR/subdiagnostic-derive.rs:163:1
+  --> $DIR/subdiagnostic-derive.rs:153:1
    |
 LL | #[foo]
    | ^
 
 error: derive(Diagnostic): `#[bar]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:177:5
+  --> $DIR/subdiagnostic-derive.rs:167:5
    |
 LL |     #[bar]
    |     ^
 
 error: derive(Diagnostic): `#[bar = ...]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:189:5
+  --> $DIR/subdiagnostic-derive.rs:179:5
    |
 LL |     #[bar = "..."]
    |     ^
 
 error: derive(Diagnostic): `#[bar = ...]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:201:5
+  --> $DIR/subdiagnostic-derive.rs:191:5
    |
 LL |     #[bar = 4]
    |     ^
 
 error: derive(Diagnostic): `#[bar(...)]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:213:5
+  --> $DIR/subdiagnostic-derive.rs:203:5
    |
 LL |     #[bar("...")]
    |     ^
 
 error: derive(Diagnostic): only `no_span` is a valid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:225:13
+  --> $DIR/subdiagnostic-derive.rs:215:13
    |
 LL |     #[label(code = "...")]
    |             ^^^^
 
 error: derive(Diagnostic): diagnostic slug must be first argument of a `#[label(...)]` attribute
-  --> $DIR/subdiagnostic-derive.rs:225:5
+  --> $DIR/subdiagnostic-derive.rs:215:5
    |
 LL |     #[label(code = "...")]
    |     ^
 
 error: derive(Diagnostic): the `#[primary_span]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/subdiagnostic-derive.rs:254:5
+  --> $DIR/subdiagnostic-derive.rs:244:5
    |
 LL |     #[primary_span]
    |     ^
 
 error: derive(Diagnostic): label without `#[primary_span]` field
-  --> $DIR/subdiagnostic-derive.rs:251:1
+  --> $DIR/subdiagnostic-derive.rs:241:1
    |
 LL | #[label(no_crate_example)]
    | ^
 
 error: derive(Diagnostic): `#[applicability]` is only valid on suggestions
-  --> $DIR/subdiagnostic-derive.rs:264:5
+  --> $DIR/subdiagnostic-derive.rs:254:5
    |
 LL |     #[applicability]
    |     ^
 
 error: derive(Diagnostic): `#[bar]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:274:5
+  --> $DIR/subdiagnostic-derive.rs:264:5
    |
 LL |     #[bar]
    |     ^
@@ -145,111 +151,121 @@ LL |     #[bar]
    = help: only `primary_span`, `applicability` and `skip_arg` are valid field attributes
 
 error: derive(Diagnostic): `#[bar = ...]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:285:5
+  --> $DIR/subdiagnostic-derive.rs:275:5
    |
 LL |     #[bar = "..."]
    |     ^
 
 error: derive(Diagnostic): `#[bar(...)]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:296:5
+  --> $DIR/subdiagnostic-derive.rs:286:5
    |
 LL |     #[bar("...")]
    |     ^
    |
    = help: only `primary_span`, `applicability` and `skip_arg` are valid field attributes
 
+error: unexpected unsupported untagged union
+  --> $DIR/subdiagnostic-derive.rs:302:1
+   |
+LL | / union AC {
+LL | |
+LL | |     span: u32,
+LL | |     b: u64,
+LL | | }
+   | |_^
+
 error: derive(Diagnostic): a diagnostic slug must be the first argument to the attribute
-  --> $DIR/subdiagnostic-derive.rs:328:44
+  --> $DIR/subdiagnostic-derive.rs:317:44
    |
 LL | #[label(no_crate_example, no_crate::example)]
    |                                            ^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/subdiagnostic-derive.rs:341:5
+  --> $DIR/subdiagnostic-derive.rs:330:5
    |
 LL |     #[primary_span]
    |     ^
    |
 note: previously specified here
-  --> $DIR/subdiagnostic-derive.rs:338:5
+  --> $DIR/subdiagnostic-derive.rs:327:5
    |
 LL |     #[primary_span]
    |     ^
 
 error: derive(Diagnostic): subdiagnostic kind not specified
-  --> $DIR/subdiagnostic-derive.rs:347:8
+  --> $DIR/subdiagnostic-derive.rs:336:8
    |
 LL | struct AG {
    |        ^^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/subdiagnostic-derive.rs:384:46
+  --> $DIR/subdiagnostic-derive.rs:373:46
    |
 LL | #[suggestion(no_crate_example, code = "...", code = "...")]
    |                                              ^^^^
    |
 note: previously specified here
-  --> $DIR/subdiagnostic-derive.rs:384:32
+  --> $DIR/subdiagnostic-derive.rs:373:32
    |
 LL | #[suggestion(no_crate_example, code = "...", code = "...")]
    |                                ^^^^
 
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/subdiagnostic-derive.rs:402:5
+  --> $DIR/subdiagnostic-derive.rs:391:5
    |
 LL |     #[applicability]
    |     ^
    |
 note: previously specified here
-  --> $DIR/subdiagnostic-derive.rs:399:5
+  --> $DIR/subdiagnostic-derive.rs:388:5
    |
 LL |     #[applicability]
    |     ^
 
 error: derive(Diagnostic): the `#[applicability]` attribute can only be applied to fields of type `Applicability`
-  --> $DIR/subdiagnostic-derive.rs:412:5
+  --> $DIR/subdiagnostic-derive.rs:401:5
    |
 LL |     #[applicability]
    |     ^
 
 error: derive(Diagnostic): suggestion without `code = "..."`
-  --> $DIR/subdiagnostic-derive.rs:425:1
+  --> $DIR/subdiagnostic-derive.rs:414:1
    |
 LL | #[suggestion(no_crate_example)]
    | ^
 
 error: derive(Diagnostic): invalid applicability
-  --> $DIR/subdiagnostic-derive.rs:435:62
+  --> $DIR/subdiagnostic-derive.rs:424:62
    |
 LL | #[suggestion(no_crate_example, code = "...", applicability = "foo")]
    |                                                              ^^^^^
 
 error: derive(Diagnostic): suggestion without `#[primary_span]` field
-  --> $DIR/subdiagnostic-derive.rs:453:1
+  --> $DIR/subdiagnostic-derive.rs:442:1
    |
 LL | #[suggestion(no_crate_example, code = "...")]
    | ^
 
 error: derive(Diagnostic): unsupported type attribute for subdiagnostic enum
-  --> $DIR/subdiagnostic-derive.rs:467:1
+  --> $DIR/subdiagnostic-derive.rs:456:1
    |
 LL | #[label]
    | ^
 
 error: derive(Diagnostic): `var` doesn't refer to a field on this type
-  --> $DIR/subdiagnostic-derive.rs:487:39
+  --> $DIR/subdiagnostic-derive.rs:476:39
    |
 LL | #[suggestion(no_crate_example, code = "{var}", applicability = "machine-applicable")]
    |                                       ^^^^^^^
 
 error: derive(Diagnostic): `var` doesn't refer to a field on this type
-  --> $DIR/subdiagnostic-derive.rs:506:43
+  --> $DIR/subdiagnostic-derive.rs:495:43
    |
 LL |     #[suggestion(no_crate_example, code = "{var}", applicability = "machine-applicable")]
    |                                           ^^^^^^^
 
 error: derive(Diagnostic): `#[suggestion_part]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:529:5
+  --> $DIR/subdiagnostic-derive.rs:518:5
    |
 LL |     #[suggestion_part]
    |     ^
@@ -257,7 +273,7 @@ LL |     #[suggestion_part]
    = help: `#[suggestion_part(...)]` is only valid in multipart suggestions, use `#[primary_span]` instead
 
 error: derive(Diagnostic): `#[suggestion_part(...)]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:532:5
+  --> $DIR/subdiagnostic-derive.rs:521:5
    |
 LL |     #[suggestion_part(code = "...")]
    |     ^
@@ -265,13 +281,13 @@ LL |     #[suggestion_part(code = "...")]
    = help: `#[suggestion_part(...)]` is only valid in multipart suggestions
 
 error: derive(Diagnostic): suggestion without `#[primary_span]` field
-  --> $DIR/subdiagnostic-derive.rs:526:1
+  --> $DIR/subdiagnostic-derive.rs:515:1
    |
 LL | #[suggestion(no_crate_example, code = "...")]
    | ^
 
 error: derive(Diagnostic): invalid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:541:42
+  --> $DIR/subdiagnostic-derive.rs:530:42
    |
 LL | #[multipart_suggestion(no_crate_example, code = "...", applicability = "machine-applicable")]
    |                                          ^^^^
@@ -279,25 +295,25 @@ LL | #[multipart_suggestion(no_crate_example, code = "...", applicability = "mac
    = help: only `no_span`, `style` and `applicability` are valid nested attributes
 
 error: derive(Diagnostic): multipart suggestion without any `#[suggestion_part(...)]` fields
-  --> $DIR/subdiagnostic-derive.rs:541:1
+  --> $DIR/subdiagnostic-derive.rs:530:1
    |
 LL | #[multipart_suggestion(no_crate_example, code = "...", applicability = "machine-applicable")]
    | ^
 
 error: derive(Diagnostic): `#[suggestion_part(...)]` attribute without `code = "..."`
-  --> $DIR/subdiagnostic-derive.rs:551:5
+  --> $DIR/subdiagnostic-derive.rs:540:5
    |
 LL |     #[suggestion_part]
    |     ^
 
 error: derive(Diagnostic): `#[suggestion_part(...)]` attribute without `code = "..."`
-  --> $DIR/subdiagnostic-derive.rs:559:5
+  --> $DIR/subdiagnostic-derive.rs:548:5
    |
 LL |     #[suggestion_part()]
    |     ^
 
 error: derive(Diagnostic): `#[primary_span]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:568:5
+  --> $DIR/subdiagnostic-derive.rs:557:5
    |
 LL |     #[primary_span]
    |     ^
@@ -305,97 +321,127 @@ LL |     #[primary_span]
    = help: multipart suggestions use one or more `#[suggestion_part]`s rather than one `#[primary_span]`
 
 error: derive(Diagnostic): multipart suggestion without any `#[suggestion_part(...)]` fields
-  --> $DIR/subdiagnostic-derive.rs:565:1
+  --> $DIR/subdiagnostic-derive.rs:554:1
    |
 LL | #[multipart_suggestion(no_crate_example)]
    | ^
 
 error: derive(Diagnostic): `#[suggestion_part(...)]` attribute without `code = "..."`
-  --> $DIR/subdiagnostic-derive.rs:576:5
+  --> $DIR/subdiagnostic-derive.rs:565:5
    |
 LL |     #[suggestion_part]
    |     ^
 
 error: derive(Diagnostic): `#[suggestion_part(...)]` attribute without `code = "..."`
-  --> $DIR/subdiagnostic-derive.rs:579:5
+  --> $DIR/subdiagnostic-derive.rs:568:5
    |
 LL |     #[suggestion_part()]
    |     ^
 
 error: derive(Diagnostic): `code` is the only valid nested attribute
-  --> $DIR/subdiagnostic-derive.rs:582:23
+  --> $DIR/subdiagnostic-derive.rs:571:23
    |
 LL |     #[suggestion_part(foo = "bar")]
    |                       ^^^
 
 error: derive(Diagnostic): the `#[suggestion_part(...)]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/subdiagnostic-derive.rs:587:5
+  --> $DIR/subdiagnostic-derive.rs:575:5
    |
 LL |     #[suggestion_part(code = "...")]
    |     ^
 
 error: derive(Diagnostic): the `#[suggestion_part(...)]` attribute can only be applied to fields of type `Span` or `MultiSpan`
-  --> $DIR/subdiagnostic-derive.rs:590:5
+  --> $DIR/subdiagnostic-derive.rs:578:5
    |
 LL |     #[suggestion_part()]
    |     ^
 
+error: expected `,`
+  --> $DIR/subdiagnostic-derive.rs:571:27
+   |
+LL |     #[suggestion_part(foo = "bar")]
+   |                           ^
+
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/subdiagnostic-derive.rs:598:37
+  --> $DIR/subdiagnostic-derive.rs:586:37
    |
 LL |     #[suggestion_part(code = "...", code = ",,,")]
    |                                     ^^^^
    |
 note: previously specified here
-  --> $DIR/subdiagnostic-derive.rs:598:23
+  --> $DIR/subdiagnostic-derive.rs:586:23
    |
 LL |     #[suggestion_part(code = "...", code = ",,,")]
    |                       ^^^^
 
 error: derive(Diagnostic): `#[applicability]` has no effect if all `#[suggestion]`/`#[multipart_suggestion]` attributes have a static `applicability = "..."`
-  --> $DIR/subdiagnostic-derive.rs:627:5
+  --> $DIR/subdiagnostic-derive.rs:615:5
    |
 LL |     #[applicability]
    |     ^
 
 error: derive(Diagnostic): expected exactly one string literal for `code = ...`
-  --> $DIR/subdiagnostic-derive.rs:675:34
+  --> $DIR/subdiagnostic-derive.rs:663:34
    |
 LL |     #[suggestion_part(code("foo"))]
    |                                  ^
 
+error: unexpected token, expected `)`
+  --> $DIR/subdiagnostic-derive.rs:663:28
+   |
+LL |     #[suggestion_part(code("foo"))]
+   |                            ^^^^^
+
 error: derive(Diagnostic): expected exactly one string literal for `code = ...`
-  --> $DIR/subdiagnostic-derive.rs:686:41
+  --> $DIR/subdiagnostic-derive.rs:673:41
    |
 LL |     #[suggestion_part(code("foo", "bar"))]
    |                                         ^
 
+error: unexpected token, expected `)`
+  --> $DIR/subdiagnostic-derive.rs:673:28
+   |
+LL |     #[suggestion_part(code("foo", "bar"))]
+   |                            ^^^^^
+
 error: derive(Diagnostic): expected exactly one string literal for `code = ...`
-  --> $DIR/subdiagnostic-derive.rs:697:30
+  --> $DIR/subdiagnostic-derive.rs:683:30
    |
 LL |     #[suggestion_part(code(3))]
    |                              ^
 
+error: unexpected token, expected `)`
+  --> $DIR/subdiagnostic-derive.rs:683:28
+   |
+LL |     #[suggestion_part(code(3))]
+   |                            ^
+
 error: derive(Diagnostic): expected exactly one string literal for `code = ...`
-  --> $DIR/subdiagnostic-derive.rs:708:29
+  --> $DIR/subdiagnostic-derive.rs:693:29
    |
 LL |     #[suggestion_part(code())]
    |                             ^
 
+error: expected string literal
+  --> $DIR/subdiagnostic-derive.rs:702:30
+   |
+LL |     #[suggestion_part(code = 3)]
+   |                              ^
+
 error: derive(Diagnostic): attribute specified multiple times
-  --> $DIR/subdiagnostic-derive.rs:763:1
+  --> $DIR/subdiagnostic-derive.rs:744:1
    |
 LL | #[suggestion(no_crate_example, code = "", style = "hidden", style = "normal")]
    | ^
    |
 note: previously specified here
-  --> $DIR/subdiagnostic-derive.rs:763:1
+  --> $DIR/subdiagnostic-derive.rs:744:1
    |
 LL | #[suggestion(no_crate_example, code = "", style = "hidden", style = "normal")]
    | ^
 
 error: derive(Diagnostic): `#[suggestion_hidden(...)]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:772:1
+  --> $DIR/subdiagnostic-derive.rs:753:1
    |
 LL | #[suggestion_hidden(no_crate_example, code = "")]
    | ^
@@ -403,7 +449,7 @@ LL | #[suggestion_hidden(no_crate_example, code = "")]
    = help: Use `#[suggestion(..., style = "hidden")]` instead
 
 error: derive(Diagnostic): `#[suggestion_hidden(...)]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:780:1
+  --> $DIR/subdiagnostic-derive.rs:761:1
    |
 LL | #[suggestion_hidden(no_crate_example, code = "", style = "normal")]
    | ^
@@ -411,7 +457,7 @@ LL | #[suggestion_hidden(no_crate_example, code = "", style = "normal")]
    = help: Use `#[suggestion(..., style = "hidden")]` instead
 
 error: derive(Diagnostic): invalid suggestion style
-  --> $DIR/subdiagnostic-derive.rs:788:51
+  --> $DIR/subdiagnostic-derive.rs:769:51
    |
 LL | #[suggestion(no_crate_example, code = "", style = "foo")]
    |                                                   ^^^^^
@@ -419,25 +465,31 @@ LL | #[suggestion(no_crate_example, code = "", style = "foo")]
    = help: valid styles are `normal`, `short`, `hidden`, `verbose` and `tool-only`
 
 error: derive(Diagnostic): expected `= "xxx"`
-  --> $DIR/subdiagnostic-derive.rs:796:49
+  --> $DIR/subdiagnostic-derive.rs:777:49
    |
 LL | #[suggestion(no_crate_example, code = "", style = 42)]
    |                                                 ^
 
 error: derive(Diagnostic): a diagnostic slug must be the first argument to the attribute
-  --> $DIR/subdiagnostic-derive.rs:804:48
+  --> $DIR/subdiagnostic-derive.rs:785:48
    |
 LL | #[suggestion(no_crate_example, code = "", style)]
    |                                                ^
 
 error: derive(Diagnostic): expected `= "xxx"`
-  --> $DIR/subdiagnostic-derive.rs:812:48
+  --> $DIR/subdiagnostic-derive.rs:793:48
+   |
+LL | #[suggestion(no_crate_example, code = "", style("foo"))]
+   |                                                ^
+
+error: expected `,`
+  --> $DIR/subdiagnostic-derive.rs:793:48
    |
 LL | #[suggestion(no_crate_example, code = "", style("foo"))]
    |                                                ^
 
 error: derive(Diagnostic): `#[primary_span]` is not a valid attribute
-  --> $DIR/subdiagnostic-derive.rs:825:5
+  --> $DIR/subdiagnostic-derive.rs:805:5
    |
 LL |     #[primary_span]
    |     ^
@@ -446,128 +498,64 @@ LL |     #[primary_span]
    = help: to create a suggestion with multiple spans, use `#[multipart_suggestion]` instead
 
 error: derive(Diagnostic): suggestion without `#[primary_span]` field
-  --> $DIR/subdiagnostic-derive.rs:822:1
+  --> $DIR/subdiagnostic-derive.rs:802:1
    |
 LL | #[suggestion(no_crate_example, code = "")]
    | ^
 
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:96:9
-   |
-LL | #[label("...")]
-   |         ^^^^^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:312:1
-   |
-LL | union AC {
-   | ^^^^^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:582:27
-   |
-LL |     #[suggestion_part(foo = "bar")]
-   |                           ^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:675:28
-   |
-LL |     #[suggestion_part(code("foo"))]
-   |                            ^^^^^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:686:28
-   |
-LL |     #[suggestion_part(code("foo", "bar"))]
-   |                            ^^^^^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:697:28
-   |
-LL |     #[suggestion_part(code(3))]
-   |                            ^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:720:30
-   |
-LL |     #[suggestion_part(code = 3)]
-   |                              ^ you might be missing crate `core`
-
-error[E0433]: failed to resolve: you might be missing crate `core`
-  --> $DIR/subdiagnostic-derive.rs:812:48
-   |
-LL | #[suggestion(no_crate_example, code = "", style("foo"))]
-   |                                                ^ you might be missing crate `core`
-
 error: cannot find attribute `foo` in this scope
-  --> $DIR/subdiagnostic-derive.rs:67:3
+  --> $DIR/subdiagnostic-derive.rs:68:3
    |
 LL | #[foo]
    |   ^^^
 
 error: cannot find attribute `foo` in this scope
-  --> $DIR/subdiagnostic-derive.rs:163:3
+  --> $DIR/subdiagnostic-derive.rs:153:3
    |
 LL | #[foo]
    |   ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:177:7
+  --> $DIR/subdiagnostic-derive.rs:167:7
    |
 LL |     #[bar]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:189:7
+  --> $DIR/subdiagnostic-derive.rs:179:7
    |
 LL |     #[bar = "..."]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:201:7
+  --> $DIR/subdiagnostic-derive.rs:191:7
    |
 LL |     #[bar = 4]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:213:7
+  --> $DIR/subdiagnostic-derive.rs:203:7
    |
 LL |     #[bar("...")]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:274:7
+  --> $DIR/subdiagnostic-derive.rs:264:7
    |
 LL |     #[bar]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:285:7
+  --> $DIR/subdiagnostic-derive.rs:275:7
    |
 LL |     #[bar = "..."]
    |       ^^^
 
 error: cannot find attribute `bar` in this scope
-  --> $DIR/subdiagnostic-derive.rs:296:7
+  --> $DIR/subdiagnostic-derive.rs:286:7
    |
 LL |     #[bar("...")]
    |       ^^^
 
-error[E0425]: cannot find value `slug` in module `crate::fluent_generated`
-  --> $DIR/subdiagnostic-derive.rs:126:9
-   |
-LL | #[label(slug)]
-   |         ^^^^ not found in `crate::fluent_generated`
-
-error[E0425]: cannot find value `__code_29` in this scope
-  --> $DIR/subdiagnostic-derive.rs:714:10
-   |
-LL | #[derive(Subdiagnostic)]
-   |          ^^^^^^^^^^^^^ not found in this scope
-   |
-   = note: this error originates in the derive macro `Subdiagnostic` (in Nightly builds, run with -Z macro-backtrace for more info)
-
-error: aborting due to 86 previous errors
+error: aborting due to 84 previous errors
 
-Some errors have detailed explanations: E0425, E0433.
-For more information about an error, try `rustc --explain E0425`.

From d400dca751671b531c0826e30d9f7eba24cff8f2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Tue, 27 Jan 2026 19:00:26 +0200
Subject: [PATCH 127/131] Fix postcard test too

---
 .../crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
index 33ca1d791de7e..ba9657a9bb45e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
@@ -1,4 +1,8 @@
 #![cfg(feature = "sysroot-abi")]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
 
 mod common {
     pub(crate) mod utils;

From 7eae36f01734bfcb842b98e361012ed626ce489f Mon Sep 17 00:00:00 2001
From: Manuel Drehwald 
Date: Fri, 9 Jan 2026 14:10:12 -0800
Subject: [PATCH 128/131] Add an early return if handling multiple offload
 calls

---
 compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
index 0cf4c1d4f8c78..0cf5ac01d1a77 100644
--- a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
+++ b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
@@ -68,8 +68,15 @@ impl<'ll> OffloadGlobals<'ll> {
 // which we copy from clang, is to just have those two calls once, in the global ctor/dtor section
 // of the final binary.
 pub(crate) fn register_offload<'ll>(cx: &CodegenCx<'ll, '_>) {
+    // First we check quickly whether we already have done our setup, in which case we return early.
+    // Shouldn't be needed for correctness.
+    let register_lib_name = "__tgt_register_lib";
+    if cx.get_function(register_lib_name).is_some() {
+        return;
+    }
+
     let reg_lib_decl = cx.type_func(&[cx.type_ptr()], cx.type_void());
-    let register_lib = declare_offload_fn(&cx, "__tgt_register_lib", reg_lib_decl);
+    let register_lib = declare_offload_fn(&cx, register_lib_name, reg_lib_decl);
     let unregister_lib = declare_offload_fn(&cx, "__tgt_unregister_lib", reg_lib_decl);
 
     let ptr_null = cx.const_null(cx.type_ptr());

From 35ce8ab1204d1fe860d073c20c0f6c4e8012ccb8 Mon Sep 17 00:00:00 2001
From: Manuel Drehwald 
Date: Fri, 9 Jan 2026 15:41:07 -0800
Subject: [PATCH 129/131] adjust testcase for new logic

---
 .../codegen-llvm/gpu_offload/control_flow.rs  |   3 +-
 tests/codegen-llvm/gpu_offload/gpu_host.rs    | 117 +++++++++---------
 2 files changed, 59 insertions(+), 61 deletions(-)

diff --git a/tests/codegen-llvm/gpu_offload/control_flow.rs b/tests/codegen-llvm/gpu_offload/control_flow.rs
index 28ee9c85b0edc..1a3d3cd7a7789 100644
--- a/tests/codegen-llvm/gpu_offload/control_flow.rs
+++ b/tests/codegen-llvm/gpu_offload/control_flow.rs
@@ -12,8 +12,7 @@
 
 // CHECK: define{{( dso_local)?}} void @main()
 // CHECK-NOT: define
-// CHECK: %EmptyDesc = alloca %struct.__tgt_bin_desc, align 8
-// CHECK-NEXT: %.offload_baseptrs = alloca [1 x ptr], align 8
+// CHECK: %.offload_baseptrs = alloca [1 x ptr], align 8
 // CHECK-NEXT: %.offload_ptrs = alloca [1 x ptr], align 8
 // CHECK-NEXT: %.offload_sizes = alloca [1 x i64], align 8
 // CHECK-NEXT: %kernel_args = alloca %struct.__tgt_kernel_arguments, align 8
diff --git a/tests/codegen-llvm/gpu_offload/gpu_host.rs b/tests/codegen-llvm/gpu_offload/gpu_host.rs
index 27ff6f325aa0f..d0bc34ec66b20 100644
--- a/tests/codegen-llvm/gpu_offload/gpu_host.rs
+++ b/tests/codegen-llvm/gpu_offload/gpu_host.rs
@@ -2,9 +2,10 @@
 //@ no-prefer-dynamic
 //@ needs-offload
 
-// This test is verifying that we generate __tgt_target_data_*_mapper before and after a call to the
-// kernel_1. Better documentation to what each global or variable means is available in the gpu
-// offload code, or the LLVM offload documentation.
+// This test is verifying that we generate __tgt_target_data_*_mapper before and after a call to
+// __tgt_target_kernel, and initialize all needed variables. It also verifies some related globals.
+// Better documentation to what each global or variable means is available in the gpu offload code,
+// or the LLVM offload documentation.
 
 #![feature(rustc_attrs)]
 #![feature(core_intrinsics)]
@@ -17,10 +18,8 @@ fn main() {
     core::hint::black_box(&x);
 }
 
-#[unsafe(no_mangle)]
-#[inline(never)]
 pub fn kernel_1(x: &mut [f32; 256]) {
-    core::intrinsics::offload(_kernel_1, [256, 1, 1], [32, 1, 1], (x,))
+    core::intrinsics::offload(kernel_1, [256, 1, 1], [32, 1, 1], (x,))
 }
 
 #[unsafe(no_mangle)]
@@ -33,75 +32,75 @@ pub fn _kernel_1(x: &mut [f32; 256]) {
 
 // CHECK: %struct.ident_t = type { i32, i32, i32, i32, ptr }
 // CHECK: %struct.__tgt_offload_entry = type { i64, i16, i16, i32, ptr, ptr, i64, i64, ptr }
-// CHECK: %struct.__tgt_bin_desc = type { i32, ptr, ptr, ptr }
 // CHECK: %struct.__tgt_kernel_arguments = type { i32, i32, ptr, ptr, ptr, ptr, ptr, ptr, i64, i64, [3 x i32], [3 x i32], i32 }
 
-// CHECK: @anon.{{.*}}.0 = private unnamed_addr constant [23 x i8] c";unknown;unknown;0;0;;\00", align 1
-// CHECK: @anon.{{.*}}.1 = private unnamed_addr constant %struct.ident_t { i32 0, i32 2, i32 0, i32 22, ptr @anon.{{.*}}.0 }, align 8
+// CHECK: @anon.[[ID:.*]].0 = private unnamed_addr constant [23 x i8] c";unknown;unknown;0;0;;\00", align 1
+// CHECK: @anon.{{.*}}.1 = private unnamed_addr constant %struct.ident_t { i32 0, i32 2, i32 0, i32 22, ptr @anon.[[ID]].0 }, align 8
 
-// CHECK: @.offload_sizes._kernel_1 = private unnamed_addr constant [1 x i64] [i64 1024]
-// CHECK: @.offload_maptypes._kernel_1 = private unnamed_addr constant [1 x i64] [i64 35]
-// CHECK: @._kernel_1.region_id = internal constant i8 0
-// CHECK: @.offloading.entry_name._kernel_1 = internal unnamed_addr constant [10 x i8] c"_kernel_1\00", section ".llvm.rodata.offloading", align 1
-// CHECK: @.offloading.entry._kernel_1 = internal constant %struct.__tgt_offload_entry { i64 0, i16 1, i16 1, i32 0, ptr @._kernel_1.region_id, ptr @.offloading.entry_name._kernel_1, i64 0, i64 0, ptr null }, section "llvm_offload_entries", align 8
+// CHECK-DAG: @.omp_offloading.descriptor = internal constant { i32, ptr, ptr, ptr } zeroinitializer
+// CHECK-DAG: @llvm.global_ctors = appending constant [1 x { i32, ptr, ptr }] [{ i32, ptr, ptr } { i32 101, ptr @.omp_offloading.descriptor_reg, ptr null }]
+// CHECK-DAG: @.offload_sizes.[[K:[^ ]*kernel_1]] = private unnamed_addr constant [1 x i64] [i64 1024]
+// CHECK-DAG: @.offload_maptypes.[[K]] = private unnamed_addr constant [1 x i64] [i64 35]
+// CHECK-DAG: @.[[K]].region_id = internal constant i8 0
+// CHECK-DAG: @.offloading.entry_name.[[K]] = internal unnamed_addr constant [{{[0-9]+}} x i8] c"[[K]]{{\\00}}", section ".llvm.rodata.offloading", align 1
+// CHECK-DAG: @.offloading.entry.[[K]] = internal constant %struct.__tgt_offload_entry { i64 0, i16 1, i16 1, i32 0, ptr @.[[K]].region_id, ptr @.offloading.entry_name.[[K]], i64 0, i64 0, ptr null }, section "llvm_offload_entries", align 8
 
 // CHECK: declare i32 @__tgt_target_kernel(ptr, i64, i32, i32, ptr, ptr)
-// CHECK: declare void @__tgt_register_lib(ptr) local_unnamed_addr
-// CHECK: declare void @__tgt_unregister_lib(ptr) local_unnamed_addr
-
-// CHECK: define{{( dso_local)?}} void @main()
-// CHECK-NEXT: start:
-// CHECK-NEXT:   %0 = alloca [8 x i8], align 8
-// CHECK-NEXT:   %x = alloca [1024 x i8], align 16
-// CHECK:        call void @kernel_1(ptr noalias noundef nonnull align 4 dereferenceable(1024) %x)
-// CHECK-NEXT:   call void @llvm.lifetime.start.p0(i64 8, ptr nonnull %0)
-// CHECK-NEXT:   store ptr %x, ptr %0, align 8
-// CHECK-NEXT:   call void asm sideeffect "", "r,~{memory}"(ptr nonnull %0)
-// CHECK-NEXT:   call void @llvm.lifetime.end.p0(i64 8, ptr nonnull %0)
-// CHECK-NEXT:   call void @llvm.lifetime.end.p0(i64 1024, ptr nonnull %x)
-// CHECK-NEXT:   ret void
-// CHECK-NEXT: }
 
-// CHECK:      define{{( dso_local)?}} void @kernel_1(ptr noalias noundef align 4 dereferenceable(1024) %x)
+// CHECK-LABEL: define{{( dso_local)?}} void @main()
 // CHECK-NEXT: start:
-// CHECK-NEXT:   %EmptyDesc = alloca %struct.__tgt_bin_desc, align 8
+// CHECK-NEXT:  %0 = alloca [8 x i8], align 8
+// CHECK-NEXT:  %x = alloca [1024 x i8], align 16
 // CHECK-NEXT:   %.offload_baseptrs = alloca [1 x ptr], align 8
 // CHECK-NEXT:   %.offload_ptrs = alloca [1 x ptr], align 8
 // CHECK-NEXT:   %.offload_sizes = alloca [1 x i64], align 8
 // CHECK-NEXT:   %kernel_args = alloca %struct.__tgt_kernel_arguments, align 8
-// CHECK-NEXT:   %dummy = load volatile ptr, ptr @.offload_sizes._kernel_1, align 8
-// CHECK-NEXT:   %dummy1 = load volatile ptr, ptr @.offloading.entry._kernel_1, align 8
-// CHECK-NEXT:   call void @llvm.memset.p0.i64(ptr noundef nonnull align 8 dereferenceable(32) %EmptyDesc, i8 0, i64 32, i1 false)
-// CHECK-NEXT:   call void @__tgt_register_lib(ptr nonnull %EmptyDesc)
+// CHECK:   %dummy = load volatile ptr, ptr @.offload_sizes.[[K]], align 8
+// CHECK-NEXT:   %dummy1 = load volatile ptr, ptr @.offloading.entry.[[K]], align 8
 // CHECK-NEXT:   call void @__tgt_init_all_rtls()
 // CHECK-NEXT:   store ptr %x, ptr %.offload_baseptrs, align 8
 // CHECK-NEXT:   store ptr %x, ptr %.offload_ptrs, align 8
 // CHECK-NEXT:   store i64 1024, ptr %.offload_sizes, align 8
-// CHECK-NEXT:   call void @__tgt_target_data_begin_mapper(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 1, ptr nonnull %.offload_baseptrs, ptr nonnull %.offload_ptrs, ptr nonnull %.offload_sizes, ptr nonnull @.offload_maptypes._kernel_1, ptr null, ptr null)
+// CHECK-NEXT:   call void @__tgt_target_data_begin_mapper(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 1, ptr nonnull %.offload_baseptrs, ptr nonnull %.offload_ptrs, ptr nonnull %.offload_sizes, ptr nonnull @.offload_maptypes.[[K]], ptr null, ptr null)
 // CHECK-NEXT:   store i32 3, ptr %kernel_args, align 8
-// CHECK-NEXT:   %0 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 4
-// CHECK-NEXT:   store i32 1, ptr %0, align 4
-// CHECK-NEXT:   %1 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 8
-// CHECK-NEXT:   store ptr %.offload_baseptrs, ptr %1, align 8
-// CHECK-NEXT:   %2 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 16
-// CHECK-NEXT:   store ptr %.offload_ptrs, ptr %2, align 8
-// CHECK-NEXT:   %3 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 24
-// CHECK-NEXT:   store ptr %.offload_sizes, ptr %3, align 8
-// CHECK-NEXT:   %4 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 32
-// CHECK-NEXT:   store ptr @.offload_maptypes._kernel_1, ptr %4, align 8
-// CHECK-NEXT:   %5 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 40
-// CHECK-NEXT:   %6 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 72
-// CHECK-NEXT:   call void @llvm.memset.p0.i64(ptr noundef nonnull align 8 dereferenceable(32) %5, i8 0, i64 32, i1 false)
-// CHECK-NEXT:   store <4 x i32> , ptr %6, align 8
-// CHECK-NEXT:   %.fca.1.gep5 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 88
-// CHECK-NEXT:   store i32 1, ptr %.fca.1.gep5, align 8
-// CHECK-NEXT:   %.fca.2.gep7 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 92
-// CHECK-NEXT:   store i32 1, ptr %.fca.2.gep7, align 4
-// CHECK-NEXT:   %7 = getelementptr inbounds nuw i8, ptr %kernel_args, i64 96
-// CHECK-NEXT:   store i32 0, ptr %7, align 8
-// CHECK-NEXT:   %8 = call i32 @__tgt_target_kernel(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 256, i32 32, ptr nonnull @._kernel_1.region_id, ptr nonnull %kernel_args)
-// CHECK-NEXT:   call void @__tgt_target_data_end_mapper(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 1, ptr nonnull %.offload_baseptrs, ptr nonnull %.offload_ptrs, ptr nonnull %.offload_sizes, ptr nonnull @.offload_maptypes._kernel_1, ptr null, ptr null)
-// CHECK-NEXT:   call void @__tgt_unregister_lib(ptr nonnull %EmptyDesc)
+// CHECK-NEXT:   [[P4:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 4
+// CHECK-NEXT:   store i32 1, ptr [[P4]], align 4
+// CHECK-NEXT:   [[P8:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 8
+// CHECK-NEXT:   store ptr %.offload_baseptrs, ptr [[P8]], align 8
+// CHECK-NEXT:   [[P16:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 16
+// CHECK-NEXT:   store ptr %.offload_ptrs, ptr [[P16]], align 8
+// CHECK-NEXT:   [[P24:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 24
+// CHECK-NEXT:   store ptr %.offload_sizes, ptr [[P24]], align 8
+// CHECK-NEXT:   [[P32:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 32
+// CHECK-NEXT:   store ptr @.offload_maptypes.[[K]], ptr [[P32]], align 8
+// CHECK-NEXT:   [[P40:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 40
+// CHECK-NEXT:   [[P72:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 72
+// CHECK-NEXT:   call void @llvm.memset.p0.i64(ptr noundef nonnull align 8 dereferenceable(32) [[P40]], i8 0, i64 32, i1 false)
+// CHECK-NEXT:   store <4 x i32> , ptr [[P72]], align 8
+// CHECK-NEXT:   [[P88:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 88
+// CHECK-NEXT:   store i32 1, ptr [[P88]], align 8
+// CHECK-NEXT:   [[P92:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 92
+// CHECK-NEXT:   store i32 1, ptr [[P92]], align 4
+// CHECK-NEXT:   [[P96:%[^ ]+]] = getelementptr inbounds nuw i8, ptr %kernel_args, i64 96
+// CHECK-NEXT:   store i32 0, ptr [[P96]], align 8
+// CHECK-NEXT:   {{%[^ ]+}} = call i32 @__tgt_target_kernel(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 256, i32 32, ptr nonnull @.[[K]].region_id, ptr nonnull %kernel_args)
+// CHECK-NEXT:   call void @__tgt_target_data_end_mapper(ptr nonnull @anon.{{.*}}.1, i64 -1, i32 1, ptr nonnull %.offload_baseptrs, ptr nonnull %.offload_ptrs, ptr nonnull %.offload_sizes, ptr nonnull @.offload_maptypes.[[K]], ptr null, ptr null)
+// CHECK:   ret void
+// CHECK-NEXT: }
+
+// CHECK: declare void @__tgt_register_lib(ptr) local_unnamed_addr
+// CHECK: declare void @__tgt_unregister_lib(ptr) local_unnamed_addr
+
+// CHECK-LABEL: define internal void @.omp_offloading.descriptor_reg() section ".text.startup" {
+// CHECK-NEXT: entry:
+// CHECK-NEXT:   call void @__tgt_register_lib(ptr nonnull @.omp_offloading.descriptor)
+// CHECK-NEXT:   %0 = {{tail }}call i32 @atexit(ptr nonnull @.omp_offloading.descriptor_unreg)
+// CHECK-NEXT:   ret void
+// CHECK-NEXT: }
+
+// CHECK-LABEL: define internal void @.omp_offloading.descriptor_unreg() section ".text.startup" {
+// CHECK-NEXT: entry:
+// CHECK-NEXT:   call void @__tgt_unregister_lib(ptr nonnull @.omp_offloading.descriptor)
 // CHECK-NEXT:   ret void
 // CHECK-NEXT: }
 

From 1f11bf66498150d930df2abef04552c174ab18aa Mon Sep 17 00:00:00 2001
From: Manuel Drehwald 
Date: Tue, 13 Jan 2026 15:18:49 -0800
Subject: [PATCH 130/131] Leave note to drop tgt_init_all_rtls in the future

---
 compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
index 0cf5ac01d1a77..402861eda8707 100644
--- a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
+++ b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
@@ -29,6 +29,8 @@ pub(crate) struct OffloadGlobals<'ll> {
 
     pub ident_t_global: &'ll llvm::Value,
 
+    // FIXME(offload): Drop this, once we fully automated our offload compilation pipeline, since
+    // LLVM will initialize them for us if it sees gpu kernels being registered.
     pub init_rtls: &'ll llvm::Value,
 }
 

From 83dcfc8803225f2666ace25d8448be17351af9fd Mon Sep 17 00:00:00 2001
From: Guillaume Gomez 
Date: Tue, 27 Jan 2026 21:14:25 +0100
Subject: [PATCH 131/131] Update `browser-ui-test` version to `0.23.3`

---
 package.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/package.json b/package.json
index 1fe87b1816691..66596cab42682 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "dependencies": {
-    "browser-ui-test": "^0.23.2",
+    "browser-ui-test": "^0.23.3",
     "es-check": "^9.4.4",
     "eslint": "^8.57.1",
     "typescript": "^5.8.3"