diff --git a/Cargo.lock b/Cargo.lock index 0ff2d53d11b8..efb313cd3c80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -328,6 +328,15 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -1665,6 +1674,7 @@ dependencies = [ "anyhow", "cfg", "crossbeam-channel", + "dirs", "dissimilar", "expect-test", "flycheck", diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 431aa30e56f2..e9408bf89766 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -273,10 +273,17 @@ impl Analysis { self.with_db(|db| status::status(db, file_id)) } - pub fn source_root(&self, file_id: FileId) -> Cancellable { + pub fn source_root_id(&self, file_id: FileId) -> Cancellable { self.with_db(|db| db.file_source_root(file_id)) } + pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable { + self.with_db(|db| { + let sr = db.source_root(source_root_id); + !sr.is_library + }) + } + pub fn parallel_prime_caches(&self, num_worker_threads: u8, cb: F) -> Cancellable<()> where F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe, diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 8adadddde611..128e86f1d684 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -272,7 +272,7 @@ impl SourceRootConfig { /// If a `SourceRoot` doesn't have a parent and is local then it is not contained in this mapping but it can be asserted that it is a root `SourceRoot`. pub fn source_root_parent_map(&self) -> FxHashMap { let roots = self.fsc.roots(); - let mut map = FxHashMap::::default(); + let mut i = 0; roots .iter() .enumerate() @@ -280,17 +280,16 @@ impl SourceRootConfig { .filter_map(|(idx, (root, root_id))| { // We are interested in parents if they are also local source roots. // So instead of a non-local parent we may take a local ancestor as a parent to a node. - roots.iter().take(idx).find_map(|(root2, root2_id)| { + roots[..idx].iter().find_map(|(root2, root2_id)| { + i += 1; if self.local_filesets.contains(root2_id) && root.starts_with(root2) { return Some((root_id, root2_id)); } None }) }) - .for_each(|(child, parent)| { - map.insert(SourceRootId(*child as u32), SourceRootId(*parent as u32)); - }); - map + .map(|(&child, &parent)| (SourceRootId(child as u32), SourceRootId(parent as u32))) + .collect() } } diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index 1dda02e3f10f..33c3f83db50f 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -135,6 +135,24 @@ impl AbsPathBuf { pub fn pop(&mut self) -> bool { self.0.pop() } + + /// Equivalent of [`PathBuf::push`] for `AbsPathBuf`. + /// + /// Extends `self` with `path`. + /// + /// If `path` is absolute, it replaces the current path. + /// + /// On Windows: + /// + /// * if `path` has a root but no prefix (e.g., `\windows`), it + /// replaces everything except for the prefix (if any) of `self`. + /// * if `path` has a prefix but no root, it replaces `self`. + /// * if `self` has a verbatim prefix (e.g. `\\?\C:\windows`) + /// and `path` is not empty, the new path is normalized: all references + /// to `.` and `..` are removed. + pub fn push>(&mut self, suffix: P) { + self.0.push(suffix) + } } impl fmt::Display for AbsPathBuf { diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 34b3e4931403..8ff7235b8fa5 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -22,6 +22,7 @@ path = "src/bin/main.rs" [dependencies] anyhow.workspace = true crossbeam-channel = "0.5.5" +dirs = "5.0.1" dissimilar.workspace = true itertools.workspace = true scip = "0.3.3" diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 9daae914d79c..774784f37b00 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -15,7 +15,11 @@ use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; -use rust_analyzer::{cli::flags, config::Config, from_json}; +use rust_analyzer::{ + cli::flags, + config::{Config, ConfigChange, ConfigErrors}, + from_json, +}; use semver::Version; use tracing_subscriber::fmt::writer::BoxMakeWriter; use vfs::AbsPathBuf; @@ -220,16 +224,22 @@ fn run_server() -> anyhow::Result<()> { .filter(|workspaces| !workspaces.is_empty()) .unwrap_or_else(|| vec![root_path.clone()]); let mut config = - Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version); + Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version, None); if let Some(json) = initialization_options { - if let Err(e) = config.update(json) { + let mut change = ConfigChange::default(); + change.change_client_config(json); + + let error_sink: ConfigErrors; + (config, error_sink, _) = config.apply_change(change); + + if !error_sink.is_empty() { use lsp_types::{ notification::{Notification, ShowMessage}, MessageType, ShowMessageParams, }; let not = lsp_server::Notification::new( ShowMessage::METHOD.to_owned(), - ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() }, + ShowMessageParams { typ: MessageType::WARNING, message: error_sink.to_string() }, ); connection.sender.send(lsp_server::Message::Notification(not)).unwrap(); } diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index aef2c1be2249..8f60b17b5943 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -10,9 +10,11 @@ use ide_db::LineIndexDatabase; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use rustc_hash::{FxHashMap, FxHashSet}; use scip::types as scip_types; +use tracing::error; use crate::{ cli::flags, + config::ConfigChange, line_index::{LineEndings, LineIndex, PositionEncoding}, }; @@ -35,12 +37,20 @@ impl flags::Scip { lsp_types::ClientCapabilities::default(), vec![], None, + None, ); if let Some(p) = self.config_path { let mut file = std::io::BufReader::new(std::fs::File::open(p)?); let json = serde_json::from_reader(&mut file)?; - config.update(json)?; + let mut change = ConfigChange::default(); + change.change_client_config(json); + + let error_sink; + (config, error_sink, _) = config.apply_change(change); + + // FIXME @alibektas : What happens to errors without logging? + error!(?error_sink, "Config Error(s)"); } let cargo_config = config.cargo(); let (db, vfs, _) = load_workspace_at( diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 2574cf059de0..64054669df0e 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1,14 +1,12 @@ //! Config used by the language server. //! -//! We currently get this config from `initialize` LSP request, which is not the -//! best way to do it, but was the simplest thing we could implement. -//! //! Of particular interest is the `feature_flags` hash map: while other fields //! configure the server itself, feature flags are passed into analysis, and //! tweak things like automatic insertion of `()` in completions. -use std::{fmt, iter, ops::Not}; +use std::{fmt, iter, ops::Not, sync::OnceLock}; use cfg::{CfgAtom, CfgDiff}; +use dirs::config_dir; use flycheck::{CargoOptions, FlycheckConfig}; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, @@ -29,9 +27,13 @@ use project_model::{ }; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::{ + de::{DeserializeOwned, Error}, + Deserialize, Serialize, +}; use stdx::format_to_acc; -use vfs::{AbsPath, AbsPathBuf}; +use triomphe::Arc; +use vfs::{AbsPath, AbsPathBuf, VfsPath}; use crate::{ caps::completion_item_edit_resolve, @@ -60,6 +62,7 @@ mod patch_old_style; // parsing the old name. config_data! { /// Configs that apply on a workspace-wide scope. There are 3 levels on which a global configuration can be configured + // FIXME: 1. and 3. should be split, some configs do not make sense per project /// /// 1. `rust-analyzer.toml` file under user's config directory (e.g ~/.config/rust-analyzer.toml) /// 2. Client's own configurations (e.g `settings.json` on VS Code) @@ -67,12 +70,6 @@ config_data! { /// /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle. global: struct GlobalDefaultConfigData <- GlobalConfigInput -> { - /// Whether to insert #[must_use] when generating `as_` methods - /// for enum variants. - assist_emitMustUse: bool = false, - /// Placeholder expression to use for missing expressions in assists. - assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, - /// Warm up caches on project load. cachePriming_enable: bool = true, /// How many worker threads to handle priming caches. The default `0` means to pick automatically. @@ -272,87 +269,12 @@ config_data! { /// The warnings will be indicated by a blue squiggly underline in code /// and a blue icon in the `Problems Panel`. diagnostics_warningsAsInfo: Vec = vec![], + /// These directories will be ignored by rust-analyzer. They are /// relative to the workspace root, and globs are not supported. You may /// also need to add the folders to Code's `files.watcherExclude`. files_excludeDirs: Vec = vec![], - /// Controls file watching implementation. - files_watcher: FilesWatcherDef = FilesWatcherDef::Client, - /// Whether to show `Debug` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_debug_enable: bool = true, - /// Whether to show HoverActions in Rust files. - hover_actions_enable: bool = true, - /// Whether to show `Go to Type Definition` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_gotoTypeDef_enable: bool = true, - /// Whether to show `Implementations` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_implementations_enable: bool = true, - /// Whether to show `References` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_references_enable: bool = false, - /// Whether to show `Run` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_run_enable: bool = true, - - /// Whether to show documentation on hover. - hover_documentation_enable: bool = true, - /// Whether to show keyword hover popups. Only applies when - /// `#rust-analyzer.hover.documentation.enable#` is set. - hover_documentation_keywords_enable: bool = true, - /// Use markdown syntax for links on hover. - hover_links_enable: bool = true, - /// How to render the align information in a memory layout hover. - hover_memoryLayout_alignment: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), - /// Whether to show memory layout data on hover. - hover_memoryLayout_enable: bool = true, - /// How to render the niche information in a memory layout hover. - hover_memoryLayout_niches: Option = Some(false), - /// How to render the offset information in a memory layout hover. - hover_memoryLayout_offset: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), - /// How to render the size information in a memory layout hover. - hover_memoryLayout_size: Option = Some(MemoryLayoutHoverRenderKindDef::Both), - - /// How many variants of an enum to display when hovering on. Show none if empty. - hover_show_enumVariants: Option = Some(5), - /// How many fields of a struct, variant or union to display when hovering on. Show none if empty. - hover_show_fields: Option = Some(5), - /// How many associated items of a trait to display when hovering a trait. - hover_show_traitAssocItems: Option = None, - - /// Enables the experimental support for interpreting tests. - interpret_tests: bool = false, - - /// Whether to show `Debug` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_debug_enable: bool = true, - /// Whether to show CodeLens in Rust files. - lens_enable: bool = true, - /// Internal config: use custom client-side commands even when the - /// client doesn't set the corresponding capability. - lens_forceCustomCommands: bool = true, - /// Whether to show `Implementations` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_implementations_enable: bool = true, - /// Where to render annotations. - lens_location: AnnotationLocation = AnnotationLocation::AboveName, - /// Whether to show `References` lens for Struct, Enum, and Union. - /// Only applies when `#rust-analyzer.lens.enable#` is set. - lens_references_adt_enable: bool = false, - /// Whether to show `References` lens for Enum Variants. - /// Only applies when `#rust-analyzer.lens.enable#` is set. - lens_references_enumVariant_enable: bool = false, - /// Whether to show `Method References` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_references_method_enable: bool = false, - /// Whether to show `References` lens for Trait. - /// Only applies when `#rust-analyzer.lens.enable#` is set. - lens_references_trait_enable: bool = false, - /// Whether to show `Run` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_run_enable: bool = true, /// Disable project auto-discovery in favor of explicitly specified set /// of projects. @@ -367,31 +289,10 @@ config_data! { /// Sets the LRU capacity of the specified queries. lru_query_capacities: FxHashMap, usize> = FxHashMap::default(), - /// Whether to show `can't find Cargo.toml` error message. - notifications_cargoTomlNotFound: bool = true, - - /// Whether to send an UnindexedProject notification to the client. - notifications_unindexedProject: bool = false, - - /// How many worker threads in the main loop. The default `null` means to pick automatically. - numThreads: Option = None, - - /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. - procMacro_attributes_enable: bool = true, - /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. - procMacro_enable: bool = true, /// These proc-macros will be ignored when trying to expand them. /// /// This config takes a map of crate names with the exported proc-macro names to ignore as values. procMacro_ignored: FxHashMap, Box<[Box]>> = FxHashMap::default(), - /// Internal config, path to proc-macro server executable. - procMacro_server: Option = None, - - /// Exclude imports from find-all-references. - references_excludeImports: bool = false, - - /// Exclude tests from find-all-references. - references_excludeTests: bool = false, /// Command to be executed instead of 'cargo' for runnables. runnables_command: Option = None, @@ -429,34 +330,41 @@ config_data! { /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only /// available on a nightly build. rustfmt_rangeFormatting_enable: bool = false, - - - /// Show full signature of the callable. Only shows parameters if disabled. - signatureInfo_detail: SignatureDetail = SignatureDetail::Full, - /// Show documentation. - signatureInfo_documentation_enable: bool = true, - - /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list. - typing_autoClosingAngleBrackets_enable: bool = false, - - /// Workspace symbol search kind. - workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes, - /// Limits the number of items returned from a workspace symbol search (Defaults to 128). - /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search. - /// Other clients requires all results upfront and might require a higher limit. - workspace_symbol_search_limit: usize = 128, - /// Workspace symbol search scope. - workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace, } } config_data! { - /// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root. - /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle. + /// Local configurations can be defined per `SourceRoot`. This almost always corresponds to a `Crate`. local: struct LocalDefaultConfigData <- LocalConfigInput -> { + /// Whether to insert #[must_use] when generating `as_` methods + /// for enum variants. + assist_emitMustUse: bool = false, + /// Placeholder expression to use for missing expressions in assists. + assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, /// Term search fuel in "units of work" for assists (Defaults to 400). assist_termSearch_fuel: usize = 400, + /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. + imports_granularity_enforce: bool = false, + /// How imports should be grouped into use statements. + imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, + /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines. + imports_group_enable: bool = true, + /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. + imports_merge_glob: bool = true, + /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate. + imports_preferNoStd | imports_prefer_no_std: bool = false, + /// Whether to prefer import paths containing a `prelude` module. + imports_preferPrelude: bool = false, + /// The path structure for newly inserted paths to use. + imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain, + } +} + +config_data! { + /// Configs that only make sense when they are set by a client. As such they can only be defined + /// by setting them using client's settings (e.g `settings.json` on VS Code). + client: struct ClientDefaultConfigData <- ClientConfigInput -> { /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. completion_autoimport_enable: bool = true, @@ -521,6 +429,9 @@ config_data! { /// Term search fuel in "units of work" for autocompletion (Defaults to 200). completion_termSearch_fuel: usize = 200, + /// Controls file watching implementation. + files_watcher: FilesWatcherDef = FilesWatcherDef::Client, + /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. highlightRelated_breakPoints_enable: bool = true, /// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure. @@ -532,21 +443,48 @@ config_data! { /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords. highlightRelated_yieldPoints_enable: bool = true, - /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. - imports_granularity_enforce: bool = false, - /// How imports should be grouped into use statements. - imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, - /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines. - imports_group_enable: bool = true, - /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. - imports_merge_glob: bool = true, - /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate. - imports_preferNoStd | imports_prefer_no_std: bool = false, - /// Whether to prefer import paths containing a `prelude` module. - imports_preferPrelude: bool = false, - /// The path structure for newly inserted paths to use. - imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain, + /// Whether to show `Debug` action. Only applies when + /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_debug_enable: bool = true, + /// Whether to show HoverActions in Rust files. + hover_actions_enable: bool = true, + /// Whether to show `Go to Type Definition` action. Only applies when + /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_gotoTypeDef_enable: bool = true, + /// Whether to show `Implementations` action. Only applies when + /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_implementations_enable: bool = true, + /// Whether to show `References` action. Only applies when + /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_references_enable: bool = false, + /// Whether to show `Run` action. Only applies when + /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_run_enable: bool = true, + + /// Whether to show documentation on hover. + hover_documentation_enable: bool = true, + /// Whether to show keyword hover popups. Only applies when + /// `#rust-analyzer.hover.documentation.enable#` is set. + hover_documentation_keywords_enable: bool = true, + /// Use markdown syntax for links on hover. + hover_links_enable: bool = true, + /// How to render the align information in a memory layout hover. + hover_memoryLayout_alignment: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + /// Whether to show memory layout data on hover. + hover_memoryLayout_enable: bool = true, + /// How to render the niche information in a memory layout hover. + hover_memoryLayout_niches: Option = Some(false), + /// How to render the offset information in a memory layout hover. + hover_memoryLayout_offset: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + /// How to render the size information in a memory layout hover. + hover_memoryLayout_size: Option = Some(MemoryLayoutHoverRenderKindDef::Both), + /// How many variants of an enum to display when hovering on. Show none if empty. + hover_show_enumVariants: Option = Some(5), + /// How many fields of a struct, variant or union to display when hovering on. Show none if empty. + hover_show_fields: Option = Some(5), + /// How many associated items of a trait to display when hovering a trait. + hover_show_traitAssocItems: Option = None, /// Whether to show inlay type hints for binding modes. inlayHints_bindingModeHints_enable: bool = false, @@ -597,6 +535,8 @@ config_data! { /// Whether to hide inlay type hints for constructors. inlayHints_typeHints_hideNamedConstructor: bool = false, + /// Enables the experimental support for interpreting tests. + interpret_tests: bool = false, /// Join lines merges consecutive declaration and initialization of an assignment. joinLines_joinAssignments: bool = true, @@ -607,75 +547,408 @@ config_data! { /// Join lines unwraps trivial blocks. joinLines_unwrapTrivialBlock: bool = true, - /// Inject additional highlighting into doc comments. - /// - /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra - /// doc links. - semanticHighlighting_doc_comment_inject_enable: bool = true, - /// Whether the server is allowed to emit non-standard tokens and modifiers. - semanticHighlighting_nonStandardTokens: bool = true, - /// Use semantic tokens for operators. - /// - /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when - /// they are tagged with modifiers. - semanticHighlighting_operator_enable: bool = true, - /// Use specialized semantic tokens for operators. - /// - /// When enabled, rust-analyzer will emit special token types for operator tokens instead - /// of the generic `operator` token type. - semanticHighlighting_operator_specialization_enable: bool = false, - /// Use semantic tokens for punctuation. - /// - /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when - /// they are tagged with modifiers or have a special role. - semanticHighlighting_punctuation_enable: bool = false, - /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro - /// calls. - semanticHighlighting_punctuation_separate_macro_bang: bool = false, - /// Use specialized semantic tokens for punctuation. - /// - /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead - /// of the generic `punctuation` token type. - semanticHighlighting_punctuation_specialization_enable: bool = false, - /// Use semantic tokens for strings. - /// - /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. - /// By disabling semantic tokens for strings, other grammars can be used to highlight - /// their contents. - semanticHighlighting_strings_enable: bool = true, + /// Whether to show `Debug` lens. Only applies when + /// `#rust-analyzer.lens.enable#` is set. + lens_debug_enable: bool = true, + /// Whether to show CodeLens in Rust files. + lens_enable: bool = true, + /// Internal config: use custom client-side commands even when the + /// client doesn't set the corresponding capability. + lens_forceCustomCommands: bool = true, + /// Whether to show `Implementations` lens. Only applies when + /// `#rust-analyzer.lens.enable#` is set. + lens_implementations_enable: bool = true, + /// Where to render annotations. + lens_location: AnnotationLocation = AnnotationLocation::AboveName, + /// Whether to show `References` lens for Struct, Enum, and Union. + /// Only applies when `#rust-analyzer.lens.enable#` is set. + lens_references_adt_enable: bool = false, + /// Whether to show `References` lens for Enum Variants. + /// Only applies when `#rust-analyzer.lens.enable#` is set. + lens_references_enumVariant_enable: bool = false, + /// Whether to show `Method References` lens. Only applies when + /// `#rust-analyzer.lens.enable#` is set. + lens_references_method_enable: bool = false, + /// Whether to show `References` lens for Trait. + /// Only applies when `#rust-analyzer.lens.enable#` is set. + lens_references_trait_enable: bool = false, + /// Whether to show `Run` lens. Only applies when + /// `#rust-analyzer.lens.enable#` is set. + lens_run_enable: bool = true, + + /// Whether to show `can't find Cargo.toml` error message. + notifications_cargoTomlNotFound: bool = true, + + /// Whether to send an UnindexedProject notification to the client. + notifications_unindexedProject: bool = false, + + /// How many worker threads in the main loop. The default `null` means to pick automatically. + numThreads: Option = None, + + /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. + procMacro_attributes_enable: bool = true, + /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. + procMacro_enable: bool = true, + /// Internal config, path to proc-macro server executable. + procMacro_server: Option = None, + + /// Exclude imports from find-all-references. + references_excludeImports: bool = false, + + /// Exclude tests from find-all-references. + references_excludeTests: bool = false, + + /// Inject additional highlighting into doc comments. + /// + /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra + /// doc links. + semanticHighlighting_doc_comment_inject_enable: bool = true, + /// Whether the server is allowed to emit non-standard tokens and modifiers. + semanticHighlighting_nonStandardTokens: bool = true, + /// Use semantic tokens for operators. + /// + /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when + /// they are tagged with modifiers. + semanticHighlighting_operator_enable: bool = true, + /// Use specialized semantic tokens for operators. + /// + /// When enabled, rust-analyzer will emit special token types for operator tokens instead + /// of the generic `operator` token type. + semanticHighlighting_operator_specialization_enable: bool = false, + /// Use semantic tokens for punctuation. + /// + /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when + /// they are tagged with modifiers or have a special role. + semanticHighlighting_punctuation_enable: bool = false, + /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro + /// calls. + semanticHighlighting_punctuation_separate_macro_bang: bool = false, + /// Use specialized semantic tokens for punctuation. + /// + /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead + /// of the generic `punctuation` token type. + semanticHighlighting_punctuation_specialization_enable: bool = false, + /// Use semantic tokens for strings. + /// + /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. + /// By disabling semantic tokens for strings, other grammars can be used to highlight + /// their contents. + semanticHighlighting_strings_enable: bool = true, + + /// Show full signature of the callable. Only shows parameters if disabled. + signatureInfo_detail: SignatureDetail = SignatureDetail::Full, + /// Show documentation. + signatureInfo_documentation_enable: bool = true, + + /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list. + typing_autoClosingAngleBrackets_enable: bool = false, + + /// Workspace symbol search kind. + workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes, + /// Limits the number of items returned from a workspace symbol search (Defaults to 128). + /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search. + /// Other clients requires all results upfront and might require a higher limit. + workspace_symbol_search_limit: usize = 128, + /// Workspace symbol search scope. + workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace, + } +} + +#[derive(Debug, Clone)] +pub struct Config { + discovered_projects: Vec, + /// The workspace roots as registered by the LSP client + workspace_roots: Vec, + caps: lsp_types::ClientCapabilities, + root_path: AbsPathBuf, + snippets: Vec, + visual_studio_code_version: Option, + + default_config: &'static DefaultConfigData, + /// Config node that obtains its initial value during the server initialization and + /// by receiving a `lsp_types::notification::DidChangeConfiguration`. + client_config: (FullConfigInput, ConfigErrors), + + /// Path to the root configuration file. This can be seen as a generic way to define what would be `$XDG_CONFIG_HOME/rust-analyzer/rust-analyzer.toml` in Linux. + /// If not specified by init of a `Config` object this value defaults to : + /// + /// |Platform | Value | Example | + /// | ------- | ------------------------------------- | ---------------------------------------- | + /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config | + /// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support | + /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | + user_config_path: VfsPath, + + /// FIXME @alibektas : Change this to sth better. + /// Config node whose values apply to **every** Rust project. + user_config: Option<(GlobalLocalConfigInput, ConfigErrors)>, + + /// A special file for this session whose path is set to `self.root_path.join("rust-analyzer.toml")` + root_ratoml_path: VfsPath, + + /// This file can be used to make global changes while having only a workspace-wide scope. + root_ratoml: Option<(GlobalLocalConfigInput, ConfigErrors)>, + + /// For every `SourceRoot` there can be at most one RATOML file. + ratoml_files: FxHashMap, + + /// Clone of the value that is stored inside a `GlobalState`. + source_root_parent_map: Arc>, + + detached_files: Vec, +} + +impl Config { + pub fn user_config_path(&self) -> &VfsPath { + &self.user_config_path + } + + pub fn same_source_root_parent_map( + &self, + other: &Arc>, + ) -> bool { + Arc::ptr_eq(&self.source_root_parent_map, other) + } + + // FIXME @alibektas : Server's health uses error sink but in other places it is not used atm. + /// Changes made to client and global configurations will partially not be reflected even after `.apply_change()` was called. + /// The return tuple's bool component signals whether the `GlobalState` should call its `update_configuration()` method. + fn apply_change_with_sink(&self, change: ConfigChange) -> (Config, bool) { + let mut config = self.clone(); + + let mut should_update = false; + + if let Some(change) = change.user_config_change { + if let Ok(table) = toml::from_str(&change) { + let mut toml_errors = vec![]; + validate_toml_table( + GlobalLocalConfigInput::FIELDS, + &table, + &mut String::new(), + &mut toml_errors, + ); + config.user_config = Some(( + GlobalLocalConfigInput::from_toml(table, &mut toml_errors), + ConfigErrors( + toml_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Toml { config_key: a, error: b }) + .map(Arc::new) + .collect(), + ), + )); + should_update = true; + } + } + + if let Some(mut json) = change.client_config_change { + tracing::info!("updating config from JSON: {:#}", json); + if !(json.is_null() || json.as_object().map_or(false, |it| it.is_empty())) { + let mut json_errors = vec![]; + let detached_files = get_field::>( + &mut json, + &mut json_errors, + "detachedFiles", + None, + ) + .unwrap_or_default() + .into_iter() + .map(AbsPathBuf::assert) + .collect(); + + patch_old_style::patch_json_for_outdated_configs(&mut json); + + config.client_config = ( + FullConfigInput::from_json(json, &mut json_errors), + ConfigErrors( + json_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Json { config_key: a, error: b }) + .map(Arc::new) + .collect(), + ), + ); + config.detached_files = detached_files; + } + should_update = true; + } + + if let Some(change) = change.root_ratoml_change { + tracing::info!("updating root ra-toml config: {:#}", change); + #[allow(clippy::single_match)] + match toml::from_str(&change) { + Ok(table) => { + let mut toml_errors = vec![]; + validate_toml_table( + GlobalLocalConfigInput::FIELDS, + &table, + &mut String::new(), + &mut toml_errors, + ); + config.root_ratoml = Some(( + GlobalLocalConfigInput::from_toml(table, &mut toml_errors), + ConfigErrors( + toml_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Toml { config_key: a, error: b }) + .map(Arc::new) + .collect(), + ), + )); + should_update = true; + } + // FIXME + Err(_) => (), + } + } + + if let Some(change) = change.ratoml_file_change { + for (source_root_id, (_, text)) in change { + if let Some(text) = text { + let mut toml_errors = vec![]; + tracing::info!("updating ra-toml config: {:#}", text); + #[allow(clippy::single_match)] + match toml::from_str(&text) { + Ok(table) => { + validate_toml_table( + &[LocalConfigInput::FIELDS], + &table, + &mut String::new(), + &mut toml_errors, + ); + config.ratoml_files.insert( + source_root_id, + ( + LocalConfigInput::from_toml(&table, &mut toml_errors), + ConfigErrors( + toml_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Toml { + config_key: a, + error: b, + }) + .map(Arc::new) + .collect(), + ), + ), + ); + } + // FIXME + Err(_) => (), + } + } + } + } + + if let Some(source_root_map) = change.source_map_change { + config.source_root_parent_map = source_root_map; + } + + let snips = self.completion_snippets_custom().to_owned(); + + for (name, def) in snips.iter() { + if def.prefix.is_empty() && def.postfix.is_empty() { + continue; + } + let scope = match def.scope { + SnippetScopeDef::Expr => SnippetScope::Expr, + SnippetScopeDef::Type => SnippetScope::Type, + SnippetScopeDef::Item => SnippetScope::Item, + }; + #[allow(clippy::single_match)] + match Snippet::new( + &def.prefix, + &def.postfix, + &def.body, + def.description.as_ref().unwrap_or(name), + &def.requires, + scope, + ) { + Some(snippet) => config.snippets.push(snippet), + // FIXME + // None => error_sink.0.push(ConfigErrorInner::Json { + // config_key: "".to_owned(), + // error: ::custom(format!( + // "snippet {name} is invalid or triggers are missing", + // )), + // }), + None => (), + } + } + + // FIXME: bring this back + // if config.check_command().is_empty() { + // error_sink.0.push(ConfigErrorInner::Json { + // config_key: "/check/command".to_owned(), + // error: serde_json::Error::custom("expected a non-empty string"), + // }); + // } + (config, should_update) + } + + /// Given `change` this generates a new `Config`, thereby collecting errors of type `ConfigError`. + /// If there are changes that have global/client level effect, the last component of the return type + /// will be set to `true`, which should be used by the `GlobalState` to update itself. + pub fn apply_change(&self, change: ConfigChange) -> (Config, ConfigErrors, bool) { + let (config, should_update) = self.apply_change_with_sink(change); + let e = ConfigErrors( + config + .client_config + .1 + .0 + .iter() + .chain(config.root_ratoml.as_ref().into_iter().flat_map(|it| it.1 .0.iter())) + .chain(config.user_config.as_ref().into_iter().flat_map(|it| it.1 .0.iter())) + .chain(config.ratoml_files.values().flat_map(|it| it.1 .0.iter())) + .cloned() + .collect(), + ); + (config, e, should_update) } } -config_data! { - /// Configs that only make sense when they are set by a client. As such they can only be defined - /// by setting them using client's settings (e.g `settings.json` on VS Code). - client: struct ClientDefaultConfigData <- ClientConfigInput -> {} +#[derive(Default, Debug)] +pub struct ConfigChange { + user_config_change: Option>, + root_ratoml_change: Option>, + client_config_change: Option, + ratoml_file_change: Option>)>>, + source_map_change: Option>>, } -#[derive(Debug, Clone)] -pub struct Config { - discovered_projects: Vec, - /// The workspace roots as registered by the LSP client - workspace_roots: Vec, - caps: lsp_types::ClientCapabilities, - root_path: AbsPathBuf, - detached_files: Vec, - snippets: Vec, - visual_studio_code_version: Option, +impl ConfigChange { + pub fn change_ratoml( + &mut self, + source_root: SourceRootId, + vfs_path: VfsPath, + content: Option>, + ) -> Option<(VfsPath, Option>)> { + self.ratoml_file_change + .get_or_insert_with(Default::default) + .insert(source_root, (vfs_path, content)) + } - default_config: DefaultConfigData, - client_config: FullConfigInput, - user_config: GlobalLocalConfigInput, - #[allow(dead_code)] - ratoml_files: FxHashMap, -} + pub fn change_user_config(&mut self, content: Option>) { + assert!(self.user_config_change.is_none()); // Otherwise it is a double write. + self.user_config_change = content; + } -#[derive(Clone, Debug)] -struct RatomlNode { - #[allow(dead_code)] - node: GlobalLocalConfigInput, - #[allow(dead_code)] - parent: Option, + pub fn change_root_ratoml(&mut self, content: Option>) { + assert!(self.root_ratoml_change.is_none()); // Otherwise it is a double write. + self.root_ratoml_change = content; + } + + pub fn change_client_config(&mut self, change: serde_json::Value) { + self.client_config_change = Some(change); + } + + pub fn change_source_root_parent_map( + &mut self, + source_root_map: Arc>, + ) { + assert!(self.source_map_change.is_none()); + self.source_map_change = Some(source_root_map.clone()); + } } macro_rules! try_ { @@ -866,27 +1139,39 @@ pub struct ClientCommandsConfig { } #[derive(Debug)] -pub struct ConfigError { - errors: Vec<(String, serde_json::Error)>, +pub enum ConfigErrorInner { + Json { config_key: String, error: serde_json::Error }, + Toml { config_key: String, error: toml::de::Error }, +} + +#[derive(Clone, Debug)] +pub struct ConfigErrors(Vec>); + +impl ConfigErrors { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } } -impl fmt::Display for ConfigError { +impl fmt::Display for ConfigErrors { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let errors = self.errors.iter().format_with("\n", |(key, e), f| { - f(key)?; - f(&": ")?; - f(e) + let errors = self.0.iter().format_with("\n", |inner, f| match &**inner { + ConfigErrorInner::Json { config_key: key, error: e } => { + f(key)?; + f(&": ")?; + f(e) + } + ConfigErrorInner::Toml { config_key: key, error: e } => { + f(key)?; + f(&": ")?; + f(e) + } }); - write!( - f, - "invalid config value{}:\n{}", - if self.errors.len() == 1 { "" } else { "s" }, - errors - ) + write!(f, "invalid config value{}:\n{}", if self.0.len() == 1 { "" } else { "s" }, errors) } } -impl std::error::Error for ConfigError {} +impl std::error::Error for ConfigErrors {} impl Config { pub fn new( @@ -894,19 +1179,46 @@ impl Config { caps: ClientCapabilities, workspace_roots: Vec, visual_studio_code_version: Option, + user_config_path: Option, ) -> Self { + static DEFAULT_CONFIG_DATA: OnceLock<&'static DefaultConfigData> = OnceLock::new(); + let user_config_path = if let Some(user_config_path) = user_config_path { + user_config_path.join("rust-analyzer").join("rust-analyzer.toml") + } else { + let p = config_dir() + .expect("A config dir is expected to existed on all platforms ra supports.") + .join("rust-analyzer") + .join("rust-analyzer.toml"); + Utf8PathBuf::from_path_buf(p).expect("Config dir expected to be abs.") + }; + + // A user config cannot be a virtual path as rust-analyzer cannot support watching changes in virtual paths. + // See `GlobalState::process_changes` to get more info. + // FIXME @alibektas : Temporary solution. I don't think this is right as at some point we may allow users to specify + // custom USER_CONFIG_PATHs which may also be relative. + let user_config_path = VfsPath::from(AbsPathBuf::assert(user_config_path)); + let root_ratoml_path = { + let mut p = root_path.clone(); + p.push("rust-analyzer.toml"); + VfsPath::new_real_path(p.to_string()) + }; + Config { caps, - detached_files: Vec::new(), discovered_projects: Vec::new(), root_path, snippets: Default::default(), workspace_roots, visual_studio_code_version, - client_config: FullConfigInput::default(), - user_config: GlobalLocalConfigInput::default(), + client_config: (FullConfigInput::default(), ConfigErrors(vec![])), ratoml_files: FxHashMap::default(), - default_config: DefaultConfigData::default(), + default_config: DEFAULT_CONFIG_DATA.get_or_init(|| Box::leak(Box::default())), + source_root_parent_map: Arc::new(FxHashMap::default()), + user_config: None, + user_config_path, + root_ratoml: None, + root_ratoml_path, + detached_files: Default::default(), } } @@ -929,71 +1241,6 @@ impl Config { self.workspace_roots.extend(paths); } - pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigError> { - tracing::info!("updating config from JSON: {:#}", json); - if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { - return Ok(()); - } - let mut errors = Vec::new(); - self.detached_files = - get_field::>(&mut json, &mut errors, "detachedFiles", None) - .unwrap_or_default() - .into_iter() - .map(AbsPathBuf::assert) - .collect(); - patch_old_style::patch_json_for_outdated_configs(&mut json); - self.client_config = FullConfigInput::from_json(json, &mut errors); - tracing::debug!(?self.client_config, "deserialized config data"); - self.snippets.clear(); - - let snips = self.completion_snippets_custom(None).to_owned(); - - for (name, def) in snips.iter() { - if def.prefix.is_empty() && def.postfix.is_empty() { - continue; - } - let scope = match def.scope { - SnippetScopeDef::Expr => SnippetScope::Expr, - SnippetScopeDef::Type => SnippetScope::Type, - SnippetScopeDef::Item => SnippetScope::Item, - }; - match Snippet::new( - &def.prefix, - &def.postfix, - &def.body, - def.description.as_ref().unwrap_or(name), - &def.requires, - scope, - ) { - Some(snippet) => self.snippets.push(snippet), - None => errors.push(( - format!("snippet {name} is invalid"), - ::custom( - "snippet path is invalid or triggers are missing", - ), - )), - } - } - - self.validate(&mut errors); - - if errors.is_empty() { - Ok(()) - } else { - Err(ConfigError { errors }) - } - } - - fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) { - use serde::de::Error; - if self.check_command().is_empty() { - error_sink.push(( - "/check/command".to_owned(), - serde_json::Error::custom("expected a non-empty string"), - )); - } - } - pub fn json_schema() -> serde_json::Value { FullConfigInput::json_schema() } @@ -1002,12 +1249,12 @@ impl Config { &self.root_path } - pub fn caps(&self) -> &lsp_types::ClientCapabilities { - &self.caps + pub fn root_ratoml_path(&self) -> &VfsPath { + &self.root_ratoml_path } - pub fn detached_files(&self) -> &[AbsPathBuf] { - &self.detached_files + pub fn caps(&self) -> &lsp_types::ClientCapabilities { + &self.caps } } @@ -1018,7 +1265,7 @@ impl Config { allowed: None, insert_use: self.insert_use_config(source_root), prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), - assist_emit_must_use: self.assist_emitMustUse().to_owned(), + assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, } @@ -1026,17 +1273,13 @@ impl Config { pub fn completion(&self, source_root: Option) -> CompletionConfig { CompletionConfig { - enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(), - enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned() + enable_postfix_completions: self.completion_postfix_enable().to_owned(), + enable_imports_on_the_fly: self.completion_autoimport_enable().to_owned() && completion_item_edit_resolve(&self.caps), - enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(), - enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(), - enable_term_search: self.completion_termSearch_enable(source_root).to_owned(), - term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64, - full_function_signatures: self - .completion_fullFunctionSignatures_enable(source_root) - .to_owned(), - callable: match self.completion_callable_snippets(source_root) { + enable_self_on_the_fly: self.completion_autoself_enable().to_owned(), + enable_private_editable: self.completion_privateEditable_enable().to_owned(), + full_function_signatures: self.completion_fullFunctionSignatures_enable().to_owned(), + callable: match self.completion_callable_snippets() { CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments), CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses), CallableCompletionDef::None => None, @@ -1055,10 +1298,18 @@ impl Config { prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), snippets: self.snippets.clone().to_vec(), - limit: self.completion_limit(source_root).to_owned(), + limit: self.completion_limit().to_owned(), + enable_term_search: self.completion_termSearch_enable().to_owned(), + term_search_fuel: self.completion_termSearch_fuel().to_owned() as u64, } } + pub fn detached_files(&self) -> &Vec { + // FIXME @alibektas : This is the only config that is confusing. If it's a proper configuration + // why is it not among the others? If it's client only which I doubt it is current state should be alright + &self.detached_files + } + pub fn diagnostics(&self, source_root: Option) -> DiagnosticsConfig { DiagnosticsConfig { enabled: *self.diagnostics_enable(), @@ -1066,7 +1317,7 @@ impl Config { proc_macros_enabled: *self.procMacro_enable(), disable_experimental: !self.diagnostics_experimental_enable(), disabled: self.diagnostics_disabled().clone(), - expr_fill_default: match self.assist_expressionFillDefault() { + expr_fill_default: match self.assist_expressionFillDefault(source_root) { ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo, ExprFillDefaultDef::Default => ExprFillDefaultMode::Default, }, @@ -1081,13 +1332,13 @@ impl Config { self.procMacro_enable().to_owned() && self.procMacro_attributes_enable().to_owned() } - pub fn highlight_related(&self, source_root: Option) -> HighlightRelatedConfig { + pub fn highlight_related(&self, _source_root: Option) -> HighlightRelatedConfig { HighlightRelatedConfig { - references: self.highlightRelated_references_enable(source_root).to_owned(), - break_points: self.highlightRelated_breakPoints_enable(source_root).to_owned(), - exit_points: self.highlightRelated_exitPoints_enable(source_root).to_owned(), - yield_points: self.highlightRelated_yieldPoints_enable(source_root).to_owned(), - closure_captures: self.highlightRelated_closureCaptures_enable(source_root).to_owned(), + references: self.highlightRelated_references_enable().to_owned(), + break_points: self.highlightRelated_breakPoints_enable().to_owned(), + exit_points: self.highlightRelated_exitPoints_enable().to_owned(), + yield_points: self.highlightRelated_yieldPoints_enable().to_owned(), + closure_captures: self.highlightRelated_closureCaptures_enable().to_owned(), } } @@ -1141,7 +1392,7 @@ impl Config { } } - pub fn inlay_hints(&self, source_root: Option) -> InlayHintsConfig { + pub fn inlay_hints(&self) -> InlayHintsConfig { let client_capability_fields = self .caps .text_document @@ -1155,74 +1406,65 @@ impl Config { .collect::>(); InlayHintsConfig { - render_colons: self.inlayHints_renderColons(source_root).to_owned(), - type_hints: self.inlayHints_typeHints_enable(source_root).to_owned(), - parameter_hints: self.inlayHints_parameterHints_enable(source_root).to_owned(), - chaining_hints: self.inlayHints_chainingHints_enable(source_root).to_owned(), - discriminant_hints: match self.inlayHints_discriminantHints_enable(source_root) { + render_colons: self.inlayHints_renderColons().to_owned(), + type_hints: self.inlayHints_typeHints_enable().to_owned(), + parameter_hints: self.inlayHints_parameterHints_enable().to_owned(), + chaining_hints: self.inlayHints_chainingHints_enable().to_owned(), + discriminant_hints: match self.inlayHints_discriminantHints_enable() { DiscriminantHintsDef::Always => ide::DiscriminantHints::Always, DiscriminantHintsDef::Never => ide::DiscriminantHints::Never, DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless, }, - closure_return_type_hints: match self - .inlayHints_closureReturnTypeHints_enable(source_root) - { + closure_return_type_hints: match self.inlayHints_closureReturnTypeHints_enable() { ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always, ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never, ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock, }, - lifetime_elision_hints: match self.inlayHints_lifetimeElisionHints_enable(source_root) { + lifetime_elision_hints: match self.inlayHints_lifetimeElisionHints_enable() { LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always, LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never, LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial, }, hide_named_constructor_hints: self - .inlayHints_typeHints_hideNamedConstructor(source_root) + .inlayHints_typeHints_hideNamedConstructor() .to_owned(), hide_closure_initialization_hints: self - .inlayHints_typeHints_hideClosureInitialization(source_root) + .inlayHints_typeHints_hideClosureInitialization() .to_owned(), - closure_style: match self.inlayHints_closureStyle(source_root) { + closure_style: match self.inlayHints_closureStyle() { ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn, ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation, ClosureStyle::WithId => hir::ClosureStyle::ClosureWithId, ClosureStyle::Hide => hir::ClosureStyle::Hide, }, - closure_capture_hints: self - .inlayHints_closureCaptureHints_enable(source_root) - .to_owned(), - adjustment_hints: match self.inlayHints_expressionAdjustmentHints_enable(source_root) { + closure_capture_hints: self.inlayHints_closureCaptureHints_enable().to_owned(), + adjustment_hints: match self.inlayHints_expressionAdjustmentHints_enable() { AdjustmentHintsDef::Always => ide::AdjustmentHints::Always, - AdjustmentHintsDef::Never => { - match self.inlayHints_reborrowHints_enable(source_root) { - ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => { - ide::AdjustmentHints::ReborrowOnly - } - ReborrowHintsDef::Never => ide::AdjustmentHints::Never, + AdjustmentHintsDef::Never => match self.inlayHints_reborrowHints_enable() { + ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => { + ide::AdjustmentHints::ReborrowOnly } - } + ReborrowHintsDef::Never => ide::AdjustmentHints::Never, + }, AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, }, - adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode(source_root) - { + adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode() { AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix, AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix, AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix, AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix, }, adjustment_hints_hide_outside_unsafe: self - .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe(source_root) + .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe() .to_owned(), - binding_mode_hints: self.inlayHints_bindingModeHints_enable(source_root).to_owned(), + binding_mode_hints: self.inlayHints_bindingModeHints_enable().to_owned(), param_names_for_lifetime_elision_hints: self - .inlayHints_lifetimeElisionHints_useParameterNames(source_root) + .inlayHints_lifetimeElisionHints_useParameterNames() .to_owned(), - max_length: self.inlayHints_maxLength(source_root).to_owned(), - closing_brace_hints_min_lines: if self - .inlayHints_closingBraceHints_enable(source_root) - .to_owned() + max_length: self.inlayHints_maxLength().to_owned(), + closing_brace_hints_min_lines: if self.inlayHints_closingBraceHints_enable().to_owned() { - Some(self.inlayHints_closingBraceHints_minLines(source_root).to_owned()) + Some(self.inlayHints_closingBraceHints_minLines().to_owned()) } else { None }, @@ -1233,10 +1475,8 @@ impl Config { resolve_label_location: client_capability_fields.contains("label.location"), resolve_label_command: client_capability_fields.contains("label.command"), }, - implicit_drop_hints: self.inlayHints_implicitDrops_enable(source_root).to_owned(), - range_exclusive_hints: self - .inlayHints_rangeExclusiveHints_enable(source_root) - .to_owned(), + implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(), + range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(), } } @@ -1260,36 +1500,32 @@ impl Config { } } - pub fn join_lines(&self, source_root: Option) -> JoinLinesConfig { + pub fn join_lines(&self) -> JoinLinesConfig { JoinLinesConfig { - join_else_if: self.joinLines_joinElseIf(source_root).to_owned(), - remove_trailing_comma: self.joinLines_removeTrailingComma(source_root).to_owned(), - unwrap_trivial_blocks: self.joinLines_unwrapTrivialBlock(source_root).to_owned(), - join_assignments: self.joinLines_joinAssignments(source_root).to_owned(), + join_else_if: self.joinLines_joinElseIf().to_owned(), + remove_trailing_comma: self.joinLines_removeTrailingComma().to_owned(), + unwrap_trivial_blocks: self.joinLines_unwrapTrivialBlock().to_owned(), + join_assignments: self.joinLines_joinAssignments().to_owned(), } } - pub fn highlighting_non_standard_tokens(&self, source_root: Option) -> bool { - self.semanticHighlighting_nonStandardTokens(source_root).to_owned() + pub fn highlighting_non_standard_tokens(&self) -> bool { + self.semanticHighlighting_nonStandardTokens().to_owned() } - pub fn highlighting_config(&self, source_root: Option) -> HighlightConfig { + pub fn highlighting_config(&self) -> HighlightConfig { HighlightConfig { - strings: self.semanticHighlighting_strings_enable(source_root).to_owned(), - punctuation: self.semanticHighlighting_punctuation_enable(source_root).to_owned(), + strings: self.semanticHighlighting_strings_enable().to_owned(), + punctuation: self.semanticHighlighting_punctuation_enable().to_owned(), specialize_punctuation: self - .semanticHighlighting_punctuation_specialization_enable(source_root) - .to_owned(), - macro_bang: self - .semanticHighlighting_punctuation_separate_macro_bang(source_root) + .semanticHighlighting_punctuation_specialization_enable() .to_owned(), - operator: self.semanticHighlighting_operator_enable(source_root).to_owned(), + macro_bang: self.semanticHighlighting_punctuation_separate_macro_bang().to_owned(), + operator: self.semanticHighlighting_operator_enable().to_owned(), specialize_operator: self - .semanticHighlighting_operator_specialization_enable(source_root) - .to_owned(), - inject_doc_comment: self - .semanticHighlighting_doc_comment_inject_enable(source_root) + .semanticHighlighting_operator_specialization_enable() .to_owned(), + inject_doc_comment: self.semanticHighlighting_doc_comment_inject_enable().to_owned(), syntactic_name_ref_highlighting: false, } } @@ -2016,7 +2252,7 @@ enum SnippetScopeDef { #[derive(Serialize, Deserialize, Debug, Clone, Default)] #[serde(default)] -struct SnippetDef { +pub(crate) struct SnippetDef { #[serde(with = "single_or_array")] #[serde(skip_serializing_if = "Vec::is_empty")] prefix: Vec, @@ -2111,7 +2347,7 @@ enum ImportGranularityDef { #[derive(Serialize, Deserialize, Debug, Copy, Clone)] #[serde(rename_all = "snake_case")] -enum CallableCompletionDef { +pub(crate) enum CallableCompletionDef { FillArguments, AddParentheses, None, @@ -2318,54 +2554,81 @@ macro_rules! _impl_for_config_data { $( $($doc)* #[allow(non_snake_case)] - $vis fn $field(&self, _source_root: Option) -> &$ty { - if let Some(v) = self.client_config.local.$field.as_ref() { - return &v; + $vis fn $field(&self, source_root: Option) -> &$ty { + let mut par: Option = source_root; + while let Some(source_root_id) = par { + par = self.source_root_parent_map.get(&source_root_id).copied(); + if let Some((config, _)) = self.ratoml_files.get(&source_root_id) { + if let Some(value) = config.$field.as_ref() { + return value; + } + } + } + + if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() { + if let Some(v) = root_path_ratoml.local.$field.as_ref() { + return &v; + } } - if let Some(v) = self.user_config.local.$field.as_ref() { + if let Some(v) = self.client_config.0.local.$field.as_ref() { return &v; } + if let Some((user_config, _)) = self.user_config.as_ref() { + if let Some(v) = user_config.local.$field.as_ref() { + return &v; + } + } + &self.default_config.local.$field } )* } }; (global, $( - $(#[doc=$doc:literal])* - $vis:vis $field:ident : $ty:ty = $default:expr, - )* - ) => { + $(#[doc=$doc:literal])* + $vis:vis $field:ident : $ty:ty = $default:expr, + )* + ) => { impl Config { $( $($doc)* #[allow(non_snake_case)] $vis fn $field(&self) -> &$ty { - if let Some(v) = self.client_config.global.$field.as_ref() { - return &v; + + if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() { + if let Some(v) = root_path_ratoml.global.$field.as_ref() { + return &v; + } } - if let Some(v) = self.user_config.global.$field.as_ref() { + if let Some(v) = self.client_config.0.global.$field.as_ref() { return &v; } + if let Some((user_config, _)) = self.user_config.as_ref() { + if let Some(v) = user_config.global.$field.as_ref() { + return &v; + } + } + &self.default_config.global.$field } )* } }; (client, $( - $(#[doc=$doc:literal])* - $vis:vis $field:ident : $ty:ty = $default:expr, - )* + $(#[doc=$doc:literal])* + $vis:vis $field:ident : $ty:ty = $default:expr, + )* ) => { impl Config { $( $($doc)* #[allow(non_snake_case)] $vis fn $field(&self) -> &$ty { - if let Some(v) = self.client_config.global.$field.as_ref() { + if let Some(v) = self.client_config.0.client.$field.as_ref() { return &v; } @@ -2387,7 +2650,7 @@ macro_rules! _config_data { }) => { /// Default config values for this grouping. #[allow(non_snake_case)] - #[derive(Debug, Clone, Serialize)] + #[derive(Debug, Clone )] struct $name { $($field: $ty,)* } impl_for_config_data!{ @@ -2425,26 +2688,10 @@ macro_rules! _config_data { } } - #[allow(unused)] - impl $name { - /// Applies overrides from some more local config blob, to self. - fn apply_input(&mut self, input: $input) { - $( - if let Some(value) = input.$field { - self.$field = value; - } - )* - } - - fn clone_with_overrides(&self, input: $input) -> Self { - Self {$( - $field: input.$field.unwrap_or_else(|| self.$field.clone()), - )*} - } - } - #[allow(unused, clippy::ptr_arg)] impl $input { + const FIELDS: &'static [&'static str] = &[$(stringify!($field)),*]; + fn from_json(json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> Self { Self {$( $field: get_field( @@ -2456,7 +2703,7 @@ macro_rules! _config_data { )*} } - fn from_toml(toml: &mut toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self { + fn from_toml(toml: &toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self { Self {$( $field: get_field_toml::<$ty>( toml, @@ -2483,8 +2730,7 @@ macro_rules! _config_data { mod $modname { #[test] fn fields_are_sorted() { - let field_names: &'static [&'static str] = &[$(stringify!($field)),*]; - field_names.windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1])); + super::$input::FIELDS.windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1])); } } }; @@ -2495,18 +2741,16 @@ use _config_data as config_data; struct DefaultConfigData { global: GlobalDefaultConfigData, local: LocalDefaultConfigData, - #[allow(dead_code)] client: ClientDefaultConfigData, } /// All of the config levels, all fields `Option`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize)] struct FullConfigInput { global: GlobalConfigInput, local: LocalConfigInput, - #[allow(dead_code)] client: ClientConfigInput, } @@ -2527,7 +2771,6 @@ impl FullConfigInput { GlobalConfigInput::schema_fields(&mut fields); LocalConfigInput::schema_fields(&mut fields); ClientConfigInput::schema_fields(&mut fields); - // HACK: sort the fields, so the diffs on the generated docs/schema are smaller fields.sort_by_key(|&(x, ..)| x); fields } @@ -2545,63 +2788,57 @@ impl FullConfigInput { /// All of the config levels, all fields `Option`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize)] struct GlobalLocalConfigInput { global: GlobalConfigInput, local: LocalConfigInput, } impl GlobalLocalConfigInput { - #[allow(dead_code)] + const FIELDS: &'static [&'static [&'static str]] = + &[GlobalConfigInput::FIELDS, LocalConfigInput::FIELDS]; fn from_toml( - mut toml: toml::Table, + toml: toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>, ) -> GlobalLocalConfigInput { GlobalLocalConfigInput { - global: GlobalConfigInput::from_toml(&mut toml, error_sink), - local: LocalConfigInput::from_toml(&mut toml, error_sink), + global: GlobalConfigInput::from_toml(&toml, error_sink), + local: LocalConfigInput::from_toml(&toml, error_sink), } } } -fn get_field_toml( - val: &toml::Table, - error_sink: &mut Vec<(String, toml::de::Error)>, +fn get_field( + json: &mut serde_json::Value, + error_sink: &mut Vec<(String, serde_json::Error)>, field: &'static str, alias: Option<&'static str>, ) -> Option { + // XXX: check alias first, to work around the VS Code where it pre-fills the + // defaults instead of sending an empty object. alias .into_iter() .chain(iter::once(field)) .filter_map(move |field| { - let subkeys = field.split('_'); - let mut v = val; - for subkey in subkeys { - if let Some(val) = v.get(subkey) { - if let Some(map) = val.as_table() { - v = map; - } else { - return Some(toml::Value::try_into(val.clone()).map_err(|e| (e, v))); - } - } else { - return None; - } - } - None + let mut pointer = field.replace('_', "/"); + pointer.insert(0, '/'); + json.pointer_mut(&pointer) + .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer))) }) .find(Result::is_ok) .and_then(|res| match res { Ok(it) => Some(it), Err((e, pointer)) => { - error_sink.push((pointer.to_string(), e)); + tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); + error_sink.push((pointer, e)); None } }) } -fn get_field( - json: &mut serde_json::Value, - error_sink: &mut Vec<(String, serde_json::Error)>, +fn get_field_toml( + toml: &toml::Table, + error_sink: &mut Vec<(String, toml::de::Error)>, field: &'static str, alias: Option<&'static str>, ) -> Option { @@ -2613,8 +2850,8 @@ fn get_field( .filter_map(move |field| { let mut pointer = field.replace('_', "/"); pointer.insert(0, '/'); - json.pointer_mut(&pointer) - .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer))) + toml_pointer(toml, &pointer) + .map(|it| <_>::deserialize(it.clone()).map_err(|e| (e, pointer))) }) .find(Result::is_ok) .and_then(|res| match res { @@ -2627,6 +2864,32 @@ fn get_field( }) } +fn toml_pointer<'a>(toml: &'a toml::Table, pointer: &str) -> Option<&'a toml::Value> { + fn parse_index(s: &str) -> Option { + if s.starts_with('+') || (s.starts_with('0') && s.len() != 1) { + return None; + } + s.parse().ok() + } + + if pointer.is_empty() { + return None; + } + if !pointer.starts_with('/') { + return None; + } + let mut parts = pointer.split('/').skip(1); + let first = parts.next()?; + let init = toml.get(first)?; + parts.map(|x| x.replace("~1", "/").replace("~0", "~")).try_fold(init, |target, token| { + match target { + toml::Value::Table(table) => table.get(&token), + toml::Value::Array(list) => parse_index(&token).and_then(move |x| list.get(x)), + _ => None, + } + }) +} + type SchemaField = (&'static str, &'static str, &'static [&'static str], String); fn schema(fields: &[SchemaField]) -> serde_json::Value { @@ -3000,6 +3263,34 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json map.into() } +fn validate_toml_table( + known_ptrs: &[&[&'static str]], + toml: &toml::Table, + ptr: &mut String, + error_sink: &mut Vec<(String, toml::de::Error)>, +) { + let verify = |ptr: &String| known_ptrs.iter().any(|ptrs| ptrs.contains(&ptr.as_str())); + + let l = ptr.len(); + for (k, v) in toml { + if !ptr.is_empty() { + ptr.push('_'); + } + ptr.push_str(k); + + match v { + // This is a table config, any entry in it is therefor valid + toml::Value::Table(_) if verify(ptr) => (), + toml::Value::Table(table) => validate_toml_table(known_ptrs, table, ptr, error_sink), + _ if !verify(ptr) => error_sink + .push((ptr.replace('_', "/"), toml::de::Error::custom("unexpected field"))), + _ => (), + } + + ptr.truncate(l); + } +} + #[cfg(test)] fn manual(fields: &[SchemaField]) -> String { fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| { @@ -3113,12 +3404,16 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "procMacro_server": null, - })) - .unwrap(); + + let mut change = ConfigChange::default(); + change.change_client_config(serde_json::json!({ + "procMacro" : { + "server": null, + }})); + + (config, _, _) = config.apply_change(change); assert_eq!(config.proc_macro_srv(), None); } @@ -3129,12 +3424,15 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "procMacro": {"server": project_root().display().to_string()} - })) - .unwrap(); + let mut change = ConfigChange::default(); + change.change_client_config(serde_json::json!({ + "procMacro" : { + "server": project_root().display().to_string(), + }})); + + (config, _, _) = config.apply_change(change); assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap())); } @@ -3145,12 +3443,18 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "procMacro": {"server": "./server"} - })) - .unwrap(); + + let mut change = ConfigChange::default(); + + change.change_client_config(serde_json::json!({ + "procMacro" : { + "server": "./server" + }})); + + (config, _, _) = config.apply_change(change); + assert_eq!( config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap()) @@ -3164,12 +3468,16 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "rust": { "analyzerTargetDir": null } - })) - .unwrap(); + + let mut change = ConfigChange::default(); + + change.change_client_config(serde_json::json!({ + "rust" : { "analyzerTargetDir" : null } + })); + + (config, _, _) = config.apply_change(change); assert_eq!(config.cargo_targetDir(), &None); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none()) @@ -3183,12 +3491,16 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "rust": { "analyzerTargetDir": true } - })) - .unwrap(); + + let mut change = ConfigChange::default(); + change.change_client_config(serde_json::json!({ + "rust" : { "analyzerTargetDir" : true } + })); + + (config, _, _) = config.apply_change(change); + assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true))); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer"))) @@ -3202,12 +3514,16 @@ mod tests { Default::default(), vec![], None, + None, ); - config - .update(serde_json::json!({ - "rust": { "analyzerTargetDir": "other_folder" } - })) - .unwrap(); + + let mut change = ConfigChange::default(); + change.change_client_config(serde_json::json!({ + "rust" : { "analyzerTargetDir" : "other_folder" } + })); + + (config, _, _) = config.apply_change(change); + assert_eq!( config.cargo_targetDir(), &Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder"))) @@ -3216,4 +3532,95 @@ mod tests { matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("other_folder"))) ); } + + #[test] + fn toml_unknown_key() { + let config = Config::new( + AbsPathBuf::try_from(project_root()).unwrap(), + Default::default(), + vec![], + None, + None, + ); + + let mut change = ConfigChange::default(); + + change.change_root_ratoml(Some( + toml::toml! { + [cargo.cfgs] + these = "these" + should = "should" + be = "be" + valid = "valid" + + [invalid.config] + err = "error" + + [cargo] + target = "ok" + + // FIXME: This should be an error + [cargo.sysroot] + non-table = "expected" + } + .to_string() + .into(), + )); + + let (config, e, _) = config.apply_change(change); + expect_test::expect![[r#" + ConfigErrors( + [ + Toml { + config_key: "invalid/config/err", + error: Error { + inner: Error { + inner: TomlError { + message: "unexpected field", + raw: None, + keys: [], + span: None, + }, + }, + }, + }, + ], + ) + "#]] + .assert_debug_eq(&e); + let mut change = ConfigChange::default(); + + change.change_user_config(Some( + toml::toml! { + [cargo.cfgs] + these = "these" + should = "should" + be = "be" + valid = "valid" + } + .to_string() + .into(), + )); + let (_, e, _) = config.apply_change(change); + expect_test::expect![[r#" + ConfigErrors( + [ + Toml { + config_key: "invalid/config/err", + error: Error { + inner: Error { + inner: TomlError { + message: "unexpected field", + raw: None, + keys: [], + span: None, + }, + }, + }, + }, + ], + ) + "#]] + .assert_debug_eq(&e); + } } diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 9f1893ff0eba..6798e058dbf5 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -154,7 +154,7 @@ pub(crate) fn fetch_native_diagnostics( .copied() .filter_map(|file_id| { let line_index = snapshot.file_line_index(file_id).ok()?; - let source_root = snapshot.analysis.source_root(file_id).ok()?; + let source_root = snapshot.analysis.source_root_id(file_id).ok()?; let diagnostics = snapshot .analysis diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 3d3f94401991..4832e8cab43f 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -547,6 +547,7 @@ mod tests { ClientCapabilities::default(), Vec::new(), None, + None, ), ); let snap = state.snapshot(); diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index f64e66183d1b..59431d7d4208 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -3,13 +3,13 @@ //! //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. -use std::time::Instant; +use std::{ops::Not as _, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; -use ide_db::base_db::{CrateId, ProcMacroPaths}; +use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -25,13 +25,16 @@ use project_model::{ use rustc_hash::{FxHashMap, FxHashSet}; use tracing::{span, Level}; use triomphe::Arc; -use vfs::{AnchoredPathBuf, Vfs}; +use vfs::{AnchoredPathBuf, ChangeKind, Vfs}; use crate::{ - config::{Config, ConfigError}, + config::{Config, ConfigChange, ConfigErrors}, diagnostics::{CheckFixes, DiagnosticCollection}, line_index::{LineEndings, LineIndex}, - lsp::{from_proto, to_proto::url_from_abs_path}, + lsp::{ + from_proto::{self}, + to_proto::url_from_abs_path, + }, lsp_ext, main_loop::Task, mem_docs::MemDocs, @@ -65,13 +68,13 @@ pub(crate) struct GlobalState { pub(crate) fmt_pool: Handle, Receiver>, pub(crate) config: Arc, - pub(crate) config_errors: Option, + pub(crate) config_errors: Option, pub(crate) analysis_host: AnalysisHost, pub(crate) diagnostics: DiagnosticCollection, pub(crate) mem_docs: MemDocs, pub(crate) source_root_config: SourceRootConfig, /// A mapping that maps a local source root's `SourceRootId` to it parent's `SourceRootId`, if it has one. - pub(crate) local_roots_parent_map: FxHashMap, + pub(crate) local_roots_parent_map: Arc>, pub(crate) semantic_tokens_cache: Arc>>, // status @@ -213,7 +216,7 @@ impl GlobalState { shutdown_requested: false, last_reported_status: None, source_root_config: SourceRootConfig::default(), - local_roots_parent_map: FxHashMap::default(), + local_roots_parent_map: Arc::new(FxHashMap::default()), config_errors: Default::default(), proc_macro_clients: Arc::from_iter([]), @@ -254,6 +257,14 @@ impl GlobalState { pub(crate) fn process_changes(&mut self) -> bool { let _p = span!(Level::INFO, "GlobalState::process_changes").entered(); + + // We cannot directly resolve a change in a ratoml file to a format + // that can be used by the config module because config talks + // in `SourceRootId`s instead of `FileId`s and `FileId` -> `SourceRootId` + // mapping is not ready until `AnalysisHost::apply_changes` has been called. + let mut modified_ratoml_files: FxHashMap = + FxHashMap::default(); + let (change, modified_rust_files, workspace_structure_change) = { let mut change = ChangeWithProcMacros::new(); let mut guard = self.vfs.write(); @@ -273,6 +284,11 @@ impl GlobalState { let mut modified_rust_files = vec![]; for file in changed_files.into_values() { let vfs_path = vfs.file_path(file.file_id); + if let Some(("rust-analyzer", Some("toml"))) = vfs_path.name_and_extension() { + // Remember ids to use them after `apply_changes` + modified_ratoml_files.insert(file.file_id, (file.kind(), vfs_path.clone())); + } + if let Some(path) = vfs_path.as_path() { has_structure_changes |= file.is_created_or_deleted(); @@ -310,12 +326,15 @@ impl GlobalState { bytes.push((file.file_id, text)); } let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard); - bytes.into_iter().for_each(|(file_id, text)| match text { - None => change.change_file(file_id, None), - Some((text, line_endings)) => { - line_endings_map.insert(file_id, line_endings); - change.change_file(file_id, Some(text)); - } + bytes.into_iter().for_each(|(file_id, text)| { + let text = match text { + None => None, + Some((text, line_endings)) => { + line_endings_map.insert(file_id, line_endings); + Some(text) + } + }; + change.change_file(file_id, text); }); if has_structure_changes { let roots = self.source_root_config.partition(vfs); @@ -326,6 +345,63 @@ impl GlobalState { let _p = span!(Level::INFO, "GlobalState::process_changes/apply_change").entered(); self.analysis_host.apply_change(change); + if !modified_ratoml_files.is_empty() + || !self.config.same_source_root_parent_map(&self.local_roots_parent_map) + { + let config_change = { + let user_config_path = self.config.user_config_path(); + let root_ratoml_path = self.config.root_ratoml_path(); + let mut change = ConfigChange::default(); + let db = self.analysis_host.raw_database(); + + for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files { + if vfs_path == *user_config_path { + change.change_user_config(Some(db.file_text(file_id))); + continue; + } + + if vfs_path == *root_ratoml_path { + change.change_root_ratoml(Some(db.file_text(file_id))); + continue; + } + + // If change has been made to a ratoml file that + // belongs to a non-local source root, we will ignore it. + // As it doesn't make sense a users to use external config files. + let sr_id = db.file_source_root(file_id); + let sr = db.source_root(sr_id); + if !sr.is_library { + if let Some((old_path, old_text)) = change.change_ratoml( + sr_id, + vfs_path.clone(), + Some(db.file_text(file_id)), + ) { + // SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins. + if old_path < vfs_path { + span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect."); + // Put the old one back in. + change.change_ratoml(sr_id, old_path, old_text); + } + } + } else { + // Mapping to a SourceRoot should always end up in `Ok` + span!(Level::ERROR, "Mapping to SourceRootId failed."); + } + } + change.change_source_root_parent_map(self.local_roots_parent_map.clone()); + change + }; + + let (config, e, should_update) = self.config.apply_change(config_change); + self.config_errors = e.is_empty().not().then_some(e); + + if should_update { + self.update_configuration(config); + } else { + // No global or client level config was changed. So we can just naively replace config. + self.config = Arc::new(config); + } + } { if !matches!(&workspace_structure_change, Some((.., true))) { diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index 2d5fc9bfacc9..2dbc297ea6c2 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -1,7 +1,7 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notifications. -use std::ops::Deref; +use std::ops::{Deref, Not as _}; use itertools::Itertools; use lsp_types::{ @@ -13,7 +13,7 @@ use triomphe::Arc; use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ - config::Config, + config::{Config, ConfigChange}, global_state::GlobalState, lsp::{from_proto, utils::apply_document_changes}, lsp_ext::{self, RunFlycheckParams}, @@ -71,6 +71,7 @@ pub(crate) fn handle_did_open_text_document( tracing::error!("duplicate DidOpenTextDocument: {}", path); } + tracing::info!("New file content set {:?}", params.text_document.text); state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes())); if state.config.notifications().unindexed_project { tracing::debug!("queuing task"); @@ -196,10 +197,14 @@ pub(crate) fn handle_did_change_configuration( } (None, Some(mut configs)) => { if let Some(json) = configs.get_mut(0) { - // Note that json can be null according to the spec if the client can't - // provide a configuration. This is handled in Config::update below. - let mut config = Config::clone(&*this.config); - this.config_errors = config.update(json.take()).err(); + let config = Config::clone(&*this.config); + let mut change = ConfigChange::default(); + change.change_client_config(json.take()); + + let (config, e, _) = config.apply_change(change); + this.config_errors = e.is_empty().not().then_some(e); + + // Client config changes neccesitates .update_config method to be called. this.update_configuration(config); } } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 50d405c9521c..0789dd646239 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -42,6 +42,7 @@ use crate::{ hack_recover_crate_name, line_index::LineEndings, lsp::{ + ext::InternalTestingFetchConfigParams, from_proto, to_proto, utils::{all_edits_are_disjoint, invalid_params_error}, LspError, @@ -367,8 +368,7 @@ pub(crate) fn handle_join_lines( let _p = tracing::info_span!("handle_join_lines").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; - let source_root = snap.analysis.source_root(file_id)?; - let config = snap.config.join_lines(Some(source_root)); + let config = snap.config.join_lines(); let line_index = snap.file_line_index(file_id)?; let mut res = TextEdit::default(); @@ -949,7 +949,7 @@ pub(crate) fn handle_completion( let completion_trigger_character = context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); - let source_root = snap.analysis.source_root(position.file_id)?; + let source_root = snap.analysis.source_root_id(position.file_id)?; let completion_config = &snap.config.completion(Some(source_root)); // FIXME: We should fix up the position when retrying the cancelled request instead position.offset = position.offset.min(line_index.index.len()); @@ -997,7 +997,7 @@ pub(crate) fn handle_completion_resolve( let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else { return Ok(original_completion); }; - let source_root = snap.analysis.source_root(file_id)?; + let source_root = snap.analysis.source_root_id(file_id)?; let additional_edits = snap .analysis @@ -1229,7 +1229,7 @@ pub(crate) fn handle_code_action( let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; - let source_root = snap.analysis.source_root(file_id)?; + let source_root = snap.analysis.source_root_id(file_id)?; let mut assists_config = snap.config.assist(Some(source_root)); assists_config.allowed = params @@ -1307,7 +1307,7 @@ pub(crate) fn handle_code_action_resolve( let line_index = snap.file_line_index(file_id)?; let range = from_proto::text_range(&line_index, params.code_action_params.range)?; let frange = FileRange { file_id, range }; - let source_root = snap.analysis.source_root(file_id)?; + let source_root = snap.analysis.source_root_id(file_id)?; let mut assists_config = snap.config.assist(Some(source_root)); assists_config.allowed = params @@ -1460,7 +1460,7 @@ pub(crate) fn handle_document_highlight( let _p = tracing::info_span!("handle_document_highlight").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let line_index = snap.file_line_index(position.file_id)?; - let source_root = snap.analysis.source_root(position.file_id)?; + let source_root = snap.analysis.source_root_id(position.file_id)?; let refs = match snap .analysis @@ -1511,13 +1511,12 @@ pub(crate) fn handle_inlay_hints( params.range, )?; let line_index = snap.file_line_index(file_id)?; - let source_root = snap.analysis.source_root(file_id)?; let range = TextRange::new( range.start().min(line_index.index.len()), range.end().min(line_index.index.len()), ); - let inlay_hints_config = snap.config.inlay_hints(Some(source_root)); + let inlay_hints_config = snap.config.inlay_hints(); Ok(Some( snap.analysis .inlay_hints(&inlay_hints_config, file_id, Some(range))? @@ -1553,9 +1552,8 @@ pub(crate) fn handle_inlay_hints_resolve( let line_index = snap.file_line_index(file_id)?; let hint_position = from_proto::offset(&line_index, original_hint.position)?; - let source_root = snap.analysis.source_root(file_id)?; - let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root)); + let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); let resolve_hints = snap.analysis.inlay_hints_resolve( &forced_resolve_inlay_hints_config, @@ -1687,9 +1685,8 @@ pub(crate) fn handle_semantic_tokens_full( let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let source_root = snap.analysis.source_root(file_id)?; - let mut highlight_config = snap.config.highlighting_config(Some(source_root)); + let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -1700,7 +1697,7 @@ pub(crate) fn handle_semantic_tokens_full( &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), - snap.config.highlighting_non_standard_tokens(Some(source_root)), + snap.config.highlighting_non_standard_tokens(), ); // Unconditionally cache the tokens @@ -1718,9 +1715,8 @@ pub(crate) fn handle_semantic_tokens_full_delta( let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let source_root = snap.analysis.source_root(file_id)?; - let mut highlight_config = snap.config.highlighting_config(Some(source_root)); + let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -1731,7 +1727,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), - snap.config.highlighting_non_standard_tokens(Some(source_root)), + snap.config.highlighting_non_standard_tokens(), ); let cached_tokens = snap.semantic_tokens_cache.lock().remove(¶ms.text_document.uri); @@ -1762,9 +1758,8 @@ pub(crate) fn handle_semantic_tokens_range( let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; let text = snap.analysis.file_text(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?; - let source_root = snap.analysis.source_root(frange.file_id)?; - let mut highlight_config = snap.config.highlighting_config(Some(source_root)); + let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -1775,7 +1770,7 @@ pub(crate) fn handle_semantic_tokens_range( &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), - snap.config.highlighting_non_standard_tokens(Some(source_root)), + snap.config.highlighting_non_standard_tokens(), ); Ok(Some(semantic_tokens.into())) } @@ -1991,8 +1986,8 @@ fn goto_type_action_links( snap: &GlobalStateSnapshot, nav_targets: &[HoverGotoTypeData], ) -> Option { - if nav_targets.is_empty() - || !snap.config.hover_actions().goto_type_def + if !snap.config.hover_actions().goto_type_def + || nav_targets.is_empty() || !snap.config.client_commands().goto_location { return None; @@ -2237,6 +2232,30 @@ pub(crate) fn fetch_dependency_list( Ok(FetchDependencyListResult { crates: crate_infos }) } +pub(crate) fn internal_testing_fetch_config( + state: GlobalStateSnapshot, + params: InternalTestingFetchConfigParams, +) -> anyhow::Result { + let source_root = params + .text_document + .map(|it| { + state + .analysis + .source_root_id(from_proto::file_id(&state, &it.uri)?) + .map_err(anyhow::Error::from) + }) + .transpose()?; + serde_json::to_value(match &*params.config { + "local" => state.config.assist(source_root).assist_emit_must_use, + "global" => matches!( + state.config.rustfmt(), + RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } + ), + _ => return Err(anyhow::anyhow!("Unknown test config key: {}", params.config)), + }) + .map_err(Into::into) +} + /// Searches for the directory of a Rust crate given this crate's root file path. /// /// # Arguments diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 175ffa622ff7..b3c11d0156ed 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -18,7 +18,6 @@ mod cargo_target_spec; mod diagnostics; mod diff; mod dispatch; -mod global_state; mod hack_recover_crate_name; mod line_index; mod main_loop; @@ -40,6 +39,7 @@ pub mod tracing { } pub mod config; +mod global_state; pub mod lsp; use self::lsp::ext as lsp_ext; diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index aa75633ac356..4da9054d13de 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -17,6 +17,20 @@ use serde::{Deserialize, Serialize}; use crate::line_index::PositionEncoding; +pub enum InternalTestingFetchConfig {} + +impl Request for InternalTestingFetchConfig { + type Params = InternalTestingFetchConfigParams; + type Result = serde_json::Value; + const METHOD: &'static str = "rust-analyzer-internal/internalTestingFetchConfig"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct InternalTestingFetchConfigParams { + pub text_document: Option, + pub config: String, +} pub enum AnalyzerStatus {} impl Request for AnalyzerStatus { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 864dc9623fd5..9b19e58eaa6f 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -186,6 +186,11 @@ impl GlobalState { scheme: None, pattern: Some("**/Cargo.lock".into()), }, + lsp_types::DocumentFilter { + language: None, + scheme: None, + pattern: Some("**/rust-analyzer.toml".into()), + }, ]), }, }; @@ -474,6 +479,7 @@ impl GlobalState { fn update_diagnostics(&mut self) { let db = self.analysis_host.raw_database(); + // spawn a task per subscription? let subscriptions = { let vfs = &self.vfs.read().0; self.mem_docs @@ -971,6 +977,8 @@ impl GlobalState { .on::(handlers::handle_open_docs) .on::(handlers::handle_open_cargo_toml) .on::(handlers::handle_move_item) + // + .on::(handlers::internal_testing_fetch_config) .finish(); } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index c07adb3c5adc..9d8f2b5fcc9d 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -24,6 +24,7 @@ use ide_db::{ }; use itertools::Itertools; use load_cargo::{load_proc_macro, ProjectFolders}; +use lsp_types::FileSystemWatcher; use proc_macro_api::ProcMacroServer; use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts}; use stdx::{format_to, thread::ThreadIntent}; @@ -442,40 +443,59 @@ impl GlobalState { let filter = self.workspaces.iter().flat_map(|ws| ws.to_roots()).filter(|it| it.is_local); - let watchers = if self.config.did_change_watched_files_relative_pattern_support() { - // When relative patterns are supported by the client, prefer using them - filter - .flat_map(|root| { - root.include.into_iter().flat_map(|base| { - [(base.clone(), "**/*.rs"), (base, "**/Cargo.{lock,toml}")] + let mut watchers: Vec = + if self.config.did_change_watched_files_relative_pattern_support() { + // When relative patterns are supported by the client, prefer using them + filter + .flat_map(|root| { + root.include.into_iter().flat_map(|base| { + [ + (base.clone(), "**/*.rs"), + (base.clone(), "**/Cargo.{lock,toml}"), + (base, "**/rust-analyzer.toml"), + ] + }) }) - }) - .map(|(base, pat)| lsp_types::FileSystemWatcher { - glob_pattern: lsp_types::GlobPattern::Relative( - lsp_types::RelativePattern { - base_uri: lsp_types::OneOf::Right( - lsp_types::Url::from_file_path(base).unwrap(), - ), - pattern: pat.to_owned(), - }, - ), - kind: None, - }) - .collect() - } else { - // When they're not, integrate the base to make them into absolute patterns - filter - .flat_map(|root| { - root.include.into_iter().flat_map(|base| { - [format!("{base}/**/*.rs"), format!("{base}/**/Cargo.{{lock,toml}}")] + .map(|(base, pat)| lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::Relative( + lsp_types::RelativePattern { + base_uri: lsp_types::OneOf::Right( + lsp_types::Url::from_file_path(base).unwrap(), + ), + pattern: pat.to_owned(), + }, + ), + kind: None, }) - }) + .collect() + } else { + // When they're not, integrate the base to make them into absolute patterns + filter + .flat_map(|root| { + root.include.into_iter().flat_map(|base| { + [ + format!("{base}/**/*.rs"), + format!("{base}/**/Cargo.{{toml,lock}}"), + format!("{base}/**/rust-analyzer.toml"), + ] + }) + }) + .map(|glob_pattern| lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::String(glob_pattern), + kind: None, + }) + .collect() + }; + + watchers.extend( + iter::once(self.config.user_config_path().to_string()) + .chain(iter::once(self.config.root_ratoml_path().to_string())) .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern: lsp_types::GlobPattern::String(glob_pattern), kind: None, }) - .collect() - }; + .collect::>(), + ); let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers }; @@ -547,7 +567,7 @@ impl GlobalState { version: self.vfs_config_version, }); self.source_root_config = project_folders.source_root_config; - self.local_roots_parent_map = self.source_root_config.source_root_parent_map(); + self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map()); self.recreate_crate_graph(cause); diff --git a/crates/rust-analyzer/src/tracing/config.rs b/crates/rust-analyzer/src/tracing/config.rs index f77d98933044..fcdbf6c69497 100644 --- a/crates/rust-analyzer/src/tracing/config.rs +++ b/crates/rust-analyzer/src/tracing/config.rs @@ -13,6 +13,7 @@ use tracing_tree::HierarchicalLayer; use crate::tracing::hprof; +#[derive(Debug)] pub struct Config { pub writer: T, pub filter: String, diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 43a830501058..f886df60e681 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -11,6 +11,7 @@ #![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::disallowed_types)] +mod ratoml; #[cfg(not(feature = "in-rust-tree"))] mod sourcegen; mod support; @@ -30,15 +31,15 @@ use lsp_types::{ InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; + use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject}; use serde_json::json; use stdx::format_to_acc; + use test_utils::skip_slow_tests; +use testdir::TestDir; -use crate::{ - support::{project, Project}, - testdir::TestDir, -}; +use crate::support::{project, Project}; #[test] fn completes_items_from_standard_library() { diff --git a/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/crates/rust-analyzer/tests/slow-tests/ratoml.rs new file mode 100644 index 000000000000..218a9a32adba --- /dev/null +++ b/crates/rust-analyzer/tests/slow-tests/ratoml.rs @@ -0,0 +1,947 @@ +use crate::support::{Project, Server}; +use crate::testdir::TestDir; +use lsp_types::{ + notification::{DidChangeTextDocument, DidOpenTextDocument, DidSaveTextDocument}, + DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, Url, + VersionedTextDocumentIdentifier, +}; +use paths::Utf8PathBuf; + +use rust_analyzer::lsp::ext::{InternalTestingFetchConfig, InternalTestingFetchConfigParams}; +use serde_json::json; + +enum QueryType { + Local, + /// A query whose config key is a part of the global configs, so that + /// testing for changes to this config means testing if global changes + /// take affect. + Global, +} + +struct RatomlTest { + urls: Vec, + server: Server, + tmp_path: Utf8PathBuf, + user_config_dir: Utf8PathBuf, +} + +impl RatomlTest { + const EMIT_MUST_USE: &'static str = r#"assist.emitMustUse = true"#; + const EMIT_MUST_NOT_USE: &'static str = r#"assist.emitMustUse = false"#; + + const GLOBAL_TRAIT_ASSOC_ITEMS_ZERO: &'static str = r#"hover.show.traitAssocItems = 0"#; + + fn new( + fixtures: Vec<&str>, + roots: Vec<&str>, + client_config: Option, + ) -> Self { + let tmp_dir = TestDir::new(); + let tmp_path = tmp_dir.path().to_owned(); + + let full_fixture = fixtures.join("\n"); + + let user_cnf_dir = TestDir::new(); + let user_config_dir = user_cnf_dir.path().to_owned(); + + let mut project = + Project::with_fixture(&full_fixture).tmp_dir(tmp_dir).user_config_dir(user_cnf_dir); + + for root in roots { + project = project.root(root); + } + + if let Some(client_config) = client_config { + project = project.with_config(client_config); + } + + let server = project.server().wait_until_workspace_is_loaded(); + + let mut case = Self { urls: vec![], server, tmp_path, user_config_dir }; + let urls = fixtures.iter().map(|fixture| case.fixture_path(fixture)).collect::>(); + case.urls = urls; + case + } + + fn fixture_path(&self, fixture: &str) -> Url { + let mut lines = fixture.trim().split('\n'); + + let mut path = + lines.next().expect("All files in a fixture are expected to have at least one line."); + + if path.starts_with("//- minicore") { + path = lines.next().expect("A minicore line must be followed by a path.") + } + + path = path.strip_prefix("//- ").expect("Path must be preceded by a //- prefix "); + + let spl = path[1..].split('/'); + let mut path = self.tmp_path.clone(); + + let mut spl = spl.into_iter(); + if let Some(first) = spl.next() { + if first == "$$CONFIG_DIR$$" { + path = self.user_config_dir.clone(); + } else { + path = path.join(first); + } + } + for piece in spl { + path = path.join(piece); + } + + Url::parse( + format!( + "file://{}", + path.into_string().to_owned().replace("C:\\", "/c:/").replace('\\', "/") + ) + .as_str(), + ) + .unwrap() + } + + fn create(&mut self, fixture_path: &str, text: String) { + let url = self.fixture_path(fixture_path); + + self.server.notification::(DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: url.clone(), + language_id: "rust".to_owned(), + version: 0, + text: String::new(), + }, + }); + + self.server.notification::(DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { uri: url, version: 0 }, + content_changes: vec![TextDocumentContentChangeEvent { + range: None, + range_length: None, + text, + }], + }); + } + + fn delete(&mut self, file_idx: usize) { + self.server.notification::(DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: self.urls[file_idx].clone(), + language_id: "rust".to_owned(), + version: 0, + text: "".to_owned(), + }, + }); + + // See if deleting ratoml file will make the config of interest to return to its default value. + self.server.notification::(DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { uri: self.urls[file_idx].clone() }, + text: Some("".to_owned()), + }); + } + + fn edit(&mut self, file_idx: usize, text: String) { + self.server.notification::(DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: self.urls[file_idx].clone(), + language_id: "rust".to_owned(), + version: 0, + text: String::new(), + }, + }); + + self.server.notification::(DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: self.urls[file_idx].clone(), + version: 0, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: None, + range_length: None, + text, + }], + }); + } + + fn query(&self, query: QueryType, source_file_idx: usize) -> bool { + let config = match query { + QueryType::Local => "local".to_owned(), + QueryType::Global => "global".to_owned(), + }; + let res = self.server.send_request::( + InternalTestingFetchConfigParams { + text_document: Some(TextDocumentIdentifier { + uri: self.urls[source_file_idx].clone(), + }), + config, + }, + ); + res.as_bool().unwrap() + } +} + +// /// Check if we are listening for changes in user's config file ( e.g on Linux `~/.config/rust-analyzer/.rust-analyzer.toml`) +// #[test] +// #[cfg(target_os = "windows")] +// fn listen_to_user_config_scenario_windows() { +// todo!() +// } + +// #[test] +// #[cfg(target_os = "linux")] +// fn listen_to_user_config_scenario_linux() { +// todo!() +// } + +// #[test] +// #[cfg(target_os = "macos")] +// fn listen_to_user_config_scenario_macos() { +// todo!() +// } + +/// Check if made changes have had any effect on +/// the client config. +#[test] +fn ratoml_client_config_basic() { + let server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#"//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + ], + vec!["p1"], + Some(json!({ + "assist" : { + "emitMustUse" : true + } + })), + ); + + assert!(server.query(QueryType::Local, 1)); +} + +/// Checks if client config can be modified. +/// FIXME @alibektas : This test is atm not valid. +/// Asking for client config from the client is a 2 way communication +/// which we cannot imitate with the current slow-tests infrastructure. +/// See rust-analyzer::handlers::notifications#197 +// #[test] +// fn client_config_update() { +// setup(); + +// let server = RatomlTest::new( +// vec![ +// r#" +// //- /p1/Cargo.toml +// [package] +// name = "p1" +// version = "0.1.0" +// edition = "2021" +// "#, +// r#" +// //- /p1/src/lib.rs +// enum Value { +// Number(i32), +// Text(String), +// }"#, +// ], +// vec!["p1"], +// None, +// ); + +// assert!(!server.query(QueryType::AssistEmitMustUse, 1)); + +// // a.notification::(DidChangeConfigurationParams { +// // settings: json!({ +// // "assists" : { +// // "emitMustUse" : true +// // } +// // }), +// // }); + +// assert!(server.query(QueryType::AssistEmitMustUse, 1)); +// } + +// #[test] +// fn ratoml_create_ratoml_basic() { +// let server = RatomlTest::new( +// vec![ +// r#" +// //- /p1/Cargo.toml +// [package] +// name = "p1" +// version = "0.1.0" +// edition = "2021" +// "#, +// r#" +// //- /p1/rust-analyzer.toml +// assist.emitMustUse = true +// "#, +// r#" +// //- /p1/src/lib.rs +// enum Value { +// Number(i32), +// Text(String), +// } +// "#, +// ], +// vec!["p1"], +// None, +// ); + +// assert!(server.query(QueryType::AssistEmitMustUse, 2)); +// } + +#[test] +#[ignore = "the user config is currently not being watched on startup, fix this"] +fn ratoml_user_config_detected() { + let server = RatomlTest::new( + vec![ + r#" +//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#"//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 2)); +} + +#[test] +#[ignore = "the user config is currently not being watched on startup, fix this"] +fn ratoml_create_user_config() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + ], + vec!["p1"], + None, + ); + + assert!(!server.query(QueryType::Local, 1)); + server.create( + "//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml", + RatomlTest::EMIT_MUST_USE.to_owned(), + ); + assert!(server.query(QueryType::Local, 1)); +} + +#[test] +#[ignore = "the user config is currently not being watched on startup, fix this"] +fn ratoml_modify_user_config() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021""#, + r#" +//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml +assist.emitMustUse = true"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 1)); + server.edit(2, String::new()); + assert!(!server.query(QueryType::Local, 1)); +} + +#[test] +#[ignore = "the user config is currently not being watched on startup, fix this"] +fn ratoml_delete_user_config() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021""#, + r#" +//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml +assist.emitMustUse = true"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 1)); + server.delete(2); + assert!(!server.query(QueryType::Local, 1)); +} +// #[test] +// fn delete_user_config() { +// todo!() +// } + +// #[test] +// fn modify_client_config() { +// todo!() +// } + +#[test] +fn ratoml_inherit_config_from_ws_root() { + let server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +pub fn add(left: usize, right: usize) -> usize { + left + right +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); +} + +#[test] +fn ratoml_modify_ratoml_at_ws_root() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = false +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +pub fn add(left: usize, right: usize) -> usize { + left + right +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(!server.query(QueryType::Local, 3)); + server.edit(1, "assist.emitMustUse = true".to_owned()); + assert!(server.query(QueryType::Local, 3)); +} + +#[test] +fn ratoml_delete_ratoml_at_ws_root() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +pub fn add(left: usize, right: usize) -> usize { + left + right +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); + server.delete(1); + assert!(!server.query(QueryType::Local, 3)); +} + +#[test] +fn ratoml_add_immediate_child_to_ws_root() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +pub fn add(left: usize, right: usize) -> usize { + left + right +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); + server.create("//- /p1/p2/rust-analyzer.toml", RatomlTest::EMIT_MUST_NOT_USE.to_owned()); + assert!(!server.query(QueryType::Local, 3)); +} + +#[test] +fn ratoml_rm_ws_root_ratoml_child_has_client_as_parent_now() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +pub fn add(left: usize, right: usize) -> usize { + left + right +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); + server.delete(1); + assert!(!server.query(QueryType::Local, 3)); +} + +#[test] +fn ratoml_crates_both_roots() { + let server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +workspace = { members = ["p2"] } +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/p2/Cargo.toml +[package] +name = "p2" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/p2/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + r#" +//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +}"#, + ], + vec!["p1", "p2"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); + assert!(server.query(QueryType::Local, 4)); +} + +#[test] +fn ratoml_multiple_ratoml_in_single_source_root() { + let server = RatomlTest::new( + vec![ + r#" + //- /p1/Cargo.toml + [package] + name = "p1" + version = "0.1.0" + edition = "2021" + "#, + r#" + //- /p1/rust-analyzer.toml + assist.emitMustUse = true + "#, + r#" + //- /p1/src/rust-analyzer.toml + assist.emitMustUse = false + "#, + r#" + //- /p1/src/lib.rs + enum Value { + Number(i32), + Text(String), + } + "#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); + + let server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" +"#, + r#" +//- /p1/src/rust-analyzer.toml +assist.emitMustUse = false +"#, + r#" +//- /p1/rust-analyzer.toml +assist.emitMustUse = true +"#, + r#" +//- /p1/src/lib.rs +enum Value { + Number(i32), + Text(String), +} +"#, + ], + vec!["p1"], + None, + ); + + assert!(server.query(QueryType::Local, 3)); +} + +/// If a root is non-local, so we cannot find what its parent is +/// in our `config.local_root_parent_map`. So if any config should +/// apply, it must be looked for starting from the client level. +/// FIXME @alibektas : "locality" is according to ra that, which is simply in the file system. +/// This doesn't really help us with what we want to achieve here. +// #[test] +// fn ratoml_non_local_crates_start_inheriting_from_client() { +// let server = RatomlTest::new( +// vec![ +// r#" +// //- /p1/Cargo.toml +// [package] +// name = "p1" +// version = "0.1.0" +// edition = "2021" + +// [dependencies] +// p2 = { path = "../p2" } +// #, +// r#" +// //- /p1/src/lib.rs +// enum Value { +// Number(i32), +// Text(String), +// } + +// use p2; + +// pub fn add(left: usize, right: usize) -> usize { +// p2::add(left, right) +// } + +// #[cfg(test)] +// mod tests { +// use super::*; + +// #[test] +// fn it_works() { +// let result = add(2, 2); +// assert_eq!(result, 4); +// } +// }"#, +// r#" +// //- /p2/Cargo.toml +// [package] +// name = "p2" +// version = "0.1.0" +// edition = "2021" + +// # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +// [dependencies] +// "#, +// r#" +// //- /p2/rust-analyzer.toml +// # DEF +// assist.emitMustUse = true +// "#, +// r#" +// //- /p2/src/lib.rs +// enum Value { +// Number(i32), +// Text(String), +// }"#, +// ], +// vec!["p1", "p2"], +// None, +// ); + +// assert!(!server.query(QueryType::AssistEmitMustUse, 5)); +// } + +/// Having a ratoml file at the root of a project enables +/// configuring global level configurations as well. +#[test] +fn ratoml_in_root_is_global() { + let server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" + "#, + r#" +//- /rust-analyzer.toml +hover.show.traitAssocItems = 4 + "#, + r#" +//- /p1/src/lib.rs +trait RandomTrait { + type B; + fn abc() -> i32; + fn def() -> i64; +} + +fn main() { + let a = RandomTrait; +}"#, + ], + vec![], + None, + ); + + server.query(QueryType::Global, 2); +} + +#[allow(unused)] +// #[test] +// FIXME: Re-enable this test when we have a global config we can check again +fn ratoml_root_is_updateable() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" + "#, + r#" +//- /rust-analyzer.toml +hover.show.traitAssocItems = 4 + "#, + r#" +//- /p1/src/lib.rs +trait RandomTrait { + type B; + fn abc() -> i32; + fn def() -> i64; +} + +fn main() { + let a = RandomTrait; +}"#, + ], + vec![], + None, + ); + + assert!(server.query(QueryType::Global, 2)); + server.edit(1, RatomlTest::GLOBAL_TRAIT_ASSOC_ITEMS_ZERO.to_owned()); + assert!(!server.query(QueryType::Global, 2)); +} + +#[allow(unused)] +// #[test] +// FIXME: Re-enable this test when we have a global config we can check again +fn ratoml_root_is_deletable() { + let mut server = RatomlTest::new( + vec![ + r#" +//- /p1/Cargo.toml +[package] +name = "p1" +version = "0.1.0" +edition = "2021" + "#, + r#" +//- /rust-analyzer.toml +hover.show.traitAssocItems = 4 + "#, + r#" +//- /p1/src/lib.rs +trait RandomTrait { + type B; + fn abc() -> i32; + fn def() -> i64; +} + +fn main() { + let a = RandomTrait; +}"#, + ], + vec![], + None, + ); + + assert!(server.query(QueryType::Global, 2)); + server.delete(1); + assert!(!server.query(QueryType::Global, 2)); +} diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index cf27cc7eeff6..c43832553230 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -9,7 +9,10 @@ use crossbeam_channel::{after, select, Receiver}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; use paths::{Utf8Path, Utf8PathBuf}; -use rust_analyzer::{config::Config, lsp, main_loop}; +use rust_analyzer::{ + config::{Config, ConfigChange, ConfigErrors}, + lsp, main_loop, +}; use serde::Serialize; use serde_json::{json, to_string_pretty, Value}; use test_utils::FixtureWithProjectMeta; @@ -24,6 +27,7 @@ pub(crate) struct Project<'a> { roots: Vec, config: serde_json::Value, root_dir_contains_symlink: bool, + user_config_path: Option, } impl Project<'_> { @@ -47,9 +51,15 @@ impl Project<'_> { } }), root_dir_contains_symlink: false, + user_config_path: None, } } + pub(crate) fn user_config_dir(mut self, config_path_dir: TestDir) -> Self { + self.user_config_path = Some(config_path_dir.path().to_owned()); + self + } + pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self { self.tmp_dir = Some(tmp_dir); self @@ -111,10 +121,17 @@ impl Project<'_> { assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); assert!(toolchain.is_none()); + for entry in fixture { - let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]); - fs::create_dir_all(path.parent().unwrap()).unwrap(); - fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); + if let Some(pth) = entry.path.strip_prefix("/$$CONFIG_DIR$$") { + let path = self.user_config_path.clone().unwrap().join(&pth['/'.len_utf8()..]); + fs::create_dir_all(path.parent().unwrap()).unwrap(); + fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); + } else { + let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]); + fs::create_dir_all(path.parent().unwrap()).unwrap(); + fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); + } } let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf()); @@ -184,8 +201,16 @@ impl Project<'_> { }, roots, None, + self.user_config_path, ); - config.update(self.config).expect("invalid config"); + let mut change = ConfigChange::default(); + + change.change_client_config(self.config); + + let error_sink: ConfigErrors; + (config, error_sink, _) = config.apply_change(change); + assert!(error_sink.is_empty(), "{error_sink:?}"); + config.rediscover_workspaces(); Server::new(tmp_dir.keep(), config) diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 4a7415b016da..7dd6382cfac6 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -185,27 +185,6 @@ Zlib OR Apache-2.0 OR MIT } fn check_test_attrs(path: &Path, text: &str) { - let ignore_rule = - "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore"; - let need_ignore: &[&str] = &[ - // This file. - "slow-tests/tidy.rs", - // Special case to run `#[ignore]` tests. - "ide/src/runnables.rs", - // A legit test which needs to be ignored, as it takes too long to run - // :( - "hir-def/src/nameres/collector.rs", - // Long sourcegen test to generate lint completions. - "ide-db/src/tests/sourcegen_lints.rs", - // Obviously needs ignore. - "ide-assists/src/handlers/toggle_ignore.rs", - // See above. - "ide-assists/src/tests/generated.rs", - ]; - if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) { - panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),) - } - let panic_rule = "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic"; let need_panic: &[&str] = &[ diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 1c91e856e72e..100662f4cebd 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@