diff --git a/Cargo.lock b/Cargo.lock index 898b5599561211..615abf0f009799 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1312,9 +1312,9 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90684d387a893a3318569b8bb548e2f9291f86f2909f5349dd9d2b97c83fdb18" +checksum = "d6b72d6f849f5640ed80dc458b529aec74a79bdabdebe291448d345f865faa99" dependencies = [ "anyhow", "deno_semver", diff --git a/Cargo.toml b/Cargo.toml index 6c4ca95671d009..492ddc47f5a499 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -101,7 +101,7 @@ console_static_text = "=0.8.1" data-encoding = "2.3.3" data-url = "=0.3.0" deno_cache_dir = "=0.10.0" -deno_config = { version = "=0.23.0", default-features = false } +deno_config = { version = "=0.24.0", default-features = false } dlopen2 = "0.6.1" ecb = "=0.1.2" elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 8b1b8e0c3b4429..2c87be03949128 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -500,6 +500,22 @@ fn resolve_lint_rules_options( } } +pub fn discover_npmrc_from_workspace( + workspace: &Workspace, +) -> Result<(Arc, Option), AnyError> { + let root_folder = workspace.root_folder().1; + discover_npmrc( + root_folder.pkg_json.as_ref().map(|p| p.path.clone()), + root_folder.deno_json.as_ref().and_then(|cf| { + if cf.specifier.scheme() == "file" { + Some(cf.specifier.to_file_path().unwrap()) + } else { + None + } + }), + ) +} + /// Discover `.npmrc` file - currently we only support it next to `package.json` /// or next to `deno.json`. /// @@ -846,6 +862,7 @@ impl CliOptions { } WorkspaceDiscoverOptions { fs: &config_fs_adapter, + deno_json_cache: None, pkg_json_cache: Some( &deno_runtime::deno_node::PackageJsonThreadLocalCache, ), @@ -890,17 +907,7 @@ impl CliOptions { log::warn!("{} {}", colors::yellow("Warning"), diagnostic); } - let root_folder = workspace.root_folder().1; - let (npmrc, _) = discover_npmrc( - root_folder.pkg_json.as_ref().map(|p| p.path.clone()), - root_folder.deno_json.as_ref().and_then(|cf| { - if cf.specifier.scheme() == "file" { - Some(cf.specifier.to_file_path().unwrap()) - } else { - None - } - }), - )?; + let (npmrc, _) = discover_npmrc_from_workspace(&workspace)?; let maybe_lock_file = CliLockfile::discover(&flags, &workspace)?; diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index f02ba6409415a4..17fb0f719c8427 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -804,7 +804,7 @@ mod tests { fs_sources: &[(&str, &str)], ) -> Documents { let temp_dir = TempDir::new(); - let cache = LspCache::new(Some(temp_dir.uri())); + let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let mut documents = Documents::default(); documents.update_config( &Default::default(), diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 861a63d0c7675f..5b549fc39b1989 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -1,22 +1,27 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use super::logging::lsp_log; -use crate::args::discover_npmrc; -use crate::args::CliLockfile; -use crate::args::ConfigFile; -use crate::args::FmtOptions; -use crate::args::LintOptions; -use crate::args::DENO_FUTURE; -use crate::cache::FastInsecureHasher; -use crate::file_fetcher::FileFetcher; -use crate::lsp::logging::lsp_warn; -use crate::tools::lint::get_configured_rules; -use crate::tools::lint::ConfiguredRules; -use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_ast::MediaType; +use deno_config::fs::DenoConfigFs; +use deno_config::fs::RealDenoConfigFs; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPatternSet; +use deno_config::package_json::PackageJsonCache; +use deno_config::workspace::CreateResolverOptions; +use deno_config::workspace::PackageJsonDepResolution; +use deno_config::workspace::SpecifiedImportMap; +use deno_config::workspace::VendorEnablement; +use deno_config::workspace::Workspace; +use deno_config::workspace::WorkspaceDiscoverOptions; +use deno_config::workspace::WorkspaceEmptyOptions; +use deno_config::workspace::WorkspaceMemberContext; +use deno_config::workspace::WorkspaceResolver; +use deno_config::DenoJsonCache; +use deno_config::FmtConfig; use deno_config::FmtOptionsConfig; use deno_config::TsConfig; -use deno_core::normalize_path; +use deno_core::anyhow::anyhow; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; use deno_core::serde::de::DeserializeOwned; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -24,25 +29,33 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::ModuleSpecifier; -use deno_lint::linter::LintConfig; use deno_npm::npm_rc::ResolvedNpmRc; -use deno_runtime::deno_fs::DenoConfigFsAdapter; -use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::fs_util::specifier_to_file_path; -use deno_semver::package::PackageNv; -use deno_semver::Version; -use import_map::ImportMap; use indexmap::IndexSet; use lsp::Url; use lsp_types::ClientCapabilities; use std::collections::BTreeMap; use std::collections::HashMap; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use tower_lsp::lsp_types as lsp; +use super::logging::lsp_log; +use crate::args::discover_npmrc_from_workspace; +use crate::args::has_flag_env_var; +use crate::args::CliLockfile; +use crate::args::ConfigFile; +use crate::args::DENO_FUTURE; +use crate::cache::FastInsecureHasher; +use crate::file_fetcher::FileFetcher; +use crate::lsp::logging::lsp_warn; +use crate::tools::lint::get_configured_rules; +use crate::tools::lint::ConfiguredRules; +use crate::util::fs::canonicalize_path_maybe_not_exists; + pub const SETTINGS_SECTION: &str = "deno"; fn is_true() -> bool { @@ -934,10 +947,10 @@ impl Config { } pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool { - let config_file = self.tree.config_file_for_specifier(specifier); - if let Some(cf) = config_file { - if let Ok(files) = cf.to_exclude_files_config() { - if !files.matches_specifier(specifier) { + let data = self.tree.data_for_specifier(specifier); + if let Some(data) = &data { + if let Ok(path) = specifier.to_file_path() { + if data.exclude_files.matches_path(&path) { return false; } } @@ -945,18 +958,16 @@ impl Config { self .settings .specifier_enabled(specifier) - .unwrap_or_else(|| config_file.is_some()) + .unwrap_or_else(|| data.and_then(|d| d.maybe_deno_json()).is_some()) } pub fn specifier_enabled_for_test( &self, specifier: &ModuleSpecifier, ) -> bool { - if let Some(cf) = self.tree.config_file_for_specifier(specifier) { - if let Ok(options) = cf.to_test_config() { - if !options.files.matches_specifier(specifier) { - return false; - } + if let Some(data) = self.tree.data_for_specifier(specifier) { + if !data.test_config.files.matches_specifier(specifier) { + return false; } } self.specifier_enabled(specifier) @@ -1083,16 +1094,11 @@ impl LspTsConfig { } } -#[derive(Debug, Clone)] -pub struct LspPackageConfig { - pub nv: PackageNv, - pub exports: Value, -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ConfigWatchedFileType { DenoJson, Lockfile, + NpmRc, PackageJson, ImportMap, } @@ -1100,495 +1106,455 @@ pub enum ConfigWatchedFileType { /// Contains the config file and dependent information. #[derive(Debug, Clone)] pub struct ConfigData { - pub scope: ModuleSpecifier, - pub config_file: Option>, - pub fmt_options: Arc, - pub lint_options: Arc, - pub lint_config: LintConfig, + pub scope: Arc, + pub workspace: Arc, + pub member_ctx: Arc, + pub fmt_config: Arc, + pub lint_config: Arc, + pub test_config: Arc, + pub exclude_files: Arc, + pub deno_lint_config: deno_lint::linter::LintConfig, pub lint_rules: Arc, pub ts_config: Arc, pub byonm: bool, pub node_modules_dir: Option, pub vendor_dir: Option, pub lockfile: Option>, - pub package_json: Option>, pub npmrc: Option>, - pub import_map: Option>, - pub import_map_from_settings: bool, - pub package_config: Option>, - pub is_workspace_root: bool, - pub workspace_root_dir: ModuleSpecifier, - /// Workspace member directories. For a workspace root this will be a list of - /// members. For a member this will be the same list, representing self and - /// siblings. For a solitary package this will be `vec![self.scope]`. These - /// are the list of packages to override with local resolutions for this - /// config scope. - pub workspace_members: Arc>, + pub resolver: Arc, + pub import_map_from_settings: Option, watched_files: HashMap, } impl ConfigData { async fn load( - config_file_specifier: Option<&ModuleSpecifier>, + specified_config: Option<&Path>, scope: &ModuleSpecifier, - workspace_root: Option<&ConfigData>, settings: &Settings, - file_fetcher: Option<&Arc>, + file_fetcher: &Arc, + // sync requirement is because the lsp requires sync + cached_deno_config_fs: &(dyn DenoConfigFs + Sync), + deno_json_cache: &(dyn DenoJsonCache + Sync), + pkg_json_cache: &(dyn PackageJsonCache + Sync), ) -> Self { - if let Some(specifier) = config_file_specifier { - match ConfigFile::from_specifier( - &DenoConfigFsAdapter::new(&RealFs), - specifier.clone(), - &deno_config::ConfigParseOptions::default(), - ) { - Ok(config_file) => { - lsp_log!( - " Resolved Deno configuration file: \"{}\"", - config_file.specifier.as_str() - ); - Self::load_inner( - Some(config_file), - scope, - workspace_root, - settings, - file_fetcher, - ) - .await - } - Err(err) => { - lsp_warn!( - " Couldn't read Deno configuration file \"{}\": {}", - specifier.as_str(), - err - ); - let mut data = Self::load_inner( - None, - scope, - workspace_root, - settings, - file_fetcher, - ) - .await; - data - .watched_files - .insert(specifier.clone(), ConfigWatchedFileType::DenoJson); - let canonicalized_specifier = specifier - .to_file_path() - .ok() - .and_then(|p| canonicalize_path_maybe_not_exists(&p).ok()) - .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); - if let Some(specifier) = canonicalized_specifier { - data - .watched_files - .insert(specifier, ConfigWatchedFileType::DenoJson); + let scope = Arc::new(scope.clone()); + let discover_result = match scope.to_file_path() { + Ok(scope_dir_path) => { + let paths = [scope_dir_path]; + Workspace::discover( + match specified_config { + Some(config_path) => { + deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( + config_path, + ) + } + None => { + deno_config::workspace::WorkspaceDiscoverStart::Paths(&paths) + } + }, + &WorkspaceDiscoverOptions { + fs: cached_deno_config_fs, + additional_config_file_names: &[], + deno_json_cache: Some(deno_json_cache), + pkg_json_cache: Some(pkg_json_cache), + discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"), + config_parse_options: Default::default(), + maybe_vendor_override: None, + }, + ) + .map(Arc::new) + .map_err(AnyError::from) + } + Err(()) => Err(anyhow!("Scope '{}' was not a directory path.", scope)), + }; + match discover_result { + Ok(workspace) => { + Self::load_inner(workspace, scope, settings, Some(file_fetcher)).await + } + Err(err) => { + lsp_warn!(" Couldn't open workspace \"{}\": {}", scope.as_str(), err); + let workspace = Arc::new(Workspace::empty(WorkspaceEmptyOptions { + root_dir: scope.clone(), + use_vendor_dir: VendorEnablement::Disable, + })); + let mut data = Self::load_inner( + workspace, + scope.clone(), + settings, + Some(file_fetcher), + ) + .await; + // check if any of these need to be added to the workspace + let files = [ + ( + scope.join("deno.json").unwrap(), + ConfigWatchedFileType::DenoJson, + ), + ( + scope.join("deno.jsonc").unwrap(), + ConfigWatchedFileType::DenoJson, + ), + ( + scope.join("package.json").unwrap(), + ConfigWatchedFileType::PackageJson, + ), + ]; + for (url, file_type) in files { + let Some(file_path) = url.to_file_path().ok() else { + continue; + }; + if file_path.exists() { + data.watched_files.insert(url.clone(), file_type); + let canonicalized_specifier = + canonicalize_path_maybe_not_exists(&file_path) + .ok() + .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); + if let Some(specifier) = canonicalized_specifier { + data.watched_files.insert(specifier, file_type); + } } - data } + data } - } else { - Self::load_inner(None, scope, workspace_root, settings, file_fetcher) - .await } } async fn load_inner( - config_file: Option, - scope: &ModuleSpecifier, - workspace_root: Option<&ConfigData>, + workspace: Arc, + scope: Arc, settings: &Settings, file_fetcher: Option<&Arc>, ) -> Self { - let (settings, workspace_folder) = settings.get_for_specifier(scope); - let mut watched_files = HashMap::with_capacity(6); - if let Some(config_file) = &config_file { - watched_files - .entry(config_file.specifier.clone()) - .or_insert(ConfigWatchedFileType::DenoJson); - } - let config_file_path = config_file - .as_ref() - .and_then(|c| specifier_to_file_path(&c.specifier).ok()); - let config_file_canonicalized_specifier = config_file_path - .as_ref() - .and_then(|p| canonicalize_path_maybe_not_exists(p).ok()) - .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); - if let Some(specifier) = config_file_canonicalized_specifier { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::DenoJson); - } + let (settings, workspace_folder) = settings.get_for_specifier(&scope); + let mut watched_files = HashMap::with_capacity(10); + let mut add_watched_file = + |specifier: ModuleSpecifier, file_type: ConfigWatchedFileType| { + let maybe_canonicalized = specifier + .to_file_path() + .ok() + .and_then(|p| canonicalize_path_maybe_not_exists(&p).ok()) + .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); + if let Some(canonicalized) = maybe_canonicalized { + if canonicalized != specifier { + watched_files.entry(canonicalized).or_insert(file_type); + } + } + watched_files.entry(specifier).or_insert(file_type); + }; - let mut fmt_options = None; - if let Some(workspace_data) = workspace_root { - let has_own_fmt_options = config_file - .as_ref() - .is_some_and(|config_file| config_file.json.fmt.is_some()); - if !has_own_fmt_options { - fmt_options = Some(workspace_data.fmt_options.clone()) - } - } - let fmt_options = fmt_options.unwrap_or_else(|| { - config_file - .as_ref() - .and_then(|config_file| { - config_file - .to_fmt_config() - .map(|o| FmtOptions::resolve(o, &Default::default())) - .inspect_err(|err| { - lsp_warn!(" Couldn't read formatter configuration: {}", err) - }) - .ok() - }) - .map(Arc::new) - .unwrap_or_default() - }); + let member_ctx = workspace.resolve_start_ctx(); - let mut lint_options_rules = None; - if let Some(workspace_data) = workspace_root { - let has_own_lint_options = config_file - .as_ref() - .is_some_and(|config_file| config_file.json.lint.is_some()); - if !has_own_lint_options { - lint_options_rules = Some(( - workspace_data.lint_options.clone(), - workspace_data.lint_rules.clone(), - )) - } + if let Some(deno_json) = member_ctx.maybe_deno_json() { + lsp_log!( + " Resolved Deno configuration file: \"{}\"", + deno_json.specifier + ); + + add_watched_file( + deno_json.specifier.clone(), + ConfigWatchedFileType::DenoJson, + ); } - let (lint_options, lint_rules) = lint_options_rules.unwrap_or_else(|| { - let lint_options = config_file - .as_ref() - .and_then(|config_file| { - config_file - .to_lint_config() - .map(|o| LintOptions::resolve(o, &Default::default())) - .inspect_err(|err| { - lsp_warn!(" Couldn't read lint configuration: {}", err) - }) - .ok() - }) - .map(Arc::new) - .unwrap_or_default(); - let lint_rules = Arc::new(get_configured_rules( - lint_options.rules.clone(), - config_file.as_ref(), - )); - (lint_options, lint_rules) - }); - let ts_config = LspTsConfig::new(config_file.as_ref()); + if let Some(pkg_json) = member_ctx.maybe_pkg_json() { + lsp_log!(" Resolved package.json: \"{}\"", pkg_json.specifier()); - let lint_config = if ts_config.inner.0.get("jsx").and_then(|v| v.as_str()) - == Some("react") - { - let default_jsx_factory = - ts_config.inner.0.get("jsxFactory").and_then(|v| v.as_str()); - let default_jsx_fragment_factory = ts_config - .inner - .0 - .get("jsxFragmentFactory") - .and_then(|v| v.as_str()); - deno_lint::linter::LintConfig { - default_jsx_factory: default_jsx_factory.map(String::from), - default_jsx_fragment_factory: default_jsx_fragment_factory - .map(String::from), - } - } else { - deno_lint::linter::LintConfig { - default_jsx_factory: None, - default_jsx_fragment_factory: None, - } - }; + add_watched_file( + pkg_json.specifier(), + ConfigWatchedFileType::PackageJson, + ); + } - let vendor_dir = if let Some(workspace_root) = workspace_root { - workspace_root.vendor_dir.clone() - } else { - config_file.as_ref().and_then(|c| { - if c.vendor() == Some(true) { - Some(c.specifier.to_file_path().ok()?.parent()?.join("vendor")) - } else { - None + // todo(dsherret): cache this so we don't load this so many times + let npmrc = discover_npmrc_from_workspace(&workspace) + .inspect(|(_, path)| { + if let Some(path) = path { + lsp_log!(" Resolved .npmrc: \"{}\"", path.display()); + + if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { + add_watched_file(specifier, ConfigWatchedFileType::NpmRc); + } } }) - }; + .inspect_err(|err| { + lsp_warn!(" Couldn't read .npmrc for \"{scope}\": {err}"); + }) + .map(|(r, _)| r) + .ok(); + let default_file_patterns = scope + .to_file_path() + .map(FilePatterns::new_with_base) + .unwrap_or_else(|_| FilePatterns::new_with_base(PathBuf::from("/"))); + let fmt_config = Arc::new( + member_ctx + .to_fmt_config(FilePatterns::new_with_base(member_ctx.dir_path())) + .inspect_err(|err| { + lsp_warn!(" Couldn't read formatter configuration: {}", err) + }) + .ok() + .unwrap_or_else(|| deno_config::FmtConfig { + options: Default::default(), + files: default_file_patterns.clone(), + }), + ); + let lint_config = Arc::new( + member_ctx + .to_lint_config(FilePatterns::new_with_base(member_ctx.dir_path())) + .inspect_err(|err| { + lsp_warn!(" Couldn't read lint configuration: {}", err) + }) + .ok() + .unwrap_or_else(|| deno_config::LintConfig { + options: Default::default(), + files: default_file_patterns.clone(), + }), + ); + let lint_rules = Arc::new(get_configured_rules( + lint_config.options.rules.clone(), + member_ctx.maybe_deno_json().map(|c| c.as_ref()), + )); + let test_config = Arc::new( + member_ctx + .to_test_config(FilePatterns::new_with_base(member_ctx.dir_path())) + .inspect_err(|err| { + lsp_warn!(" Couldn't read test configuration: {}", err) + }) + .ok() + .unwrap_or_else(|| deno_config::TestConfig { + files: default_file_patterns.clone(), + }), + ); + let exclude_files = Arc::new( + workspace + .resolve_config_excludes() + .inspect_err(|err| { + lsp_warn!(" Couldn't read config excludes: {}", err) + }) + .ok() + .unwrap_or_default(), + ); - // Load lockfile - let lockfile = if let Some(workspace_root) = workspace_root { - workspace_root.lockfile.clone() - } else { - config_file - .as_ref() - .and_then(resolve_lockfile_from_config) - .map(Arc::new) - }; + let ts_config = + LspTsConfig::new(workspace.root_deno_json().map(|c| c.as_ref())); + + let deno_lint_config = + if ts_config.inner.0.get("jsx").and_then(|v| v.as_str()) == Some("react") + { + let default_jsx_factory = + ts_config.inner.0.get("jsxFactory").and_then(|v| v.as_str()); + let default_jsx_fragment_factory = ts_config + .inner + .0 + .get("jsxFragmentFactory") + .and_then(|v| v.as_str()); + deno_lint::linter::LintConfig { + default_jsx_factory: default_jsx_factory.map(String::from), + default_jsx_fragment_factory: default_jsx_fragment_factory + .map(String::from), + } + } else { + deno_lint::linter::LintConfig { + default_jsx_factory: None, + default_jsx_fragment_factory: None, + } + }; + + let vendor_dir = workspace.vendor_dir_path().cloned(); + // todo(dsherret): add caching so we don't load this so many times + let lockfile = resolve_lockfile_from_workspace(&workspace).map(Arc::new); if let Some(lockfile) = &lockfile { if let Ok(specifier) = ModuleSpecifier::from_file_path(&lockfile.filename) { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::Lockfile); + add_watched_file(specifier, ConfigWatchedFileType::Lockfile); } } - let lockfile_canonicalized_specifier = lockfile - .as_ref() - .and_then(|lockfile| { - canonicalize_path_maybe_not_exists(&lockfile.filename).ok() - }) - .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); - if let Some(specifier) = lockfile_canonicalized_specifier { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::Lockfile); + + let byonm = std::env::var("DENO_UNSTABLE_BYONM").is_ok() + || workspace.has_unstable("byonm") + || (*DENO_FUTURE + && workspace.package_jsons().next().is_some() + && workspace.node_modules_dir().is_none()); + if byonm { + lsp_log!(" Enabled 'bring your own node_modules'."); } + let node_modules_dir = resolve_node_modules_dir(&workspace, byonm); - // Load package.json - let mut package_json = None; - let package_json_path = specifier_to_file_path(scope) - .ok() - .map(|p| p.join("package.json")); - if let Some(path) = &package_json_path { - if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::PackageJson); + // Mark the import map as a watched file + if let Some(import_map_specifier) = + workspace.to_import_map_specifier().ok().flatten() + { + add_watched_file( + import_map_specifier.clone(), + ConfigWatchedFileType::ImportMap, + ); + } + // attempt to create a resolver for the workspace + let pkg_json_dep_resolution = if byonm { + PackageJsonDepResolution::Disabled + } else { + // todo(dsherret): this should be false for nodeModulesDir: true + PackageJsonDepResolution::Enabled + }; + let mut import_map_from_settings = { + let is_config_import_map = member_ctx + .maybe_deno_json() + .map(|c| c.is_an_import_map() || c.json.import_map.is_some()) + .or_else(|| { + workspace + .root_deno_json() + .map(|c| c.is_an_import_map() || c.json.import_map.is_some()) + }) + .unwrap_or(false); + if is_config_import_map { + None + } else { + settings.import_map.as_ref().and_then(|import_map_str| { + Url::parse(import_map_str) + .ok() + .or_else(|| workspace_folder?.join(import_map_str).ok()) + }) } - let package_json_canonicalized_specifier = - canonicalize_path_maybe_not_exists(path) - .ok() - .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); - if let Some(specifier) = package_json_canonicalized_specifier { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::PackageJson); + }; + + let specified_import_map = { + let is_config_import_map = member_ctx + .maybe_deno_json() + .map(|c| c.is_an_import_map() || c.json.import_map.is_some()) + .or_else(|| { + workspace + .root_deno_json() + .map(|c| c.is_an_import_map() || c.json.import_map.is_some()) + }) + .unwrap_or(false); + if is_config_import_map { + import_map_from_settings = None; } - if let Ok(source) = std::fs::read_to_string(path) { - match PackageJson::load_from_string(path.clone(), source) { - Ok(result) => { - lsp_log!(" Resolved package.json: \"{}\"", path.display()); - package_json = Some(result); + if let Some(import_map_url) = &import_map_from_settings { + add_watched_file( + import_map_url.clone(), + ConfigWatchedFileType::ImportMap, + ); + // spawn due to the lsp's `Send` requirement + let fetch_result = deno_core::unsync::spawn({ + let file_fetcher = file_fetcher.cloned().unwrap(); + let import_map_url = import_map_url.clone(); + async move { + file_fetcher + .fetch(&import_map_url, &PermissionsContainer::allow_all()) + .await } + }) + .await + .unwrap(); + + let value_result = fetch_result.and_then(|f| { + serde_json::from_slice::(&f.source).map_err(|e| e.into()) + }); + match value_result { + Ok(value) => Some(SpecifiedImportMap { + base_url: import_map_url.clone(), + value, + }), Err(err) => { lsp_warn!( - " Couldn't read package.json \"{}\": {}", - path.display(), + " Couldn't read import map \"{}\": {}", + import_map_url.as_str(), err ); + import_map_from_settings = None; + None } } + } else { + None } - } - let npmrc = discover_npmrc(package_json_path, config_file_path) - .inspect(|(_, path)| { - if let Some(path) = path { - lsp_log!(" Resolved .npmrc: \"{}\"", path.display()); - } - }) - .inspect_err(|err| { - lsp_warn!(" Couldn't read .npmrc for \"{scope}\": {err}"); - }) - .map(|(r, _)| r) - .ok(); - let byonm = if let Some(workspace_root) = workspace_root { - workspace_root.byonm - } else { - std::env::var("DENO_UNSTABLE_BYONM").is_ok() - || config_file - .as_ref() - .map(|c| c.has_unstable("byonm")) - .unwrap_or(false) - || (*DENO_FUTURE - && package_json.is_some() - && config_file - .as_ref() - .map(|c| c.json.node_modules_dir.is_none()) - .unwrap_or(true)) }; - if byonm { - lsp_log!(" Enabled 'bring your own node_modules'."); - } - let node_modules_dir = if let Some(workspace_root) = workspace_root { - workspace_root.node_modules_dir.clone() - } else { - config_file - .as_ref() - .and_then(|c| resolve_node_modules_dir(c, byonm)) - }; - - // Load import map - let mut import_map = None; - let mut import_map_value = None; - let mut import_map_specifier = None; - let mut import_map_from_settings = false; - if let Some(workspace_data) = workspace_root { - import_map.clone_from(&workspace_data.import_map); - import_map_from_settings = workspace_data.import_map_from_settings; - } else { - if let Some(config_file) = &config_file { - if config_file.is_an_import_map() { - import_map_value = - Some(config_file.to_import_map_value_from_imports()); - import_map_specifier = Some(config_file.specifier.clone()); - } else if let Ok(Some(specifier)) = - config_file.to_import_map_specifier() - { - import_map_specifier = Some(specifier); - } - } - import_map_specifier = import_map_specifier.or_else(|| { - let import_map_str = settings.import_map.as_ref()?; - let specifier = Url::parse(import_map_str) - .ok() - .or_else(|| workspace_folder?.join(import_map_str).ok())?; - import_map_from_settings = true; - Some(specifier) - }); - if let Some(specifier) = &import_map_specifier { - if let Ok(path) = specifier_to_file_path(specifier) { - watched_files - .entry(specifier.clone()) - .or_insert(ConfigWatchedFileType::ImportMap); - let import_map_canonicalized_specifier = - canonicalize_path_maybe_not_exists(&path) - .ok() - .and_then(|p| ModuleSpecifier::from_file_path(p).ok()); - if let Some(specifier) = import_map_canonicalized_specifier { - watched_files - .entry(specifier) - .or_insert(ConfigWatchedFileType::ImportMap); - } - } - if import_map_value.is_none() { - if let Some(file_fetcher) = file_fetcher { - // spawn due to the lsp's `Send` requirement - let fetch_result = deno_core::unsync::spawn({ - let file_fetcher = file_fetcher.clone(); + let resolver = deno_core::unsync::spawn({ + let workspace = workspace.clone(); + let file_fetcher = file_fetcher.cloned(); + async move { + workspace + .create_resolver( + CreateResolverOptions { + pkg_json_dep_resolution, + specified_import_map, + }, + move |specifier| { let specifier = specifier.clone(); + let file_fetcher = file_fetcher.clone().unwrap(); async move { - file_fetcher + let file = file_fetcher .fetch(&specifier, &PermissionsContainer::allow_all()) - .await - } - }) - .await - .unwrap(); - let value_result = fetch_result.and_then(|f| { - serde_json::from_slice::(&f.source).map_err(|e| e.into()) - }); - match value_result { - Ok(value) => { - import_map_value = Some(value); + .await? + .into_text_decoded()?; + Ok(file.source.to_string()) } - Err(err) => { - lsp_warn!( - " Couldn't read import map \"{}\": {}", - specifier.as_str(), - err - ); - } - } - } - } - } - if let (Some(value), Some(specifier)) = - (import_map_value, import_map_specifier) - { - match import_map::parse_from_value(specifier.clone(), value) { - Ok(result) => { - if config_file.as_ref().map(|c| &c.specifier) == Some(&specifier) { - lsp_log!(" Resolved import map from configuration file"); - } else { - lsp_log!(" Resolved import map: \"{}\"", specifier.as_str()); - } - if !result.diagnostics.is_empty() { - lsp_warn!( - " Import map diagnostics:\n{}", - result - .diagnostics - .iter() - .map(|d| format!(" - {d}")) - .collect::>() - .join("\n") - ); - } - import_map = Some(Arc::new(result.import_map)); - } - Err(err) => { + }, + ) + .await + .inspect_err(|err| { lsp_warn!( - "Couldn't read import map \"{}\": {}", - specifier.as_str(), - err + " Failed to load resolver: {}", + err // will contain the specifier ); - } - } + }) + .ok() } - } - - let package_config = config_file.as_ref().and_then(|c| { - Some(LspPackageConfig { - nv: PackageNv { - name: c.json.name.clone()?, - version: Version::parse_standard(c.json.version.as_ref()?).ok()?, - }, - exports: c.json.exports.clone()?, - }) + }) + .await + .unwrap() + .unwrap_or_else(|| { + // create a dummy resolver + WorkspaceResolver::new_raw( + scope.clone(), + None, + workspace.package_jsons().cloned().collect(), + pkg_json_dep_resolution, + ) }); - - let workspace_config = config_file - .as_ref() - .and_then(|c| c.to_workspace_config().ok().flatten().map(|w| (c, w))); - let is_workspace_root = workspace_config.is_some(); - let workspace_members = - if let Some((config, workspace_config)) = workspace_config { - Arc::new( - workspace_config - .members - .iter() - .flat_map(|p| { - let dir_specifier = config.specifier.join(p).ok()?; - let dir_path = specifier_to_file_path(&dir_specifier).ok()?; - Url::from_directory_path(normalize_path(dir_path)).ok() - }) - .collect(), - ) - } else if let Some(workspace_data) = workspace_root { - workspace_data.workspace_members.clone() - } else if config_file.as_ref().is_some_and(|c| c.json.name.is_some()) { - Arc::new(vec![scope.clone()]) - } else { - Arc::new(vec![]) - }; - let workspace_root_dir = if is_workspace_root { - scope.clone() - } else { - workspace_root - .as_ref() - .map(|r| r.scope.clone()) - .unwrap_or_else(|| scope.clone()) - }; + if !resolver.diagnostics().is_empty() { + lsp_warn!( + " Import map diagnostics:\n{}", + resolver + .diagnostics() + .iter() + .map(|d| format!(" - {d}")) + .collect::>() + .join("\n") + ); + } ConfigData { - scope: scope.clone(), - config_file: config_file.map(Arc::new), - fmt_options, - lint_options, + scope, + workspace, + member_ctx: Arc::new(member_ctx), + resolver: Arc::new(resolver), + fmt_config, lint_config, + test_config, + deno_lint_config, lint_rules, + exclude_files, ts_config: Arc::new(ts_config), byonm, node_modules_dir, vendor_dir, lockfile, - package_json: package_json.map(Arc::new), npmrc, - import_map, import_map_from_settings, - package_config: package_config.map(Arc::new), - is_workspace_root, - workspace_root_dir, - workspace_members, watched_files, } } + + pub fn maybe_deno_json(&self) -> Option<&Arc> { + self.member_ctx.maybe_deno_json() + } + + pub fn maybe_pkg_json( + &self, + ) -> Option<&Arc> { + self.member_ctx.maybe_pkg_json() + } } #[derive(Clone, Debug, Default)] @@ -1622,20 +1588,18 @@ impl ConfigTree { &self.scopes } - pub fn config_file_for_specifier( + pub fn workspace_member_ctx_for_specifier( &self, specifier: &ModuleSpecifier, - ) -> Option<&Arc> { - self - .data_for_specifier(specifier) - .and_then(|d| d.config_file.as_ref()) + ) -> Option<&Arc> { + self.data_for_specifier(specifier).map(|d| &d.member_ctx) } pub fn config_files(&self) -> Vec<&Arc> { self .scopes .iter() - .filter_map(|(_, d)| d.config_file.as_ref()) + .filter_map(|(_, d)| d.maybe_deno_json()) .collect() } @@ -1643,18 +1607,23 @@ impl ConfigTree { self .scopes .iter() - .filter_map(|(_, d)| d.package_json.as_ref()) + .filter_map(|(_, d)| d.maybe_pkg_json()) .collect() } - pub fn fmt_options_for_specifier( + pub fn fmt_config_for_specifier( &self, specifier: &ModuleSpecifier, - ) -> Arc { + ) -> Arc { self .data_for_specifier(specifier) - .map(|d| d.fmt_options.clone()) - .unwrap_or_default() + .map(|d| d.fmt_config.clone()) + .unwrap_or_else(|| { + Arc::new(FmtConfig { + options: Default::default(), + files: FilePatterns::new_with_base(PathBuf::from("/")), + }) + }) } /// Returns (scope_uri, type). @@ -1690,6 +1659,12 @@ impl ConfigTree { file_fetcher: &Arc, ) { lsp_log!("Refreshing configuration tree..."); + // since we're resolving a workspace multiple times in different + // folders, we want to cache all the lookups and config files across + // ConfigData::load calls + let cached_fs = CachedDenoConfigFs::default(); + let deno_json_cache = DenoJsonMemCache::default(); + let pkg_json_cache = PackageJsonMemCache::default(); let mut scopes = BTreeMap::new(); for (folder_uri, ws_settings) in &settings.by_workspace_folder { let mut ws_settings = ws_settings.as_ref(); @@ -1699,19 +1674,23 @@ impl ConfigTree { if let Some(ws_settings) = ws_settings { if let Some(config_path) = &ws_settings.config { if let Ok(config_uri) = folder_uri.join(config_path) { - scopes.insert( - folder_uri.clone(), - Arc::new( - ConfigData::load( - Some(&config_uri), - folder_uri, - None, - settings, - Some(file_fetcher), - ) - .await, - ), - ); + if let Ok(config_file_path) = config_uri.to_file_path() { + scopes.insert( + folder_uri.clone(), + Arc::new( + ConfigData::load( + Some(&config_file_path), + folder_uri, + settings, + file_fetcher, + &cached_fs, + &deno_json_cache, + &pkg_json_cache, + ) + .await, + ), + ); + } } } } @@ -1719,7 +1698,8 @@ impl ConfigTree { for specifier in workspace_files { if !(specifier.path().ends_with("/deno.json") - || specifier.path().ends_with("/deno.jsonc")) + || specifier.path().ends_with("/deno.jsonc") + || specifier.path().ends_with("/package.json")) { continue; } @@ -1729,46 +1709,35 @@ impl ConfigTree { if scopes.contains_key(&scope) { continue; } - let data = ConfigData::load( - Some(specifier), - &scope, - None, - settings, - Some(file_fetcher), - ) - .await; - if data.is_workspace_root { - for member_scope in data.workspace_members.iter() { - if scopes.contains_key(member_scope) { - continue; - } - let Ok(member_path) = specifier_to_file_path(member_scope) else { - continue; - }; - let Some(config_file_path) = Some(member_path.join("deno.json")) - .filter(|p| p.exists()) - .or_else(|| { - Some(member_path.join("deno.jsonc")).filter(|p| p.exists()) - }) - else { - continue; - }; - let Ok(config_file_specifier) = Url::from_file_path(config_file_path) - else { - continue; - }; - let member_data = ConfigData::load( - Some(&config_file_specifier), - member_scope, - Some(&data), - settings, - Some(file_fetcher), - ) - .await; - scopes.insert(member_scope.clone(), Arc::new(member_data)); + let data = Arc::new( + ConfigData::load( + None, + &scope, + settings, + file_fetcher, + &cached_fs, + &deno_json_cache, + &pkg_json_cache, + ) + .await, + ); + scopes.insert(scope, data.clone()); + for (member_scope, _) in data.workspace.config_folders() { + if scopes.contains_key(member_scope) { + continue; } + let member_data = ConfigData::load( + None, + member_scope, + settings, + file_fetcher, + &cached_fs, + &deno_json_cache, + &pkg_json_cache, + ) + .await; + scopes.insert(member_scope.as_ref().clone(), Arc::new(member_data)); } - scopes.insert(scope, Arc::new(data)); } for folder_uri in settings.by_workspace_folder.keys() { @@ -1782,9 +1751,11 @@ impl ConfigTree { ConfigData::load( None, folder_uri, - None, settings, - Some(file_fetcher), + file_fetcher, + &cached_fs, + &deno_json_cache, + &pkg_json_cache, ) .await, ), @@ -1797,24 +1768,43 @@ impl ConfigTree { #[cfg(test)] pub async fn inject_config_file(&mut self, config_file: ConfigFile) { let scope = config_file.specifier.join(".").unwrap(); + let json_text = serde_json::to_string(&config_file.json).unwrap(); + let test_fs = deno_runtime::deno_fs::InMemoryFs::default(); + let config_path = specifier_to_file_path(&config_file.specifier).unwrap(); + test_fs.setup_text_files(vec![( + config_path.to_string_lossy().to_string(), + json_text, + )]); + let workspace = Arc::new( + Workspace::discover( + deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( + &config_path, + ), + &deno_config::workspace::WorkspaceDiscoverOptions { + fs: &deno_runtime::deno_fs::DenoConfigFsAdapter::new(&test_fs), + ..Default::default() + }, + ) + .unwrap(), + ); let data = Arc::new( ConfigData::load_inner( - Some(config_file), - &scope, - None, + workspace, + Arc::new(scope.clone()), &Default::default(), None, ) .await, ); + assert!(data.maybe_deno_json().is_some()); self.scopes = Arc::new([(scope, data)].into_iter().collect()); } } -fn resolve_lockfile_from_config( - config_file: &ConfigFile, +fn resolve_lockfile_from_workspace( + workspace: &Workspace, ) -> Option { - let lockfile_path = match config_file.resolve_lockfile_path() { + let lockfile_path = match workspace.resolve_lockfile_path() { Ok(Some(value)) => value, Ok(None) => return None, Err(err) => { @@ -1826,28 +1816,29 @@ fn resolve_lockfile_from_config( } fn resolve_node_modules_dir( - config_file: &ConfigFile, + workspace: &Workspace, byonm: bool, ) -> Option { // For the language server, require an explicit opt-in via the // `nodeModulesDir: true` setting in the deno.json file. This is to // reduce the chance of modifying someone's node_modules directory // without them having asked us to do so. - let explicitly_disabled = config_file.json.node_modules_dir == Some(false); + let explicitly_disabled = workspace.node_modules_dir() == Some(false); if explicitly_disabled { return None; } let enabled = byonm - || config_file.json.node_modules_dir == Some(true) - || config_file.json.vendor == Some(true); + || workspace.node_modules_dir() == Some(true) + || workspace.vendor_dir_path().is_some(); if !enabled { return None; } - if config_file.specifier.scheme() != "file" { - return None; - } - let file_path = config_file.specifier.to_file_path().ok()?; - let node_modules_dir = file_path.parent()?.join("node_modules"); + let node_modules_dir = workspace + .root_folder() + .0 + .to_file_path() + .ok()? + .join("node_modules"); canonicalize_path_maybe_not_exists(&node_modules_dir).ok() } @@ -1869,6 +1860,107 @@ fn resolve_lockfile_from_path(lockfile_path: PathBuf) -> Option { } } +// todo(dsherret): switch to RefCell once the lsp no longer requires Sync +#[derive(Default)] +struct DenoJsonMemCache(Mutex>>); + +impl deno_config::DenoJsonCache for DenoJsonMemCache { + fn get(&self, path: &Path) -> Option> { + self.0.lock().get(path).cloned() + } + + fn set(&self, path: PathBuf, data: Arc) { + self.0.lock().insert(path, data); + } +} + +#[derive(Default)] +struct PackageJsonMemCache(Mutex>>); + +impl deno_config::package_json::PackageJsonCache for PackageJsonMemCache { + fn get(&self, path: &Path) -> Option> { + self.0.lock().get(path).cloned() + } + + fn set(&self, path: PathBuf, data: Arc) { + self.0.lock().insert(path, data); + } +} + +#[derive(Default)] +struct CachedFsItems { + items: HashMap>, +} + +impl CachedFsItems { + pub fn get( + &mut self, + path: &Path, + action: impl FnOnce(&Path) -> Result, + ) -> Result { + let value = if let Some(value) = self.items.get(path) { + value + } else { + let value = action(path); + // just in case this gets really large for some reason + if self.items.len() == 16_384 { + return value; + } + self.items.insert(path.to_owned(), value); + self.items.get(path).unwrap() + }; + value + .as_ref() + .map(|v| (*v).clone()) + .map_err(|e| std::io::Error::new(e.kind(), e.to_string())) + } +} + +#[derive(Default)] +struct InnerData { + stat_calls: CachedFsItems, + read_to_string_calls: CachedFsItems, + read_dir_calls: CachedFsItems>, +} + +#[derive(Default)] +struct CachedDenoConfigFs(Mutex); + +impl DenoConfigFs for CachedDenoConfigFs { + fn stat_sync( + &self, + path: &Path, + ) -> Result { + self + .0 + .lock() + .stat_calls + .get(path, |path| RealDenoConfigFs.stat_sync(path)) + } + + fn read_to_string_lossy( + &self, + path: &Path, + ) -> Result { + self + .0 + .lock() + .read_to_string_calls + .get(path, |path| RealDenoConfigFs.read_to_string_lossy(path)) + } + + fn read_dir( + &self, + path: &Path, + ) -> Result, std::io::Error> { + self + .0 + .lock() + .read_dir_calls + .get(path, |path| RealDenoConfigFs.read_dir(path)) + } +} + #[cfg(test)] mod tests { use super::*; @@ -2132,7 +2224,7 @@ mod tests { #[tokio::test] async fn config_enable_via_config_file_detection() { - let root_uri = resolve_url("file:///root/").unwrap(); + let root_uri = root_dir(); let mut config = Config::new_with_roots(vec![root_uri.clone()]); assert!(!config.specifier_enabled(&root_uri)); @@ -2153,7 +2245,7 @@ mod tests { // Regression test for https://github.com/denoland/vscode_deno/issues/917. #[test] fn config_specifier_enabled_matches_by_path_component() { - let root_uri = resolve_url("file:///root/").unwrap(); + let root_uri = root_dir(); let mut config = Config::new_with_roots(vec![root_uri.clone()]); config.set_workspace_settings( WorkspaceSettings { @@ -2167,7 +2259,7 @@ mod tests { #[tokio::test] async fn config_specifier_enabled_for_test() { - let root_uri = resolve_url("file:///root/").unwrap(); + let root_uri = root_dir(); let mut config = Config::new_with_roots(vec![root_uri.clone()]); let mut settings = WorkspaceSettings { enable: Some(true), @@ -2256,4 +2348,12 @@ mod tests { !config.specifier_enabled_for_test(&root_uri.join("mod2.ts").unwrap()) ); } + + fn root_dir() -> Url { + if cfg!(windows) { + Url::parse("file://C:/root/").unwrap() + } else { + Url::parse("file:///root/").unwrap() + } + } } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 9b500567d4a8c6..02d51a2eb92732 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -15,7 +15,6 @@ use super::tsc::TsServer; use super::urls::LspClientUrl; use super::urls::LspUrlMap; -use crate::args::LintOptions; use crate::graph_util; use crate::graph_util::enhanced_resolution_error_message; use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams; @@ -24,6 +23,7 @@ use crate::resolver::SloppyImportsResolver; use crate::util::path::to_percent_decoded_str; use deno_ast::MediaType; +use deno_config::glob::FilePatterns; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::parking_lot::RwLock; @@ -39,7 +39,6 @@ use deno_graph::source::ResolutionMode; use deno_graph::Resolution; use deno_graph::ResolutionError; use deno_graph::SpecifierError; -use deno_lint::linter::LintConfig; use deno_lint::rules::LintRule; use deno_runtime::deno_fs; use deno_runtime::deno_node; @@ -51,6 +50,7 @@ use import_map::ImportMap; use log::error; use std::collections::HashMap; use std::collections::HashSet; +use std::path::PathBuf; use std::sync::atomic::AtomicUsize; use std::sync::Arc; use std::thread; @@ -814,21 +814,24 @@ fn generate_lint_diagnostics( continue; } let version = document.maybe_lsp_version(); - let (lint_options, lint_config, lint_rules) = config + let (lint_config, deno_lint_config, lint_rules) = config .tree .scope_for_specifier(specifier) .and_then(|s| config_data_by_scope.get(s)) .map(|d| { ( - d.lint_options.clone(), d.lint_config.clone(), + d.deno_lint_config.clone(), d.lint_rules.clone(), ) }) .unwrap_or_else(|| { ( - Arc::default(), - LintConfig { + Arc::new(deno_config::LintConfig { + options: Default::default(), + files: FilePatterns::new_with_base(PathBuf::from("/")), + }), + deno_lint::linter::LintConfig { default_jsx_factory: None, default_jsx_fragment_factory: None, }, @@ -841,8 +844,8 @@ fn generate_lint_diagnostics( version, diagnostics: generate_document_lint_diagnostics( &document, - &lint_options, - lint_config, + &lint_config, + deno_lint_config, lint_rules.rules.clone(), ), }, @@ -853,18 +856,20 @@ fn generate_lint_diagnostics( fn generate_document_lint_diagnostics( document: &Document, - lint_options: &LintOptions, - lint_config: LintConfig, + lint_config: &deno_config::LintConfig, + deno_lint_config: deno_lint::linter::LintConfig, lint_rules: Vec<&'static dyn LintRule>, ) -> Vec { - if !lint_options.files.matches_specifier(document.specifier()) { + if !lint_config.files.matches_specifier(document.specifier()) { return Vec::new(); } match document.maybe_parsed_source() { Some(Ok(parsed_source)) => { - if let Ok(references) = - analysis::get_lint_references(parsed_source, lint_rules, lint_config) - { + if let Ok(references) = analysis::get_lint_references( + parsed_source, + lint_rules, + deno_lint_config, + ) { references .into_iter() .map(|r| r.to_diagnostic()) @@ -1479,7 +1484,7 @@ fn diagnose_dependency( .config .tree .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) - .and_then(|d| d.import_map.as_ref()); + .and_then(|d| d.resolver.maybe_import_map()); if let Some(import_map) = import_map { if let Resolution::Ok(resolved) = &dependency.maybe_code { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { @@ -1530,7 +1535,7 @@ fn diagnose_dependency( dependency.is_dynamic, dependency.maybe_attribute_type.as_deref(), referrer_doc, - import_map.map(|i| i.as_ref()), + import_map, ) .iter() .flat_map(|diag| { @@ -1554,7 +1559,7 @@ fn diagnose_dependency( dependency.is_dynamic, dependency.maybe_attribute_type.as_deref(), referrer_doc, - import_map.map(|i| i.as_ref()), + import_map, ) .iter() .map(|diag| diag.to_lsp_diagnostic(&range)), @@ -1646,12 +1651,13 @@ mod tests { async fn setup( sources: &[(&str, &str, i32, LanguageId)], maybe_import_map: Option<(&str, &str)>, - ) -> StateSnapshot { + ) -> (TempDir, StateSnapshot) { let temp_dir = TempDir::new(); - let cache = LspCache::new(Some(temp_dir.uri())); - let mut config = Config::new_with_roots([resolve_url("file:///").unwrap()]); - if let Some((base_url, json_string)) = maybe_import_map { - let base_url = resolve_url(base_url).unwrap(); + let root_uri = temp_dir.uri(); + let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap())); + let mut config = Config::new_with_roots([root_uri.clone()]); + if let Some((relative_path, json_string)) = maybe_import_map { + let base_url = root_uri.join(relative_path).unwrap(); let config_file = ConfigFile::new( json_string, base_url, @@ -1664,9 +1670,8 @@ mod tests { Arc::new(LspResolver::from_config(&config, &cache, None).await); let mut documents = Documents::default(); documents.update_config(&config, &resolver, &cache, &Default::default()); - for (specifier, source, version, language_id) in sources { - let specifier = - resolve_url(specifier).expect("failed to create specifier"); + for (relative_path, source, version, language_id) in sources { + let specifier = root_uri.join(relative_path).unwrap(); documents.open( specifier.clone(), *version, @@ -1675,20 +1680,23 @@ mod tests { None, ); } - StateSnapshot { - project_version: 0, - documents: Arc::new(documents), - assets: Default::default(), - config: Arc::new(config), - resolver, - } + ( + temp_dir, + StateSnapshot { + project_version: 0, + documents: Arc::new(documents), + assets: Default::default(), + config: Arc::new(config), + resolver, + }, + ) } #[tokio::test] async fn test_enabled_then_disabled_specifier() { - let snapshot = setup( + let (_, snapshot) = setup( &[( - "file:///a.ts", + "a.ts", r#"import * as b from "./b.ts"; let a: any = "a"; let c: number = "a"; @@ -1781,23 +1789,23 @@ let c: number = "a"; #[tokio::test] async fn test_deno_diagnostics_with_import_map() { - let snapshot = setup( + let (temp_dir, snapshot) = setup( &[ ( - "file:///std/assert/mod.ts", + "std/assert/mod.ts", "export function assert() {}", 1, LanguageId::TypeScript, ), ( - "file:///a/file.ts", + "a/file.ts", "import { assert } from \"../std/assert/mod.ts\";\n\nassert();\n", 1, LanguageId::TypeScript, ), ], Some(( - "file:///a/import-map.json", + "a/deno.json", r#"{ "imports": { "/~/std/": "../std/" @@ -1811,11 +1819,13 @@ let c: number = "a"; let actual = generate_deno_diagnostics(&snapshot, &config, token); assert_eq!(actual.len(), 2); for record in actual { - match record.specifier.as_str() { - "file:///std/assert/mod.ts" => { + let relative_specifier = + temp_dir.uri().make_relative(&record.specifier).unwrap(); + match relative_specifier.as_str() { + "std/assert/mod.ts" => { assert_eq!(json!(record.versioned.diagnostics), json!([])) } - "file:///a/file.ts" => assert_eq!( + "a/file.ts" => assert_eq!( json!(record.versioned.diagnostics), json!([ { @@ -1917,9 +1927,9 @@ let c: number = "a"; #[tokio::test] async fn duplicate_diagnostics_for_duplicate_imports() { - let snapshot = setup( + let (_, snapshot) = setup( &[( - "file:///a.ts", + "a.ts", r#" // @deno-types="bad.d.ts" import "bad.js"; @@ -1993,9 +2003,9 @@ let c: number = "a"; #[tokio::test] async fn unable_to_load_a_local_module() { - let snapshot = setup( + let (temp_dir, snapshot) = setup( &[( - "file:///a.ts", + "a.ts", r#" import { 東京 } from "./πŸ¦•.ts"; "#, @@ -2027,7 +2037,10 @@ let c: number = "a"; "severity": 1, "code": "no-local", "source": "deno", - "message": "Unable to load a local module: file:///πŸ¦•.ts\nPlease check the file path.", + "message": format!( + "Unable to load a local module: {}πŸ¦•.ts\nPlease check the file path.", + temp_dir.uri(), + ), } ]) ); diff --git a/cli/lsp/jsr.rs b/cli/lsp/jsr.rs index 52d48c1156e50e..05eb76599a006e 100644 --- a/cli/lsp/jsr.rs +++ b/cli/lsp/jsr.rs @@ -24,7 +24,6 @@ use std::borrow::Cow; use std::collections::HashMap; use std::sync::Arc; -use super::config::Config; use super::config::ConfigData; use super::search::PackageSearchApi; @@ -44,26 +43,31 @@ impl JsrCacheResolver { pub fn new( cache: Arc, config_data: Option<&ConfigData>, - config: &Config, ) -> Self { let nv_by_req = DashMap::new(); let info_by_nv = DashMap::new(); let info_by_name = DashMap::new(); let mut workspace_scope_by_name = HashMap::new(); if let Some(config_data) = config_data { - let config_data_by_scope = config.tree.data_by_scope(); - for member_scope in config_data.workspace_members.as_ref() { - let Some(member_data) = config_data_by_scope.get(member_scope) else { + for jsr_pkg_config in config_data.workspace.jsr_packages() { + let Some(exports) = &jsr_pkg_config.config_file.json.exports else { continue; }; - let Some(package_config) = member_data.package_config.as_ref() else { + let Some(version) = &jsr_pkg_config.config_file.json.version else { continue; }; + let Ok(version) = Version::parse_standard(version) else { + continue; + }; + let nv = PackageNv { + name: jsr_pkg_config.name.clone(), + version: version.clone(), + }; info_by_name.insert( - package_config.nv.name.clone(), + nv.name.clone(), Some(Arc::new(JsrPackageInfo { versions: [( - package_config.nv.version.clone(), + nv.version.clone(), JsrPackageInfoVersion { yanked: false }, )] .into_iter() @@ -71,16 +75,21 @@ impl JsrCacheResolver { })), ); info_by_nv.insert( - package_config.nv.clone(), + nv.clone(), Some(Arc::new(JsrPackageVersionInfo { - exports: package_config.exports.clone(), + exports: exports.clone(), module_graph_1: None, module_graph_2: None, manifest: Default::default(), })), ); - workspace_scope_by_name - .insert(package_config.nv.name.clone(), member_scope.clone()); + workspace_scope_by_name.insert( + nv.name.clone(), + ModuleSpecifier::from_directory_path( + jsr_pkg_config.config_file.dir_path(), + ) + .unwrap(), + ); } } if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) { diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 093ea1dab28fce..5b6ff79f210411 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -89,6 +89,7 @@ use super::tsc::TsServer; use super::urls; use crate::args::create_default_npmrc; use crate::args::get_root_cert_store; +use crate::args::has_flag_env_var; use crate::args::CaData; use crate::args::CacheSetting; use crate::args::CliOptions; @@ -1322,7 +1323,7 @@ impl Inner { if !self .config .tree - .fmt_options_for_specifier(&specifier) + .fmt_config_for_specifier(&specifier) .files .matches_specifier(&specifier) { @@ -1352,7 +1353,7 @@ impl Inner { let mut fmt_options = self .config .tree - .fmt_options_for_specifier(&specifier) + .fmt_config_for_specifier(&specifier) .options .clone(); fmt_options.use_tabs = Some(!params.options.insert_spaces); @@ -1606,7 +1607,7 @@ impl Inner { (&self .config .tree - .fmt_options_for_specifier(&specifier) + .fmt_config_for_specifier(&specifier) .options) .into(), tsc::UserPreferences::from_config_for_specifier( @@ -1771,7 +1772,7 @@ impl Inner { (&self .config .tree - .fmt_options_for_specifier(&code_action_data.specifier) + .fmt_config_for_specifier(&code_action_data.specifier) .options) .into(), tsc::UserPreferences::from_config_for_specifier( @@ -1822,7 +1823,7 @@ impl Inner { (&self .config .tree - .fmt_options_for_specifier(&action_data.specifier) + .fmt_config_for_specifier(&action_data.specifier) .options) .into(), line_index.offset_tsc(action_data.range.start)? @@ -1857,7 +1858,9 @@ impl Inner { .config .tree .data_for_specifier(file_referrer) - .and_then(|d| d.import_map.as_ref().map(|i| i.as_ref())), + // todo(dsherret): this should probably just take the resolver itself + // as the import map is an implementation detail + .and_then(|d| d.resolver.maybe_import_map()), self.resolver.as_ref(), ) } @@ -2178,7 +2181,9 @@ impl Inner { .config .tree .data_for_specifier(file_referrer) - .and_then(|d| d.import_map.as_ref().map(|i| i.as_ref())), + // todo(dsherret): this should probably just take the resolver itself + // as the import map is an implementation detail + .and_then(|d| d.resolver.maybe_import_map()), ) .await; } @@ -2213,7 +2218,7 @@ impl Inner { (&self .config .tree - .fmt_options_for_specifier(&specifier) + .fmt_config_for_specifier(&specifier) .options) .into(), scope.cloned(), @@ -2268,11 +2273,7 @@ impl Inner { self.snapshot(), GetCompletionDetailsArgs { format_code_settings: Some( - (&self - .config - .tree - .fmt_options_for_specifier(specifier) - .options) + (&self.config.tree.fmt_config_for_specifier(specifier).options) .into(), ), preferences: Some( @@ -2846,7 +2847,7 @@ impl Inner { let format_code_settings = (&self .config .tree - .fmt_options_for_specifier(&old_specifier) + .fmt_config_for_specifier(&old_specifier) .options) .into(); changes.extend( @@ -3056,7 +3057,7 @@ impl tower_lsp::LanguageServer for LanguageServer { let mut config_events = vec![]; for (scope_uri, config_data) in inner.config.tree.data_by_scope().iter() { - if let Some(config_file) = &config_data.config_file { + if let Some(config_file) = config_data.maybe_deno_json() { config_events.push(lsp_custom::DenoConfigurationChangeEvent { scope_uri: scope_uri.clone(), file_uri: config_file.specifier.clone(), @@ -3064,7 +3065,7 @@ impl tower_lsp::LanguageServer for LanguageServer { configuration_type: lsp_custom::DenoConfigurationType::DenoJson, }); } - if let Some(package_json) = &config_data.package_json { + if let Some(package_json) = config_data.maybe_pkg_json() { config_events.push(lsp_custom::DenoConfigurationChangeEvent { scope_uri: scope_uri.clone(), file_uri: package_json.specifier(), @@ -3542,7 +3543,7 @@ impl Inner { if let Some(npm_reqs) = self .documents .npm_reqs_by_scope() - .get(&config_data.map(|d| d.scope.clone())) + .get(&config_data.map(|d| d.scope.as_ref().clone())) { roots.extend( npm_reqs @@ -3562,12 +3563,13 @@ impl Inner { ]), &WorkspaceDiscoverOptions { fs: &DenoConfigFsAdapter::new(&deno_runtime::deno_fs::RealFs), + deno_json_cache: None, pkg_json_cache: None, config_parse_options: deno_config::ConfigParseOptions { include_task_comments: false, }, additional_config_file_names: &[], - discover_pkg_json: true, + discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"), maybe_vendor_override: if force_global_cache { Some(deno_config::workspace::VendorEnablement::Disable) } else { @@ -3584,10 +3586,9 @@ impl Inner { .unsafely_ignore_certificate_errors .clone(), import_map_path: config_data.and_then(|d| { - if d.import_map_from_settings { - return Some(d.import_map.as_ref()?.base_url().to_string()); - } - None + d.import_map_from_settings + .as_ref() + .map(|url| url.to_string()) }), node_modules_dir: Some( config_data diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 2ca93114da5c77..2f03842b12a5b5 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -86,7 +86,6 @@ impl Default for LspScopeResolver { impl LspScopeResolver { async fn from_config_data( config_data: Option<&Arc>, - config: &Config, cache: &LspCache, http_client_provider: Option<&Arc>, ) -> Self { @@ -107,18 +106,16 @@ impl LspScopeResolver { node_resolver.as_ref(), ); let jsr_resolver = Some(Arc::new(JsrCacheResolver::new( - cache.for_specifier(config_data.map(|d| &d.scope)), + cache.for_specifier(config_data.map(|d| d.scope.as_ref())), config_data.map(|d| d.as_ref()), - config, ))); let redirect_resolver = Some(Arc::new(RedirectResolver::new( - cache.for_specifier(config_data.map(|d| &d.scope)), + cache.for_specifier(config_data.map(|d| d.scope.as_ref())), config_data.and_then(|d| d.lockfile.clone()), ))); let npm_graph_resolver = graph_resolver.create_graph_npm_resolver(); let graph_imports = config_data - .and_then(|d| d.config_file.as_ref()) - .and_then(|cf| cf.to_compiler_option_types().ok()) + .and_then(|d| d.workspace.to_compiler_option_types().ok()) .map(|imports| { Arc::new( imports @@ -188,7 +185,6 @@ impl LspResolver { Arc::new( LspScopeResolver::from_config_data( Some(config_data), - config, cache, http_client_provider, ) @@ -198,13 +194,8 @@ impl LspResolver { } Self { unscoped: Arc::new( - LspScopeResolver::from_config_data( - None, - config, - cache, - http_client_provider, - ) - .await, + LspScopeResolver::from_config_data(None, cache, http_client_provider) + .await, ), by_scope, } @@ -522,39 +513,28 @@ fn create_graph_resolver( npm_resolver: Option<&Arc>, node_resolver: Option<&Arc>, ) -> Arc { - let config_file = config_data.and_then(|d| d.config_file.as_deref()); + let workspace = config_data.map(|d| &d.workspace); let unstable_sloppy_imports = - config_file.is_some_and(|cf| cf.has_unstable("sloppy-imports")); + workspace.is_some_and(|w| w.has_unstable("sloppy-imports")); Arc::new(CliGraphResolver::new(CliGraphResolverOptions { node_resolver: node_resolver.cloned(), npm_resolver: npm_resolver.cloned(), - workspace_resolver: Arc::new(WorkspaceResolver::new_raw( - Arc::new( - config_data - .map(|d| d.workspace_root_dir.clone()) - // this is fine because this value is only used to filter bare - // specifier resolution to workspace npm packages when in a workspace - .unwrap_or_else(|| ModuleSpecifier::parse("file:///").unwrap()), - ), - config_data.and_then(|d| d.import_map.as_ref().map(|i| (**i).clone())), - config_data - .and_then(|d| d.package_json.clone()) - .into_iter() - .collect(), - if config_data.map(|d| d.byonm).unwrap_or(false) { - PackageJsonDepResolution::Disabled - } else { - // todo(dsherret): this should also be disabled for when using - // auto-install with a node_modules directory - PackageJsonDepResolution::Enabled + workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else( + || { + Arc::new(WorkspaceResolver::new_raw( + // this is fine because this is only used before initialization + Arc::new(ModuleSpecifier::parse("file:///").unwrap()), + None, + Vec::new(), + PackageJsonDepResolution::Disabled, + )) }, - )), - maybe_jsx_import_source_config: config_file + ), + maybe_jsx_import_source_config: workspace .and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()), maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()), - bare_node_builtins_enabled: config_file - .map(|cf| cf.has_unstable("bare-node-builtins")) - .unwrap_or(false), + bare_node_builtins_enabled: workspace + .is_some_and(|cf| cf.has_unstable("bare-node-builtins")), sloppy_imports_resolver: unstable_sloppy_imports.then(|| { SloppyImportsResolver::new_without_stat_cache(Arc::new(deno_fs::RealFs)) }), diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 6759f75f4b26e0..9dabb6ca51af8c 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -4905,7 +4905,7 @@ impl UserPreferences { config: &config::Config, specifier: &ModuleSpecifier, ) -> Self { - let fmt_options = config.tree.fmt_options_for_specifier(specifier); + let fmt_options = config.tree.fmt_config_for_specifier(specifier); let fmt_config = &fmt_options.options; let base_preferences = Self { allow_incomplete_completions: Some(true), @@ -5012,8 +5012,8 @@ impl UserPreferences { // Only use workspace settings for quote style if there's no `deno.json`. quote_preference: if config .tree - .config_file_for_specifier(specifier) - .is_some() + .workspace_member_ctx_for_specifier(specifier) + .is_some_and(|ctx| ctx.maybe_deno_json().is_some()) { base_preferences.quote_preference } else { @@ -5400,9 +5400,9 @@ mod tests { async fn setup( ts_config: Value, sources: &[(&str, &str, i32, LanguageId)], - ) -> (TsServer, Arc, LspCache) { + ) -> (TempDir, TsServer, Arc, LspCache) { let temp_dir = TempDir::new(); - let cache = LspCache::new(Some(temp_dir.uri())); + let cache = LspCache::new(Some(temp_dir.uri().join(".deno_dir").unwrap())); let mut config = Config::default(); config .tree @@ -5412,7 +5412,7 @@ mod tests { "compilerOptions": ts_config, }) .to_string(), - resolve_url("file:///deno.json").unwrap(), + temp_dir.uri().join("deno.json").unwrap(), &deno_config::ConfigParseOptions::default(), ) .unwrap(), @@ -5422,16 +5422,9 @@ mod tests { Arc::new(LspResolver::from_config(&config, &cache, None).await); let mut documents = Documents::default(); documents.update_config(&config, &resolver, &cache, &Default::default()); - for (specifier, source, version, language_id) in sources { - let specifier = - resolve_url(specifier).expect("failed to create specifier"); - documents.open( - specifier.clone(), - *version, - *language_id, - (*source).into(), - None, - ); + for (relative_specifier, source, version, language_id) in sources { + let specifier = temp_dir.uri().join(relative_specifier).unwrap(); + documents.open(specifier, *version, *language_id, (*source).into(), None); } let snapshot = Arc::new(StateSnapshot { project_version: 0, @@ -5456,7 +5449,7 @@ mod tests { .collect(), ), ); - (ts_server, snapshot, cache) + (temp_dir, ts_server, snapshot, cache) } fn setup_op_state(state_snapshot: Arc) -> OpState { @@ -5485,7 +5478,7 @@ mod tests { #[tokio::test] async fn test_get_diagnostics() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5493,22 +5486,22 @@ mod tests { "lib": [], }), &[( - "file:///a.ts", + "a.ts", r#"console.log("hello deno");"#, 1, LanguageId::TypeScript, )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [ + specifier.clone(): [ { "start": { "line": 0, @@ -5518,7 +5511,7 @@ mod tests { "line": 0, "character": 7 }, - "fileName": "file:///a.ts", + "fileName": specifier, "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the \'lib\' compiler option to include 'dom'.", "sourceLine": "console.log(\"hello deno\");", "category": 1, @@ -5531,7 +5524,7 @@ mod tests { #[tokio::test] async fn test_get_diagnostics_lib() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5540,24 +5533,24 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#"console.log(document.location);"#, 1, LanguageId::TypeScript, )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] })); + assert_eq!(json!(diagnostics), json!({ specifier: [] })); } #[tokio::test] async fn test_module_resolution() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5565,7 +5558,7 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#" import { B } from "https://deno.land/x/b/mod.ts"; @@ -5578,17 +5571,17 @@ mod tests { )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] })); + assert_eq!(json!(diagnostics), json!({ specifier: [] })); } #[tokio::test] async fn test_bad_module_specifiers() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5596,7 +5589,7 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#" import { A } from "."; "#, @@ -5605,15 +5598,15 @@ mod tests { )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [{ + specifier.clone(): [{ "start": { "line": 1, "character": 8 @@ -5622,7 +5615,7 @@ mod tests { "line": 1, "character": 30 }, - "fileName": "file:///a.ts", + "fileName": specifier, "messageText": "\'A\' is declared but its value is never read.", "sourceLine": " import { A } from \".\";", "category": 2, @@ -5634,7 +5627,7 @@ mod tests { #[tokio::test] async fn test_remote_modules() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5642,7 +5635,7 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#" import { B } from "https://deno.land/x/b/mod.ts"; @@ -5655,17 +5648,17 @@ mod tests { )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ "file:///a.ts": [] })); + assert_eq!(json!(diagnostics), json!({ specifier: [] })); } #[tokio::test] async fn test_partial_modules() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5673,7 +5666,7 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#" import { Application, @@ -5689,15 +5682,15 @@ mod tests { )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [{ + specifier.clone(): [{ "start": { "line": 1, "character": 8 @@ -5706,7 +5699,7 @@ mod tests { "line": 6, "character": 55, }, - "fileName": "file:///a.ts", + "fileName": specifier.clone(), "messageText": "All imports in import declaration are unused.", "sourceLine": " import {", "category": 2, @@ -5720,7 +5713,7 @@ mod tests { "line": 8, "character": 29 }, - "fileName": "file:///a.ts", + "fileName": specifier, "messageText": "Expression expected.", "sourceLine": " import * as test from", "category": 1, @@ -5732,7 +5725,7 @@ mod tests { #[tokio::test] async fn test_no_debug_failure() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5740,22 +5733,22 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#"const url = new URL("b.js", import."#, 1, LanguageId::TypeScript, )], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot, vec![specifier], Default::default()) + .get_diagnostics(snapshot, vec![specifier.clone()], Default::default()) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [ + specifier.clone(): [ { "start": { "line": 0, @@ -5765,7 +5758,7 @@ mod tests { "line": 0, "character": 35 }, - "fileName": "file:///a.ts", + "fileName": specifier, "messageText": "Identifier expected.", "sourceLine": "const url = new URL(\"b.js\", import.", "category": 1, @@ -5778,7 +5771,7 @@ mod tests { #[tokio::test] async fn test_request_assets() { - let (ts_server, snapshot, _) = setup(json!({}), &[]).await; + let (_, ts_server, snapshot, _) = setup(json!({}), &[]).await; let assets = get_isolate_assets(&ts_server, snapshot).await; let mut asset_names = assets .iter() @@ -5810,7 +5803,7 @@ mod tests { #[tokio::test] async fn test_modify_sources() { - let (ts_server, snapshot, cache) = setup( + let (temp_dir, ts_server, snapshot, cache) = setup( json!({ "target": "esnext", "module": "esnext", @@ -5818,7 +5811,7 @@ mod tests { "noEmit": true, }), &[( - "file:///a.ts", + "a.ts", r#" import * as a from "https://deno.land/x/example/a.ts"; if (a.a === "b") { @@ -5840,15 +5833,19 @@ mod tests { b"export const b = \"b\";\n", ) .unwrap(); - let specifier = resolve_url("file:///a.ts").unwrap(); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot.clone(), vec![specifier], Default::default()) + .get_diagnostics( + snapshot.clone(), + vec![specifier.clone()], + Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [ + specifier.clone(): [ { "start": { "line": 2, @@ -5858,7 +5855,7 @@ mod tests { "line": 2, "character": 17 }, - "fileName": "file:///a.ts", + "fileName": specifier, "messageText": "Property \'a\' does not exist on type \'typeof import(\"https://deno.land/x/example/a\")\'.", "sourceLine": " if (a.a === \"b\") {", "code": 2339, @@ -5886,15 +5883,19 @@ mod tests { [(&specifier_dep, ChangeKind::Opened)], None, ); - let specifier = resolve_url("file:///a.ts").unwrap(); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let diagnostics = ts_server - .get_diagnostics(snapshot.clone(), vec![specifier], Default::default()) + .get_diagnostics( + snapshot.clone(), + vec![specifier.clone()], + Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), json!({ - "file:///a.ts": [] + specifier: [] }) ); } @@ -5944,17 +5945,17 @@ mod tests { character: 16, }) .unwrap(); - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", "lib": ["deno.ns", "deno.window"], "noEmit": true, }), - &[("file:///a.ts", fixture, 1, LanguageId::TypeScript)], + &[("a.ts", fixture, 1, LanguageId::TypeScript)], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let info = ts_server .get_completions( snapshot.clone(), @@ -5969,7 +5970,7 @@ mod tests { trigger_kind: None, }, Default::default(), - Some(ModuleSpecifier::parse("file:///").unwrap()), + Some(temp_dir.uri()), ) .await .unwrap(); @@ -5986,7 +5987,7 @@ mod tests { preferences: None, data: None, }, - Some(ModuleSpecifier::parse("file:///").unwrap()), + Some(temp_dir.uri()), ) .await .unwrap() @@ -6095,7 +6096,7 @@ mod tests { character: 33, }) .unwrap(); - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -6103,12 +6104,12 @@ mod tests { "noEmit": true, }), &[ - ("file:///a.ts", fixture_a, 1, LanguageId::TypeScript), - ("file:///b.ts", fixture_b, 1, LanguageId::TypeScript), + ("a.ts", fixture_a, 1, LanguageId::TypeScript), + ("b.ts", fixture_b, 1, LanguageId::TypeScript), ], ) .await; - let specifier = resolve_url("file:///a.ts").expect("could not resolve url"); + let specifier = temp_dir.uri().join("a.ts").unwrap(); let fmt_options_config = FmtOptionsConfig { semi_colons: Some(false), single_quote: Some(true), @@ -6129,7 +6130,7 @@ mod tests { ..Default::default() }, FormatCodeSettings::from(&fmt_options_config), - Some(ModuleSpecifier::parse("file:///").unwrap()), + Some(temp_dir.uri()), ) .await .unwrap(); @@ -6155,7 +6156,7 @@ mod tests { }), data: entry.data.clone(), }, - Some(ModuleSpecifier::parse("file:///").unwrap()), + Some(temp_dir.uri()), ) .await .unwrap() @@ -6204,7 +6205,7 @@ mod tests { #[tokio::test] async fn test_get_edits_for_file_rename() { - let (ts_server, snapshot, _) = setup( + let (temp_dir, ts_server, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", @@ -6212,21 +6213,16 @@ mod tests { "noEmit": true, }), &[ - ( - "file:///a.ts", - r#"import "./b.ts";"#, - 1, - LanguageId::TypeScript, - ), - ("file:///b.ts", r#""#, 1, LanguageId::TypeScript), + ("a.ts", r#"import "./b.ts";"#, 1, LanguageId::TypeScript), + ("b.ts", r#""#, 1, LanguageId::TypeScript), ], ) .await; let changes = ts_server .get_edits_for_file_rename( snapshot, - resolve_url("file:///b.ts").unwrap(), - resolve_url("file:///πŸ¦•.ts").unwrap(), + temp_dir.uri().join("b.ts").unwrap(), + temp_dir.uri().join("πŸ¦•.ts").unwrap(), FormatCodeSettings::default(), UserPreferences::default(), ) @@ -6235,7 +6231,7 @@ mod tests { assert_eq!( changes, vec![FileTextChanges { - file_name: "file:///a.ts".to_string(), + file_name: temp_dir.uri().join("a.ts").unwrap().to_string(), text_changes: vec![TextChange { span: TextSpan { start: 8, @@ -6280,21 +6276,21 @@ mod tests { #[tokio::test] async fn resolve_unknown_dependency() { - let (_, snapshot, _) = setup( + let (temp_dir, _, snapshot, _) = setup( json!({ "target": "esnext", "module": "esnext", "lib": ["deno.ns", "deno.window"], "noEmit": true, }), - &[("file:///a.ts", "", 1, LanguageId::TypeScript)], + &[("a.ts", "", 1, LanguageId::TypeScript)], ) .await; let mut state = setup_op_state(snapshot); let resolved = op_resolve_inner( &mut state, ResolveArgs { - base: "file:///a.ts".to_string(), + base: temp_dir.uri().join("a.ts").unwrap().to_string(), is_base_cjs: false, specifiers: vec!["./b.ts".to_string()], }, @@ -6303,7 +6299,7 @@ mod tests { assert_eq!( resolved, vec![Some(( - "file:///b.ts".to_string(), + temp_dir.uri().join("b.ts").unwrap().to_string(), MediaType::TypeScript.as_ts_extension().to_string() ))] ); diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 2e2e00942ac511..ae859a650d82fd 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -11917,6 +11917,11 @@ fn lsp_node_modules_dir() { assert!(!temp_dir.path().join("node_modules").exists()); + // a lockfile will be created here because someone did an explicit cache + let lockfile_path = temp_dir.path().join("deno.lock"); + assert!(lockfile_path.exists()); + lockfile_path.remove_file(); + temp_dir.write( temp_dir.path().join("deno.json"), "{ \"nodeModulesDir\": true, \"lock\": false }\n", @@ -11950,7 +11955,7 @@ fn lsp_node_modules_dir() { assert!(temp_dir.path().join("node_modules/chalk").exists()); assert!(temp_dir.path().join("node_modules/@types/node").exists()); - assert!(!temp_dir.path().join("deno.lock").exists()); + assert!(!lockfile_path.exists()); // was disabled // now add a lockfile and cache temp_dir.write( @@ -11963,7 +11968,7 @@ fn lsp_node_modules_dir() { let diagnostics = client.read_diagnostics(); assert_eq!(diagnostics.all().len(), 0, "{:#?}", diagnostics); - assert!(temp_dir.path().join("deno.lock").exists()); + assert!(lockfile_path.exists()); // the declaration should be found in the node_modules directory let res = client.write_request( @@ -13315,9 +13320,9 @@ fn lsp_deno_json_workspace_fmt_config() { json!([{ "range": { "start": { "line": 0, "character": 12 }, - "end": { "line": 0, "character": 14 }, + "end": { "line": 0, "character": 16 }, }, - "newText": "''", + "newText": "'')", }]) ); // `project2/file.ts` should use the fmt settings from `deno.json`, since it @@ -13449,6 +13454,15 @@ fn lsp_deno_json_workspace_lint_config() { "code": "ban-untagged-todo", "source": "deno-lint", "message": "TODO should be tagged with (@username) or (#issue)\nAdd a user tag or issue reference to the TODO comment, e.g. TODO(@djones), TODO(djones), TODO(#123)", + }, { + "range": { + "start": { "line": 2, "character": 14 }, + "end": { "line": 2, "character": 28 }, + }, + "severity": 2, + "code": "camelcase", + "source": "deno-lint", + "message": "Identifier 'snake_case_var' is not in camel case.\nConsider renaming `snake_case_var` to `snakeCaseVar`", }], "version": 1, }) @@ -13513,8 +13527,8 @@ fn lsp_deno_json_workspace_import_map() { temp_dir.write("project1/foo1.ts", ""); temp_dir.write( "project1/project2/deno.json", - // Should ignore and inherit import map from `project1/deno.json`. json!({ + // should overwrite the "foo" entry in the parent for this scope "imports": { "foo": "./foo2.ts", }, @@ -13524,36 +13538,74 @@ fn lsp_deno_json_workspace_import_map() { temp_dir.write("project1/project2/foo2.ts", ""); let mut client = context.new_lsp_command().build(); client.initialize_default(); - client.did_open(json!({ - "textDocument": { - "uri": temp_dir.uri().join("project1/project2/file.ts").unwrap(), - "languageId": "typescript", - "version": 1, - "text": "import \"foo\";\n", - }, - })); - let res = client.write_request( - "textDocument/hover", - json!({ + + // project1 resolution + { + client.did_open(json!({ "textDocument": { - "uri": temp_dir.uri().join("project1/project2/file.ts").unwrap(), - }, - "position": { "line": 0, "character": 7 }, - }), - ); - assert_eq!( - res, - json!({ - "contents": { - "kind": "markdown", - "value": format!("**Resolved Dependency**\n\n**Code**: file​{}\n", temp_dir.uri().join("project1/foo1.ts").unwrap().as_str().trim_start_matches("file")), + "uri": temp_dir.uri().join("project1/file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": "import \"foo\";\n", }, - "range": { - "start": { "line": 0, "character": 7 }, - "end": { "line": 0, "character": 12 }, + })); + let res = client.write_request( + "textDocument/hover", + json!({ + "textDocument": { + "uri": temp_dir.uri().join("project1/file.ts").unwrap(), + }, + "position": { "line": 0, "character": 7 }, + }), + ); + assert_eq!( + res, + json!({ + "contents": { + "kind": "markdown", + "value": format!("**Resolved Dependency**\n\n**Code**: file​{}\n", temp_dir.uri().join("project1/foo1.ts").unwrap().as_str().trim_start_matches("file")), + }, + "range": { + "start": { "line": 0, "character": 7 }, + "end": { "line": 0, "character": 12 }, + }, + }) + ); + } + + // project1/project2 resolution + { + client.did_open(json!({ + "textDocument": { + "uri": temp_dir.uri().join("project1/project2/file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": "import \"foo\";\n", }, - }) - ); + })); + let res = client.write_request( + "textDocument/hover", + json!({ + "textDocument": { + "uri": temp_dir.uri().join("project1/project2/file.ts").unwrap(), + }, + "position": { "line": 0, "character": 7 }, + }), + ); + assert_eq!( + res, + json!({ + "contents": { + "kind": "markdown", + "value": format!("**Resolved Dependency**\n\n**Code**: file​{}\n", temp_dir.uri().join("project1/project2/foo2.ts").unwrap().as_str().trim_start_matches("file")), + }, + "range": { + "start": { "line": 0, "character": 7 }, + "end": { "line": 0, "character": 12 }, + }, + }) + ); + } client.shutdown(); } @@ -13827,6 +13879,101 @@ fn lsp_deno_json_workspace_jsr_resolution() { client.shutdown(); } +#[test] +fn lsp_npm_workspace() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); + temp_dir.write( + "package.json", + json!({ + "workspaces": ["packages/*"] + }) + .to_string(), + ); + { + temp_dir.create_dir_all("packages/add"); + temp_dir.write( + "packages/add/package.json", + json!({ + "name": "add", + "version": "1.0.0", + "exports": "./index.ts" + }) + .to_string(), + ); + temp_dir.write( + "packages/add/index.ts", + "export function add(a: number, b: number): number { return a + b; }", + ); + } + { + temp_dir.create_dir_all("packages/subtract"); + temp_dir.write( + "packages/subtract/package.json", + json!({ + "name": "add", + "version": "1.0.0", + "exports": "./index.ts", + "dependencies": { + "add": "^1.0.0" + } + }) + .to_string(), + ); + } + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + let diagnostics = client.did_open(json!({ + "textDocument": { + "uri": temp_dir.uri().join("packages/subtract/index.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": "import { add } from 'add';\nexport function subtract(a: number, b: number): number { return add(a, -b); }", + }, + })); + assert_eq!(json!(diagnostics.all()), json!([])); + let res = client.write_request( + "textDocument/definition", + json!({ + "textDocument": { + "uri": temp_dir.uri().join("packages/subtract/index.ts").unwrap(), + }, + "position": { "line": 0, "character": 9 }, + }), + ); + // The temp dir is symlinked on the CI + assert_eq!( + res, + json!([{ + "targetUri": temp_dir.uri().join("packages/add/index.ts").unwrap(), + "targetRange": { + "start": { + "line": 0, + "character": 0, + }, + "end": { + "line": 0, + "character": 67, + }, + }, + "targetSelectionRange": { + "start": { + "line": 0, + "character": 16, + }, + "end": { + "line": 0, + "character": 19, + }, + }, + }]), + ); + client.shutdown(); +} + #[test] fn lsp_import_unstable_bare_node_builtins_auto_discovered() { let context = TestContextBuilder::new().use_temp_cwd().build();