Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(lsp): include all diagnosable documents on initialize #17979

Merged
merged 37 commits into from
Mar 30, 2023
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
6125671
fix(lsp): add all documents to the lsp on initialize
dsherret Feb 28, 2023
f08a5bf
Fix
dsherret Feb 28, 2023
ef6475c
Take into account enabled_paths
dsherret Mar 1, 2023
17b1f0b
Another one.
dsherret Mar 1, 2023
67439dd
More work.
dsherret Mar 4, 2023
abedfff
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 15, 2023
459ce44
Fix inverted logic.
dsherret Mar 15, 2023
72bfa94
Compile
dsherret Mar 15, 2023
cdc4af0
Ignore minified files
dsherret Mar 15, 2023
0129cf9
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 29, 2023
8688718
Handle files in enabled paths
dsherret Mar 29, 2023
8b0ab70
Consolidate code
dsherret Mar 29, 2023
4c9e102
refactor(lsp): remove boolean parameters on `documents.documents(...)`
dsherret Mar 29, 2023
d9c5e91
Remove word "And"
dsherret Mar 29, 2023
fc4d027
Merge branch 'refactor_lsp_documents_bool_param' into fix_lsp_open_do…
dsherret Mar 29, 2023
08ef6af
Working.
dsherret Mar 29, 2023
2dc17b0
Failing test because our textDocument/references implementation is br…
dsherret Mar 29, 2023
bab99d7
fix(lsp): `textDocument/references` should respect `includeDeclaration`
dsherret Mar 29, 2023
b7dee99
Small refactor.
dsherret Mar 29, 2023
a8cb329
Fix codelens test
dsherret Mar 29, 2023
5894f85
Merge branch 'main' into fix_lsp_find_references_respect_include_decl…
dsherret Mar 29, 2023
c5cfb24
Merge branch 'main' into fix_lsp_find_references_respect_include_decl…
dsherret Mar 30, 2023
b7d7541
Add test and improve code
dsherret Mar 30, 2023
41b3004
Merge branch 'main' into fix_lsp_find_references_respect_include_decl…
dsherret Mar 30, 2023
8dec2d3
Oops... Fix test
dsherret Mar 30, 2023
5a55a7e
Reduce flakiness of package_json_uncached_no_error
dsherret Mar 30, 2023
74fb03f
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 30, 2023
818811e
Merge branch 'fix_lsp_find_references_respect_include_declaration' in…
dsherret Mar 30, 2023
7b96f00
Add enabled_root_urls tests
dsherret Mar 30, 2023
bc3c42e
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 30, 2023
697ace6
Refactor document finder to be more easily unit testable
dsherret Mar 30, 2023
2216402
Another test.
dsherret Mar 30, 2023
8fe2597
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 30, 2023
4d8a9b0
Fix tests
dsherret Mar 30, 2023
b416267
Merge branch 'main' into fix_lsp_open_docs_root
dsherret Mar 30, 2023
055f94d
Fix two failing tests.
dsherret Mar 30, 2023
6cc5a97
Update lsp tests to use a temp cwd
dsherret Mar 30, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
57 changes: 57 additions & 0 deletions cli/lsp/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -485,6 +485,24 @@ impl Config {
.unwrap_or_else(|| self.settings.workspace.enable)
}

pub fn root_dirs(&self) -> Vec<Url> {
let mut dirs: Vec<Url> = Vec::new();
for (workspace, enabled_paths) in &self.enabled_paths {
if !enabled_paths.is_empty() {
dirs.extend(enabled_paths.iter().cloned());
dsherret marked this conversation as resolved.
Show resolved Hide resolved
} else {
dirs.push(workspace.clone());
}
}
if dirs.is_empty() {
if let Some(root_dir) = &self.root_uri {
dirs.push(root_dir.clone())
}
}
sort_and_remove_child_dirs(&mut dirs);
dirs
}

pub fn specifier_code_lens_test(&self, specifier: &ModuleSpecifier) -> bool {
let value = self
.settings
Expand Down Expand Up @@ -621,6 +639,20 @@ impl Config {
}
}

fn sort_and_remove_child_dirs(dirs: &mut Vec<Url>) {
if dirs.is_empty() {
return;
}

dirs.sort();
for i in (0..dirs.len() - 1).rev() {
let prev = &dirs[i + 1];
if prev.as_str().starts_with(dirs[i].as_str()) {
dirs.remove(i + 1);
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -785,4 +817,29 @@ mod tests {
WorkspaceSettings::default()
);
}

#[test]
fn test_sort_and_remove_child_dirs() {
fn run_test(dirs: Vec<&str>, expected_output: Vec<&str>) {
let mut dirs = dirs
.into_iter()
.map(|dir| Url::parse(dir).unwrap())
.collect();
sort_and_remove_child_dirs(&mut dirs);
let dirs: Vec<_> = dirs.iter().map(|dir| dir.as_str()).collect();
assert_eq!(dirs, expected_output);
}

run_test(
vec![
"file:https:///test/asdf/test/asdf/",
"file:https:///test/asdf/",
"file:https:///test/asdf/",
"file:https:///testing/456/893/",
"file:https:///testing/456/893/test/",
],
vec!["file:https:///test/asdf/", "file:https:///testing/456/893/"],
);
run_test(vec![], vec![]);
}
}
135 changes: 120 additions & 15 deletions cli/lsp/documents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::permissions::PermissionsContainer;
use indexmap::IndexMap;
use lsp::Url;
use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::collections::HashSet;
Expand Down Expand Up @@ -774,18 +775,6 @@ impl FileSystemDocuments {
self.docs.insert(specifier.clone(), doc.clone());
Some(doc)
}

pub fn refresh_dependencies(
&mut self,
resolver: &dyn deno_graph::source::Resolver,
) {
for doc in self.docs.values_mut() {
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
*doc = new_doc;
}
}
self.dirty = true;
}
}

fn get_document_path(
Expand Down Expand Up @@ -1168,18 +1157,26 @@ impl Documents {

pub fn update_config(
&mut self,
root_dirs: Vec<Url>,
maybe_import_map: Option<Arc<import_map::ImportMap>>,
maybe_config_file: Option<&ConfigFile>,
maybe_package_json: Option<&PackageJson>,
npm_registry_api: NpmRegistryApi,
npm_resolution: NpmResolution,
) {
fn calculate_resolver_config_hash(
root_dirs: &[Url],
maybe_import_map: Option<&import_map::ImportMap>,
maybe_jsx_config: Option<&JsxImportSourceConfig>,
maybe_package_json_deps: Option<&PackageJsonDeps>,
) -> u64 {
let mut hasher = FastInsecureHasher::default();
hasher.write_hashable(&{
// ensure these are sorted
let mut root_dirs = root_dirs.to_vec();
root_dirs.sort_unstable();
root_dirs
});
if let Some(import_map) = maybe_import_map {
hasher.write_str(&import_map.to_json());
hasher.write_str(import_map.base_url().as_str());
Expand All @@ -1195,6 +1192,7 @@ impl Documents {
let maybe_jsx_config =
maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config());
let new_resolver_config_hash = calculate_resolver_config_hash(
&root_dirs,
maybe_import_map.as_deref(),
maybe_jsx_config.as_ref(),
maybe_package_json_deps.as_ref(),
Expand Down Expand Up @@ -1248,21 +1246,126 @@ impl Documents {

// only refresh the dependencies if the underlying configuration has changed
if self.resolver_config_hash != new_resolver_config_hash {
self.refresh_dependencies();
self.refresh_dependencies(root_dirs);
self.resolver_config_hash = new_resolver_config_hash;
}

self.dirty = true;
}

fn refresh_dependencies(&mut self) {
fn refresh_dependencies(&mut self, root_dirs: Vec<Url>) {
fn is_auto_discoverable_dir(dir_path: &Path) -> bool {
if let Some(dir_name) = dir_path.file_name() {
let dir_name = dir_name.to_string_lossy().to_lowercase();
matches!(dir_name.as_str(), "node_modules" | ".git")
} else {
false
}
}

fn is_auto_discoverable_file(file_path: &Path) -> bool {
// Don't auto-discover minified files as they are likely to be very large
// and likely not to have dependencies on code outside them that would
// be useful in the LSP
if let Some(file_name) = file_path.file_name() {
let file_name = file_name.to_string_lossy().to_lowercase();
!file_name.as_str().contains(".min.")
} else {
false
}
}

let resolver = self.resolver.as_graph_resolver();
for doc in self.open_docs.values_mut() {
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
*doc = new_doc;
}
}
self.file_system_docs.lock().refresh_dependencies(resolver);

// update the file system documents
let mut fs_docs = self.file_system_docs.lock();
let mut not_found_docs =
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();

let mut pending_dirs = VecDeque::new();
for dir in root_dirs {
if let Ok(path) = dir.to_file_path() {
pending_dirs.push_back(path.to_path_buf());
}
}

while let Some(dir_path) = pending_dirs.pop_front() {
if !is_auto_discoverable_dir(&dir_path) {
continue;
}
let read_dir = match std::fs::read_dir(&dir_path) {
Ok(entry) => entry,
Err(_) => continue,
};

for entry in read_dir {
let entry = match entry {
Ok(entry) => entry,
Err(_) => continue,
};
let file_type = match entry.file_type() {
Ok(file_type) => file_type,
Err(_) => continue,
};
let new_path = dir_path.join(entry.file_name());
if file_type.is_dir() {
pending_dirs.push_back(new_path);
} else if file_type.is_file() {
let media_type: MediaType = (&new_path).into();
let is_diagnosable = match media_type {
dsherret marked this conversation as resolved.
Show resolved Hide resolved
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => true,
MediaType::Json
| MediaType::Wasm
| MediaType::SourceMap
| MediaType::TsBuildInfo
| MediaType::Unknown => false,
};
if !is_diagnosable || !is_auto_discoverable_file(&new_path) {
continue;
}
let specifier = match ModuleSpecifier::from_file_path(&new_path) {
Ok(specifier) => specifier,
Err(_) => continue,
};

if !self.open_docs.contains_key(&specifier)
&& !fs_docs.docs.contains_key(&specifier)
{
fs_docs.refresh_document(&self.cache, resolver, &specifier);
} else {
not_found_docs.remove(&specifier);
// update the existing entry to have the new resolver
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
*doc = new_doc;
}
}
}
}
}
}

// clean up and remove any documents that weren't found
for uri in not_found_docs {
fs_docs.docs.remove(&uri);
}

fs_docs.dirty = true;
}

/// Iterate through the documents, building a map where the key is a unique
Expand Down Expand Up @@ -1633,6 +1736,7 @@ console.log(b, "hello deno");
.unwrap();

documents.update_config(
vec![],
Some(Arc::new(import_map)),
None,
None,
Expand Down Expand Up @@ -1672,6 +1776,7 @@ console.log(b, "hello deno");
.unwrap();

documents.update_config(
vec![],
Some(Arc::new(import_map)),
None,
None,
Expand Down
1 change: 1 addition & 0 deletions cli/lsp/language_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1133,6 +1133,7 @@ impl Inner {

fn refresh_documents_config(&mut self) {
self.documents.update_config(
self.config.root_dirs(),
self.maybe_import_map.clone(),
self.maybe_config_file.as_ref(),
self.maybe_package_json.as_ref(),
Expand Down
21 changes: 8 additions & 13 deletions cli/lsp/tsc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2748,9 +2748,9 @@ fn op_respond(state: &mut OpState, args: Response) -> bool {
fn op_script_names(state: &mut OpState) -> Vec<String> {
let state = state.borrow_mut::<State>();
let documents = &state.state_snapshot.documents;
let open_docs = documents.documents(true, true);
let mut result = Vec::new();
let all_docs = documents.documents(false, true);
let mut seen = HashSet::new();
let mut result = Vec::new();

if documents.has_injected_types_node_package() {
// ensure this is first so it resolves the node types first
Expand All @@ -2766,23 +2766,18 @@ fn op_script_names(state: &mut OpState) -> Vec<String> {
}
}

// finally include the documents and all their dependencies
for doc in &open_docs {
// finally, include the documents and all their dependencies
for doc in &all_docs {
let specifier = doc.specifier();
if seen.insert(specifier.as_str()) {
if seen.insert(specifier.as_str()) && documents.exists(specifier) {
// only include dependencies we know to exist otherwise typescript will error
result.push(specifier.to_string());
}
}

// and then all their dependencies (do this after to avoid exists calls)
for doc in &open_docs {
for dep in doc.dependencies().values() {
if let Some(specifier) = dep.get_type().or_else(|| dep.get_code()) {
if seen.insert(specifier.as_str()) {
// only include dependencies we know to exist otherwise typescript will error
if documents.exists(specifier) {
result.push(specifier.to_string());
}
if seen.insert(specifier.as_str()) && documents.exists(specifier) {
result.push(specifier.to_string());
}
}
}
Expand Down