Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: faster LSP by caching file managers #6047

Merged
merged 1 commit into from
Sep 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 17 additions & 7 deletions tooling/lsp/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use nargo::{
use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection};
use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING};
use noirc_frontend::{
graph::{CrateId, CrateName},
graph::{CrateGraph, CrateId, CrateName},
hir::{
def_map::{parse_file, CrateDefMap},
Context, FunctionNameMatch, ParsedFiles,
Expand Down Expand Up @@ -92,15 +92,26 @@ pub struct LspState {
open_documents_count: usize,
input_files: HashMap<String, String>,
cached_lenses: HashMap<String, Vec<CodeLens>>,
cached_definitions: HashMap<PathBuf, NodeInterner>,
cached_parsed_files: HashMap<PathBuf, (usize, (ParsedModule, Vec<ParserError>))>,
cached_def_maps: HashMap<PathBuf, BTreeMap<CrateId, CrateDefMap>>,
workspace_cache: HashMap<PathBuf, WorkspaceCacheData>,
package_cache: HashMap<PathBuf, PackageCacheData>,
options: LspInitializationOptions,

// Tracks files that currently have errors, by package root.
files_with_errors: HashMap<PathBuf, HashSet<Url>>,
}

struct WorkspaceCacheData {
file_manager: FileManager,
}

struct PackageCacheData {
crate_id: CrateId,
crate_graph: CrateGraph,
node_interner: NodeInterner,
def_maps: BTreeMap<CrateId, CrateDefMap>,
}

impl LspState {
fn new(
client: &ClientSocket,
Expand All @@ -112,12 +123,11 @@ impl LspState {
solver: WrapperSolver(Box::new(solver)),
input_files: HashMap::new(),
cached_lenses: HashMap::new(),
cached_definitions: HashMap::new(),
open_documents_count: 0,
cached_parsed_files: HashMap::new(),
cached_def_maps: HashMap::new(),
workspace_cache: HashMap::new(),
package_cache: HashMap::new(),
open_documents_count: 0,
options: Default::default(),

files_with_errors: HashMap::new(),
}
}
Expand Down
23 changes: 19 additions & 4 deletions tooling/lsp/src/notifications/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ use std::collections::HashSet;
use std::ops::ControlFlow;
use std::path::PathBuf;

use crate::insert_all_files_for_workspace_into_file_manager;
use crate::{
insert_all_files_for_workspace_into_file_manager, PackageCacheData, WorkspaceCacheData,
};
use async_lsp::{ErrorCode, LanguageClient, ResponseError};
use fm::{FileManager, FileMap};
use fxhash::FxHashMap as HashMap;
Expand Down Expand Up @@ -79,7 +81,8 @@ pub(super) fn on_did_close_text_document(
state.open_documents_count -= 1;

if state.open_documents_count == 0 {
state.cached_definitions.clear();
state.package_cache.clear();
state.workspace_cache.clear();
}

let document_uri = params.text_document.uri;
Expand Down Expand Up @@ -155,8 +158,15 @@ pub(crate) fn process_workspace_for_noir_document(
Some(&file_path),
);
state.cached_lenses.insert(document_uri.to_string(), collected_lenses);
state.cached_definitions.insert(package.root_dir.clone(), context.def_interner);
state.cached_def_maps.insert(package.root_dir.clone(), context.def_maps);
state.package_cache.insert(
package.root_dir.clone(),
PackageCacheData {
crate_id,
crate_graph: context.crate_graph,
node_interner: context.def_interner,
def_maps: context.def_maps,
},
);

let fm = &context.file_manager;
let files = fm.as_file_map();
Expand All @@ -166,6 +176,11 @@ pub(crate) fn process_workspace_for_noir_document(
}
}

state.workspace_cache.insert(
workspace.root_dir.clone(),
WorkspaceCacheData { file_manager: workspace_file_manager },
);

Ok(())
}

Expand Down
82 changes: 70 additions & 12 deletions tooling/lsp/src/requests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
use std::path::PathBuf;
use std::{collections::HashMap, future::Future};

use crate::insert_all_files_for_workspace_into_file_manager;
use crate::{insert_all_files_for_workspace_into_file_manager, parse_diff, PackageCacheData};
use crate::{
parse_diff, resolve_workspace_for_source_path,
resolve_workspace_for_source_path,
types::{CodeLensOptions, InitializeParams},
};
use async_lsp::{ErrorCode, ResponseError};
Expand Down Expand Up @@ -248,14 +248,14 @@
signature_help_provider: Some(lsp_types::OneOf::Right(
lsp_types::SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
retrigger_characters: None,

Check warning on line 251 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (retrigger)
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
},
)),
code_action_provider: Some(lsp_types::OneOf::Right(lsp_types::CodeActionOptions {
code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]),

Check warning on line 258 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (QUICKFIX)
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
Expand Down Expand Up @@ -407,7 +407,7 @@
location: noirc_errors::Location,
files: &'a FileMap,
interner: &'a NodeInterner,
interners: &'a HashMap<PathBuf, NodeInterner>,
package_cache: &'a HashMap<PathBuf, PackageCacheData>,
crate_id: CrateId,
crate_name: String,
dependencies: &'a Vec<Dependency>,
Expand All @@ -432,6 +432,63 @@
ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file")
})?;

// In practice when `process_request` is called, a document in the project should already have been
// open so both the workspace and package cache will have data. However, just in case this isn't true
// for some reason, and also for tests (some tests just test a request without going through the full
// LSP workflow), we have a fallback where we type-check the workspace/package, then continue with
// processing the request.
let Some(workspace_cache_data) = state.workspace_cache.get(&workspace.root_dir) else {
return process_request_no_workspace_cache(state, text_document_position_params, callback);
};

let Some(package_cache_data) = state.package_cache.get(&package.root_dir) else {
return process_request_no_workspace_cache(state, text_document_position_params, callback);
};

let file_manager = &workspace_cache_data.file_manager;
let interner = &package_cache_data.node_interner;
let def_maps = &package_cache_data.def_maps;
let crate_graph = &package_cache_data.crate_graph;
let crate_id = package_cache_data.crate_id;

let files = file_manager.as_file_map();

let location = position_to_location(
files,
&PathString::from(file_path),
&text_document_position_params.position,
)?;

Ok(callback(ProcessRequestCallbackArgs {
location,
files,
interner,
package_cache: &state.package_cache,
crate_id,
crate_name: package.name.to_string(),
dependencies: &crate_graph[crate_id].dependencies,
def_maps,
}))
}

pub(crate) fn process_request_no_workspace_cache<F, T>(
state: &mut LspState,
text_document_position_params: TextDocumentPositionParams,
callback: F,
) -> Result<T, ResponseError>
where
F: FnOnce(ProcessRequestCallbackArgs) -> T,
{
let file_path =
text_document_position_params.text_document.uri.to_file_path().map_err(|_| {
ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path")
})?;

let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap();
let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| {
ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file")
})?;

let mut workspace_file_manager = workspace.new_file_manager();
insert_all_files_for_workspace_into_file_manager(
state,
Expand All @@ -445,17 +502,17 @@

let interner;
let def_maps;
if let Some(def_interner) = state.cached_definitions.get(&package.root_dir) {
interner = def_interner;
def_maps = state.cached_def_maps.get(&package.root_dir).unwrap();
if let Some(package_cache) = state.package_cache.get(&package.root_dir) {
interner = &package_cache.node_interner;
def_maps = &package_cache.def_maps;
} else {
// We ignore the warnings and errors produced by compilation while resolving the definition
let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default());
interner = &context.def_interner;
def_maps = &context.def_maps;
}

let files = context.file_manager.as_file_map();
let files = workspace_file_manager.as_file_map();

let location = position_to_location(
files,
Expand All @@ -467,17 +524,18 @@
location,
files,
interner,
interners: &state.cached_definitions,
package_cache: &state.package_cache,
crate_id,
crate_name: package.name.to_string(),
dependencies: &context.crate_graph[context.root_crate_id()].dependencies,
dependencies: &context.crate_graph[crate_id].dependencies,
def_maps,
}))
}

pub(crate) fn find_all_references_in_workspace(
location: noirc_errors::Location,
interner: &NodeInterner,
cached_interners: &HashMap<PathBuf, NodeInterner>,
package_cache: &HashMap<PathBuf, PackageCacheData>,
files: &FileMap,
include_declaration: bool,
include_self_type_name: bool,
Expand All @@ -489,8 +547,8 @@
// If we found the referenced node, find its location
let referenced_location = interner.reference_location(referenced);

// Now we find all references that point to this location, in all interners

Check warning on line 550 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
// (there's one interner per package, and all interners in a workspace rely on the

Check warning on line 551 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
// same FileManager so a Location/FileId in one package is the same as in another package)
let mut locations = find_all_references(
referenced_location,
Expand All @@ -499,17 +557,17 @@
include_declaration,
include_self_type_name,
);
for interner in cached_interners.values() {
for cache_data in package_cache.values() {
locations.extend(find_all_references(
referenced_location,
interner,
&cache_data.node_interner,
files,
include_declaration,
include_self_type_name,
));
}

// The LSP client usually removes duplicate loctions, but we do it here just in case they don't

Check warning on line 570 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (loctions)
locations.sort_by_key(|location| {
(
location.uri.to_string(),
Expand Down
2 changes: 1 addition & 1 deletion tooling/lsp/src/requests/references.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
find_all_references_in_workspace(
args.location,
args.interner,
args.interners,
args.package_cache,
args.files,
include_declaration,
true,
Expand Down Expand Up @@ -99,7 +99,7 @@
// See https://github.com/noir-lang/noir/issues/5460
#[ignore]
#[test]
async fn test_on_references_request_works_accross_workspace_packages() {

Check warning on line 102 in tooling/lsp/src/requests/references.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (accross)
let (mut state, noir_text_document) = test_utils::init_lsp_server("workspace").await;

// noir_text_document is always `src/main.nr` in the workspace directory, so let's go to the workspace dir
Expand Down
2 changes: 1 addition & 1 deletion tooling/lsp/src/requests/rename.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ pub(crate) fn on_rename_request(
let rename_changes = find_all_references_in_workspace(
args.location,
args.interner,
args.interners,
args.package_cache,
args.files,
true,
false,
Expand Down
Loading