diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index 0e090a07336..39d4c3faa61 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -35,7 +35,7 @@ use nargo::{ use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ - graph::{CrateId, CrateName}, + graph::{CrateGraph, CrateId, CrateName}, hir::{ def_map::{parse_file, CrateDefMap}, Context, FunctionNameMatch, ParsedFiles, @@ -92,15 +92,26 @@ pub struct LspState { open_documents_count: usize, input_files: HashMap, cached_lenses: HashMap>, - cached_definitions: HashMap, cached_parsed_files: HashMap))>, - cached_def_maps: HashMap>, + workspace_cache: HashMap, + package_cache: HashMap, options: LspInitializationOptions, // Tracks files that currently have errors, by package root. files_with_errors: HashMap>, } +struct WorkspaceCacheData { + file_manager: FileManager, +} + +struct PackageCacheData { + crate_id: CrateId, + crate_graph: CrateGraph, + node_interner: NodeInterner, + def_maps: BTreeMap, +} + impl LspState { fn new( client: &ClientSocket, @@ -112,12 +123,11 @@ impl LspState { solver: WrapperSolver(Box::new(solver)), input_files: HashMap::new(), cached_lenses: HashMap::new(), - cached_definitions: HashMap::new(), - open_documents_count: 0, cached_parsed_files: HashMap::new(), - cached_def_maps: HashMap::new(), + workspace_cache: HashMap::new(), + package_cache: HashMap::new(), + open_documents_count: 0, options: Default::default(), - files_with_errors: HashMap::new(), } } diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index 96e339ee212..c3dc1bca9c0 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -2,13 +2,18 @@ use std::collections::HashSet; use std::ops::ControlFlow; use std::path::PathBuf; -use crate::insert_all_files_for_workspace_into_file_manager; +use crate::{ + insert_all_files_for_workspace_into_file_manager, PackageCacheData, WorkspaceCacheData, +}; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use fm::{FileManager, FileMap}; +use fm::{FileId, FileManager, FileMap}; use fxhash::FxHashMap as HashMap; use lsp_types::{DiagnosticTag, Url}; use noirc_driver::check_crate; use noirc_errors::{DiagnosticKind, FileDiagnostic}; +use noirc_frontend::ast::{NoirFunction, NoirTraitImpl, TraitImplItemKind, TypeImpl}; +use noirc_frontend::parser::ItemKind; +use noirc_frontend::ParsedModule; use crate::types::{ notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, @@ -43,8 +48,14 @@ pub(crate) fn on_did_open_text_document( let document_uri = params.text_document.uri; let output_diagnostics = true; + let only_check_open_files = false; - match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { + match process_workspace_for_noir_document( + state, + document_uri, + output_diagnostics, + only_check_open_files, + ) { Ok(_) => { state.open_documents_count += 1; ControlFlow::Continue(()) @@ -62,8 +73,14 @@ pub(super) fn on_did_change_text_document( let document_uri = params.text_document.uri; let output_diagnostics = false; + let only_check_open_files = true; - match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { + match process_workspace_for_noir_document( + state, + document_uri, + output_diagnostics, + only_check_open_files, + ) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -79,13 +96,20 @@ pub(super) fn on_did_close_text_document( state.open_documents_count -= 1; if state.open_documents_count == 0 { - state.cached_definitions.clear(); + state.package_cache.clear(); + state.workspace_cache.clear(); } let document_uri = params.text_document.uri; let output_diagnostics = false; + let only_check_open_files = false; - match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { + match process_workspace_for_noir_document( + state, + document_uri, + output_diagnostics, + only_check_open_files, + ) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -97,8 +121,14 @@ pub(super) fn on_did_save_text_document( ) -> ControlFlow> { let document_uri = params.text_document.uri; let output_diagnostics = true; + let only_check_open_files = false; - match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { + match process_workspace_for_noir_document( + state, + document_uri, + output_diagnostics, + only_check_open_files, + ) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -111,6 +141,7 @@ pub(crate) fn process_workspace_for_noir_document( state: &mut LspState, document_uri: Url, output_diagnostics: bool, + only_check_open_files: bool, ) -> Result<(), async_lsp::Error> { let file_path = document_uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") @@ -128,7 +159,28 @@ pub(crate) fn process_workspace_for_noir_document( &mut workspace_file_manager, ); - let parsed_files = parse_diff(&workspace_file_manager, state); + let mut parsed_files = parse_diff(&workspace_file_manager, state); + + // If we only want to check the currently open files, we empty function bodies of files that aren't open. + // These function bodies will error, but we are not interested in those errors (we don't report errors in this + // case). The function types are correct, though, because the types in the signature remain unchanged. + // Doing this greatly speeds up the time it takes to reanalyze a crate after incremental edits without saving + // (type-checking body functions is much slower than emptying them) + if only_check_open_files { + let mut currently_open_files: HashSet = HashSet::new(); + for filename in state.input_files.keys() { + let filename = filename.strip_prefix("file://").unwrap(); + if let Some(file_id) = workspace_file_manager.name_to_id(PathBuf::from(filename)) { + currently_open_files.insert(file_id); + } + } + + for (file_id, (parsed_module, _errors)) in parsed_files.iter_mut() { + if !currently_open_files.is_empty() && !currently_open_files.contains(file_id) { + empty_parsed_module_function_bodies(parsed_module); + } + } + }; for package in workspace.into_iter() { let (mut context, crate_id) = @@ -154,9 +206,17 @@ pub(crate) fn process_workspace_for_noir_document( package, Some(&file_path), ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); - state.cached_definitions.insert(package.root_dir.clone(), context.def_interner); - state.cached_def_maps.insert(package.root_dir.clone(), context.def_maps); + state.package_cache.insert( + package.root_dir.clone(), + PackageCacheData { + crate_id, + crate_graph: context.crate_graph, + node_interner: context.def_interner, + def_maps: context.def_maps, + }, + ); let fm = &context.file_manager; let files = fm.as_file_map(); @@ -166,6 +226,11 @@ pub(crate) fn process_workspace_for_noir_document( } } + state.workspace_cache.insert( + workspace.root_dir.clone(), + WorkspaceCacheData { file_manager: workspace_file_manager }, + ); + Ok(()) } @@ -256,6 +321,47 @@ pub(super) fn on_exit( ControlFlow::Continue(()) } +fn empty_parsed_module_function_bodies(parsed_module: &mut ParsedModule) { + for item in &mut parsed_module.items { + match &mut item.kind { + ItemKind::Function(noir_function) => empty_noir_function_body(noir_function), + ItemKind::TraitImpl(noir_trait_impl) => { + empty_noir_trait_impl_function_bodies(noir_trait_impl); + } + ItemKind::Impl(noir_impl) => empty_noir_impl_function_bodies(noir_impl), + ItemKind::Submodules(parsed_sub_module) => { + empty_parsed_module_function_bodies(&mut parsed_sub_module.contents); + } + ItemKind::Import(_, _) + | ItemKind::Struct(_) + | ItemKind::Trait(_) + | ItemKind::TypeAlias(_) + | ItemKind::Global(_) + | ItemKind::ModuleDecl(_) + | ItemKind::InnerAttribute(_) => (), + } + } +} + +fn empty_noir_trait_impl_function_bodies(noir_trait_impl: &mut NoirTraitImpl) { + for item in &mut noir_trait_impl.items { + match &mut item.item.kind { + TraitImplItemKind::Function(noir_function) => empty_noir_function_body(noir_function), + TraitImplItemKind::Constant(..) | TraitImplItemKind::Type { .. } => (), + } + } +} + +fn empty_noir_impl_function_bodies(noir_impl: &mut TypeImpl) { + for (noir_function, _span) in &mut noir_impl.methods { + empty_noir_function_body(&mut noir_function.item); + } +} + +fn empty_noir_function_body(noir_function: &mut NoirFunction) { + noir_function.def.body.statements.clear(); +} + #[cfg(test)] mod notification_tests { use crate::test_utils; diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index fea758e0e52..c8d3e2f5f0e 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -2,9 +2,9 @@ use std::collections::BTreeMap; use std::path::PathBuf; use std::{collections::HashMap, future::Future}; -use crate::insert_all_files_for_workspace_into_file_manager; +use crate::{insert_all_files_for_workspace_into_file_manager, parse_diff, PackageCacheData}; use crate::{ - parse_diff, resolve_workspace_for_source_path, + resolve_workspace_for_source_path, types::{CodeLensOptions, InitializeParams}, }; use async_lsp::{ErrorCode, ResponseError}; @@ -407,7 +407,7 @@ pub(crate) struct ProcessRequestCallbackArgs<'a> { location: noirc_errors::Location, files: &'a FileMap, interner: &'a NodeInterner, - interners: &'a HashMap, + package_cache: &'a HashMap, crate_id: CrateId, crate_name: String, dependencies: &'a Vec, @@ -432,6 +432,63 @@ where ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") })?; + // In practice when `process_request` is called, a document in the project should already have been + // open so both the workspace and package cache will have data. However, just in case this isn't true + // for some reason, and also for tests (some tests just test a request without going through the full + // LSP workflow), we have a fallback where we type-check the workspace/package, then continue with + // processing the request. + let Some(workspace_cache_data) = state.workspace_cache.get(&workspace.root_dir) else { + return process_request_no_workspace_cache(state, text_document_position_params, callback); + }; + + let Some(package_cache_data) = state.package_cache.get(&package.root_dir) else { + return process_request_no_workspace_cache(state, text_document_position_params, callback); + }; + + let file_manager = &workspace_cache_data.file_manager; + let interner = &package_cache_data.node_interner; + let def_maps = &package_cache_data.def_maps; + let crate_graph = &package_cache_data.crate_graph; + let crate_id = package_cache_data.crate_id; + + let files = file_manager.as_file_map(); + + let location = position_to_location( + files, + &PathString::from(file_path), + &text_document_position_params.position, + )?; + + Ok(callback(ProcessRequestCallbackArgs { + location, + files, + interner, + package_cache: &state.package_cache, + crate_id, + crate_name: package.name.to_string(), + dependencies: &crate_graph[crate_id].dependencies, + def_maps, + })) +} + +pub(crate) fn process_request_no_workspace_cache( + state: &mut LspState, + text_document_position_params: TextDocumentPositionParams, + callback: F, +) -> Result +where + F: FnOnce(ProcessRequestCallbackArgs) -> T, +{ + let file_path = + text_document_position_params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") + })?; + let mut workspace_file_manager = workspace.new_file_manager(); insert_all_files_for_workspace_into_file_manager( state, @@ -445,9 +502,9 @@ where let interner; let def_maps; - if let Some(def_interner) = state.cached_definitions.get(&package.root_dir) { - interner = def_interner; - def_maps = state.cached_def_maps.get(&package.root_dir).unwrap(); + if let Some(package_cache) = state.package_cache.get(&package.root_dir) { + interner = &package_cache.node_interner; + def_maps = &package_cache.def_maps; } else { // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); @@ -455,7 +512,7 @@ where def_maps = &context.def_maps; } - let files = context.file_manager.as_file_map(); + let files = workspace_file_manager.as_file_map(); let location = position_to_location( files, @@ -467,17 +524,18 @@ where location, files, interner, - interners: &state.cached_definitions, + package_cache: &state.package_cache, crate_id, crate_name: package.name.to_string(), - dependencies: &context.crate_graph[context.root_crate_id()].dependencies, + dependencies: &context.crate_graph[crate_id].dependencies, def_maps, })) } + pub(crate) fn find_all_references_in_workspace( location: noirc_errors::Location, interner: &NodeInterner, - cached_interners: &HashMap, + package_cache: &HashMap, files: &FileMap, include_declaration: bool, include_self_type_name: bool, @@ -499,10 +557,10 @@ pub(crate) fn find_all_references_in_workspace( include_declaration, include_self_type_name, ); - for interner in cached_interners.values() { + for cache_data in package_cache.values() { locations.extend(find_all_references( referenced_location, - interner, + &cache_data.node_interner, files, include_declaration, include_self_type_name, diff --git a/tooling/lsp/src/requests/references.rs b/tooling/lsp/src/requests/references.rs index c720156659d..ea2f84f1f62 100644 --- a/tooling/lsp/src/requests/references.rs +++ b/tooling/lsp/src/requests/references.rs @@ -16,7 +16,7 @@ pub(crate) fn on_references_request( find_all_references_in_workspace( args.location, args.interner, - args.interners, + args.package_cache, args.files, include_declaration, true, @@ -113,11 +113,13 @@ mod references_tests { // We call this to open the document, so that the entire workspace is analyzed let output_diagnostics = true; + let only_check_open_files = false; notifications::process_workspace_for_noir_document( &mut state, one_lib.clone(), output_diagnostics, + only_check_open_files, ) .unwrap(); diff --git a/tooling/lsp/src/requests/rename.rs b/tooling/lsp/src/requests/rename.rs index 84956681167..95dd6b506be 100644 --- a/tooling/lsp/src/requests/rename.rs +++ b/tooling/lsp/src/requests/rename.rs @@ -38,7 +38,7 @@ pub(crate) fn on_rename_request( let rename_changes = find_all_references_in_workspace( args.location, args.interner, - args.interners, + args.package_cache, args.files, true, false,