Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: correctly track sources for open LSP documents #5561

Merged
merged 9 commits into from
Jul 23, 2024
18 changes: 17 additions & 1 deletion tooling/lsp/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use lsp_types::{
use nargo::{
package::{Package, PackageType},
parse_all,
workspace::Workspace,
workspace::{self, Workspace},
};
use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection};
use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING};
Expand Down Expand Up @@ -398,6 +398,22 @@ fn parse_diff(file_manager: &FileManager, state: &mut LspState) -> ParsedFiles {
}
}

pub fn insert_all_files_for_workspace_into_file_manager(
state: &LspState,
workspace: &workspace::Workspace,
file_manager: &mut FileManager,
) {
// First add files we cached: these have the source code of files that are modified
// but not saved to disk yet, and we want to make sure all LSP features work well
// according to these unsaved buffers, not what's saved on disk.
for (path, source) in &state.input_files {
let path = path.strip_prefix("file://").unwrap();
file_manager.add_file_with_source_canonical_path(Path::new(path), source.clone());
}

nargo::insert_all_files_for_workspace_into_file_manager(workspace, file_manager);
}

#[test]
fn prepare_package_from_source_string() {
let source = r#"
Expand Down
235 changes: 153 additions & 82 deletions tooling/lsp/src/notifications/mod.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
use std::ops::ControlFlow;

use crate::insert_all_files_for_workspace_into_file_manager;
use async_lsp::{ErrorCode, LanguageClient, ResponseError};
use nargo::insert_all_files_for_workspace_into_file_manager;
use noirc_driver::{check_crate, file_manager_with_stdlib};
use noirc_errors::{DiagnosticKind, FileDiagnostic};

use crate::requests::collect_lenses_for_package;
use crate::types::{
notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams,
DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
DidSaveTextDocumentParams, InitializedParams, NargoPackageTests, PublishDiagnosticsParams,
};

use crate::{
byte_span_to_range, get_package_tests_in_crate, parse_diff, prepare_source,
resolve_workspace_for_source_path, workspace_package_for_file, LspState,
byte_span_to_range, get_package_tests_in_crate, parse_diff, resolve_workspace_for_source_path,
LspState,
};

pub(super) fn on_initialized(
Expand All @@ -39,7 +38,7 @@ pub(super) fn on_did_open_text_document(

let document_uri = params.text_document.uri;

match process_workspace_for_noir_document(document_uri, state) {
match process_workspace_for_noir_document(state, document_uri, false, true) {
Ok(_) => {
state.open_documents_count += 1;
ControlFlow::Continue(())
Expand All @@ -55,41 +54,12 @@ pub(super) fn on_did_change_text_document(
let text = params.content_changes.into_iter().next().unwrap().text;
state.input_files.insert(params.text_document.uri.to_string(), text.clone());

let file_path = params.text_document.uri.to_file_path().unwrap();

let (mut context, crate_id) = prepare_source(text, state);
let _ = check_crate(&mut context, crate_id, false, false, None);

let workspace = match resolve_workspace_for_source_path(
params.text_document.uri.to_file_path().unwrap().as_path(),
&state.root_path,
) {
Ok(workspace) => workspace,
Err(lsp_error) => {
return ControlFlow::Break(Err(ResponseError::new(
ErrorCode::REQUEST_FAILED,
lsp_error.to_string(),
)
.into()))
}
};

let package = match workspace_package_for_file(&workspace, &file_path) {
Some(package) => package,
None => {
return ControlFlow::Break(Err(ResponseError::new(
ErrorCode::REQUEST_FAILED,
"Selected workspace has no members",
)
.into()))
}
};

let lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None);

state.cached_lenses.insert(params.text_document.uri.to_string(), lenses);
let document_uri = params.text_document.uri;

ControlFlow::Continue(())
match process_workspace_for_noir_document(state, document_uri, true, false) {
Ok(_) => ControlFlow::Continue(()),
Err(err) => ControlFlow::Break(Err(err)),
}
}

pub(super) fn on_did_close_text_document(
Expand All @@ -105,7 +75,12 @@ pub(super) fn on_did_close_text_document(
state.cached_definitions.clear();
}

ControlFlow::Continue(())
let document_uri = params.text_document.uri;

match process_workspace_for_noir_document(state, document_uri, true, false) {
asterite marked this conversation as resolved.
Show resolved Hide resolved
Ok(_) => ControlFlow::Continue(()),
Err(err) => ControlFlow::Break(Err(err)),
}
}

pub(super) fn on_did_save_text_document(
Expand All @@ -114,7 +89,7 @@ pub(super) fn on_did_save_text_document(
) -> ControlFlow<Result<(), async_lsp::Error>> {
let document_uri = params.text_document.uri;

match process_workspace_for_noir_document(document_uri, state) {
match process_workspace_for_noir_document(state, document_uri, false, true) {
Ok(_) => ControlFlow::Continue(()),
Err(err) => ControlFlow::Break(Err(err)),
}
Expand All @@ -124,8 +99,10 @@ pub(super) fn on_did_save_text_document(
// it's only contained in a package), then type-checks the workspace's packages,
// caching code lenses and type definitions, and notifying about compilation errors.
pub(crate) fn process_workspace_for_noir_document(
document_uri: lsp_types::Url,
state: &mut LspState,
document_uri: lsp_types::Url,
only_process_document_uri_package: bool,
output_diagnostics: bool,
) -> Result<(), async_lsp::Error> {
let file_path = document_uri.to_file_path().map_err(|_| {
ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path")
Expand All @@ -137,13 +114,23 @@ pub(crate) fn process_workspace_for_noir_document(
})?;

let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir);
insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager);
insert_all_files_for_workspace_into_file_manager(
state,
&workspace,
&mut workspace_file_manager,
);

let parsed_files = parse_diff(&workspace_file_manager, state);

let diagnostics: Vec<_> = workspace
.into_iter()
.flat_map(|package| -> Vec<Diagnostic> {
let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into();

if only_process_document_uri_package && !file_path.starts_with(&package.root_dir) {
return vec![];
}

let (mut context, crate_id) =
crate::prepare_package(&workspace_file_manager, &parsed_files, package);

Expand All @@ -152,8 +139,6 @@ pub(crate) fn process_workspace_for_noir_document(
Err(errors_and_warnings) => errors_and_warnings,
};

let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into();

// We don't add test headings for a package if it contains no `#[test]` functions
if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) {
let _ = state.client.notify::<notification::NargoUpdateTests>(NargoPackageTests {
Expand All @@ -176,46 +161,53 @@ pub(crate) fn process_workspace_for_noir_document(
let fm = &context.file_manager;
let files = fm.as_file_map();

file_diagnostics
.into_iter()
.filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| {
// Ignore diagnostics for any file that wasn't the file we saved
// TODO: In the future, we could create "related" diagnostics for these files
if fm.path(file_id).expect("file must exist to have emitted diagnostic")
!= file_path
{
return None;
}

// TODO: Should this be the first item in secondaries? Should we bail when we find a range?
let range = diagnostic
.secondaries
.into_iter()
.filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into()))
.last()
.unwrap_or_default();

let severity = match diagnostic.kind {
DiagnosticKind::Error => DiagnosticSeverity::ERROR,
DiagnosticKind::Warning => DiagnosticSeverity::WARNING,
DiagnosticKind::Info => DiagnosticSeverity::INFORMATION,
DiagnosticKind::Bug => DiagnosticSeverity::WARNING,
};
Some(Diagnostic {
range,
severity: Some(severity),
message: diagnostic.message,
..Default::default()
if output_diagnostics {
file_diagnostics
.into_iter()
.filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| {
// Ignore diagnostics for any file that wasn't the file we saved
// TODO: In the future, we could create "related" diagnostics for these files
michaeljklein marked this conversation as resolved.
Show resolved Hide resolved
if fm.path(file_id).expect("file must exist to have emitted diagnostic")
!= file_path
{
return None;
}

// TODO: Should this be the first item in secondaries? Should we bail when we find a range?
michaeljklein marked this conversation as resolved.
Show resolved Hide resolved
let range = diagnostic
.secondaries
.into_iter()
.filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into()))
.last()
.unwrap_or_default();

let severity = match diagnostic.kind {
DiagnosticKind::Error => DiagnosticSeverity::ERROR,
DiagnosticKind::Warning => DiagnosticSeverity::WARNING,
DiagnosticKind::Info => DiagnosticSeverity::INFORMATION,
DiagnosticKind::Bug => DiagnosticSeverity::WARNING,
};
Some(Diagnostic {
range,
severity: Some(severity),
message: diagnostic.message,
..Default::default()
})
})
})
.collect()
.collect()
} else {
vec![]
}
})
.collect();
let _ = state.client.publish_diagnostics(PublishDiagnosticsParams {
uri: document_uri,
version: None,
diagnostics,
});

if output_diagnostics {
let _ = state.client.publish_diagnostics(PublishDiagnosticsParams {
uri: document_uri,
version: None,
diagnostics,
});
}

Ok(())
}
Expand All @@ -226,3 +218,82 @@ pub(super) fn on_exit(
) -> ControlFlow<Result<(), async_lsp::Error>> {
ControlFlow::Continue(())
}

#[cfg(test)]
mod notification_tests {
use crate::test_utils;

use super::*;
use lsp_types::{
InlayHintLabel, InlayHintParams, Position, TextDocumentContentChangeEvent,
TextDocumentIdentifier, TextDocumentItem, VersionedTextDocumentIdentifier,
WorkDoneProgressParams,
};
use tokio::test;

#[test]
async fn test_caches_open_files() {
let (mut state, noir_text_document) = test_utils::init_lsp_server("inlay_hints").await;

// Open the document, fake the text to be empty
on_did_open_text_document(
&mut state,
DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: noir_text_document.clone(),
language_id: "noir".to_string(),
version: 0,
text: "".to_string(),
},
},
);

// Fake the text to change to "global a = 1;"
on_did_change_text_document(
&mut state,
DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: noir_text_document.clone(),
version: 1,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
// Should get an inlay hint for ": bool" after "a"
text: "global a = true;".to_string(),
}],
},
);

// Get inlay hints. These should now be relative to the changed text,
// not the saved file's text.
let inlay_hints = crate::requests::on_inlay_hint_request(
&mut state,
InlayHintParams {
work_done_progress_params: WorkDoneProgressParams { work_done_token: None },
text_document: TextDocumentIdentifier { uri: noir_text_document },
range: lsp_types::Range {
start: lsp_types::Position { line: 0, character: 0 },
end: lsp_types::Position { line: 1, character: 0 },
},
},
)
.await
.expect("Could not execute on_inlay_hint_request")
.unwrap();

assert_eq!(inlay_hints.len(), 1);

let inlay_hint = &inlay_hints[0];
assert_eq!(inlay_hint.position, Position { line: 0, character: 8 });

if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label {
assert_eq!(labels.len(), 2);
assert_eq!(labels[0].value, ": ");
assert_eq!(labels[0].location, None);
assert_eq!(labels[1].value, "bool");
} else {
panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label);
}
}
}
8 changes: 6 additions & 2 deletions tooling/lsp/src/requests/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::{collections::HashMap, future::Future};

use crate::insert_all_files_for_workspace_into_file_manager;
use crate::{
parse_diff, resolve_workspace_for_source_path,
types::{CodeLensOptions, InitializeParams},
Expand All @@ -11,7 +12,6 @@
TextDocumentSyncCapability, TextDocumentSyncKind, TypeDefinitionProviderCapability, Url,
WorkDoneProgressOptions,
};
use nargo::insert_all_files_for_workspace_into_file_manager;
use nargo_fmt::Config;
use noirc_driver::file_manager_with_stdlib;
use noirc_frontend::{graph::Dependency, macros_api::NodeInterner};
Expand Down Expand Up @@ -340,7 +340,7 @@
location: noirc_errors::Location,
files: &'a FileMap,
interner: &'a NodeInterner,
interners: &'a HashMap<String, NodeInterner>,

Check warning on line 343 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
root_crate_name: String,
root_crate_dependencies: &'a Vec<Dependency>,
}
Expand All @@ -367,7 +367,11 @@
let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into();

let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir);
insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager);
insert_all_files_for_workspace_into_file_manager(
state,
&workspace,
&mut workspace_file_manager,
);
let parsed_files = parse_diff(&workspace_file_manager, state);

let (mut context, crate_id) =
Expand All @@ -394,7 +398,7 @@
location,
files,
interner,
interners: &state.cached_definitions,

Check warning on line 401 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
root_crate_name: package.name.to_string(),
root_crate_dependencies: &context.crate_graph[context.root_crate_id()].dependencies,
}))
Expand All @@ -402,7 +406,7 @@
pub(crate) fn find_all_references_in_workspace(
location: noirc_errors::Location,
interner: &NodeInterner,
cached_interners: &HashMap<String, NodeInterner>,

Check warning on line 409 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
files: &FileMap,
include_declaration: bool,
include_self_type_name: bool,
Expand All @@ -414,8 +418,8 @@
// If we found the referenced node, find its location
let referenced_location = interner.reference_location(referenced);

// Now we find all references that point to this location, in all interners

Check warning on line 421 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
// (there's one interner per package, and all interners in a workspace rely on the

Check warning on line 422 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (interners)
// same FileManager so a Location/FileId in one package is the same as in another package)
let mut locations = find_all_references(
referenced_location,
Expand All @@ -434,7 +438,7 @@
));
}

// The LSP client usually removes duplicate loctions, but we do it here just in case they don't

Check warning on line 441 in tooling/lsp/src/requests/mod.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (loctions)
locations.sort_by_key(|location| {
(
location.uri.to_string(),
Expand Down
Loading
Loading