From 30ac61e1768ad3f94bb6badad5006269e2c664ff Mon Sep 17 00:00:00 2001 From: jneem Date: Fri, 1 Sep 2023 12:26:09 -0500 Subject: [PATCH] Make an enum for different input sources (#1560) * Make an enum for different input sources * Fix doc link * Review comments * Add a comment --- core/src/cache.rs | 168 ++++++++++++++++-------- core/src/program.rs | 9 +- core/src/repl/mod.rs | 8 +- core/src/repl/rustyline_frontend.rs | 2 +- core/src/transform/import_resolution.rs | 18 +-- lsp/nls/src/cache.rs | 10 +- lsp/nls/src/error.rs | 8 ++ lsp/nls/src/files.rs | 31 +++-- lsp/nls/src/incomplete.rs | 9 +- lsp/nls/src/requests/formatting.rs | 7 +- lsp/nls/src/requests/symbols.rs | 8 +- lsp/nls/test.ncl | 1 + 12 files changed, 176 insertions(+), 103 deletions(-) create mode 100644 lsp/nls/test.ncl diff --git a/core/src/cache.rs b/core/src/cache.rs index fa2de0c6a7..bea2ca308c 100644 --- a/core/src/cache.rs +++ b/core/src/cache.rs @@ -66,8 +66,9 @@ impl InputFormat { pub struct Cache { /// The content of the program sources plus imports. files: Files, + file_paths: HashMap, /// The name-id table, holding file ids stored in the database indexed by source names. - file_ids: HashMap, + file_ids: HashMap, /// Map containing for each FileId a list of files they import (directly). imports: HashMap>, /// Map containing for each FileId a list of files importing them (directly). @@ -129,7 +130,7 @@ pub struct TermEntry { /// are not auto-refreshed. If an in-memory buffer has a path that also exists in the /// filesystem, we will not even check that file to see if it has changed. #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Copy, Clone)] -enum Source { +enum SourceKind { Filesystem(SystemTime), Memory, } @@ -150,7 +151,7 @@ enum Source { #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Copy, Clone)] pub struct NameIdEntry { id: FileId, - source: Source, + source: SourceKind, } /// The state of an entry of the term cache. @@ -221,6 +222,68 @@ impl CacheError { } } +/// Input data usually comes from files on the file system, but there are also +/// lots of cases where we want to synthesize other kinds of inputs. +/// +/// Note that a `SourcePath` does not uniquely identify a cached input: +/// - Some functions (like [`Cache::add_file`]) add a new cached input unconditionally. +/// - [`Cache::get_or_add_file`] will add a new cached input at the same `SourcePath` if +/// the file on disk was updated. +/// +/// The equality checking of `SourcePath` only affects [`Cache::replace_string`], which +/// overwrites any previous cached input with the same `SourcePath`. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub enum SourcePath { + /// A file at the given path. + /// + /// Note that this does not need to be a real file on the filesystem: it could still + /// be loaded from memory by, e.g, [`Cache::add_string`]. + /// + /// This is the only `SourcePath` variant that can be resolved as the target + /// of an import statement. + Path(PathBuf), + /// A subrange of a file at the given path. + /// + /// This is used by nls to analyze small parts of files that don't fully parse. The + /// original file path is preserved, because it's needed for resolving imports. + Snippet(PathBuf), + Std(StdlibModule), + Query, + ReplInput(usize), + ReplTypecheck, + ReplQuery, + Override(Vec), +} + +impl<'a> TryFrom<&'a SourcePath> for &'a OsStr { + type Error = (); + + fn try_from(value: &'a SourcePath) -> Result { + match value { + SourcePath::Path(p) | SourcePath::Snippet(p) => Ok(p.as_os_str()), + _ => Err(()), + } + } +} + +// [`Files`] needs to have an OsString for each file, so we synthesize names even for +// sources that don't have them. They don't need to be unique; they're just used for +// diagnostics. +impl From for OsString { + fn from(source_path: SourcePath) -> Self { + match source_path { + SourcePath::Path(p) | SourcePath::Snippet(p) => p.into(), + SourcePath::Std(StdlibModule::Std) => "".into(), + SourcePath::Std(StdlibModule::Internals) => "".into(), + SourcePath::Query => "".into(), + SourcePath::ReplInput(idx) => format!(" "".into(), + SourcePath::ReplQuery => "".into(), + SourcePath::Override(path) => format!("", path.join(".")).into(), + } + } +} + /// Return status indicating if an import has been resolved from a file (first encounter), or was /// retrieved from the cache. /// @@ -246,6 +309,7 @@ impl Cache { Cache { files: Files::new(), file_ids: HashMap::new(), + file_paths: HashMap::new(), terms: HashMap::new(), wildcards: HashMap::new(), imports: HashMap::new(), @@ -258,29 +322,18 @@ impl Cache { } } - /// Load a file in the file database. Do not insert an entry in the name-id table. - fn load_file(&mut self, path: impl Into) -> io::Result { - let path = path.into(); - let mut buffer = String::new(); - fs::File::open(&path) - .and_then(|mut file| file.read_to_string(&mut buffer)) - .map(|_| self.files.add(path, buffer)) - } - /// Same as [Self::add_file], but assume that the path is already normalized, and take the /// timestamp as a parameter. - fn add_file_( - &mut self, - path: impl Into, - timestamp: SystemTime, - ) -> io::Result { - let path = path.into(); - let file_id = self.load_file(path.clone())?; + fn add_file_(&mut self, path: PathBuf, timestamp: SystemTime) -> io::Result { + let contents = std::fs::read_to_string(&path)?; + let file_id = self.files.add(&path, contents); + self.file_paths + .insert(file_id, SourcePath::Path(path.clone())); self.file_ids.insert( - path, + SourcePath::Path(path), NameIdEntry { id: file_id, - source: Source::Filesystem(timestamp), + source: SourceKind::Filesystem(timestamp), }, ); Ok(file_id) @@ -303,7 +356,7 @@ impl Cache { pub fn get_or_add_file(&mut self, path: impl Into) -> io::Result> { let path = path.into(); let normalized = normalize_path(&path)?; - match self.id_or_new_timestamp_of(&path)? { + match self.id_or_new_timestamp_of(path.as_ref())? { SourceState::UpToDate(id) => Ok(CacheOp::Cached(id)), SourceState::Stale(timestamp) => { self.add_file_(normalized, timestamp).map(CacheOp::Done) @@ -315,10 +368,9 @@ impl Cache { /// /// Do not check if a source with the same name already exists: if it is the /// case, this one will override the old entry in the name-id table. - pub fn add_source(&mut self, source_name: S, mut source: T) -> io::Result + pub fn add_source(&mut self, source_name: SourcePath, mut source: T) -> io::Result where T: Read, - S: Into, { let mut buffer = String::new(); source.read_to_string(&mut buffer)?; @@ -333,14 +385,14 @@ impl Cache { /// /// Do not check if a source with the same name already exists: if it is the case, this one /// will override the old entry in the name-id table but the old `FileId` will remain valid. - pub fn add_string(&mut self, source_name: impl Into, s: String) -> FileId { - let source_name = source_name.into(); + pub fn add_string(&mut self, source_name: SourcePath, s: String) -> FileId { let id = self.files.add(source_name.clone(), s); + self.file_paths.insert(id, source_name.clone()); self.file_ids.insert( source_name, NameIdEntry { id, - source: Source::Memory, + source: SourceKind::Memory, }, ); id @@ -353,19 +405,19 @@ impl Cache { /// /// Used to store intermediate short-lived generated snippets that needs to have a /// corresponding `FileId`, such as when querying or reporting errors. - pub fn replace_string(&mut self, source_name: impl Into, s: String) -> FileId { - let source_name = source_name.into(); + pub fn replace_string(&mut self, source_name: SourcePath, s: String) -> FileId { if let Some(file_id) = self.id_of(&source_name) { self.files.update(file_id, s); self.terms.remove(&file_id); file_id } else { let file_id = self.files.add(source_name.clone(), s); + self.file_paths.insert(file_id, source_name.clone()); self.file_ids.insert( source_name, NameIdEntry { id: file_id, - source: Source::Memory, + source: SourceKind::Memory, }, ); file_id @@ -878,10 +930,13 @@ impl Cache { /// /// Note that files added via [Self::add_file] are indexed by their full normalized path (cf /// [normalize_path]). - pub fn id_of(&self, name: impl AsRef) -> Option { - match self.id_or_new_timestamp_of(name).ok()? { - SourceState::UpToDate(id) => Some(id), - SourceState::Stale(_) => None, + pub fn id_of(&self, name: &SourcePath) -> Option { + match name { + SourcePath::Path(p) => match self.id_or_new_timestamp_of(p).ok()? { + SourceState::UpToDate(id) => Some(id), + SourceState::Stale(_) => None, + }, + name => Some(self.file_ids.get(name)?.id), } } @@ -893,13 +948,12 @@ impl Cache { /// /// The main point of this awkward signature is to minimize I/O operations: if we accessed /// the timestamp, keep it around. - fn id_or_new_timestamp_of(&self, name: impl AsRef) -> io::Result { - let name = name.as_ref(); - match self.file_ids.get(name) { + fn id_or_new_timestamp_of(&self, name: &Path) -> io::Result { + match self.file_ids.get(&SourcePath::Path(name.to_owned())) { None => Ok(SourceState::Stale(timestamp(name)?)), Some(NameIdEntry { id, - source: Source::Filesystem(ts), + source: SourceKind::Filesystem(ts), }) => { let new_timestamp = timestamp(name)?; if ts == &new_timestamp { @@ -910,7 +964,7 @@ impl Cache { } Some(NameIdEntry { id, - source: Source::Memory, + source: SourceKind::Memory, }) => Ok(SourceState::UpToDate(*id)), } } @@ -1023,11 +1077,10 @@ impl Cache { let file_ids: HashMap = nickel_stdlib::modules() .into_iter() .map(|module| { - let name = module.file_name(); let content = module.content(); ( module, - self.add_string(OsString::from(name), String::from(content)), + self.add_string(SourcePath::Std(module), String::from(content)), ) }) .collect(); @@ -1205,24 +1258,25 @@ pub trait ImportResolver { fn resolve( &mut self, path: &OsStr, - parent: Option, + parent: Option, pos: &TermPos, ) -> Result<(ResolvedTerm, FileId), ImportError>; /// Get a resolved import from the term cache. fn get(&self, file_id: FileId) -> Option; /// Return the (potentially normalized) file path corresponding to the ID of a resolved import. - fn get_path(&self, file_id: FileId) -> &OsStr; + fn get_path(&self, file_id: FileId) -> Option<&OsStr>; } impl ImportResolver for Cache { fn resolve( &mut self, path: &OsStr, - parent: Option, + parent: Option, pos: &TermPos, ) -> Result<(ResolvedTerm, FileId), ImportError> { - let path_buf = with_parent(path, parent.clone()); + let parent_path = parent.and_then(|p| self.get_path(p)).map(PathBuf::from); + let path_buf = with_parent(path, parent_path); let format = InputFormat::from_path(&path_buf).unwrap_or(InputFormat::Nickel); let id_op = self.get_or_add_file(&path_buf).map_err(|err| { ImportError::IOError( @@ -1237,12 +1291,8 @@ impl ImportResolver for Cache { }; if let Some(parent) = parent { - let parent_id = self.id_of(parent).unwrap(); - self.imports.entry(parent_id).or_default().insert(file_id); - self.rev_imports - .entry(file_id) - .or_default() - .insert(parent_id); + self.imports.entry(parent).or_default().insert(file_id); + self.rev_imports.entry(file_id).or_default().insert(parent); } self.parse_multi(file_id, format) @@ -1260,8 +1310,10 @@ impl ImportResolver for Cache { }) } - fn get_path(&self, file_id: FileId) -> &OsStr { - self.files.name(file_id) + fn get_path(&self, file_id: FileId) -> Option<&OsStr> { + self.file_paths + .get(&file_id) + .and_then(|p| p.try_into().ok()) } } @@ -1340,7 +1392,7 @@ pub mod resolvers { fn resolve( &mut self, _path: &OsStr, - _parent: Option, + _parent: Option, _pos: &TermPos, ) -> Result<(ResolvedTerm, FileId), ImportError> { panic!("cache::resolvers: dummy resolver should not have been invoked"); @@ -1350,7 +1402,7 @@ pub mod resolvers { panic!("cache::resolvers: dummy resolver should not have been invoked"); } - fn get_path(&self, _file_id: FileId) -> &OsStr { + fn get_path(&self, _file_id: FileId) -> Option<&OsStr> { panic!("cache::resolvers: dummy resolver should not have been invoked"); } } @@ -1381,7 +1433,7 @@ pub mod resolvers { fn resolve( &mut self, path: &OsStr, - _parent: Option, + _parent: Option, pos: &TermPos, ) -> Result<(ResolvedTerm, FileId), ImportError> { let file_id = self @@ -1417,8 +1469,8 @@ pub mod resolvers { self.term_cache.get(&file_id).cloned() } - fn get_path(&self, file_id: FileId) -> &OsStr { - self.files.name(file_id) + fn get_path(&self, file_id: FileId) -> Option<&OsStr> { + Some(self.files.name(file_id)) } } } diff --git a/core/src/program.rs b/core/src/program.rs index a3dc4f4c15..aea4a5f22b 100644 --- a/core/src/program.rs +++ b/core/src/program.rs @@ -35,6 +35,7 @@ use crate::{ use codespan::FileId; use codespan_reporting::term::termcolor::Ansi; +use std::path::PathBuf; use std::{ ffi::OsString, @@ -66,8 +67,7 @@ impl QueryPath { grammar::FieldPathParser, lexer::Lexer, utils::FieldPathElem, ErrorTolerantParser, }; - let format_name = "query-path"; - let input_id = cache.replace_string(format_name, input); + let input_id = cache.replace_string(SourcePath::Query, input); let s = cache.source(input_id); let parser = FieldPathParser::new(); @@ -166,7 +166,8 @@ impl Program { S: Into + Clone, { let mut cache = Cache::new(ErrorTolerance::Strict); - let main_id = cache.add_source(source_name, source)?; + let path = PathBuf::from(source_name.into()); + let main_id = cache.add_source(SourcePath::Path(path), source)?; let vm = VirtualMachine::new(cache, trace); Ok(Self { @@ -242,7 +243,7 @@ impl Program { let value_file_id = self .vm .import_resolver_mut() - .add_string(format!("", ovd.path.join(".")), ovd.value); + .add_string(SourcePath::Override(ovd.path.clone()), ovd.value); self.vm.prepare_eval(value_file_id)?; record = record .path(ovd.path) diff --git a/core/src/repl/mod.rs b/core/src/repl/mod.rs index 8594a540ea..1ac3e0bd01 100644 --- a/core/src/repl/mod.rs +++ b/core/src/repl/mod.rs @@ -6,7 +6,7 @@ //! Dually, the frontend is the user-facing part, which may be a CLI, a web application, a //! jupyter-kernel (which is not exactly user-facing, but still manages input/output and //! formatting), etc. -use crate::cache::{Cache, Envs, ErrorTolerance}; +use crate::cache::{Cache, Envs, ErrorTolerance, SourcePath}; use crate::error::{Error, EvalError, IOError, ParseError, ParseErrors, ReplError}; use crate::eval::cache::Cache as EvalCache; use crate::eval::{Closure, VirtualMachine}; @@ -171,7 +171,7 @@ impl ReplImpl { }; let file_id = self.vm.import_resolver_mut().add_string( - format!("repl-input-{}", InputNameCounter::next()), + SourcePath::ReplInput(InputNameCounter::next()), String::from(exp), ); @@ -256,7 +256,7 @@ impl Repl for ReplImpl { let file_id = self .vm .import_resolver_mut() - .replace_string("", String::from(exp)); + .replace_string(SourcePath::ReplTypecheck, String::from(exp)); // We ignore non fatal errors while type checking. let (term, _) = self.vm.import_resolver().parse_nocache(file_id)?; let import_resolution::strict::ResolveResult { @@ -305,7 +305,7 @@ impl Repl for ReplImpl { let file_id = self .vm .import_resolver_mut() - .replace_string("", target.label().into()); + .replace_string(SourcePath::ReplQuery, target.label().into()); program::query(&mut self.vm, file_id, &self.env, query_path) } diff --git a/core/src/repl/rustyline_frontend.rs b/core/src/repl/rustyline_frontend.rs index ee31f7f2f5..0db89f3b52 100644 --- a/core/src/repl/rustyline_frontend.rs +++ b/core/src/repl/rustyline_frontend.rs @@ -46,7 +46,7 @@ pub fn repl(histfile: PathBuf, color_opt: ColorOpt) -> Result<(), InitError> { let validator = InputParser::new( repl.cache_mut() - .replace_string("", String::new()), + .replace_string(SourcePath::ReplInput(0), String::new()), ); let mut editor = Editor::with_config(config(color_opt)) diff --git a/core/src/transform/import_resolution.rs b/core/src/transform/import_resolution.rs index 3e2b8f3f5c..ee0741961b 100644 --- a/core/src/transform/import_resolution.rs +++ b/core/src/transform/import_resolution.rs @@ -4,7 +4,6 @@ use super::ImportResolver; use crate::error::ImportError; use codespan::FileId; -use std::path::PathBuf; /// The state passed around during the imports resolution. struct ImportsResolutionState<'a, R> { @@ -12,7 +11,7 @@ struct ImportsResolutionState<'a, R> { /// Pending imported terms to be transformed. stack: &'a mut Vec, /// Path of the import being currently processed, if any. - parent: Option, + parent: Option, /// Errors of imports that couldn't be resolved correctly. import_errors: &'a mut Vec, } @@ -23,7 +22,6 @@ pub mod strict { use crate::error::ImportError; use crate::term::RichTerm; use codespan::FileId; - use std::path::PathBuf; /// The result of an import resolution transformation. pub struct ResolveResult { @@ -73,7 +71,7 @@ pub mod strict { pub fn transform_one( rt: RichTerm, resolver: &mut R, - parent: &Option, + parent: Option, ) -> Result where R: ImportResolver, @@ -94,7 +92,6 @@ pub mod tolerant { use crate::error::ImportError; use crate::term::{RichTerm, Term, Traverse, TraverseOrder}; use codespan::FileId; - use std::path::PathBuf; /// The result of an error tolerant import resolution. pub struct ResolveResult { @@ -115,10 +112,7 @@ pub mod tolerant { let mut stack = Vec::new(); let mut import_errors = Vec::new(); - let source_file: Option = rt.pos.as_opt_ref().map(|x| { - let path = resolver.get_path(x.src_id); - PathBuf::from(path) - }); + let source_file = rt.pos.as_opt_ref().map(|x| x.src_id); let mut state = ImportsResolutionState { resolver, @@ -133,7 +127,7 @@ pub mod tolerant { &|rt: RichTerm, state: &mut ImportsResolutionState| -> Result { - let (rt, err) = transform_one(rt, state.resolver, &state.parent); + let (rt, err) = transform_one(rt, state.resolver, state.parent); if let Some(err) = err { state.import_errors.push(err); } @@ -164,14 +158,14 @@ pub mod tolerant { pub fn transform_one( rt: RichTerm, resolver: &mut R, - parent: &Option, + parent: Option, ) -> (RichTerm, Option) where R: ImportResolver, { let term = rt.as_ref(); match term { - Term::Import(path) => match resolver.resolve(path, parent.clone(), &rt.pos) { + Term::Import(path) => match resolver.resolve(path, parent, &rt.pos) { Ok((_, file_id)) => (RichTerm::new(Term::ResolvedImport(file_id), rt.pos), None), Err(err) => (rt, Some(err)), }, diff --git a/lsp/nls/src/cache.rs b/lsp/nls/src/cache.rs index 6647151816..e05346cfe6 100644 --- a/lsp/nls/src/cache.rs +++ b/lsp/nls/src/cache.rs @@ -4,7 +4,7 @@ use codespan::{ByteIndex, FileId}; use lsp_types::TextDocumentPositionParams; use nickel_lang_core::position::TermPos; use nickel_lang_core::{ - cache::{Cache, CacheError, CacheOp, EntryState, TermEntry}, + cache::{Cache, CacheError, CacheOp, EntryState, SourcePath, TermEntry}, error::{Error, ImportError}, position::RawPos, typecheck::{self, linearization::Linearization}, @@ -111,11 +111,11 @@ impl CacheExt for Cache { lsp_pos: &TextDocumentPositionParams, ) -> Result { let uri = &lsp_pos.text_document.uri; + let path = uri + .to_file_path() + .map_err(|_| crate::error::Error::FileNotFound(uri.clone()))?; let file_id = self - .id_of( - uri.to_file_path() - .map_err(|_| crate::error::Error::FileNotFound(uri.clone()))?, - ) + .id_of(&SourcePath::Path(path)) .ok_or_else(|| crate::error::Error::FileNotFound(uri.clone()))?; let pos = lsp_pos.position; diff --git a/lsp/nls/src/error.rs b/lsp/nls/src/error.rs index 500322cc36..35d017f2ad 100644 --- a/lsp/nls/src/error.rs +++ b/lsp/nls/src/error.rs @@ -4,6 +4,12 @@ use lsp_types::{Position, Url}; /// Non-fatal errors generated by nls. #[derive(Debug, thiserror::Error)] pub enum Error { + #[error("scheme not supported: {0}")] + SchemeNotSupported(String), + + #[error("invalid path: {0}")] + InvalidPath(Url), + #[error("file {0} not found")] FileNotFound(Url), @@ -22,6 +28,8 @@ impl From for ResponseError { let code = match value { Error::FileNotFound(_) => ErrorCode::InvalidParams, Error::InvalidPosition { .. } => ErrorCode::InvalidParams, + Error::SchemeNotSupported(_) => ErrorCode::InvalidParams, + Error::InvalidPath(_) => ErrorCode::InvalidParams, Error::MethodNotFound => ErrorCode::MethodNotFound, Error::FormattingFailed { .. } => ErrorCode::InternalError, }; diff --git a/lsp/nls/src/files.rs b/lsp/nls/src/files.rs index 2d00840292..19945f2030 100644 --- a/lsp/nls/src/files.rs +++ b/lsp/nls/src/files.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use anyhow::Result; use codespan::FileId; use codespan_reporting::diagnostic::Diagnostic; @@ -5,18 +7,30 @@ use log::trace; use lsp_server::RequestId; use lsp_types::{ notification::{DidOpenTextDocument, Notification}, - DidChangeTextDocumentParams, DidOpenTextDocumentParams, + DidChangeTextDocumentParams, DidOpenTextDocumentParams, Url, }; use nickel_lang_core::{ - cache::{CacheError, CacheOp}, + cache::{CacheError, CacheOp, SourcePath}, error::IntoDiagnostics, }; -use crate::trace::{param::FileUpdate, Enrich, Trace}; +use crate::{ + error::Error, + trace::{param::FileUpdate, Enrich, Trace}, +}; use super::cache::CacheExt; use super::server::Server; +pub(crate) fn uri_to_path(uri: &Url) -> std::result::Result { + if uri.scheme() != "file" { + Err(Error::SchemeNotSupported(uri.scheme().into())) + } else { + uri.to_file_path() + .map_err(|_| Error::InvalidPath(uri.clone())) + } +} + pub fn handle_open(server: &mut Server, params: DidOpenTextDocumentParams) -> Result<()> { let id: RequestId = format!( "{}#{}", @@ -31,10 +45,10 @@ pub fn handle_open(server: &mut Server, params: DidOpenTextDocumentParams) -> Re content: ¶ms.text_document.text, }, ); - let file_id = server.cache.add_string( - params.text_document.uri.to_file_path().unwrap(), - params.text_document.text, - ); + let path = uri_to_path(¶ms.text_document.uri)?; + let file_id = server + .cache + .add_string(SourcePath::Path(path), params.text_document.text); server.file_uris.insert(file_id, params.text_document.uri); parse_and_typecheck(server, file_id)?; @@ -57,8 +71,9 @@ pub fn handle_save(server: &mut Server, params: DidChangeTextDocumentParams) -> }, ); + let path = uri_to_path(¶ms.text_document.uri)?; let file_id = server.cache.replace_string( - params.text_document.uri.to_file_path().unwrap(), + SourcePath::Path(path), params.content_changes[0].text.to_owned(), ); diff --git a/lsp/nls/src/incomplete.rs b/lsp/nls/src/incomplete.rs index 84161a2973..8ae9966063 100644 --- a/lsp/nls/src/incomplete.rs +++ b/lsp/nls/src/incomplete.rs @@ -8,6 +8,7 @@ use std::path::PathBuf; use nickel_lang_core::{ + cache::SourcePath, parser::lexer::{self, NormalToken, SpannedToken, Token}, position::RawSpan, term::RichTerm, @@ -145,10 +146,10 @@ pub fn parse_path_from_incomplete_input(range: RawSpan, server: &mut Server) -> // In order to help the input resolver find relative imports, we add a fake input whose parent // is the same as the real file. - let mut path = PathBuf::from(server.cache.files().name(range.src_id)); - path.pop(); - path.push(""); - let file_id = server.cache.replace_string(&path, to_parse); + let path = PathBuf::from(server.cache.files().name(range.src_id)); + let file_id = server + .cache + .replace_string(SourcePath::Snippet(path), to_parse); match server.cache.parse_nocache(file_id) { Ok((rt, _errors)) => Some(resolve_imports(rt, server)), diff --git a/lsp/nls/src/requests/formatting.rs b/lsp/nls/src/requests/formatting.rs index e13e91e5de..be8c60808c 100644 --- a/lsp/nls/src/requests/formatting.rs +++ b/lsp/nls/src/requests/formatting.rs @@ -1,7 +1,8 @@ use lsp_server::{RequestId, Response, ResponseError}; use lsp_types::{DocumentFormattingParams, Position, Range, TextEdit}; +use nickel_lang_core::cache::SourcePath; -use crate::{error::Error, server::Server}; +use crate::{error::Error, files::uri_to_path, server::Server}; /// Handle the LSP formatting request from a client using an external binary as a formatter. /// If this succeds, it sends a reponse to the server and returns `Ok(..)`, otherwise, @@ -11,8 +12,8 @@ pub fn handle_format_document( id: RequestId, server: &mut Server, ) -> Result<(), ResponseError> { - let document_id = params.text_document.uri.to_file_path().unwrap(); - let file_id = server.cache.id_of(document_id).unwrap(); + let path = uri_to_path(¶ms.text_document.uri)?; + let file_id = server.cache.id_of(&SourcePath::Path(path)).unwrap(); let text = server.cache.files().source(file_id).clone(); let document_length = text.lines().count() as u32; let last_line_length = text.lines().next_back().unwrap().len() as u32; diff --git a/lsp/nls/src/requests/symbols.rs b/lsp/nls/src/requests/symbols.rs index 1ab387ca6f..4289c2d731 100644 --- a/lsp/nls/src/requests/symbols.rs +++ b/lsp/nls/src/requests/symbols.rs @@ -1,10 +1,12 @@ use crate::{ + files::uri_to_path, linearization::interface::TermKind, term::RawSpanExt, trace::{Enrich, Trace}, }; use lsp_server::{RequestId, Response, ResponseError}; use lsp_types::{DocumentSymbol, DocumentSymbolParams, SymbolKind}; +use nickel_lang_core::cache::SourcePath; use serde_json::Value; use crate::server::Server; @@ -14,10 +16,8 @@ pub fn handle_document_symbols( id: RequestId, server: &mut Server, ) -> Result<(), ResponseError> { - let file_id = server - .cache - .id_of(params.text_document.uri.to_file_path().unwrap()) - .unwrap(); + let path = uri_to_path(¶ms.text_document.uri)?; + let file_id = server.cache.id_of(&SourcePath::Path(path)).unwrap(); if let Some(completed) = server.lin_registry.map.get(&file_id) { Trace::enrich(&id, completed); diff --git a/lsp/nls/test.ncl b/lsp/nls/test.ncl new file mode 100644 index 0000000000..21c906d8b9 --- /dev/null +++ b/lsp/nls/test.ncl @@ -0,0 +1 @@ +let x = { ab } | { abcde | Number } in x