From c3b830ee3a35f88af6564d969e6eab1806ffe5ab Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Tue, 25 Jul 2023 09:31:19 -0400 Subject: [PATCH 1/3] report errors on transtive imports as it was before, we would crash --- core/src/repl/mod.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/core/src/repl/mod.rs b/core/src/repl/mod.rs index a623fa21af..c90c416c3d 100644 --- a/core/src/repl/mod.rs +++ b/core/src/repl/mod.rs @@ -6,7 +6,7 @@ //! Dually, the frontend is the user-facing part, which may be a CLI, a web application, a //! jupyter-kernel (which is not exactly user-facing, but still manages input/output and //! formatting), etc. -use crate::cache::{Cache, Envs, ErrorTolerance}; +use crate::cache::{Cache, CacheError, Envs, ErrorTolerance}; use crate::error::{Error, EvalError, IOError, ParseError, ParseErrors, ReplError}; use crate::eval::cache::Cache as EvalCache; use crate::eval::{Closure, VirtualMachine}; @@ -113,7 +113,12 @@ impl ReplImpl { resolved_ids: pending, } = import_resolution::strict::resolve_imports(t, self.vm.import_resolver_mut())?; for id in &pending { - dbg!(self.vm.import_resolver_mut().resolve_imports(*id)).unwrap(); + self.vm + .import_resolver_mut() + .resolve_imports(*id) + .map_err(|cache_err| { + cache_err.unwrap_error("repl::eval_(): expected imports to be parsed") + })?; } let wildcards = From 1f6d451a99d22c828c595c65dbf822069c11d877 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Tue, 25 Jul 2023 09:33:27 -0400 Subject: [PATCH 2/3] slight refactor of resolve_imports --- core/src/cache.rs | 118 +++++++++++++++++++++++----------------------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/core/src/cache.rs b/core/src/cache.rs index 846fa568f6..605b76bd19 100644 --- a/core/src/cache.rs +++ b/core/src/cache.rs @@ -688,69 +688,69 @@ impl Cache { file_id: FileId, ) -> Result, Vec)>, CacheError> { match self.entry_state(file_id) { - Some(state) if state >= EntryState::ImportsResolved => { - Ok(CacheOp::Cached((Vec::new(), Vec::new()))) - } - Some(state) if state >= EntryState::Parsed => { - let (pending, errors) = if state < EntryState::ImportsResolving { - let TermEntry { term, .. } = self.terms.get(&file_id).unwrap(); - let term = term.clone(); - - let import_resolution::tolerant::ResolveResult { - transformed_term, - resolved_ids: pending, - import_errors, - } = match self.error_tolerance { - ErrorTolerance::Tolerant => { - import_resolution::tolerant::resolve_imports(term, self) - } - ErrorTolerance::Strict => { - import_resolution::strict::resolve_imports(term, self)?.into() - } - }; - - // unwrap!(): we called `unwrap()` at the beggining of the enclosing if branch - // on the result of `self.terms.get(&file_id)`. We only made recursive calls to - // `resolve_imports` in between, which don't remove anything from `self.terms`. - let cached_term = self.terms.get_mut(&file_id).unwrap(); - cached_term.term = transformed_term; - cached_term.state = EntryState::ImportsResolving; - - let mut done = Vec::new(); - - // Transitively resolve the imports, and accumulate the ids of the resolved - // files along the way. - for id in pending { - if let CacheOp::Done((mut done_local, _)) = self.resolve_imports(id)? { - done.push(id); - done.append(&mut done_local) - } + Some(EntryState::Parsed) => { + let TermEntry { term, .. } = self.terms.get(&file_id).unwrap(); + let term = term.clone(); + + let import_resolution::tolerant::ResolveResult { + transformed_term, + resolved_ids: pending, + import_errors, + } = match self.error_tolerance { + ErrorTolerance::Tolerant => { + import_resolution::tolerant::resolve_imports(term, self) + } + ErrorTolerance::Strict => { + import_resolution::strict::resolve_imports(term, self)?.into() } - - self.update_state(file_id, EntryState::ImportsResolved); - (done, import_errors) - } else { - // [transitory_entry_state]: - // - // In this branch, the state is `EntryState::ImportsResolving`. Which means we - // recursively called `resolve_imports` on the same entry: there is a cyclic - // import. In that case, we don't do anything, as the entry is being treated - // by a on-going call to `resolve_import` higher up in the call chain. - // - // Note that in some cases, this intermediate state can be observed by an - // external caller: if a first call to `resolve_imports` fails in the middle of - // resolving the transitive imports, the end state of the entry is - // `ImportsResolving`. Subsequent calls to `resolve_imports` will succeed, but - // won't change the state to `EntryState::ImportsResolved` (and for a good - // reason: we wouldn't even know what are the pending imports to resolve). The - // Nickel pipeline should however fail if `resolve_imports` failed at some - // point, anyway. - (Vec::new(), Vec::new()) }; - Ok(CacheOp::Done((pending, errors))) + // unwrap!(): we called `unwrap()` at the beggining of the enclosing if branch + // on the result of `self.terms.get(&file_id)`. We only made recursive calls to + // `resolve_imports` in between, which don't remove anything from `self.terms`. + let cached_term = self.terms.get_mut(&file_id).unwrap(); + cached_term.term = transformed_term; + cached_term.state = EntryState::ImportsResolving; + + let mut done = Vec::new(); + + // Transitively resolve the imports, and accumulate the ids of the resolved + // files along the way. + for id in pending { + if let CacheOp::Done((mut done_local, _)) = self.resolve_imports(id)? { + done.push(id); + done.append(&mut done_local) + } + } + + self.update_state(file_id, EntryState::ImportsResolved); + + Ok(CacheOp::Done((done, import_errors))) } - _ => Err(CacheError::NotParsed), + // [transitory_entry_state]: + // + // This case is triggered by a cyclic import. The entry is already + // being treated by an ongoing call to `resolve_import` higher up in + // the call chain, so we don't do anything here. + // + // Note that in some cases, this intermediate state can be observed by an + // external caller: if a first call to `resolve_imports` fails in the middle of + // resolving the transitive imports, the end state of the entry is + // `ImportsResolving`. Subsequent calls to `resolve_imports` will succeed, but + // won't change the state to `EntryState::ImportsResolved` (and for a good + // reason: we wouldn't even know what are the pending imports to resolve). The + // Nickel pipeline should however fail if `resolve_imports` failed at some + // point, anyway. + Some(EntryState::ImportsResolving) => Ok(CacheOp::Done((Vec::new(), Vec::new()))), + // >= EntryState::ImportsResolved + Some( + EntryState::ImportsResolved + | EntryState::Typechecking + | EntryState::Typechecked + | EntryState::Transforming + | EntryState::Transformed, + ) => Ok(CacheOp::Cached((Vec::new(), Vec::new()))), + None => Err(CacheError::NotParsed), } } From 6282ac3378348e3196e9c7075f9f8ea4b2d1344c Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Wed, 26 Jul 2023 06:45:40 -0400 Subject: [PATCH 3/3] clippy --- core/src/repl/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/repl/mod.rs b/core/src/repl/mod.rs index c90c416c3d..d92a859341 100644 --- a/core/src/repl/mod.rs +++ b/core/src/repl/mod.rs @@ -6,7 +6,7 @@ //! Dually, the frontend is the user-facing part, which may be a CLI, a web application, a //! jupyter-kernel (which is not exactly user-facing, but still manages input/output and //! formatting), etc. -use crate::cache::{Cache, CacheError, Envs, ErrorTolerance}; +use crate::cache::{Cache, Envs, ErrorTolerance}; use crate::error::{Error, EvalError, IOError, ParseError, ParseErrors, ReplError}; use crate::eval::cache::Cache as EvalCache; use crate::eval::{Closure, VirtualMachine};