From 4fca2e46900ebf02191ba33c83827a17508905a4 Mon Sep 17 00:00:00 2001 From: Grant Wuerker Date: Tue, 12 Oct 2021 19:49:31 -0600 Subject: [PATCH] Multi-file support --- Cargo.lock | 28 ++ crates/abi/src/builder.rs | 25 +- crates/analyzer/src/context.rs | 5 + crates/analyzer/src/db.rs | 44 ++- crates/analyzer/src/db/queries.rs | 1 + crates/analyzer/src/db/queries/ingots.rs | 105 +++++++ crates/analyzer/src/db/queries/module.rs | 290 +++++++++++++++++- crates/analyzer/src/lib.rs | 27 +- crates/analyzer/src/namespace/items.rs | 210 +++++++++++-- crates/analyzer/src/namespace/scopes.rs | 30 +- crates/analyzer/src/traversal/expressions.rs | 147 +++++++-- crates/analyzer/src/traversal/types.rs | 74 +++-- crates/analyzer/tests/analysis.rs | 88 +++++- crates/analyzer/tests/errors.rs | 81 ++++- .../snapshots/analysis__basic_ingot.snap | 195 ++++++++++++ .../tests/snapshots/errors__bad_ingot.snap | 72 +++++ .../snapshots/errors__mainless_ingot.snap | 9 + ...rrors__type_constructor_from_variable.snap | 12 - crates/common/src/files.rs | 15 +- crates/common/src/lib.rs | 2 + crates/common/src/upcast.rs | 3 + crates/driver/Cargo.toml | 1 + crates/driver/src/lib.rs | 181 +++++++++-- crates/fe/Cargo.toml | 1 + crates/fe/src/main.rs | 85 ++++- crates/lowering/Cargo.toml | 1 + crates/lowering/src/db.rs | 33 ++ crates/lowering/src/db/queries.rs | 33 ++ crates/lowering/src/lib.rs | 22 +- crates/lowering/src/mappers/expressions.rs | 1 + crates/lowering/src/mappers/module.rs | 2 + crates/lowering/src/mappers/types.rs | 2 +- crates/lowering/tests/lowering.rs | 34 +- crates/parser/src/ast.rs | 33 +- crates/parser/src/grammar/expressions.rs | 63 +++- crates/parser/src/grammar/module.rs | 77 +---- crates/parser/src/grammar/types.rs | 37 ++- crates/parser/tests/cases/errors.rs | 4 + crates/parser/tests/cases/parse_ast.rs | 3 +- .../cases__errors__expr_dotted_number.snap | 12 + .../cases__errors__expr_path_left.snap | 14 + .../cases__errors__expr_path_right.snap | 14 + .../snapshots/cases__errors__self_use1.snap | 6 +- .../cases__errors__type_desc_path_number.snap | 14 + .../cases__parse_ast__expr_path_call.snap | 52 ++++ .../cases__parse_ast__module_stmts.snap | 89 ++---- .../cases__parse_ast__stmt_path_type.snap | 117 +++++++ .../snapshots/cases__parse_ast__use_glob.snap | 41 +-- .../cases__parse_ast__use_nested1.snap | 201 +++++------- .../cases__parse_ast__use_nested2.snap | 230 ++++++-------- .../cases__parse_ast__use_simple1.snap | 37 +-- .../cases__parse_ast__use_simple2.snap | 37 +-- crates/test-files/Cargo.toml | 2 + .../compile_errors/bad_ingot/src/bing.fe | 2 + .../compile_errors/bad_ingot/src/biz/bad.fe | 5 + .../compile_errors/bad_ingot/src/foo.fe | 8 + .../compile_errors/bad_ingot/src/main.fe | 12 + .../compile_errors/mainless_ingot/src/foo.fe | 0 .../ingots/basic_ingot/src/bar/baz.fe | 3 + .../fixtures/ingots/basic_ingot/src/bing.fe | 10 + .../ingots/basic_ingot/src/ding/dang.fe | 1 + .../ingots/basic_ingot/src/ding/dong.fe | 4 + .../fixtures/ingots/basic_ingot/src/main.fe | 26 ++ crates/test-files/fixtures/printing/defs.fe | 4 +- crates/test-files/src/lib.rs | 45 +++ crates/test-utils/Cargo.toml | 1 + crates/test-utils/src/lib.rs | 36 ++- crates/tests/src/crashes.rs | 2 +- crates/tests/src/ingots.rs | 33 ++ crates/tests/src/lib.rs | 2 + crates/yulc/Cargo.toml | 1 + crates/yulc/src/lib.rs | 30 +- crates/yulgen/Cargo.toml | 1 + crates/yulgen/src/db.rs | 44 +++ crates/yulgen/src/db/queries.rs | 44 +++ crates/yulgen/src/lib.rs | 54 +--- crates/yulgen/src/mappers/expressions.rs | 1 + newsfragments/562.feature.md | 3 + 78 files changed, 2560 insertions(+), 754 deletions(-) create mode 100644 crates/analyzer/src/db/queries/ingots.rs create mode 100644 crates/analyzer/tests/snapshots/analysis__basic_ingot.snap create mode 100644 crates/analyzer/tests/snapshots/errors__bad_ingot.snap create mode 100644 crates/analyzer/tests/snapshots/errors__mainless_ingot.snap delete mode 100644 crates/analyzer/tests/snapshots/errors__type_constructor_from_variable.snap create mode 100644 crates/common/src/upcast.rs create mode 100644 crates/lowering/src/db.rs create mode 100644 crates/lowering/src/db/queries.rs create mode 100644 crates/parser/tests/cases/snapshots/cases__errors__expr_dotted_number.snap create mode 100644 crates/parser/tests/cases/snapshots/cases__errors__expr_path_left.snap create mode 100644 crates/parser/tests/cases/snapshots/cases__errors__expr_path_right.snap create mode 100644 crates/parser/tests/cases/snapshots/cases__errors__type_desc_path_number.snap create mode 100644 crates/parser/tests/cases/snapshots/cases__parse_ast__expr_path_call.snap create mode 100644 crates/parser/tests/cases/snapshots/cases__parse_ast__stmt_path_type.snap create mode 100644 crates/test-files/fixtures/compile_errors/bad_ingot/src/bing.fe create mode 100644 crates/test-files/fixtures/compile_errors/bad_ingot/src/biz/bad.fe create mode 100644 crates/test-files/fixtures/compile_errors/bad_ingot/src/foo.fe create mode 100644 crates/test-files/fixtures/compile_errors/bad_ingot/src/main.fe create mode 100644 crates/test-files/fixtures/compile_errors/mainless_ingot/src/foo.fe create mode 100644 crates/test-files/fixtures/ingots/basic_ingot/src/bar/baz.fe create mode 100644 crates/test-files/fixtures/ingots/basic_ingot/src/bing.fe create mode 100644 crates/test-files/fixtures/ingots/basic_ingot/src/ding/dang.fe create mode 100644 crates/test-files/fixtures/ingots/basic_ingot/src/ding/dong.fe create mode 100644 crates/test-files/fixtures/ingots/basic_ingot/src/main.fe create mode 100644 crates/tests/src/ingots.rs create mode 100644 crates/yulgen/src/db.rs create mode 100644 crates/yulgen/src/db/queries.rs create mode 100644 newsfragments/562.feature.md diff --git a/Cargo.lock b/Cargo.lock index 30b2051ecf..f8447e90c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -486,6 +486,7 @@ dependencies = [ "fe-common", "fe-driver", "fe-parser", + "walkdir", ] [[package]] @@ -544,6 +545,7 @@ dependencies = [ "ethabi", "evm", "evm-runtime", + "fe-analyzer", "fe-common", "fe-driver", "fe-test-files", @@ -594,6 +596,7 @@ dependencies = [ "fe-parser", "fe-yulc", "fe-yulgen", + "indexmap", "serde_json", ] @@ -610,6 +613,7 @@ dependencies = [ "pretty_assertions", "regex", "rstest", + "salsa", "wasm-bindgen-test", ] @@ -637,7 +641,9 @@ dependencies = [ name = "fe-test-files" version = "0.10.0-alpha" dependencies = [ + "fe-common", "include_dir", + "walkdir", ] [[package]] @@ -645,6 +651,7 @@ name = "fe-yulc" version = "0.10.0-alpha" dependencies = [ "fe-yulgen", + "indexmap", "serde_json", "solc", ] @@ -656,6 +663,7 @@ dependencies = [ "fe-abi", "fe-analyzer", "fe-common", + "fe-lowering", "fe-parser", "fe-test-files", "indexmap", @@ -1437,6 +1445,15 @@ dependencies = [ "syn", ] +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "scoped-tls" version = "1.0.0" @@ -1770,6 +1787,17 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi", + "winapi-util", +] + [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" diff --git a/crates/abi/src/builder.rs b/crates/abi/src/builder.rs index 5f51452c7c..375af9c504 100644 --- a/crates/abi/src/builder.rs +++ b/crates/abi/src/builder.rs @@ -126,8 +126,11 @@ fn components(db: &dyn AnalyzerDb, typ: &types::FixedSize) -> Vec { #[cfg(test)] mod tests { use crate::builder; - use fe_analyzer::Db; + use fe_analyzer::namespace::items::{Global, Module, ModuleContext, ModuleFileContent}; + use fe_analyzer::{AnalyzerDb, TestDb}; + use fe_common::files::SourceFileId; use fe_parser::{grammar::module::parse_module, parse_code_chunk}; + use std::rc::Rc; #[test] fn build_contract_abi() { @@ -146,11 +149,25 @@ contract Foo: pub fn bar(x: u256) -> Array: revert"#; - let module = parse_code_chunk(parse_module, contract) + let ast = parse_code_chunk(parse_module, contract) .expect("unable to build module AST") .kind; - let db = Db::default(); - let module_id = fe_analyzer::analyze(&db, module).expect("failed to analyze source"); + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let module = Module { + name: "test_module".to_string(), + context: ModuleContext::Global(global_id), + file_content: ModuleFileContent::File { + file: SourceFileId(0), + }, + ast, + }; + let module_id = db.intern_module(Rc::new(module)); + + fe_analyzer::analyze_module(&db, module_id).expect("failed to analyze source"); let abis = builder::module(&db, module_id).expect("unable to build ABI"); if let Some(abi) = abis.get("Foo") { diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 9a646f133f..303535901c 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -6,6 +6,7 @@ use crate::AnalyzerDb; use fe_common::diagnostics::Diagnostic; pub use fe_common::diagnostics::Label; use fe_common::Span; +use fe_parser::ast; use fe_parser::node::NodeId; use indexmap::IndexMap; use std::collections::HashMap; @@ -28,6 +29,7 @@ impl Analysis { pub trait AnalyzerContext { fn resolve_name(&self, name: &str) -> Option; + fn resolve_path(&mut self, path: &ast::Path) -> Option; fn add_diagnostic(&mut self, diag: Diagnostic); fn db(&self) -> &dyn AnalyzerDb; @@ -157,6 +159,9 @@ impl AnalyzerContext for TempContext { fn add_diagnostic(&mut self, diag: Diagnostic) { self.diagnostics.push(diag) } + fn resolve_path(&mut self, _path: &ast::Path) -> Option { + panic!("TempContext can't resolve paths") + } } /// Indicates where an expression is stored. diff --git a/crates/analyzer/src/db.rs b/crates/analyzer/src/db.rs index 80f1a908bf..511d769a3c 100644 --- a/crates/analyzer/src/db.rs +++ b/crates/analyzer/src/db.rs @@ -1,11 +1,14 @@ use crate::context::{Analysis, FunctionBody}; use crate::errors::TypeError; use crate::namespace::items::{ - self, ContractFieldId, ContractId, EventId, FunctionId, Item, ModuleConstantId, ModuleId, - StructFieldId, StructId, TypeAliasId, + self, ContractFieldId, ContractId, EventId, FunctionId, GlobalId, IngotId, Item, + ModuleConstantId, ModuleId, StructFieldId, StructId, TypeAliasId, }; use crate::namespace::types; -use indexmap::IndexMap; +use fe_common::Span; +use fe_parser::ast; +use fe_parser::node::Node; +use indexmap::map::IndexMap; use std::rc::Rc; mod queries; @@ -28,6 +31,10 @@ macro_rules! impl_intern_key { #[salsa::query_group(AnalyzerDbStorage)] pub trait AnalyzerDb { + #[salsa::interned] + fn intern_global(&self, data: Rc) -> GlobalId; + #[salsa::interned] + fn intern_ingot(&self, data: Rc) -> IngotId; #[salsa::interned] fn intern_module(&self, data: Rc) -> ModuleId; #[salsa::interned] @@ -47,17 +54,38 @@ pub trait AnalyzerDb { #[salsa::interned] fn intern_event(&self, data: Rc) -> EventId; + // Ingot + #[salsa::invoke(queries::ingots::ingot_all_modules)] + fn ingot_all_modules(&self, ingot: IngotId) -> Rc>; + #[salsa::invoke(queries::ingots::ingot_main_module)] + fn ingot_main_module(&self, ingot: IngotId) -> Analysis>; + // Module #[salsa::invoke(queries::module::module_all_items)] fn module_all_items(&self, module: ModuleId) -> Rc>; #[salsa::invoke(queries::module::module_item_map)] fn module_item_map(&self, module: ModuleId) -> Analysis>>; - #[salsa::invoke(queries::module::module_imported_item_map)] - fn module_imported_item_map(&self, module: ModuleId) -> Rc>; #[salsa::invoke(queries::module::module_contracts)] fn module_contracts(&self, module: ModuleId) -> Rc>; #[salsa::invoke(queries::module::module_structs)] fn module_structs(&self, module: ModuleId) -> Rc>; + #[salsa::invoke(queries::module::module_used_item_map)] + fn module_used_item_map( + &self, + module: ModuleId, + ) -> Analysis>>; + #[salsa::invoke(queries::module::module_resolve_use_tree)] + fn module_resolve_use_tree( + &self, + module: ModuleId, + tree: Node, + ) -> Analysis>>; + #[salsa::invoke(queries::module::module_parent_module)] + fn module_parent_module(&self, module: ModuleId) -> Option; + #[salsa::invoke(queries::module::module_adjacent_modules)] + fn module_adjacent_modules(&self, module: ModuleId) -> Rc>; + #[salsa::invoke(queries::module::module_sub_modules)] + fn module_sub_modules(&self, module: ModuleId) -> Rc>; // Module Constant #[salsa::invoke(queries::module::module_constant_type)] @@ -127,7 +155,7 @@ pub trait AnalyzerDb { #[salsa::database(AnalyzerDbStorage)] #[derive(Default)] -pub struct Db { - storage: salsa::Storage, +pub struct TestDb { + storage: salsa::Storage, } -impl salsa::Database for Db {} +impl salsa::Database for TestDb {} diff --git a/crates/analyzer/src/db/queries.rs b/crates/analyzer/src/db/queries.rs index 7b9c6bb156..f6c5db8ad6 100644 --- a/crates/analyzer/src/db/queries.rs +++ b/crates/analyzer/src/db/queries.rs @@ -1,6 +1,7 @@ pub mod contracts; pub mod events; pub mod functions; +pub mod ingots; pub mod module; pub mod structs; pub mod types; diff --git a/crates/analyzer/src/db/queries/ingots.rs b/crates/analyzer/src/db/queries/ingots.rs new file mode 100644 index 0000000000..3340503548 --- /dev/null +++ b/crates/analyzer/src/db/queries/ingots.rs @@ -0,0 +1,105 @@ +use crate::context::Analysis; +use crate::namespace::items::{IngotId, Module, ModuleContext, ModuleFileContent, ModuleId}; +use crate::AnalyzerDb; +use fe_common::diagnostics::{Diagnostic, Severity}; +use fe_parser::ast; +use indexmap::set::IndexSet; +use std::path::Path; +use std::rc::Rc; + +pub fn ingot_all_modules(db: &dyn AnalyzerDb, ingot_id: IngotId) -> Rc> { + let ingot = &ingot_id.data(db); + + let file_modules = ingot + .fe_files + .values() + .into_iter() + .map(|(file, ast)| { + let module = Module { + name: Path::new(&file.name) + .file_stem() + .expect("file does not have stem") + .to_str() + .expect("could not convert file stem to string") + .to_string(), + ast: ast.clone(), + file_content: ModuleFileContent::File { file: file.id }, + context: ModuleContext::Ingot(ingot_id), + }; + + db.intern_module(Rc::new(module)) + }) + .collect::>(); + + let dir_modules = ingot + .fe_files + .values() + .into_iter() + .map(|(file, _)| { + Path::new(&file.name) + .parent() + .expect("file does not have parent path") + }) + .collect::>() + .into_iter() + .map(|dir| { + let module = Module { + name: dir + .file_name() + .expect("missing file name") + .to_str() + .expect("could not convert dir name to string") + .to_string(), + ast: ast::Module { body: vec![] }, + context: ModuleContext::Ingot(ingot_id), + file_content: ModuleFileContent::Dir { + dir_path: dir + .to_str() + .expect("could not convert dir path to string") + .to_string(), + }, + }; + + db.intern_module(Rc::new(module)) + }) + .collect::>(); + + let all_modules = [file_modules, dir_modules].concat(); + Rc::new(all_modules) +} + +pub fn ingot_main_module(db: &dyn AnalyzerDb, ingot_id: IngotId) -> Analysis> { + let main_id = ingot_id + .all_modules(db) + .iter() + .find(|module_id| { + module_id.name(db) == "main" && { + if let Some(parent_id) = module_id.parent_module(db) { + parent_id.name(db) == "src" + } else { + false + } + } + }) + .copied(); + + Analysis { + value: main_id, + diagnostics: Rc::new({ + if main_id.is_none() { + vec![Diagnostic { + severity: Severity::Error, + message: format!( + "The ingot named \"{}\" is missing a main module. \ + \nPlease add a `src/main.fe` file to the base directory.", + ingot_id.name(db) + ), + labels: vec![], + notes: vec![], + }] + } else { + vec![] + } + }), + } +} diff --git a/crates/analyzer/src/db/queries/module.rs b/crates/analyzer/src/db/queries/module.rs index ec69642aec..5be0bc7c6b 100644 --- a/crates/analyzer/src/db/queries/module.rs +++ b/crates/analyzer/src/db/queries/module.rs @@ -3,16 +3,21 @@ use crate::context::{Analysis, AnalyzerContext}; use crate::db::AnalyzerDb; use crate::errors::{self, TypeError}; use crate::namespace::items::{ - Contract, ContractId, Function, Item, ModuleConstant, ModuleConstantId, ModuleId, Struct, - StructId, TypeAlias, TypeDef, + Contract, ContractId, Function, Item, ModuleConstant, ModuleConstantId, ModuleContext, + ModuleFileContent, ModuleId, Struct, StructId, TypeAlias, TypeDef, }; use crate::namespace::scopes::ItemScope; use crate::namespace::types::{self, Type}; use crate::traversal::types::type_desc; use fe_common::diagnostics::Label; +use fe_common::Span; use fe_parser::ast; +use fe_parser::node::Node; use indexmap::indexmap; use indexmap::map::{Entry, IndexMap}; +use std::collections::HashSet; +use std::ops::Deref; +use std::path::Path; use std::rc::Rc; use strum::IntoEnumIterator; @@ -41,11 +46,6 @@ fn std_prelude_items() -> IndexMap { items } -// This is probably too simple for real module imports -pub fn module_imported_item_map(_: &dyn AnalyzerDb, _: ModuleId) -> Rc> { - Rc::new(std_prelude_items()) -} - pub fn module_all_items(db: &dyn AnalyzerDb, module: ModuleId) -> Rc> { let ast::Module { body } = &module.data(db).ast; @@ -85,7 +85,7 @@ pub fn module_all_items(db: &dyn AnalyzerDb, module: ModuleId) -> Rc> })))) } ast::ModuleStmt::Pragma(_) => None, - ast::ModuleStmt::Use(_) => todo!(), + ast::ModuleStmt::Use(_) => None, }) .collect(); Rc::new(items) @@ -95,14 +95,20 @@ pub fn module_item_map( db: &dyn AnalyzerDb, module: ModuleId, ) -> Analysis>> { - let mut diagnostics = vec![]; + let builtin_items = std_prelude_items(); + let sub_modules = module + .sub_modules(db) + .iter() + .map(|(name, id)| (name.clone(), Item::Module(*id))) + .collect::>(); + let used_items = db.module_used_item_map(module); + let mut diagnostics = (*used_items.diagnostics).clone(); - let imports = db.module_imported_item_map(module); let mut map = IndexMap::::new(); for item in module.all_items(db).iter() { let item_name = item.name(db); - if let Some(builtin) = imports.get(&item_name) { + if let Some(builtin) = builtin_items.get(&item_name) { let builtin_kind = builtin.item_kind_display_name(); diagnostics.push(errors::error( &format!("type name conflicts with built-in {}", builtin_kind), @@ -112,6 +118,19 @@ pub fn module_item_map( continue; } + if let Some((used_item_name_span, used_item)) = used_items.value.get(&item_name) { + diagnostics.push(errors::duplicate_name_error( + &format!( + "a {} with the same name has already been imported", + used_item.item_kind_display_name() + ), + &item.name(db), + *used_item_name_span, + item.name_span(db).expect("missing name span"), + )); + continue; + } + match map.entry(item_name) { Entry::Occupied(entry) => { diagnostics.push(errors::fancy_error( @@ -136,7 +155,18 @@ pub fn module_item_map( } } Analysis { - value: Rc::new(map), + value: Rc::new( + map.into_iter() + .chain(sub_modules) + .chain(builtin_items) + .chain( + used_items + .value + .iter() + .map(|(name, (_, item))| (name.to_owned(), *item)), + ) + .collect::>(), + ), diagnostics: Rc::new(diagnostics), } } @@ -159,6 +189,14 @@ pub fn module_structs(db: &dyn AnalyzerDb, module: ModuleId) -> Rc module .all_items(db) .iter() + // TODO: this needs dependency graph stuff + .chain( + module + .used_items(db) + .values() + .into_iter() + .map(|(_, item)| item), + ) .filter_map(|item| match item { Item::Type(TypeDef::Struct(id)) => Some(*id), _ => None, @@ -190,3 +228,231 @@ pub fn module_constant_type( diagnostics: Rc::new(scope.diagnostics), } } + +pub fn module_used_item_map( + db: &dyn AnalyzerDb, + module: ModuleId, +) -> Analysis>> { + let mut diagnostics = vec![]; + + let ast::Module { body } = &module.data(db).ast; + + let items = body + .iter() + .fold(indexmap! {}, |mut accum, stmt| { + if let ast::ModuleStmt::Use(use_stmt) = stmt { + let parent = module + .parent_module(db) + .expect("module does not have a parent"); + let items = db.module_resolve_use_tree(parent, use_stmt.kind.tree.clone()); + diagnostics.extend(items.diagnostics.deref().clone()); + + for (name, (name_span, item)) in items.value.iter() { + if let Some((other_name_span, other_item)) = + accum.insert(name.to_owned(), (*name_span, *item)) + { + diagnostics.push(errors::duplicate_name_error( + &format!( + "a {} with the same name has already been imported", + other_item.item_kind_display_name() + ), + name, + other_name_span, + *name_span, + )); + } + } + } + + accum + }) + .into_iter() + .filter_map(|(name, (name_span, item))| { + let builtin_items = std_prelude_items(); + + if let Some(builtin) = builtin_items.get(&name) { + let builtin_kind = builtin.item_kind_display_name(); + + diagnostics.push(errors::error( + &format!("import name conflicts with built-in {}", builtin_kind), + name_span, + &format!("`{}` is a built-in {}", name, builtin_kind), + )); + + None + } else { + Some((name, (name_span, item))) + } + }) + .collect::>(); + + Analysis { + value: Rc::new(items), + diagnostics: Rc::new(diagnostics), + } +} + +pub fn module_resolve_use_tree( + db: &dyn AnalyzerDb, + module: ModuleId, + tree: Node, +) -> Analysis>> { + let mut diagnostics = vec![]; + + match &tree.kind { + ast::UseTree::Glob { prefix } => { + let prefix_module = Item::Module(module).resolve_path(db, prefix); + diagnostics.extend(prefix_module.diagnostics.deref().clone()); + + let items = match prefix_module.value { + Some(Item::Module(module)) => (*module.items(db)) + .clone() + .into_iter() + .map(|(name, item)| (name, (tree.span, item))) + .collect(), + Some(item) => { + diagnostics.push(errors::error( + format!("cannot glob import from {}", item.item_kind_display_name()), + prefix.segments.last().expect("path is empty").span, + "prefix item must be a module", + )); + indexmap! {} + } + None => indexmap! {}, + }; + + Analysis { + value: Rc::new(items), + diagnostics: Rc::new(diagnostics), + } + } + ast::UseTree::Nested { prefix, children } => { + let prefix_module = Item::Module(module).resolve_path(db, prefix); + diagnostics.extend(prefix_module.diagnostics.deref().clone()); + + let items = match prefix_module.value { + Some(Item::Module(module)) => { + children.iter().fold(indexmap! {}, |mut accum, node| { + let child_items = db.module_resolve_use_tree(module, node.clone()); + diagnostics.extend(child_items.diagnostics.deref().clone()); + + for (name, (name_span, item)) in child_items.value.iter() { + if let Some((other_name_span, other_item)) = + accum.insert(name.to_owned(), (*name_span, *item)) + { + diagnostics.push(errors::duplicate_name_error( + &format!( + "a {} with the same name has already been imported", + other_item.item_kind_display_name() + ), + name, + other_name_span, + *name_span, + )); + } + } + + accum + }) + } + Some(item) => { + diagnostics.push(errors::error( + format!("cannot glob import from {}", item.item_kind_display_name()), + prefix.segments.last().unwrap().span, + "prefix item must be a module", + )); + indexmap! {} + } + None => indexmap! {}, + }; + + Analysis { + value: Rc::new(items), + diagnostics: Rc::new(diagnostics), + } + } + ast::UseTree::Simple { path, rename } => { + let item = Item::Module(module).resolve_path(db, path); + + let items = match item.value { + Some(item) => { + let (item_name, item_name_span) = if let Some(name) = rename { + (name.kind.clone(), name.span) + } else { + let name_segment_node = path.segments.last().expect("path is empty"); + (name_segment_node.kind.clone(), name_segment_node.span) + }; + + indexmap! { item_name => (item_name_span, item) } + } + None => indexmap! {}, + }; + + Analysis { + value: Rc::new(items), + diagnostics: item.diagnostics, + } + } + } +} + +pub fn module_parent_module(db: &dyn AnalyzerDb, module: ModuleId) -> Option { + match module.context(db) { + ModuleContext::Ingot(ingot) => { + let all_modules = ingot.all_modules(db); + + for curr_module in all_modules.iter() { + if curr_module + .sub_modules(db) + .values() + .collect::>() + .contains(&module) + { + return Some(*curr_module); + } + } + + None + } + ModuleContext::Global(_) => None, + } +} + +pub fn module_adjacent_modules( + db: &dyn AnalyzerDb, + module: ModuleId, +) -> Rc> { + if let Some(parent) = module.parent_module(db) { + parent.sub_modules(db) + } else { + Rc::new(indexmap! {}) + } +} + +pub fn module_sub_modules(db: &dyn AnalyzerDb, module: ModuleId) -> Rc> { + match module.context(db) { + ModuleContext::Ingot(ingot) => { + let all_modules = ingot.all_modules(db); + + match module.file_content(db) { + ModuleFileContent::Dir { dir_path } => { + let sub_modules = all_modules + .iter() + .filter(|module_id| { + Path::new(&module_id.ingot_path(db)) + .parent() + .expect("module file in ingot does not have parent path") + == Path::new(&dir_path) + }) + .map(|module_id| (module_id.name(db), *module_id)) + .collect::>(); + Rc::new(sub_modules) + } + // file modules do not have sub-modules (for now, at least) + ModuleFileContent::File { .. } => Rc::new(indexmap! {}), + } + } + // if we are compiling a module in the global context, then it will not have any sub-modules + ModuleContext::Global(_) => Rc::new(indexmap! {}), + } +} diff --git a/crates/analyzer/src/lib.rs b/crates/analyzer/src/lib.rs index a51d5c5f02..70f2a9fd63 100644 --- a/crates/analyzer/src/lib.rs +++ b/crates/analyzer/src/lib.rs @@ -7,28 +7,31 @@ pub mod builtins; pub mod constants; pub mod context; -mod db; +pub mod db; pub mod errors; pub mod namespace; mod operations; mod traversal; -pub use db::{AnalyzerDb, Db}; +use crate::namespace::items::{IngotId, ModuleId}; +pub use db::{AnalyzerDb, TestDb}; use fe_common::diagnostics::Diagnostic; -use fe_parser::ast; -use namespace::items; -use std::rc::Rc; -/// Performs semantic analysis of the source program -pub fn analyze( - db: &dyn AnalyzerDb, - module: ast::Module, -) -> Result> { - let module_id = db.intern_module(Rc::new(items::Module { ast: module })); +pub fn analyze_ingot(db: &dyn AnalyzerDb, ingot_id: IngotId) -> Result<(), Vec> { + let diagnostics = ingot_id.diagnostics(db); + if diagnostics.is_empty() { + Ok(()) + } else { + Err(diagnostics) + } +} + +pub fn analyze_module(db: &dyn AnalyzerDb, module_id: ModuleId) -> Result<(), Vec> { let diagnostics = module_id.diagnostics(db); + if diagnostics.is_empty() { - Ok(module_id) + Ok(()) } else { Err(diagnostics) } diff --git a/crates/analyzer/src/namespace/items.rs b/crates/analyzer/src/namespace/items.rs index b4b5b49e2a..89c43c2419 100644 --- a/crates/analyzer/src/namespace/items.rs +++ b/crates/analyzer/src/namespace/items.rs @@ -1,22 +1,28 @@ use crate::builtins; use crate::context; +use crate::context::Analysis; use crate::errors::{self, TypeError}; use crate::impl_intern_key; use crate::namespace::types::{self, GenericType}; use crate::traversal::pragma::check_pragma_version; use crate::AnalyzerDb; use fe_common::diagnostics::Diagnostic; +use fe_common::files::{SourceFile, SourceFileId}; use fe_parser::ast; use fe_parser::ast::Expr; use fe_parser::node::{Node, Span}; +use indexmap::indexmap; use indexmap::IndexMap; +use std::collections::BTreeMap; use std::rc::Rc; /// A named item. This does not include things inside of /// a function body. #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Item { - // Module // TODO: modules don't have names yet + Ingot(IngotId), + Module(ModuleId), + // Constant // TODO: when `const` is implemented Type(TypeDef), // GenericType probably shouldn't be a separate category. // Any of the items inside TypeDef (struct, alias, etc) @@ -47,6 +53,8 @@ impl Item { Item::BuiltinFunction(id) => id.as_ref().to_string(), Item::Object(id) => id.as_ref().to_string(), Item::Constant(id) => id.name(db), + Item::Ingot(id) => id.name(db), + Item::Module(id) => id.name(db), } } @@ -59,6 +67,8 @@ impl Item { Item::BuiltinFunction(_) => None, Item::Object(_) => None, Item::Constant(id) => Some(id.name_span(db)), + Item::Ingot(_) => None, + Item::Module(_) => None, } } @@ -72,6 +82,8 @@ impl Item { Item::BuiltinFunction(_) => true, Item::Object(_) => true, Item::Constant(_) => false, + Item::Ingot(_) => false, + Item::Module(_) => false, } } @@ -84,6 +96,47 @@ impl Item { Item::BuiltinFunction(_) => "function", Item::Object(_) => "object", Item::Constant(_) => "constant", + Item::Ingot(_) => "ingot", + Item::Module(_) => "module", + } + } + + pub fn items(&self, db: &dyn AnalyzerDb) -> Rc> { + match self { + Item::Ingot(_) => todo!("cannot access items in ingots yet"), + Item::Module(module) => module.items(db), + Item::Type(_) => todo!("cannot access items in types yet"), + Item::GenericType(_) + | Item::Event(_) + | Item::Function(_) + | Item::Constant(_) + | Item::BuiltinFunction(_) + | Item::Object(_) => Rc::new(indexmap! {}), + } + } + + pub fn resolve_path(&self, db: &dyn AnalyzerDb, path: &ast::Path) -> Analysis> { + let mut curr_item = *self; + + for node in path.segments.iter() { + curr_item = match curr_item.items(db).get(&node.kind) { + Some(item) => *item, + None => { + return Analysis { + value: None, + diagnostics: Rc::new(vec![errors::error( + "unresolved path item", + node.span, + "not found", + )]), + } + } + } + } + + Analysis { + value: Some(curr_item), + diagnostics: Rc::new(vec![]), } } @@ -96,16 +149,93 @@ impl Item { Item::BuiltinFunction(_) => {} Item::Object(_) => {} Item::Constant(id) => id.sink_diagnostics(db, sink), + Item::Ingot(id) => id.sink_diagnostics(db, sink), + Item::Module(id) => id.sink_diagnostics(db, sink), + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Default)] +pub struct Global { + ingots: BTreeMap, +} + +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] +pub struct GlobalId(pub(crate) u32); +impl_intern_key!(GlobalId); +impl GlobalId {} + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub struct Ingot { + pub name: String, + // pub version: String, + pub global: GlobalId, + // `BTreeMap` implements `Hash`, which is required for an ID. + pub fe_files: BTreeMap, +} + +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] +pub struct IngotId(pub(crate) u32); +impl_intern_key!(IngotId); +impl IngotId { + pub fn data(&self, db: &dyn AnalyzerDb) -> Rc { + db.lookup_intern_ingot(*self) + } + + pub fn name(&self, db: &dyn AnalyzerDb) -> String { + self.data(db).name.clone() + } + + pub fn main_module(&self, db: &dyn AnalyzerDb) -> Option { + db.ingot_main_module(*self).value + } + + pub fn diagnostics(&self, db: &dyn AnalyzerDb) -> Vec { + let mut diagnostics = vec![]; + self.sink_diagnostics(db, &mut diagnostics); + diagnostics + } + + pub fn all_modules(&self, db: &dyn AnalyzerDb) -> Rc> { + db.ingot_all_modules(*self) + } + + pub fn sink_diagnostics(&self, db: &dyn AnalyzerDb, sink: &mut impl DiagnosticSink) { + let modules = self.all_modules(db); + + for module in modules.iter() { + module.sink_diagnostics(db, sink) } + + sink.push_all(db.ingot_main_module(*self).diagnostics.iter()); } } +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub enum ModuleFileContent { + Dir { + // directories will have a corresponding source file. we can remove + // the `dir_path` attribute when this is added. + // file: SourceFileId, + dir_path: String, + }, + File { + file: SourceFileId, + }, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub enum ModuleContext { + Ingot(IngotId), + Global(GlobalId), +} + #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Module { + pub name: String, + pub context: ModuleContext, + pub file_content: ModuleFileContent, pub ast: ast::Module, - // When we support multiple files, a module should know its file id, - // but for now this isn't used. - // pub file: SourceFileId, } #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] @@ -116,6 +246,32 @@ impl ModuleId { db.lookup_intern_module(*self) } + pub fn name(&self, db: &dyn AnalyzerDb) -> String { + self.data(db).name.clone() + } + + pub fn file_content(&self, db: &dyn AnalyzerDb) -> ModuleFileContent { + self.data(db).file_content.clone() + } + + pub fn ingot_path(&self, db: &dyn AnalyzerDb) -> String { + match self.context(db) { + ModuleContext::Ingot(ingot) => match self.file_content(db) { + ModuleFileContent::Dir { dir_path } => dir_path, + ModuleFileContent::File { file } => ingot.data(db).fe_files[&file].0.name.clone(), + }, + ModuleContext::Global(_) => panic!("cannot get path"), + } + } + + pub fn ast(&self, db: &dyn AnalyzerDb) -> ast::Module { + self.data(db).ast.clone() + } + + pub fn context(&self, db: &dyn AnalyzerDb) -> ModuleContext { + self.data(db).context.clone() + } + /// Returns a map of the named items in the module pub fn items(&self, db: &dyn AnalyzerDb) -> Rc> { db.module_item_map(*self).value @@ -126,21 +282,41 @@ impl ModuleId { db.module_all_items(*self) } - pub fn imported_items(&self, db: &dyn AnalyzerDb) -> Rc> { - db.module_imported_item_map(*self) + /// Returns a `name -> (name_span, external_item)` map for all `use` statements in a module. + pub fn used_items(&self, db: &dyn AnalyzerDb) -> Rc> { + db.module_used_item_map(*self).value + } + + /// Returns a `name -> (name_span, external_item)` map for a single `use` tree. + pub fn resolve_use_tree( + &self, + db: &dyn AnalyzerDb, + tree: &Node, + ) -> Rc> { + db.module_resolve_use_tree(*self, tree.to_owned()).value } pub fn resolve_name(&self, db: &dyn AnalyzerDb, name: &str) -> Option { - self.items(db) - .get(name) - .copied() - .or_else(|| self.imported_items(db).get(name).copied()) + self.items(db).get(name).copied() + } + + pub fn sub_modules(&self, db: &dyn AnalyzerDb) -> Rc> { + db.module_sub_modules(*self) + } + + pub fn adjacent_modules(&self, db: &dyn AnalyzerDb) -> Rc> { + db.module_adjacent_modules(*self) + } + + pub fn parent_module(&self, db: &dyn AnalyzerDb) -> Option { + db.module_parent_module(*self) } /// All contracts, including duplicates pub fn all_contracts(&self, db: &dyn AnalyzerDb) -> Rc> { db.module_contracts(*self) } + /// All structs, including duplicates pub fn all_structs(&self, db: &dyn AnalyzerDb) -> Rc> { db.module_structs(*self) @@ -155,16 +331,10 @@ impl ModuleId { pub fn sink_diagnostics(&self, db: &dyn AnalyzerDb, sink: &mut impl DiagnosticSink) { let ast::Module { body } = &self.data(db).ast; for stmt in body { - match stmt { - ast::ModuleStmt::Pragma(inner) => { - if let Some(diag) = check_pragma_version(inner) { - sink.push(&diag) - } - } - ast::ModuleStmt::Use(inner) => { - sink.push(&errors::not_yet_implemented("use", inner.span)); + if let ast::ModuleStmt::Pragma(inner) = stmt { + if let Some(diag) = check_pragma_version(inner) { + sink.push(&diag) } - _ => {} // everything else is a type def, handled below. } } @@ -211,7 +381,7 @@ impl ModuleConstantId { self.data(db).ast.kind.name.span } - pub fn value(&self, db: &dyn AnalyzerDb) -> fe_parser::ast::Expr { + pub fn value(&self, db: &dyn AnalyzerDb) -> ast::Expr { self.data(db).ast.kind.value.kind.clone() } diff --git a/crates/analyzer/src/namespace/scopes.rs b/crates/analyzer/src/namespace/scopes.rs index d5e1c88a89..1f58b8c3e4 100644 --- a/crates/analyzer/src/namespace/scopes.rs +++ b/crates/analyzer/src/namespace/scopes.rs @@ -2,7 +2,7 @@ use crate::context::{AnalyzerContext, CallType, ExpressionAttributes, FunctionBody, NamedThing}; use crate::errors::{AlreadyDefined, TypeError}; -use crate::namespace::items::{Class, EventId, FunctionId, ModuleId}; +use crate::namespace::items::{Class, EventId, FunctionId, Item, ModuleId}; use crate::namespace::types::FixedSize; use crate::AnalyzerDb; use fe_common::diagnostics::Diagnostic; @@ -39,6 +39,15 @@ impl<'a> AnalyzerContext for ItemScope<'a> { fn add_diagnostic(&mut self, diag: Diagnostic) { self.diagnostics.push(diag) } + fn resolve_path(&mut self, path: &ast::Path) -> Option { + let item = Item::Module(self.module).resolve_path(self.db(), path); + + for diagnostic in item.diagnostics.iter() { + self.add_diagnostic(diagnostic.to_owned()) + } + + item.value.map(NamedThing::Item) + } } pub struct FunctionScope<'a> { @@ -183,6 +192,16 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { .map(NamedThing::Item) }) } + + fn resolve_path(&mut self, path: &ast::Path) -> Option { + let item = Item::Module(self.function.module(self.db())).resolve_path(self.db(), path); + + for diagnostic in item.diagnostics.iter() { + self.add_diagnostic(diagnostic.to_owned()) + } + + item.value.map(NamedThing::Item) + } } pub struct BlockScope<'a, 'b> { @@ -220,6 +239,15 @@ impl AnalyzerContext for BlockScope<'_, '_> { } }) } + fn resolve_path(&mut self, path: &ast::Path) -> Option { + let item = Item::Module(self.root.function.module(self.db())).resolve_path(self.db(), path); + + for diagnostic in item.diagnostics.iter() { + self.add_diagnostic(diagnostic.to_owned()) + } + + item.value.map(NamedThing::Item) + } fn add_diagnostic(&mut self, diag: Diagnostic) { self.root.diagnostics.borrow_mut().push(diag) } diff --git a/crates/analyzer/src/traversal/expressions.rs b/crates/analyzer/src/traversal/expressions.rs index 17370422f7..ad3df903d5 100644 --- a/crates/analyzer/src/traversal/expressions.rs +++ b/crates/analyzer/src/traversal/expressions.rs @@ -17,6 +17,7 @@ use fe_common::diagnostics::Label; use fe_common::numeric; use fe_common::Span; use fe_parser::ast as fe; +use fe_parser::ast; use fe_parser::ast::UnaryOperator; use fe_parser::node::Node; use num_bigint::BigInt; @@ -33,6 +34,7 @@ pub fn expr( ) -> Result { let attributes = match &exp.kind { fe::Expr::Name(_) => expr_name(scope, exp, expected_type), + fe::Expr::Path(_) => expr_path(scope, exp, expected_type), fe::Expr::Num(_) => Ok(expr_num(scope, exp, expected_type.as_int())), fe::Expr::Bool(_) => expr_bool(exp), fe::Expr::Subscript { .. } => expr_subscript(scope, exp), @@ -265,7 +267,30 @@ fn expr_name( _ => unreachable!(), }; - match scope.resolve_name(name) { + expr_named_thing(scope, exp, scope.resolve_name(name), expected_type) +} + +fn expr_path( + scope: &mut BlockScope, + exp: &Node, + expected_type: Option<&Type>, +) -> Result { + let path = match &exp.kind { + fe::Expr::Path(path) => path, + _ => unreachable!(), + }; + + let named_thing = scope.resolve_path(path); + expr_named_thing(scope, exp, named_thing, expected_type) +} + +fn expr_named_thing( + scope: &mut BlockScope, + exp: &Node, + named_thing: Option, + expected_type: Option<&Type>, +) -> Result { + match named_thing { Some(NamedThing::Variable { typ, .. }) => { let typ = typ?; let location = Location::assign_location(&typ); @@ -322,14 +347,17 @@ fn expr_name( scope.fancy_error( &format!( "`{}` is a {} name, and can't be used as an expression", - name, item_kind + exp.kind, item_kind ), vec![ Label::primary( def_span, - &format!("`{}` is defined here as a {}", name, item_kind), + &format!("`{}` is defined here as a {}", exp.kind, item_kind), + ), + Label::primary( + exp.span, + &format!("`{}` is used here as a value", exp.kind), ), - Label::primary(exp.span, &format!("`{}` is used here as a value", name)), ], vec![], ) @@ -337,17 +365,17 @@ fn expr_name( scope.error( &format!( "`{}` is a built-in {} name, and can't be used as an expression", - name, item_kind + exp.kind, item_kind ), exp.span, - &format!("`{}` is used here as a value", name), + &format!("`{}` is used here as a value", exp.kind), ) }; Err(FatalError::new(diag)) } None => { let diag = scope.error( - &format!("cannot find value `{}` in this scope", name), + &format!("cannot find value `{}` in this scope", exp.kind), exp.span, "undefined", ); @@ -754,7 +782,8 @@ fn expr_call( args: &Node>>, ) -> Result { let (attributes, call_type) = match &func.kind { - fe::Expr::Name(name) => expr_call_name(scope, name, func.span, generic_args, args)?, + fe::Expr::Name(name) => expr_call_name(scope, name, func, generic_args, args)?, + fe::Expr::Path(path) => expr_call_path(scope, path, func, generic_args, args)?, fe::Expr::Attribute { value, attr } => { // TODO: err if there are generic args expr_call_method(scope, value, attr, generic_args, args)? @@ -794,16 +823,16 @@ fn expr_call( Ok(attributes) } -fn expr_call_name( +fn expr_call_name( scope: &mut BlockScope, name: &str, - name_span: Span, + func: &Node, generic_args: &Option>>, args: &Node>>, ) -> Result<(ExpressionAttributes, CallType), FatalError> { - check_for_call_to_init_fn(scope, name, name_span)?; + check_for_call_to_init_fn(scope, name, func.span)?; - let named_item = scope.resolve_name(name).ok_or_else(|| { + let named_thing = scope.resolve_name(name).ok_or_else(|| { // Check for call to a fn in the current class that takes self. if let Some(function) = scope .root @@ -820,7 +849,7 @@ fn expr_call_name( &format!("`{}` is defined here as a function that takes `self`", name), ), Label::primary( - name_span, + func.span, format!("`{}` is called here as a standalone function", name), ), ], @@ -832,15 +861,43 @@ fn expr_call_name( } else { FatalError::new(scope.error( &format!("`{}` is not defined", name), - name_span, + func.span, &format!("`{}` has not been defined in this scope", name), )) } })?; - match named_item { + expr_call_named_thing(scope, named_thing, func, generic_args, args) +} + +fn expr_call_path( + scope: &mut BlockScope, + path: &ast::Path, + func: &Node, + generic_args: &Option>>, + args: &Node>>, +) -> Result<(ExpressionAttributes, CallType), FatalError> { + let named_thing = scope.resolve_path(path).ok_or_else(|| { + FatalError::new(scope.error( + &format!("`{}` is not defined", func.kind), + func.span, + &format!("`{}` has not been defined in this scope", func.kind), + )) + })?; + + expr_call_named_thing(scope, named_thing, func, generic_args, args) +} + +fn expr_call_named_thing( + scope: &mut BlockScope, + named_thing: NamedThing, + func: &Node, + generic_args: &Option>>, + args: &Node>>, +) -> Result<(ExpressionAttributes, CallType), FatalError> { + match named_thing { NamedThing::Item(Item::BuiltinFunction(function)) => { - expr_call_builtin_function(scope, function, name_span, generic_args, args) + expr_call_builtin_function(scope, function, func.span, generic_args, args) } NamedThing::Item(Item::Function(function)) => { expr_call_pure(scope, function, generic_args, args) @@ -848,7 +905,7 @@ fn expr_call_name( NamedThing::Item(Item::Type(id)) => { if let Some(args) = generic_args { scope.fancy_error( - &format!("`{}` type is not generic", name), + &format!("`{}` type is not generic", func.kind), vec![Label::primary( args.span, "unexpected generic argument list", @@ -856,55 +913,77 @@ fn expr_call_name( vec![], ); } - expr_call_type_constructor(scope, id.typ(scope.db())?, name_span, args) + expr_call_type_constructor(scope, id.typ(scope.db())?, func.span, args) } NamedThing::Item(Item::GenericType(generic)) => { let concrete_type = - apply_generic_type_args(scope, generic, name_span, generic_args.as_ref())?; - expr_call_type_constructor(scope, concrete_type, name_span, args) + apply_generic_type_args(scope, generic, func.span, generic_args.as_ref())?; + expr_call_type_constructor(scope, concrete_type, func.span, args) } // Nothing else is callable (for now at least) NamedThing::SelfValue { .. } => Err(FatalError::new(scope.error( "`self` is not callable", - name_span, + func.span, "can't be used as a function", ))), NamedThing::Variable { typ, span, .. } => Err(FatalError::new(scope.fancy_error( - &format!("`{}` is not callable", name), + &format!("`{}` is not callable", func.kind), vec![ - Label::secondary(span, format!("`{}` has type `{}`", name, typ?)), - Label::primary(name_span, format!("`{}` can't be used as a function", name)), + Label::secondary(span, format!("`{}` has type `{}`", func.kind, typ?)), + Label::primary( + func.span, + format!("`{}` can't be used as a function", func.kind), + ), ], vec![], ))), NamedThing::Item(Item::Constant(id)) => Err(FatalError::new(scope.error( - &format!("`{}` is not callable", name), - name_span, + &format!("`{}` is not callable", func.kind), + func.span, &format!( "`{}` is a constant of type `{}`, and can't be used as a function", - name, + func.kind, id.typ(scope.db())?, ), ))), NamedThing::Item(Item::Object(_)) => Err(FatalError::new(scope.error( - &format!("`{}` is not callable", name), - name_span, + &format!("`{}` is not callable", func.kind), + func.span, &format!( "`{}` is a built-in object, and can't be used as a function", - name + func.kind ), ))), NamedThing::Item(Item::Event(_)) => Err(FatalError::new(scope.fancy_error( - &format!("`{}` is not callable", name), + &format!("`{}` is not callable", func.kind), vec![Label::primary( - name_span, + func.span, &format!( "`{}` is an event, and can't be constructed in this context", - name + func.kind ), )], - vec![format!("Hint: to emit an event, use `emit {}(..)`", name)], + vec![format!( + "Hint: to emit an event, use `emit {}(..)`", + func.kind + )], + ))), + NamedThing::Item(Item::Ingot(_)) => Err(FatalError::new(scope.error( + &format!("`{}` is not callable", func.kind), + func.span, + &format!( + "`{}` is an ingot, and can't be used as a function", + func.kind + ), + ))), + NamedThing::Item(Item::Module(_)) => Err(FatalError::new(scope.error( + &format!("`{}` is not callable", func.kind), + func.span, + &format!( + "`{}` is a module, and can't be used as a function", + func.kind + ), ))), } } diff --git a/crates/analyzer/src/traversal/types.rs b/crates/analyzer/src/traversal/types.rs index f25b2881fb..b315156e9a 100644 --- a/crates/analyzer/src/traversal/types.rs +++ b/crates/analyzer/src/traversal/types.rs @@ -109,25 +109,37 @@ fn friendly_generic_arg_example_string(generic: GenericType) -> String { format!("Example: `{}<{}>`", generic.name(), example_args.join(", ")) } -pub fn resolve_concrete_type( +pub fn resolve_concrete_type_name( context: &mut dyn AnalyzerContext, name: &str, - name_span: Span, + base_desc: &Node, generic_args: Option<&Node>>, ) -> Result { - let named_item = context.resolve_name(name).ok_or_else(|| { - TypeError::new(context.error( - "undefined type", - name_span, - &format!("`{}` has not been defined", name), - )) - })?; + let named_thing = context.resolve_name(name); + resolve_concrete_type_named_thing(context, named_thing, base_desc, generic_args) +} + +pub fn resolve_concrete_type_path( + context: &mut dyn AnalyzerContext, + path: &ast::Path, + base_desc: &Node, + generic_args: Option<&Node>>, +) -> Result { + let named_thing = context.resolve_path(path); + resolve_concrete_type_named_thing(context, named_thing, base_desc, generic_args) +} - match named_item { - NamedThing::Item(Item::Type(id)) => { +pub fn resolve_concrete_type_named_thing( + context: &mut dyn AnalyzerContext, + named_thing: Option, + base_desc: &Node, + generic_args: Option<&Node>>, +) -> Result { + match named_thing { + Some(NamedThing::Item(Item::Type(id))) => { if let Some(args) = generic_args { context.fancy_error( - &format!("`{}` type is not generic", name), + &format!("`{}` type is not generic", base_desc.kind), vec![Label::primary( args.span, "unexpected generic argument list", @@ -137,31 +149,43 @@ pub fn resolve_concrete_type( } id.typ(context.db()) } - NamedThing::Item(Item::GenericType(generic)) => { - apply_generic_type_args(context, generic, name_span, generic_args) + Some(NamedThing::Item(Item::GenericType(generic))) => { + apply_generic_type_args(context, generic, base_desc.span, generic_args) } - _ => Err(TypeError::new(context.fancy_error( - &format!("`{}` is not a type name", name), - if let Some(def_span) = named_item.name_span(context.db()) { + Some(named_thing) => Err(TypeError::new(context.fancy_error( + &format!("`{}` is not a type name", base_desc.kind), + if let Some(def_span) = named_thing.name_span(context.db()) { vec![ Label::primary( def_span, format!( "`{}` is defined here as a {}", - name, - named_item.item_kind_display_name() + base_desc.kind, + named_thing.item_kind_display_name() ), ), - Label::primary(name_span, format!("`{}` is used here as a type", name)), + Label::primary( + base_desc.span, + format!("`{}` is used here as a type", base_desc.kind), + ), ] } else { vec![Label::primary( - name_span, - format!("`{}` is a {}", name, named_item.item_kind_display_name()), + base_desc.span, + format!( + "`{}` is a {}", + base_desc.kind, + named_thing.item_kind_display_name() + ), )] }, vec![], ))), + None => Err(TypeError::new(context.error( + "undefined type", + base_desc.span, + &format!("`{}` has not been defined", base_desc.kind), + ))), } } @@ -171,9 +195,11 @@ pub fn type_desc( desc: &Node, ) -> Result { match &desc.kind { - ast::TypeDesc::Base { base } => resolve_concrete_type(context, base, desc.span, None), + ast::TypeDesc::Base { base } => resolve_concrete_type_name(context, base, desc, None), + ast::TypeDesc::Path(path) => resolve_concrete_type_path(context, path, desc, None), + // generic will need to allow for paths too ast::TypeDesc::Generic { base, args } => { - resolve_concrete_type(context, &base.kind, base.span, Some(args)) + resolve_concrete_type_name(context, &base.kind, base, Some(args)) } ast::TypeDesc::Tuple { items } => { let types = items diff --git a/crates/analyzer/tests/analysis.rs b/crates/analyzer/tests/analysis.rs index 13a39149d5..d89c93655b 100644 --- a/crates/analyzer/tests/analysis.rs +++ b/crates/analyzer/tests/analysis.rs @@ -1,6 +1,8 @@ -use fe_analyzer::namespace::items::{self, Item, TypeDef}; +use fe_analyzer::namespace::items::{ + self, Global, Ingot, Item, Module, ModuleContext, ModuleFileContent, TypeDef, +}; use fe_analyzer::namespace::types::{Event, FixedSize}; -use fe_analyzer::{AnalyzerDb, Db}; +use fe_analyzer::{AnalyzerDb, TestDb}; use fe_common::diagnostics::{diagnostics_string, print_diagnostics, Diagnostic, Label, Severity}; use fe_common::files::FileStore; use fe_parser::node::NodeId; @@ -24,7 +26,7 @@ macro_rules! test_analysis { let mut files = FileStore::new(); let src = test_files::fixture($path); let id = files.add_file($path, src); - let fe_module = match fe_parser::parse_file(id, &src) { + let ast = match fe_parser::parse_file(id, &src) { Ok((module, _)) => module, Err(diags) => { print_diagnostics(&diags, &files); @@ -32,9 +34,21 @@ macro_rules! test_analysis { } }; - let db = Db::default(); - let module = db.intern_module(Rc::new(items::Module { ast: fe_module })); - let diagnostics = module.diagnostics(&db); + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let module = Module { + name: "test_module".to_string(), + context: ModuleContext::Global(global_id), + file_content: ModuleFileContent::File { file: id }, + ast, + }; + + let module_id = db.intern_module(Rc::new(module)); + + let diagnostics = module_id.diagnostics(&db); if !diagnostics.is_empty() { print_diagnostics(&diagnostics, &files); panic!("analysis failed") @@ -46,10 +60,64 @@ macro_rules! test_analysis { // for larger diffs. I recommend commenting out all tests but one. fe_common::assert_snapshot_wasm!( concat!("snapshots/analysis__", stringify!($name), ".snap"), - build_snapshot(&files, module, &db) + build_snapshot(&files, module_id, &db) ); } else { - assert_snapshot!(build_snapshot(&files, module, &db)); + assert_snapshot!(build_snapshot(&files, module_id, &db)); + } + } + }; +} + +macro_rules! test_analysis_ingot { + ($name:ident, $path:expr) => { + #[test] + #[wasm_bindgen_test] + fn $name() { + let files = test_files::build_filestore($path); + + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let ingot = Ingot { + name: "test_ingot".to_string(), + global: global_id, + fe_files: files + .files + .values() + .into_iter() + .map(|file| { + ( + file.id, + ( + file.clone(), + fe_parser::parse_file(file.id, &file.content).unwrap().0, + ), + ) + }) + .collect(), + }; + let ingot_id = db.intern_ingot(Rc::new(ingot)); + + let snapshot = ingot_id + .all_modules(&db) + .iter() + .map(|module_id| build_snapshot(&files, *module_id, &db)) + .collect::>() + .join("\n"); + + if cfg!(target_arch = "wasm32") { + // NOTE: If this assertion fails, the generation of the output diff + // is very slow on wasm, and may result in an out-of-memory error + // for larger diffs. I recommend commenting out all tests but one. + fe_common::assert_snapshot_wasm!( + concat!("snapshots/analysis__", stringify!($name), ".snap"), + snapshot + ); + } else { + assert_snapshot!(snapshot); } } }; @@ -164,6 +232,8 @@ test_analysis! { data_copying_stress, "stress/data_copying_stress.fe"} test_analysis! { tuple_stress, "stress/tuple_stress.fe"} test_analysis! { type_aliases, "features/type_aliases.fe"} +test_analysis_ingot! { basic_ingot, "ingots/basic_ingot"} + fn build_snapshot(file_store: &FileStore, module: items::ModuleId, db: &dyn AnalyzerDb) -> String { let diagnostics = module .all_items(db) @@ -223,6 +293,8 @@ fn build_snapshot(file_store: &FileStore, module: items::ModuleId, db: &dyn Anal | Item::Type(TypeDef::Primitive(_)) | Item::GenericType(_) | Item::BuiltinFunction(_) + | Item::Ingot(_) + | Item::Module(_) | Item::Object(_) => vec![], }) .flatten() diff --git a/crates/analyzer/tests/errors.rs b/crates/analyzer/tests/errors.rs index 94301a0490..f61b8cc762 100644 --- a/crates/analyzer/tests/errors.rs +++ b/crates/analyzer/tests/errors.rs @@ -1,16 +1,22 @@ //! Tests for contracts that should cause compile errors -use fe_analyzer::Db; +use fe_analyzer::namespace::items; +use fe_analyzer::namespace::items::{Global, ModuleFileContent}; +use fe_analyzer::AnalyzerDb; +use fe_analyzer::TestDb; use fe_common::diagnostics::{diagnostics_string, print_diagnostics}; use fe_common::files::FileStore; +use fe_parser::parse_file; use insta::assert_snapshot; +use std::rc::Rc; +use test_files::build_filestore; use wasm_bindgen_test::wasm_bindgen_test; fn error_string(path: &str, src: &str) -> String { let mut files = FileStore::new(); let id = files.add_file(path, src); - let fe_module = match fe_parser::parse_file(id, src) { + let ast = match fe_parser::parse_file(id, src) { Ok((module, _)) => module, Err(diags) => { print_diagnostics(&diags, &files); @@ -18,13 +24,77 @@ fn error_string(path: &str, src: &str) -> String { } }; - let db = Db::default(); - match fe_analyzer::analyze(&db, fe_module) { + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let module = items::Module { + name: path.to_string(), + context: items::ModuleContext::Global(global_id), + file_content: ModuleFileContent::File { file: id }, + ast, + }; + + let module_id = db.intern_module(Rc::new(module)); + + match fe_analyzer::analyze_module(&db, module_id) { + Ok(_) => panic!("expected analysis to fail with an error"), + Err(diags) => diagnostics_string(&diags, &files), + } +} + +fn error_string_ingot(path: &str) -> String { + let files = build_filestore(path); + + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let ingot = items::Ingot { + name: path.to_string(), + global: global_id, + fe_files: files + .files + .values() + .into_iter() + .map(|file| { + ( + file.id, + (file.clone(), parse_file(file.id, &file.content).unwrap().0), + ) + }) + .collect(), + }; + + let ingot_id = db.intern_ingot(Rc::new(ingot)); + + match fe_analyzer::analyze_ingot(&db, ingot_id) { Ok(_) => panic!("expected analysis to fail with an error"), Err(diags) => diagnostics_string(&diags, &files), } } +macro_rules! test_ingot { + ($name:ident) => { + #[test] + #[wasm_bindgen_test] + fn $name() { + let path = concat!("compile_errors/", stringify!($name)); + + if cfg!(target_arch = "wasm32") { + fe_common::assert_snapshot_wasm!( + concat!("snapshots/errors__", stringify!($name), ".snap"), + error_string_ingot(&path) + ); + } else { + assert_snapshot!(error_string_ingot(&path)); + } + } + }; +} + macro_rules! test_file { ($name:ident) => { #[test] @@ -273,3 +343,6 @@ test_file! { self_not_first } test_file! { self_in_standalone_fn } test_file! { unsafe_misuse } test_file! { unsafe_nesting } + +test_ingot! { bad_ingot } +test_ingot! { mainless_ingot } diff --git a/crates/analyzer/tests/snapshots/analysis__basic_ingot.snap b/crates/analyzer/tests/snapshots/analysis__basic_ingot.snap new file mode 100644 index 0000000000..99c88f32c5 --- /dev/null +++ b/crates/analyzer/tests/snapshots/analysis__basic_ingot.snap @@ -0,0 +1,195 @@ +--- +source: crates/analyzer/tests/analysis.rs +expression: snapshot + +--- +note: + ┌─ ingots/basic_ingot/src/ding/dang.fe:1:1 + │ +1 │ type Dang = Array + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Array + + +note: + ┌─ ingots/basic_ingot/src/main.fe:9:5 + │ + 9 │ ╭ pub fn get_my_baz() -> Baz: +10 │ │ return Baz(my_bool=true, my_u256=26) + │ ╰────────────────────────────────────────────^ attributes hash: 10853850528666400742 + │ + = FunctionSignature { + self_decl: None, + params: [], + return_type: Ok( + Struct( + Struct { + name: "Baz", + id: StructId( + 0, + ), + field_count: 2, + }, + ), + ), + } + +note: + ┌─ ingots/basic_ingot/src/main.fe:10:28 + │ +10 │ return Baz(my_bool=true, my_u256=26) + │ ^^^^ ^^ u256: Value + │ │ + │ bool: Value + +note: + ┌─ ingots/basic_ingot/src/main.fe:10:16 + │ +10 │ return Baz(my_bool=true, my_u256=26) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Baz: Memory + +note: + ┌─ ingots/basic_ingot/src/main.fe:10:16 + │ +10 │ return Baz(my_bool=true, my_u256=26) + │ ^^^ TypeConstructor(Struct(Struct { name: "Baz", id: StructId(0), field_count: 2 })) + +note: + ┌─ ingots/basic_ingot/src/main.fe:12:5 + │ +12 │ ╭ pub fn get_my_bing() -> Bong: +13 │ │ return Bong(my_address=address(42)) + │ ╰───────────────────────────────────────────^ attributes hash: 14834639838018463348 + │ + = FunctionSignature { + self_decl: None, + params: [], + return_type: Ok( + Struct( + Struct { + name: "Bing", + id: StructId( + 1, + ), + field_count: 1, + }, + ), + ), + } + +note: + ┌─ ingots/basic_ingot/src/main.fe:13:40 + │ +13 │ return Bong(my_address=address(42)) + │ ^^ u256: Value + +note: + ┌─ ingots/basic_ingot/src/main.fe:13:32 + │ +13 │ return Bong(my_address=address(42)) + │ ^^^^^^^^^^^ address: Value + +note: + ┌─ ingots/basic_ingot/src/main.fe:13:16 + │ +13 │ return Bong(my_address=address(42)) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Bing: Memory + +note: + ┌─ ingots/basic_ingot/src/main.fe:13:32 + │ +13 │ return Bong(my_address=address(42)) + │ ^^^^^^^ TypeConstructor(Base(Address)) + +note: + ┌─ ingots/basic_ingot/src/main.fe:13:16 + │ +13 │ return Bong(my_address=address(42)) + │ ^^^^ TypeConstructor(Struct(Struct { name: "Bing", id: StructId(1), field_count: 1 })) + +note: + ┌─ ingots/basic_ingot/src/main.fe:15:5 + │ +15 │ ╭ pub fn get_42() -> u256: +16 │ │ return get_42_backend() + │ ╰───────────────────────────────^ attributes hash: 17979516652885443340 + │ + = FunctionSignature { + self_decl: None, + params: [], + return_type: Ok( + Base( + Numeric( + U256, + ), + ), + ), + } + +note: + ┌─ ingots/basic_ingot/src/main.fe:16:16 + │ +16 │ return get_42_backend() + │ ^^^^^^^^^^^^^^^^ u256: Value + +note: + ┌─ ingots/basic_ingot/src/main.fe:16:16 + │ +16 │ return get_42_backend() + │ ^^^^^^^^^^^^^^ Pure(FunctionId(3)) + + +note: + ┌─ ingots/basic_ingot/src/ding/dong.fe:2:3 + │ +2 │ my_address: address + │ ^^^^^^^^^^^^^^^^^^^ address +3 │ my_u256: u256 + │ ^^^^^^^^^^^^^ u256 +4 │ my_i8: i8 + │ ^^^^^^^^^ i8 + + +note: + ┌─ ingots/basic_ingot/src/bar/baz.fe:2:5 + │ +2 │ my_bool: bool + │ ^^^^^^^^^^^^^ bool +3 │ my_u256: u256 + │ ^^^^^^^^^^^^^ u256 + + +note: + ┌─ ingots/basic_ingot/src/bing.fe:2:5 + │ +2 │ my_address: address + │ ^^^^^^^^^^^^^^^^^^^ address + +note: + ┌─ ingots/basic_ingot/src/bing.fe:4:1 + │ +4 │ ╭ fn get_42_backend() -> u256: +5 │ │ return 42 + │ ╰─────────────^ attributes hash: 17979516652885443340 + │ + = FunctionSignature { + self_decl: None, + params: [], + return_type: Ok( + Base( + Numeric( + U256, + ), + ), + ), + } + +note: + ┌─ ingots/basic_ingot/src/bing.fe:5:12 + │ +5 │ return 42 + │ ^^ u256: Value + + + + + diff --git a/crates/analyzer/tests/snapshots/errors__bad_ingot.snap b/crates/analyzer/tests/snapshots/errors__bad_ingot.snap new file mode 100644 index 0000000000..5dddefee65 --- /dev/null +++ b/crates/analyzer/tests/snapshots/errors__bad_ingot.snap @@ -0,0 +1,72 @@ +--- +source: crates/analyzer/tests/errors.rs +expression: error_string_ingot(&path) + +--- +error: cannot glob import from type + ┌─ compile_errors/bad_ingot/src/foo.fe:1:11 + │ +1 │ use bing::Bong::* + │ ^^^^ prefix item must be a module + +error: unresolved path item + ┌─ compile_errors/bad_ingot/src/foo.fe:2:11 + │ +2 │ use bing::Tong + │ ^^^^ not found + +error: unresolved path item + ┌─ compile_errors/bad_ingot/src/main.fe:2:5 + │ +2 │ use bar::Baz + │ ^^^ not found + +error: unresolved path item + ┌─ compile_errors/bad_ingot/src/main.fe:3:33 + │ +3 │ use biz::bad::{Bur, Bud as Bar, Boo} + │ ^^^ not found + +error: unresolved path item + ┌─ compile_errors/bad_ingot/src/main.fe:4:10 + │ +4 │ use biz::Bark + │ ^^^^ not found + +error: unresolved path item + ┌─ compile_errors/bad_ingot/src/main.fe:5:5 + │ +5 │ use none::* + │ ^^^^ not found + +error: a type with the same name has already been imported + ┌─ compile_errors/bad_ingot/src/main.fe:3:16 + │ +3 │ use biz::bad::{Bur, Bud as Bar, Boo} + │ ^^^ `Bur` first defined here + · +6 │ use bing::Bong as Bur + │ --- `Bur` redefined here + +error: import name conflicts with built-in type + ┌─ compile_errors/bad_ingot/src/main.fe:7:17 + │ +7 │ use foo::Bar as address + │ ^^^^^^^ `address` is a built-in type + +error: a type with the same name has already been imported + ┌─ compile_errors/bad_ingot/src/main.fe:3:28 + │ +3 │ use biz::bad::{Bur, Bud as Bar, Boo} + │ ^^^ `Bar` first defined here + · +9 │ contract Bar: + │ --- `Bar` redefined here + +error: incorrect type for `Foo` argument `my_num` + ┌─ compile_errors/bad_ingot/src/main.fe:12:27 + │ +12 │ return Foo(my_num=true) + │ ^^^^ this has type `bool`; expected type `u256` + + diff --git a/crates/analyzer/tests/snapshots/errors__mainless_ingot.snap b/crates/analyzer/tests/snapshots/errors__mainless_ingot.snap new file mode 100644 index 0000000000..1428617924 --- /dev/null +++ b/crates/analyzer/tests/snapshots/errors__mainless_ingot.snap @@ -0,0 +1,9 @@ +--- +source: crates/analyzer/tests/errors.rs +expression: error_string_ingot(&path) + +--- +error: The ingot named "compile_errors/mainless_ingot" is missing a main module. +Please add a `src/main.fe` file to the base directory. + + diff --git a/crates/analyzer/tests/snapshots/errors__type_constructor_from_variable.snap b/crates/analyzer/tests/snapshots/errors__type_constructor_from_variable.snap deleted file mode 100644 index 2b2c088fe0..0000000000 --- a/crates/analyzer/tests/snapshots/errors__type_constructor_from_variable.snap +++ /dev/null @@ -1,12 +0,0 @@ ---- -source: crates/analyzer/tests/errors.rs -expression: "error_string(\"[snippet]\", &src)" - ---- -error: type mismatch - ┌─ [snippet]:4:20 - │ -4 │ let y: u16 = u16(x) - │ ^ expected a number literal - - diff --git a/crates/common/src/files.rs b/crates/common/src/files.rs index cc073cb96f..0412c04c81 100644 --- a/crates/common/src/files.rs +++ b/crates/common/src/files.rs @@ -8,14 +8,15 @@ use std::ops::Range; use std::path::Path; use std::{fs, io}; +#[derive(PartialEq, Clone, Eq, Hash, Debug)] pub struct SourceFile { - id: SourceFileId, - name: String, - content: String, + pub id: SourceFileId, + pub name: String, + pub content: String, line_starts: Vec, } -#[derive(PartialEq, Copy, Clone, Eq, Hash, Debug, Default)] +#[derive(PartialEq, Copy, Clone, Eq, Hash, Debug, PartialOrd, Ord, Default)] pub struct SourceFileId(pub u128); impl SourceFile { @@ -59,7 +60,7 @@ impl FileLoader for OsFileLoader { } pub struct FileStore { - files: HashMap, + pub files: HashMap, loader: Box, } @@ -94,6 +95,10 @@ impl FileStore { pub fn get_file(&self, id: SourceFileId) -> Option<&SourceFile> { self.files.get(&id) } + + pub fn all_files(&self) -> Vec { + self.files.keys().copied().collect() + } } impl<'a> cs::files::Files<'a> for FileStore { diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index b6c4f0214b..677986c163 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -5,6 +5,8 @@ mod span; pub mod utils; pub use span::{Span, Spanned}; pub mod panic; +mod upcast; +pub use upcast::Upcast; #[macro_export] #[cfg(target_arch = "wasm32")] diff --git a/crates/common/src/upcast.rs b/crates/common/src/upcast.rs new file mode 100644 index 0000000000..d73fa52a1e --- /dev/null +++ b/crates/common/src/upcast.rs @@ -0,0 +1,3 @@ +pub trait Upcast { + fn upcast(&self) -> &T; +} diff --git a/crates/driver/Cargo.toml b/crates/driver/Cargo.toml index b9d9b2872c..8e968f910b 100644 --- a/crates/driver/Cargo.toml +++ b/crates/driver/Cargo.toml @@ -19,3 +19,4 @@ fe-lowering = {path = "../lowering", version = "^0.10.0-alpha"} fe-parser = {path = "../parser", version = "^0.10.0-alpha"} fe-yulgen = {path = "../yulgen", version = "^0.10.0-alpha"} fe-yulc = {path = "../yulc", version = "^0.10.0-alpha", features = ["solc-backend"], optional = true} +indexmap = "1.6.2" diff --git a/crates/driver/src/lib.rs b/crates/driver/src/lib.rs index 19a57c93c1..378469ddd0 100644 --- a/crates/driver/src/lib.rs +++ b/crates/driver/src/lib.rs @@ -1,16 +1,20 @@ -use fe_analyzer::Db; -use fe_common::diagnostics::{print_diagnostics, Diagnostic}; +use fe_analyzer::namespace::items::{Global, Ingot, Module, ModuleContext, ModuleFileContent}; +use fe_analyzer::AnalyzerDb; +use fe_common::diagnostics::Diagnostic; use fe_common::files::{FileStore, SourceFileId}; use fe_parser::parse_file; +use fe_yulgen::Db; +use indexmap::IndexMap; #[cfg(feature = "solc-backend")] use serde_json::Value; -use std::collections::HashMap; +use std::path::Path; +use std::rc::Rc; /// The artifacts of a compiled module. pub struct CompiledModule { pub src_ast: String, pub lowered_ast: String, - pub contracts: HashMap, + pub contracts: IndexMap, } /// The artifacts of a compiled contract. @@ -24,29 +28,44 @@ pub struct CompiledContract { #[derive(Debug)] pub struct CompileError(pub Vec); -/// Compiles the given Fe source code to all targets. +/// Compiles a single input file. /// /// If `with_bytecode` is set to false, the compiler will skip the final Yul -> /// Bytecode pass. This is useful when debugging invalid Yul code. -pub fn compile( +pub fn compile_module( files: &FileStore, file_id: SourceFileId, - src: &str, _with_bytecode: bool, _optimize: bool, ) -> Result { - // parse source + let file = files.get_file(file_id).expect("missing file"); + let src = &file.content; let mut errors = vec![]; - let (fe_module, parser_diagnostics) = parse_file(file_id, src).map_err(CompileError)?; + let (ast, parser_diagnostics) = parse_file(file_id, src).map_err(CompileError)?; errors.extend(parser_diagnostics.into_iter()); - - let src_ast = format!("{:#?}", &fe_module); + let src_ast = format!("{:#?}", &ast); let db = Db::default(); - let module_id = match fe_analyzer::analyze(&db, fe_module) { - Ok(module_id) => module_id, + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let module = Module { + name: Path::new(&file.name) + .file_name() + .expect("missing file name") + .to_string_lossy() + .to_string(), + context: ModuleContext::Global(global_id), + file_content: ModuleFileContent::File { file: file_id }, + ast, + }; + let module_id = db.intern_module(Rc::new(module)); + + match fe_analyzer::analyze_module(&db, module_id) { + Ok(_) => {} Err(diagnostics) => { errors.extend(diagnostics.into_iter()); return Err(CompileError(errors)); @@ -62,18 +81,131 @@ pub fn compile( let json_abis = fe_abi::build(&db, module_id).expect("failed to generate abi"); // lower the AST - let lowered_module = fe_lowering::lower(&db, module_id); - let lowered_ast = format!("{:#?}", &lowered_module); - - // analyze the lowered AST - let lowered_module_id = match fe_analyzer::analyze(&db, lowered_module) { - Ok(id) => id, - Err(diags) => { - print_diagnostics(&diags, files); - panic!("Internal compiler errror: failed to analyze lowered AST"); + let lowered_module_id = fe_lowering::lower_module(&db, module_id); + let lowered_ast = format!("{:#?}", &lowered_module_id.ast(&db)); + + fe_analyzer::analyze_module(&db, lowered_module_id).expect("failed to analyze lowered AST"); + + // compile to yul + let yul_contracts = fe_yulgen::compile(&db, lowered_module_id); + + // compile to bytecode if required + #[cfg(feature = "solc-backend")] + let bytecode_contracts = if _with_bytecode { + match fe_yulc::compile(yul_contracts.clone(), _optimize) { + Err(error) => { + for error in serde_json::from_str::(&error.0) + .expect("unable to deserialize json output")["errors"] + .as_array() + .expect("errors not an array") + { + eprintln!( + "Error: {}", + error["formattedMessage"] + .as_str() + .expect("error value not a string") + .replace("\\\n", "\n") + ) + } + + panic!("Yul compilation failed with the above errors") + } + Ok(contracts) => contracts, } + } else { + IndexMap::new() }; + // combine all of the named contract maps + let contracts = json_abis + .keys() + .map(|name| { + ( + name.to_owned(), + CompiledContract { + json_abi: json_abis[name].to_owned(), + yul: yul_contracts[name].to_owned(), + #[cfg(feature = "solc-backend")] + bytecode: if _with_bytecode { + bytecode_contracts[name].to_owned() + } else { + "".to_string() + }, + }, + ) + }) + .collect::>(); + + Ok(CompiledModule { + src_ast, + lowered_ast, + contracts, + }) +} + +/// Compiles a set of input files. +/// +/// If `with_bytecode` is set to false, the compiler will skip the final Yul -> +/// Bytecode pass. This is useful when debugging invalid Yul code. +pub fn compile_ingot( + name: &str, + files: &FileStore, + file_ids: &[SourceFileId], + _with_bytecode: bool, + _optimize: bool, +) -> Result { + let mut errors = vec![]; + + let db = Db::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let ingot = Ingot { + name: name.to_string(), + global: global_id, + fe_files: file_ids + .iter() + .map(|file_id| { + let file = files.get_file(*file_id).expect("missing file for ID"); + let (ast, parser_diagnostics) = + parse_file(*file_id, &file.content).map_err(CompileError)?; + errors.extend(parser_diagnostics.into_iter()); + Ok((*file_id, (file.to_owned(), ast))) + }) + .collect::>()?, + }; + let ingot_id = db.intern_ingot(Rc::new(ingot)); + + match fe_analyzer::analyze_ingot(&db, ingot_id) { + Ok(_) => {} + Err(diagnostics) => { + errors.extend(diagnostics.into_iter()); + return Err(CompileError(errors)); + } + }; + + if !errors.is_empty() { + // There was a non-fatal parser error (eg missing parens in a fn def `fn foo: ...`) + return Err(CompileError(errors)); + } + + let module_id = ingot_id.main_module(&db).expect("missing main module"); + + // build abi + let json_abis = fe_abi::build(&db, module_id).expect("failed to generate abi"); + let src_ast = format!("{:#?}", &module_id.ast(&db)); + + let lowered_ingot_id = fe_lowering::lower_ingot(&db, ingot_id); + + fe_analyzer::analyze_ingot(&db, lowered_ingot_id).expect("failed to analyze lowered AST"); + + let lowered_module_id = lowered_ingot_id + .main_module(&db) + .expect("missing main module"); + + let lowered_ast = format!("{:#?}", &lowered_module_id.ast(&db)); + // compile to yul let yul_contracts = fe_yulgen::compile(&db, lowered_module_id); @@ -95,13 +227,12 @@ pub fn compile( .replace("\\\n", "\n") ) } - panic!("Yul compilation failed with the above errors") } Ok(contracts) => contracts, } } else { - std::collections::HashMap::new() + IndexMap::new() }; // combine all of the named contract maps @@ -122,7 +253,7 @@ pub fn compile( }, ) }) - .collect::>(); + .collect::>(); Ok(CompiledModule { src_ast, diff --git a/crates/fe/Cargo.toml b/crates/fe/Cargo.toml index 9476140995..e5d2640a66 100644 --- a/crates/fe/Cargo.toml +++ b/crates/fe/Cargo.toml @@ -15,6 +15,7 @@ solc-backend = ["fe-driver/solc-backend"] [dependencies] clap = "2.33.3" +walkdir = "2" fe-common = {path = "../common", version = "^0.10.0-alpha"} fe-driver = {path = "../driver", version = "^0.10.0-alpha"} fe-parser = {path = "../parser", version = "^0.10.0-alpha"} diff --git a/crates/fe/src/main.rs b/crates/fe/src/main.rs index 48fbc78d98..9002bed945 100644 --- a/crates/fe/src/main.rs +++ b/crates/fe/src/main.rs @@ -10,6 +10,8 @@ use fe_common::diagnostics::print_diagnostics; use fe_common::files::{FileStore, SourceFileId}; use fe_common::panic::install_panic_hook; use fe_driver::CompiledModule; +use std::ffi::OsStr; +use walkdir::WalkDir; const DEFAULT_OUTPUT_DIR_NAME: &str = "output"; const VERSION: &str = env!("CARGO_PKG_VERSION"); @@ -72,7 +74,7 @@ pub fn main() { ) .get_matches(); - let input_file = matches.value_of("input").unwrap(); + let input_path = matches.value_of("input").unwrap(); let output_dir = matches.value_of("output-dir").unwrap(); let overwrite = matches.is_present("overwrite"); let optimize = matches.value_of("optimize") == Some("true"); @@ -84,24 +86,56 @@ pub fn main() { eprintln!("Warning: bytecode output requires 'solc-backend' feature. Try `cargo build --release --features solc-backend`. Skipping."); } - let mut files = FileStore::new(); - let file = files.load_file(input_file).map_err(ioerr_to_string); - if let Err(err) = file { - eprintln!("Failed to load file: `{}`. Error: {}", input_file, err); - std::process::exit(1); - } - let (content, id) = file.unwrap(); + let (content, compiled_module) = if Path::new(input_path).is_file() { + let mut files = FileStore::new(); + let file = files.load_file(input_path).map_err(ioerr_to_string); + + let (content, id) = match file { + Err(err) => { + eprintln!("Failed to load file: `{}`. Error: {}", input_path, err); + std::process::exit(1) + } + Ok(file) => file, + }; + + let compiled_module = match fe_driver::compile_module(&files, id, with_bytecode, optimize) { + Ok(module) => module, + Err(error) => { + eprintln!("Unable to compile {}.", input_path); + print_diagnostics(&error.0, &files); + std::process::exit(1) + } + }; + (content, compiled_module) + } else { + let files = build_ingot_filestore_for_dir(input_path); - let compiled_module = match fe_driver::compile(&files, id, &content, with_bytecode, optimize) { - Ok(module) => module, - Err(error) => { - eprintln!("Unable to compile {}.", input_file); - print_diagnostics(&error.0, &files); + if !Path::new(input_path).exists() { + eprintln!("Input directory does not exist: `{}`.", input_path); std::process::exit(1) } + + let compiled_module = match fe_driver::compile_ingot( + input_path, + &files, + &files.all_files(), + with_bytecode, + optimize, + ) { + Ok(module) => module, + Err(error) => { + eprintln!("Unable to compile {}.", input_path); + print_diagnostics(&error.0, &files); + std::process::exit(1) + } + }; + + // no file content for ingots + ("".to_string(), compiled_module) }; + match write_compiled_module(compiled_module, &content, &targets, output_dir, overwrite) { - Ok(_) => println!("Compiled {}. Outputs in `{}`", input_file, output_dir), + Ok(_) => println!("Compiled {}. Outputs in `{}`", input_path, output_dir), Err(err) => { eprintln!( "Failed to write output to directory: `{}`. Error: {}", @@ -112,6 +146,27 @@ pub fn main() { } } +fn build_ingot_filestore_for_dir(path: &str) -> FileStore { + let path = Path::new(path); + let walker = WalkDir::new(path); + let mut files = FileStore::new(); + + for entry in walker { + let entry = entry.unwrap(); + let file_path = &entry.path().to_string_lossy().to_string(); + + if entry.path().extension() == Some(OsStr::new("fe")) { + let file = files.load_file(file_path); + if let Err(err) = file { + eprintln!("Failed to load file: `{}`. Error: {}", &file_path, err); + std::process::exit(1) + } + } + } + + files +} + fn write_compiled_module( mut module: CompiledModule, file_content: &str, @@ -149,7 +204,7 @@ fn write_compiled_module( write_output(&output_dir.join("module.tokens"), &format!("{:#?}", tokens))?; } - for (name, contract) in module.contracts.drain() { + for (name, contract) in module.contracts.drain(0..) { let contract_output_dir = output_dir.join(&name); fs::create_dir_all(&contract_output_dir).map_err(ioerr_to_string)?; diff --git a/crates/lowering/Cargo.toml b/crates/lowering/Cargo.toml index ab5dc02345..c561c242a1 100644 --- a/crates/lowering/Cargo.toml +++ b/crates/lowering/Cargo.toml @@ -11,6 +11,7 @@ fe-common = {path = "../common", version = "^0.10.0-alpha"} fe-parser = {path = "../parser", version = "^0.10.0-alpha"} fe-analyzer = {path = "../analyzer", version = "^0.10.0-alpha"} indexmap = "1.6.2" +salsa = "0.16.1" [dev-dependencies] rstest = "0.6.4" diff --git a/crates/lowering/src/db.rs b/crates/lowering/src/db.rs new file mode 100644 index 0000000000..e5a5dd316d --- /dev/null +++ b/crates/lowering/src/db.rs @@ -0,0 +1,33 @@ +use fe_analyzer::namespace::items::{IngotId, ModuleId}; +use fe_analyzer::AnalyzerDb; +use fe_common::Upcast; + +mod queries; + +#[salsa::query_group(LoweringDbStorage)] +pub trait LoweringDb: AnalyzerDb + Upcast { + #[salsa::invoke(queries::lowered_ingot)] + fn lowered_ingot(&self, ingot: IngotId) -> IngotId; + + #[salsa::invoke(queries::lowered_module)] + fn lowered_module(&self, module: ModuleId) -> ModuleId; +} + +#[salsa::database(fe_analyzer::db::AnalyzerDbStorage, LoweringDbStorage)] +#[derive(Default)] +pub struct TestDb { + storage: salsa::Storage, +} +impl salsa::Database for TestDb {} + +impl Upcast for TestDb { + fn upcast(&self) -> &(dyn LoweringDb + 'static) { + &*self + } +} + +impl Upcast for TestDb { + fn upcast(&self) -> &(dyn AnalyzerDb + 'static) { + &*self + } +} diff --git a/crates/lowering/src/db/queries.rs b/crates/lowering/src/db/queries.rs new file mode 100644 index 0000000000..f6ac7f6536 --- /dev/null +++ b/crates/lowering/src/db/queries.rs @@ -0,0 +1,33 @@ +use crate::db::LoweringDb; +use crate::mappers; +use fe_analyzer::namespace::items::{IngotId, ModuleFileContent, ModuleId}; +use std::rc::Rc; + +pub fn lowered_module(db: &dyn LoweringDb, module_id: ModuleId) -> ModuleId { + let db = db.upcast(); + let mut module = (*module_id.data(db)).clone(); + module.ast = mappers::module::module(db, module_id); + db.intern_module(Rc::new(module)) +} + +pub fn lowered_ingot(lowering_db: &dyn LoweringDb, ingot_id: IngotId) -> IngotId { + let db = lowering_db.upcast(); + + let mut ingot = (*ingot_id.data(db)).clone(); + ingot.fe_files = ingot_id + .all_modules(db) + .iter() + .filter_map(|module_id| match module_id.file_content(db) { + // dir modules do not yet have ASTs to lower + ModuleFileContent::Dir { .. } => None, + ModuleFileContent::File { file: file_id } => Some(( + file_id, + ( + ingot_id.data(db).fe_files[&file_id].0.clone(), + lowered_module(lowering_db, *module_id).ast(db), + ), + )), + }) + .collect(); + db.intern_ingot(Rc::new(ingot)) +} diff --git a/crates/lowering/src/lib.rs b/crates/lowering/src/lib.rs index 0cab9df263..c394bed47f 100644 --- a/crates/lowering/src/lib.rs +++ b/crates/lowering/src/lib.rs @@ -1,15 +1,25 @@ //! Fe Lowering. -use fe_analyzer::namespace::items::ModuleId; -use fe_analyzer::AnalyzerDb; -use fe_parser::ast; +use fe_analyzer::namespace::items::{IngotId, ModuleId}; mod context; +pub mod db; mod mappers; mod names; mod utils; -/// Lowers the Fe source AST to a Fe HIR AST. -pub fn lower(db: &dyn AnalyzerDb, module: ModuleId) -> ast::Module { - mappers::module::module(db, module) +pub use db::{LoweringDb, TestDb}; + +/// Lower a Fe module +/// +/// Interns a module with the lowered AST and returns its ID. +pub fn lower_module(db: &dyn LoweringDb, module_id: ModuleId) -> ModuleId { + db.lowered_module(module_id) +} + +/// Lower a Fe ingot +/// +/// Interns an ingot with the lowered module ASTs and returns its ID. +pub fn lower_ingot(db: &dyn LoweringDb, ingot_id: IngotId) -> IngotId { + db.lowered_ingot(ingot_id) } diff --git a/crates/lowering/src/mappers/expressions.rs b/crates/lowering/src/mappers/expressions.rs index 0f624f18f0..04de57e054 100644 --- a/crates/lowering/src/mappers/expressions.rs +++ b/crates/lowering/src/mappers/expressions.rs @@ -12,6 +12,7 @@ pub fn expr(context: &mut FnContext, exp: Node) -> Node { let lowered_kind = match exp.kind { fe::Expr::Name(_) => expr_name(context, exp), + fe::Expr::Path(_) => exp.kind, fe::Expr::Num(_) => exp.kind, fe::Expr::Bool(_) => exp.kind, fe::Expr::Subscript { value, index } => fe::Expr::Subscript { diff --git a/crates/lowering/src/mappers/module.rs b/crates/lowering/src/mappers/module.rs index cf6c152d1b..01b2ca56a9 100644 --- a/crates/lowering/src/mappers/module.rs +++ b/crates/lowering/src/mappers/module.rs @@ -63,6 +63,8 @@ pub fn module(db: &dyn AnalyzerDb, module: ModuleId) -> ast::Module { // All name expressions referring to constants are handled at the time of lowering, // which causes the constants to no longer serve a purpose. Item::Constant(_) => None, + Item::Ingot(_) => unreachable!("ingots cannot be defined in a module"), + Item::Module(_) => unreachable!("modules cannot be defined in modules (at least not yet)"), })); let struct_defs_from_tuples = context diff --git a/crates/lowering/src/mappers/types.rs b/crates/lowering/src/mappers/types.rs index d2e00b28b6..6bafc00c59 100644 --- a/crates/lowering/src/mappers/types.rs +++ b/crates/lowering/src/mappers/types.rs @@ -6,7 +6,7 @@ use fe_parser::node::Node; pub fn type_desc(context: &mut ModuleContext, desc: Node, typ: &Type) -> Node { match desc.kind { - TypeDesc::Unit | TypeDesc::Base { .. } => desc, + TypeDesc::Unit | TypeDesc::Base { .. } | TypeDesc::Path(_) => desc, TypeDesc::Tuple { items } => { let typ = typ.as_tuple().expect("expected tuple type"); diff --git a/crates/lowering/tests/lowering.rs b/crates/lowering/tests/lowering.rs index 06e297b38b..5a34f2a3b8 100644 --- a/crates/lowering/tests/lowering.rs +++ b/crates/lowering/tests/lowering.rs @@ -1,26 +1,30 @@ -use fe_analyzer::namespace::items::ModuleId; -use fe_analyzer::Db; +use fe_analyzer::namespace::items::{Global, Module, ModuleContext, ModuleFileContent}; +use fe_analyzer::AnalyzerDb; use fe_common::diagnostics::print_diagnostics; use fe_common::files::{FileStore, SourceFileId}; +use fe_lowering::TestDb; use fe_parser::ast as fe; use insta::assert_snapshot; +use std::rc::Rc; use wasm_bindgen_test::wasm_bindgen_test; fn lower(src: &str, id: SourceFileId, files: &FileStore) -> fe::Module { - let fe_module = parse_file(src, id, files); - let (db, module_id) = analyze(fe_module, files); - fe_lowering::lower(&db, module_id) -} + let ast = parse_file(src, id, files); -fn analyze(module: fe::Module, files: &FileStore) -> (Db, ModuleId) { - let db = Db::default(); - match fe_analyzer::analyze(&db, module) { - Ok(id) => (db, id), - Err(diagnostics) => { - print_diagnostics(&diagnostics, files); - panic!("analysis failed"); - } - } + let db = TestDb::default(); + + let global = Global::default(); + let global_id = db.intern_global(Rc::new(global)); + + let module = Module { + name: "test_module".to_string(), + context: ModuleContext::Global(global_id), + file_content: ModuleFileContent::File { file: id }, + ast, + }; + let module_id = db.intern_module(Rc::new(module)); + + fe_lowering::lower_module(&db, module_id).ast(&db) } fn parse_file(src: &str, id: SourceFileId, files: &FileStore) -> fe::Module { diff --git a/crates/parser/src/ast.rs b/crates/parser/src/ast.rs index b1bbd7930e..57b9c0942e 100644 --- a/crates/parser/src/ast.rs +++ b/crates/parser/src/ast.rs @@ -30,8 +30,7 @@ pub struct Pragma { #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] pub struct Path { - pub names: Vec>, - pub trailing_delim: bool, + pub segments: Vec>, } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] @@ -42,14 +41,14 @@ pub struct Use { #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] pub enum UseTree { Glob { - prefix: Node, + prefix: Path, }, Nested { - prefix: Node, + prefix: Path, children: Vec>, }, Simple { - path: Node, + path: Path, rename: Option>, }, } @@ -84,14 +83,17 @@ pub struct Struct { #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] pub enum TypeDesc { Unit, - // TODO: change `Base { base: String }` to `Name(String)` + // TODO: replace with `Name(String)`, or eliminate in favor of `Path`? Base { base: String, }, + Path(Path), Tuple { items: Vec1>, }, Generic { + // TODO: when we support user-defined generic types, + // this will have to be a `Path` base: Node, args: Node>, }, @@ -272,6 +274,7 @@ pub enum Expr { }, Bool(bool), Name(String), + Path(Path), Num(String), Str(String), Unit, @@ -440,16 +443,16 @@ impl fmt::Display for Use { impl fmt::Display for UseTree { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { - UseTree::Glob { prefix } => write!(f, "{}*", prefix.kind), + UseTree::Glob { prefix } => write!(f, "{}::*", prefix), UseTree::Simple { path, rename } => { if let Some(rename) = rename { - write!(f, "{} as {}", path.kind, rename.kind) + write!(f, "{} as {}", path, rename.kind) } else { - write!(f, "{}", path.kind) + write!(f, "{}", path) } } UseTree::Nested { prefix, children } => { - write!(f, "{}{{{}}}", prefix.kind, node_comma_joined(children)) + write!(f, "{}::{{{}}}", prefix, node_comma_joined(children)) } } } @@ -458,17 +461,13 @@ impl fmt::Display for UseTree { impl fmt::Display for Path { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let joined_names = self - .names + .segments .iter() .map(|name| name.kind.to_string()) .collect::>() .join("::"); write!(f, "{}", joined_names)?; - if self.trailing_delim { - write!(f, "::")?; - } - Ok(()) } } @@ -523,6 +522,7 @@ impl fmt::Display for TypeDesc { match self { TypeDesc::Unit => write!(f, "()"), TypeDesc::Base { base } => write!(f, "{}", base), + TypeDesc::Path(path) => write!(f, "{}", path), TypeDesc::Tuple { items } => write!(f, "({})", node_comma_joined(items)), TypeDesc::Generic { base, args } => { write!(f, "{}<{}>", base.kind, comma_joined(&args.kind)) @@ -755,6 +755,7 @@ impl fmt::Display for Expr { Expr::Tuple { elts } => write!(f, "({})", node_comma_joined(elts)), Expr::Bool(bool) => write!(f, "{}", bool), Expr::Name(name) => write!(f, "{}", name), + Expr::Path(path) => write!(f, "{}", path), Expr::Num(num) => write!(f, "{}", num), Expr::Str(str) => write!(f, "\"{}\"", str), Expr::Unit => write!(f, "()"), @@ -950,6 +951,7 @@ fn expr_left_binding_power(expr: &Expr) -> u8 { Expr::Tuple { .. } => max_power, Expr::Bool(_) => max_power, Expr::Name(_) => max_power, + Expr::Path(_) => max_power, Expr::Num(_) => max_power, Expr::Str(_) => max_power, Expr::Unit => max_power, @@ -972,6 +974,7 @@ fn expr_right_binding_power(expr: &Expr) -> u8 { Expr::Tuple { .. } => max_power, Expr::Bool(_) => max_power, Expr::Name(_) => max_power, + Expr::Path(_) => max_power, Expr::Num(_) => max_power, Expr::Str(_) => max_power, Expr::Unit => max_power, diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index d496008a7e..41d4e1cf51 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -1,4 +1,4 @@ -use crate::ast::{self, CallArg, Expr}; +use crate::ast::{self, CallArg, Expr, Path}; use crate::node::Node; use crate::{Label, ParseFailed, ParseResult, Parser, Token, TokenKind}; @@ -107,7 +107,7 @@ pub fn parse_expr_with_min_bp(par: &mut Parser, min_bp: u8) -> ParseResult Option<(u8, u8)> { // Prefix Plus | Minus | Tilde => 135 StarStar => (141, 140), Dot => (150, 151), - // ColonColon => + ColonColon => (160, 161), _ => return None, }; Some(bp) @@ -377,9 +377,14 @@ fn unescape_string(quoted_string: &str) -> Option { } /// Create an expr from the given infix operator and operands. -fn infix_op(left: Node, op: &Token, right: Node) -> Node { +fn infix_op( + par: &mut Parser, + left: Node, + op: &Token, + right: Node, +) -> ParseResult> { use TokenKind::*; - match op.kind { + let expr = match op.kind { Or | And => bool_op(left, op, right), Amper | Hat | Pipe | LtLt | GtGt | Plus | Minus | Star | Slash | Percent | StarStar => { @@ -399,12 +404,56 @@ fn infix_op(left: Node, op: &Token, right: Node) -> Node { span, ) } else { - todo!("handle dotted expr where right isn't a name") + // TODO: check for float number and say something helpful + par.fancy_error( + "failed to parse attribute expression", + vec![Label::primary(right.span, "expected a name")], + vec![], + ); + return Err(ParseFailed); } } + ColonColon => { + let mut path = match left.kind { + Expr::Name(name) => Path { + segments: vec![Node::new(name, left.span)], + }, + Expr::Path(path) => path, + _ => { + par.fancy_error( + "failed to parse path expression", + vec![ + Label::secondary(op.span, "path delimiter".to_string()), + Label::primary(left.span, "expected a name"), + ], + vec![], + ); + return Err(ParseFailed); + } + }; + // `right` can't be a Path (rbp > lbp); only valid option is `Name` + match right.kind { + Expr::Name(name) => { + path.segments.push(Node::new(name, right.span)); + Node::new(Expr::Path(path), left.span + right.span) + } + _ => { + par.fancy_error( + "failed to parse path expression", + vec![ + Label::secondary(op.span, "path delimiter".to_string()), + Label::primary(right.span, "expected a name"), + ], + vec![], + ); + return Err(ParseFailed); + } + } + } _ => panic!("Unexpected infix op token: {:?}", op), - } + }; + Ok(expr) } /// Create an `Expr::BoolOperation` node for the given operator and operands. diff --git a/crates/parser/src/grammar/module.rs b/crates/parser/src/grammar/module.rs index 3dd8c12299..cddfe3d93c 100644 --- a/crates/parser/src/grammar/module.rs +++ b/crates/parser/src/grammar/module.rs @@ -1,8 +1,8 @@ use super::contracts::parse_contract_def; use super::expressions::parse_expr; use super::functions::parse_fn_def; -use super::types::{parse_struct_def, parse_type_alias, parse_type_desc}; -use crate::ast::{ConstantDecl, Module, ModuleStmt, Path, Pragma, Use, UseTree}; +use super::types::{parse_path_tail, parse_struct_def, parse_type_alias, parse_type_desc}; +use crate::ast::{ConstantDecl, Module, ModuleStmt, Pragma, Use, UseTree}; use crate::node::{Node, Span}; use crate::{Label, ParseFailed, ParseResult, Parser, TokenKind}; @@ -130,60 +130,20 @@ pub fn parse_use(par: &mut Parser) -> ParseResult> { Ok(Node::new(Use { tree }, use_tok.span + tree_span)) } -/// Parse a `::` delimited path. -pub fn parse_path(par: &mut Parser) -> ParseResult> { - let mut names = vec![]; - - let name = par.expect_with_notes(TokenKind::Name, "failed to parse path", |_| { - vec![ - "Note: paths must start with a name".into(), - "Example: `foo::bar`".into(), - ] - })?; - - names.push(Node::new(name.text.to_string(), name.span)); - - loop { - if par.peek() == Some(TokenKind::ColonColon) { - let delim_tok = par.next()?; - - if par.peek() == Some(TokenKind::Name) { - let name = par.next()?; - - names.push(Node::new(name.text.to_string(), name.span)); - } else { - let span = - names.first().expect("`names` should not be empty").span + delim_tok.span; - - return Ok(Node::new( - Path { - names, - trailing_delim: true, - }, - span, - )); - } - } else { - let span = names.first().expect("`names` should not be empty").span - + names.last().expect("").span; - - return Ok(Node::new( - Path { - names, - trailing_delim: false, - }, - span, - )); - } - } -} - /// Parse a `use` tree. pub fn parse_use_tree(par: &mut Parser) -> ParseResult> { - let path = parse_path(par)?; - let path_span = path.span; + let (path, path_span, trailing_delim) = { + let path_head = + par.expect_with_notes(TokenKind::Name, "failed to parse `use` statement", |_| { + vec![ + "Note: `use` paths must start with a name".into(), + "Example: `use foo::bar`".into(), + ] + })?; + parse_path_tail(par, path_head.into()) + }; - if path.kind.trailing_delim { + if trailing_delim.is_some() { match par.peek() { Some(TokenKind::BraceOpen) => { par.next()?; @@ -193,19 +153,16 @@ pub fn parse_use_tree(par: &mut Parser) -> ParseResult> { loop { children.push(parse_use_tree(par)?); - - match par.peek() { - Some(TokenKind::Comma) => { - par.next()?; + let tok = par.next()?; + match tok.kind { + TokenKind::Comma => { continue; } - Some(TokenKind::BraceClose) => { - let tok = par.next()?; + TokenKind::BraceClose => { close_brace_span = tok.span; break; } _ => { - let tok = par.next()?; par.unexpected_token_error( tok.span, "failed to parse `use` tree", diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index a9eb5533b1..c5e533548e 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -1,7 +1,8 @@ -use crate::ast::{self, EventField, Field, GenericArg, TypeAlias, TypeDesc}; +use crate::ast::{self, EventField, Field, GenericArg, Path, TypeAlias, TypeDesc}; use crate::grammar::expressions::parse_expr; use crate::grammar::functions::{parse_fn_def, parse_single_word_stmt}; use crate::node::{Node, Span}; +use crate::Token; use crate::{ParseFailed, ParseResult, Parser, TokenKind}; use fe_common::diagnostics::Label; use if_chain::if_chain; @@ -295,6 +296,24 @@ pub fn parse_generic_args(par: &mut Parser) -> ParseResult> Ok(Node::new(args, span)) } +/// Returns path and trailing `::` token, if present. +pub fn parse_path_tail<'a>( + par: &mut Parser<'a>, + head: Node, +) -> (Path, Span, Option>) { + let mut span = head.span; + let mut segments = vec![head]; + while let Some(delim) = par.optional(TokenKind::ColonColon) { + if let Some(name) = par.optional(TokenKind::Name) { + span += name.span; + segments.push(name.into()); + } else { + return (Path { segments }, span, Some(delim)); + } + } + (Path { segments }, span, None) +} + /// Parse a type description, e.g. `u8` or `Map`. pub fn parse_type_desc(par: &mut Parser) -> ParseResult> { use TokenKind::*; @@ -303,6 +322,22 @@ pub fn parse_type_desc(par: &mut Parser) -> ParseResult> { Name => { let name = par.next()?; match par.peek() { + Some(ColonColon) => { + let (path, span, trailing_delim) = parse_path_tail(par, name.into()); + if let Some(colons) = trailing_delim { + let next = par.next()?; + par.fancy_error( + "failed to parse type description", + vec![ + Label::secondary(colons.span, "path delimiter"), + Label::primary(next.span, "expected a name"), + ], + vec![], + ); + return Err(ParseFailed); + } + Node::new(TypeDesc::Path(path), span) + } Some(Lt) => { let args = parse_generic_args(par)?; let span = name.span + args.span; diff --git a/crates/parser/tests/cases/errors.rs b/crates/parser/tests/cases/errors.rs index 0befa61baf..f4b6851fed 100644 --- a/crates/parser/tests/cases/errors.rs +++ b/crates/parser/tests/cases/errors.rs @@ -75,6 +75,7 @@ contract C: "# } +test_parse_err! { type_desc_path_number, module::parse_module, true, "type Foo = some::mod::Foo::5000" } test_parse_err! { contract_pub_event, contracts::parse_contract_def, false, "contract C:\n pub event E:\n x: u8" } test_parse_err! { contract_const_pub, contracts::parse_contract_def, false, "contract C:\n const pub x: u8" } test_parse_err! { contract_const_fn, contracts::parse_contract_def, false, "contract C:\n const fn f():\n pass" } @@ -82,6 +83,9 @@ test_parse_err! { emit_no_args, functions::parse_stmt, true, "emit x" } test_parse_err! { emit_expr, functions::parse_stmt, true, "emit x + 1" } test_parse_err! { emit_bad_call, functions::parse_stmt, true, "emit MyEvent(1)()" } test_parse_err! { expr_bad_prefix, expressions::parse_expr, true, "*x + 1" } +test_parse_err! { expr_path_left, expressions::parse_expr, true, "(1 + 2)::foo::bar" } +test_parse_err! { expr_path_right, expressions::parse_expr, true, "foo::10::bar" } +test_parse_err! { expr_dotted_number, expressions::parse_expr, true, "3.14" } test_parse_err! { for_no_in, functions::parse_stmt, true, "for x:\n pass" } test_parse_err! { fn_no_args, module::parse_module, false, "fn f:\n return 5" } test_parse_err! { fn_unsafe_pub, module::parse_module, false, "unsafe pub fn f():\n return 5" } diff --git a/crates/parser/tests/cases/parse_ast.rs b/crates/parser/tests/cases/parse_ast.rs index 6ef1c3b97a..fdd25d2d97 100644 --- a/crates/parser/tests/cases/parse_ast.rs +++ b/crates/parser/tests/cases/parse_ast.rs @@ -66,6 +66,7 @@ test_parse! { expr_num1, expressions::parse_expr, "12345" } test_parse! { expr_num2, expressions::parse_expr, "00001" } test_parse! { expr_hex1, expressions::parse_expr, "0xbeefbeef" } test_parse! { expr_hex2, expressions::parse_expr, "0xFEED1234" } +test_parse! { expr_path_call, expressions::parse_expr, "foo::bar::abc1()" } test_parse! { expr_string, expressions::parse_expr, r#""hi \tmom\n""# } test_parse! { expr_list, expressions::parse_expr, "[]" } test_parse! { expr_list2, expressions::parse_expr, "[x, y, z,]" } @@ -102,11 +103,11 @@ test_parse! { stmt_aug_rsh, functions::parse_stmt, "x >>= y" } test_parse! { stmt_aug_exp, functions::parse_stmt, "x **= y" } test_parse! { stmt_emit1, functions::parse_stmt, "emit Foo()" } test_parse! { stmt_emit2, functions::parse_stmt, "emit Foo(1, 2, x=y)" } +test_parse! { stmt_path_type, functions::parse_stmt, "let x: foo::Bar = foo::Bar(1, 2)" } test_parse! { stmt_return1, functions::parse_stmt, "return" } test_parse! { stmt_return2, functions::parse_stmt, "return x" } test_parse! { stmt_return3, functions::parse_stmt, "return not x" } test_parse! { stmt_revert1, functions::parse_stmt, "revert" } - test_parse! { stmt_revert2, functions::parse_stmt, "revert something" } test_parse! { stmt_if, functions::parse_stmt, "if a:\n b" } diff --git a/crates/parser/tests/cases/snapshots/cases__errors__expr_dotted_number.snap b/crates/parser/tests/cases/snapshots/cases__errors__expr_dotted_number.snap new file mode 100644 index 0000000000..469769b5a9 --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__errors__expr_dotted_number.snap @@ -0,0 +1,12 @@ +--- +source: crates/parser/tests/cases/errors.rs +expression: "err_string(stringify!(expr_dotted_number), expressions::parse_expr, true,\n \"3.14\")" + +--- +error: failed to parse attribute expression + ┌─ expr_dotted_number:1:3 + │ +1 │ 3.14 + │ ^^ expected a name + + diff --git a/crates/parser/tests/cases/snapshots/cases__errors__expr_path_left.snap b/crates/parser/tests/cases/snapshots/cases__errors__expr_path_left.snap new file mode 100644 index 0000000000..331c396d6b --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__errors__expr_path_left.snap @@ -0,0 +1,14 @@ +--- +source: crates/parser/tests/cases/errors.rs +expression: "err_string(stringify!(expr_path_left), expressions::parse_expr, true,\n \"(1 + 2)::foo::bar\")" + +--- +error: failed to parse path expression + ┌─ expr_path_left:1:1 + │ +1 │ (1 + 2)::foo::bar + │ ^^^^^^^-- path delimiter + │ │ + │ expected a name + + diff --git a/crates/parser/tests/cases/snapshots/cases__errors__expr_path_right.snap b/crates/parser/tests/cases/snapshots/cases__errors__expr_path_right.snap new file mode 100644 index 0000000000..7a73d9c8aa --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__errors__expr_path_right.snap @@ -0,0 +1,14 @@ +--- +source: crates/parser/tests/cases/errors.rs +expression: "err_string(stringify!(expr_path_right), expressions::parse_expr, true,\n \"foo::10::bar\")" + +--- +error: failed to parse path expression + ┌─ expr_path_right:1:6 + │ +1 │ foo::10::bar + │ --^^ expected a name + │ │ + │ path delimiter + + diff --git a/crates/parser/tests/cases/snapshots/cases__errors__self_use1.snap b/crates/parser/tests/cases/snapshots/cases__errors__self_use1.snap index e59a224007..ff2df665b6 100644 --- a/crates/parser/tests/cases/snapshots/cases__errors__self_use1.snap +++ b/crates/parser/tests/cases/snapshots/cases__errors__self_use1.snap @@ -3,13 +3,13 @@ source: crates/parser/tests/cases/errors.rs expression: "err_string(stringify!(self_use1), module::parse_module, true,\n \"use self as bar\")" --- -error: failed to parse path +error: failed to parse `use` statement ┌─ self_use1:1:5 │ 1 │ use self as bar │ ^^^^ expected a name, found keyword `self` │ - = Note: paths must start with a name - = Example: `foo::bar` + = Note: `use` paths must start with a name + = Example: `use foo::bar` diff --git a/crates/parser/tests/cases/snapshots/cases__errors__type_desc_path_number.snap b/crates/parser/tests/cases/snapshots/cases__errors__type_desc_path_number.snap new file mode 100644 index 0000000000..e0e5417905 --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__errors__type_desc_path_number.snap @@ -0,0 +1,14 @@ +--- +source: crates/parser/tests/cases/errors.rs +expression: "err_string(stringify!(type_desc_path_number), module::parse_module, true,\n \"type Foo = some::mod::Foo::5000\")" + +--- +error: failed to parse type description + ┌─ type_desc_path_number:1:28 + │ +1 │ type Foo = some::mod::Foo::5000 + │ --^^^^ expected a name + │ │ + │ path delimiter + + diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__expr_path_call.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__expr_path_call.snap new file mode 100644 index 0000000000..227ea5f720 --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__expr_path_call.snap @@ -0,0 +1,52 @@ +--- +source: crates/parser/tests/cases/parse_ast.rs +expression: "ast_string(stringify!(expr_path_call), expressions::parse_expr,\n \"foo::bar::abc1()\")" + +--- +Node( + kind: Call( + func: Node( + kind: Path(Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 0, + end: 3, + ), + ), + Node( + kind: "bar", + span: Span( + start: 5, + end: 8, + ), + ), + Node( + kind: "abc1", + span: Span( + start: 10, + end: 14, + ), + ), + ], + )), + span: Span( + start: 0, + end: 14, + ), + ), + generic_args: None, + args: Node( + kind: [], + span: Span( + start: 14, + end: 16, + ), + ), + ), + span: Span( + start: 0, + end: 16, + ), +) diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__module_stmts.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__module_stmts.snap index 8ceadcee74..9d519f9c35 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__module_stmts.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__module_stmts.snap @@ -25,51 +25,37 @@ Node( kind: Use( tree: Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "foo", - span: Span( - start: 19, - end: 22, - ), + prefix: Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 19, + end: 22, ), - Node( - kind: "bar", - span: Span( - start: 24, - end: 27, - ), + ), + Node( + kind: "bar", + span: Span( + start: 24, + end: 27, ), - ], - trailing_delim: true, - ), - span: Span( - start: 19, - end: 29, - ), + ), + ], ), children: [ Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "bing", - span: Span( - start: 35, - end: 39, - ), + path: Path( + segments: [ + Node( + kind: "bing", + span: Span( + start: 35, + end: 39, ), - ], - trailing_delim: false, - ), - span: Span( - start: 35, - end: 39, - ), + ), + ], ), rename: Some(Node( kind: "bong", @@ -86,28 +72,21 @@ Node( ), Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "food", - span: Span( - start: 53, - end: 57, - ), + prefix: Path( + segments: [ + Node( + kind: "food", + span: Span( + start: 53, + end: 57, ), - ], - trailing_delim: true, - ), - span: Span( - start: 53, - end: 59, - ), + ), + ], ), ), span: Span( start: 53, - end: 59, + end: 57, ), ), ], diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__stmt_path_type.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__stmt_path_type.snap new file mode 100644 index 0000000000..6adab29298 --- /dev/null +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__stmt_path_type.snap @@ -0,0 +1,117 @@ +--- +source: crates/parser/tests/cases/parse_ast.rs +expression: "ast_string(stringify!(stmt_path_type), functions::parse_stmt,\n \"let x: foo::Bar = foo::Bar(1, 2)\")" + +--- +Node( + kind: VarDecl( + target: Node( + kind: Name("x"), + span: Span( + start: 4, + end: 5, + ), + ), + typ: Node( + kind: Path(Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 7, + end: 10, + ), + ), + Node( + kind: "Bar", + span: Span( + start: 12, + end: 15, + ), + ), + ], + )), + span: Span( + start: 7, + end: 15, + ), + ), + value: Some(Node( + kind: Call( + func: Node( + kind: Path(Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 18, + end: 21, + ), + ), + Node( + kind: "Bar", + span: Span( + start: 23, + end: 26, + ), + ), + ], + )), + span: Span( + start: 18, + end: 26, + ), + ), + generic_args: None, + args: Node( + kind: [ + Node( + kind: CallArg( + label: None, + value: Node( + kind: Num("1"), + span: Span( + start: 27, + end: 28, + ), + ), + ), + span: Span( + start: 27, + end: 28, + ), + ), + Node( + kind: CallArg( + label: None, + value: Node( + kind: Num("2"), + span: Span( + start: 30, + end: 31, + ), + ), + ), + span: Span( + start: 30, + end: 31, + ), + ), + ], + span: Span( + start: 26, + end: 32, + ), + ), + ), + span: Span( + start: 18, + end: 32, + ), + )), + ), + span: Span( + start: 0, + end: 32, + ), +) diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_glob.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_glob.snap index dab7f82b06..b8dd133974 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_glob.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_glob.snap @@ -7,40 +7,33 @@ Node( kind: Use( tree: Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "foo", - span: Span( - start: 4, - end: 7, - ), + prefix: Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 4, + end: 7, ), - Node( - kind: "bar", - span: Span( - start: 9, - end: 12, - ), + ), + Node( + kind: "bar", + span: Span( + start: 9, + end: 12, ), - ], - trailing_delim: true, - ), - span: Span( - start: 4, - end: 14, - ), + ), + ], ), ), span: Span( start: 4, - end: 14, + end: 12, ), ), ), span: Span( start: 0, - end: 14, + end: 12, ), ) diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested1.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested1.snap index 40ae2232f3..d20bb93ee6 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested1.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested1.snap @@ -7,84 +7,63 @@ Node( kind: Use( tree: Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "foo", - span: Span( - start: 4, - end: 7, - ), + prefix: Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 4, + end: 7, ), - Node( - kind: "bar", - span: Span( - start: 9, - end: 12, - ), + ), + Node( + kind: "bar", + span: Span( + start: 9, + end: 12, ), - ], - trailing_delim: true, - ), - span: Span( - start: 4, - end: 14, - ), + ), + ], ), children: [ Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "bing", - span: Span( - start: 15, - end: 19, - ), + prefix: Path( + segments: [ + Node( + kind: "bing", + span: Span( + start: 15, + end: 19, ), - ], - trailing_delim: true, - ), - span: Span( - start: 15, - end: 21, - ), + ), + ], ), ), span: Span( start: 15, - end: 21, + end: 19, ), ), Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "bang", - span: Span( - start: 24, - end: 28, - ), + path: Path( + segments: [ + Node( + kind: "bang", + span: Span( + start: 24, + end: 28, ), - Node( - kind: "big", - span: Span( - start: 30, - end: 33, - ), + ), + Node( + kind: "big", + span: Span( + start: 30, + end: 33, ), - ], - trailing_delim: false, - ), - span: Span( - start: 24, - end: 33, - ), + ), + ], ), rename: None, ), @@ -95,23 +74,16 @@ Node( ), Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "bass", - span: Span( - start: 35, - end: 39, - ), + path: Path( + segments: [ + Node( + kind: "bass", + span: Span( + start: 35, + end: 39, ), - ], - trailing_delim: false, - ), - span: Span( - start: 35, - end: 39, - ), + ), + ], ), rename: Some(Node( kind: "fish", @@ -128,44 +100,30 @@ Node( ), Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "bong", - span: Span( - start: 49, - end: 53, - ), + prefix: Path( + segments: [ + Node( + kind: "bong", + span: Span( + start: 49, + end: 53, ), - ], - trailing_delim: true, - ), - span: Span( - start: 49, - end: 55, - ), + ), + ], ), children: [ Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "hello", - span: Span( - start: 56, - end: 61, - ), + path: Path( + segments: [ + Node( + kind: "hello", + span: Span( + start: 56, + end: 61, ), - ], - trailing_delim: false, - ), - span: Span( - start: 56, - end: 61, - ), + ), + ], ), rename: Some(Node( kind: "hi", @@ -182,23 +140,16 @@ Node( ), Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "goodbye", - span: Span( - start: 69, - end: 76, - ), + path: Path( + segments: [ + Node( + kind: "goodbye", + span: Span( + start: 69, + end: 76, ), - ], - trailing_delim: false, - ), - span: Span( - start: 69, - end: 76, - ), + ), + ], ), rename: None, ), diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested2.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested2.snap index d65ce517e4..9390e296b5 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested2.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_nested2.snap @@ -7,98 +7,70 @@ Node( kind: Use( tree: Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "std", - span: Span( - start: 4, - end: 7, - ), + prefix: Path( + segments: [ + Node( + kind: "std", + span: Span( + start: 4, + end: 7, ), - Node( - kind: "bar", - span: Span( - start: 9, - end: 12, - ), + ), + Node( + kind: "bar", + span: Span( + start: 9, + end: 12, ), - ], - trailing_delim: true, - ), - span: Span( - start: 4, - end: 14, - ), + ), + ], ), children: [ Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "bing", - span: Span( - start: 20, - end: 24, - ), + prefix: Path( + segments: [ + Node( + kind: "bing", + span: Span( + start: 20, + end: 24, ), - ], - trailing_delim: true, - ), - span: Span( - start: 20, - end: 26, - ), + ), + ], ), ), span: Span( start: 20, - end: 26, + end: 24, ), ), Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "bad", - span: Span( - start: 33, - end: 36, - ), + prefix: Path( + segments: [ + Node( + kind: "bad", + span: Span( + start: 33, + end: 36, ), - ], - trailing_delim: true, - ), - span: Span( - start: 33, - end: 38, - ), + ), + ], ), children: [ Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "food", - span: Span( - start: 39, - end: 43, - ), + path: Path( + segments: [ + Node( + kind: "food", + span: Span( + start: 39, + end: 43, ), - ], - trailing_delim: false, - ), - span: Span( - start: 39, - end: 43, - ), + ), + ], ), rename: Some(Node( kind: "burger", @@ -115,82 +87,61 @@ Node( ), Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "barge", - span: Span( - start: 55, - end: 60, - ), + prefix: Path( + segments: [ + Node( + kind: "barge", + span: Span( + start: 55, + end: 60, ), - ], - trailing_delim: true, - ), - span: Span( - start: 55, - end: 62, - ), + ), + ], ), ), span: Span( start: 55, - end: 62, + end: 60, ), ), Node( kind: Nested( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "bill", - span: Span( - start: 65, - end: 69, - ), + prefix: Path( + segments: [ + Node( + kind: "bill", + span: Span( + start: 65, + end: 69, ), - Node( - kind: "bob", - span: Span( - start: 71, - end: 74, - ), + ), + Node( + kind: "bob", + span: Span( + start: 71, + end: 74, ), - ], - trailing_delim: true, - ), - span: Span( - start: 65, - end: 76, - ), + ), + ], ), children: [ Node( kind: Glob( - prefix: Node( - kind: Path( - names: [ - Node( - kind: "jkl", - span: Span( - start: 77, - end: 80, - ), + prefix: Path( + segments: [ + Node( + kind: "jkl", + span: Span( + start: 77, + end: 80, ), - ], - trailing_delim: true, - ), - span: Span( - start: 77, - end: 82, - ), + ), + ], ), ), span: Span( start: 77, - end: 82, + end: 80, ), ), ], @@ -209,23 +160,16 @@ Node( ), Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "evm", - span: Span( - start: 91, - end: 94, - ), + path: Path( + segments: [ + Node( + kind: "evm", + span: Span( + start: 91, + end: 94, ), - ], - trailing_delim: false, - ), - span: Span( - start: 91, - end: 94, - ), + ), + ], ), rename: Some(Node( kind: "mve", diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple1.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple1.snap index 9f3032e06b..0350bf8788 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple1.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple1.snap @@ -7,30 +7,23 @@ Node( kind: Use( tree: Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "foo", - span: Span( - start: 4, - end: 7, - ), + path: Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 4, + end: 7, ), - Node( - kind: "bar", - span: Span( - start: 9, - end: 12, - ), + ), + Node( + kind: "bar", + span: Span( + start: 9, + end: 12, ), - ], - trailing_delim: false, - ), - span: Span( - start: 4, - end: 12, - ), + ), + ], ), rename: None, ), diff --git a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple2.snap b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple2.snap index eb83f80633..a987845d27 100644 --- a/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple2.snap +++ b/crates/parser/tests/cases/snapshots/cases__parse_ast__use_simple2.snap @@ -7,30 +7,23 @@ Node( kind: Use( tree: Node( kind: Simple( - path: Node( - kind: Path( - names: [ - Node( - kind: "foo", - span: Span( - start: 4, - end: 7, - ), + path: Path( + segments: [ + Node( + kind: "foo", + span: Span( + start: 4, + end: 7, ), - Node( - kind: "bar", - span: Span( - start: 9, - end: 12, - ), + ), + Node( + kind: "bar", + span: Span( + start: 9, + end: 12, ), - ], - trailing_delim: false, - ), - span: Span( - start: 4, - end: 12, - ), + ), + ], ), rename: Some(Node( kind: "baz", diff --git a/crates/test-files/Cargo.toml b/crates/test-files/Cargo.toml index 00fbadc2bf..9598e979fc 100644 --- a/crates/test-files/Cargo.toml +++ b/crates/test-files/Cargo.toml @@ -8,3 +8,5 @@ repository = "https://github.com/ethereum/fe" [dependencies] include_dir = "0.6.0" +walkdir = "2" +fe-common = {path = "../common", version = "^0.10.0-alpha"} diff --git a/crates/test-files/fixtures/compile_errors/bad_ingot/src/bing.fe b/crates/test-files/fixtures/compile_errors/bad_ingot/src/bing.fe new file mode 100644 index 0000000000..b4a02e7635 --- /dev/null +++ b/crates/test-files/fixtures/compile_errors/bad_ingot/src/bing.fe @@ -0,0 +1,2 @@ +struct Bong: + pass \ No newline at end of file diff --git a/crates/test-files/fixtures/compile_errors/bad_ingot/src/biz/bad.fe b/crates/test-files/fixtures/compile_errors/bad_ingot/src/biz/bad.fe new file mode 100644 index 0000000000..44b94fcc5c --- /dev/null +++ b/crates/test-files/fixtures/compile_errors/bad_ingot/src/biz/bad.fe @@ -0,0 +1,5 @@ +struct Bur: + pass + +struct Bud: + pass \ No newline at end of file diff --git a/crates/test-files/fixtures/compile_errors/bad_ingot/src/foo.fe b/crates/test-files/fixtures/compile_errors/bad_ingot/src/foo.fe new file mode 100644 index 0000000000..1455137c49 --- /dev/null +++ b/crates/test-files/fixtures/compile_errors/bad_ingot/src/foo.fe @@ -0,0 +1,8 @@ +use bing::Bong::* +use bing::Tong + +struct Foo: + my_num: u256 + +struct Bar: + pass \ No newline at end of file diff --git a/crates/test-files/fixtures/compile_errors/bad_ingot/src/main.fe b/crates/test-files/fixtures/compile_errors/bad_ingot/src/main.fe new file mode 100644 index 0000000000..56b0cf06ff --- /dev/null +++ b/crates/test-files/fixtures/compile_errors/bad_ingot/src/main.fe @@ -0,0 +1,12 @@ +use foo::Foo +use bar::Baz +use biz::bad::{Bur, Bud as Bar, Boo} +use biz::Bark +use none::* +use bing::Bong as Bur +use foo::Bar as address + +contract Bar: + + pub fn a() -> Foo: + return Foo(my_num=true) diff --git a/crates/test-files/fixtures/compile_errors/mainless_ingot/src/foo.fe b/crates/test-files/fixtures/compile_errors/mainless_ingot/src/foo.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/test-files/fixtures/ingots/basic_ingot/src/bar/baz.fe b/crates/test-files/fixtures/ingots/basic_ingot/src/bar/baz.fe new file mode 100644 index 0000000000..05f4e974ee --- /dev/null +++ b/crates/test-files/fixtures/ingots/basic_ingot/src/bar/baz.fe @@ -0,0 +1,3 @@ +struct Baz: + my_bool: bool + my_u256: u256 \ No newline at end of file diff --git a/crates/test-files/fixtures/ingots/basic_ingot/src/bing.fe b/crates/test-files/fixtures/ingots/basic_ingot/src/bing.fe new file mode 100644 index 0000000000..a6ecad4944 --- /dev/null +++ b/crates/test-files/fixtures/ingots/basic_ingot/src/bing.fe @@ -0,0 +1,10 @@ +struct Bing: + my_address: address + +fn get_42_backend() -> u256: + return 42 + +# currently disallowed +#contract BingContract: +# pub fn foo(): +# pass \ No newline at end of file diff --git a/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dang.fe b/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dang.fe new file mode 100644 index 0000000000..050377d346 --- /dev/null +++ b/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dang.fe @@ -0,0 +1 @@ +type Dang = Array \ No newline at end of file diff --git a/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dong.fe b/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dong.fe new file mode 100644 index 0000000000..e17327dfbf --- /dev/null +++ b/crates/test-files/fixtures/ingots/basic_ingot/src/ding/dong.fe @@ -0,0 +1,4 @@ +struct Dong: + my_address: address + my_u256: u256 + my_i8: i8 \ No newline at end of file diff --git a/crates/test-files/fixtures/ingots/basic_ingot/src/main.fe b/crates/test-files/fixtures/ingots/basic_ingot/src/main.fe new file mode 100644 index 0000000000..9a39cdf0a2 --- /dev/null +++ b/crates/test-files/fixtures/ingots/basic_ingot/src/main.fe @@ -0,0 +1,26 @@ +use bar::baz::Baz +use bing::Bing as Bong +use bing::get_42_backend +use ding::{dang::Dang as Dung, dong} + +#use bing::BingContract + +contract Foo: + pub fn get_my_baz() -> Baz: + return Baz(my_bool=true, my_u256=26) + + pub fn get_my_bing() -> Bong: + return Bong(my_address=address(42)) + + pub fn get_42() -> u256: + return get_42_backend() + +# pub fn get_my_dong() -> dong::Dong: +# return dong::Dong( +# my_address=address(26), +# my_u256=42, +# my_i8=-1 +# ) + +# pub fn create_bing_contract(): +# BingContract.create(0) \ No newline at end of file diff --git a/crates/test-files/fixtures/printing/defs.fe b/crates/test-files/fixtures/printing/defs.fe index 64b683a083..a6cfbc46a8 100644 --- a/crates/test-files/fixtures/printing/defs.fe +++ b/crates/test-files/fixtures/printing/defs.fe @@ -9,7 +9,7 @@ use test1::{test1_1, test1_2::{test_1_2_1::*}} type MyType = String<42> struct MyStruct: - field1: bool + field1: foo::Bar field2: u256 field3: address @@ -31,7 +31,7 @@ contract Foo: field2: String<42> pub fn my_func(): - pass + std::solidity::bytes2::from_array([1, 2]) fn my_other_func(): pass diff --git a/crates/test-files/src/lib.rs b/crates/test-files/src/lib.rs index 8697870910..dba25a47a5 100644 --- a/crates/test-files/src/lib.rs +++ b/crates/test-files/src/lib.rs @@ -1,4 +1,6 @@ +use fe_common::files::{FileLoader, FileStore}; use include_dir::{include_dir, Dir}; +use std::path::Path; const FIXTURES: Dir = include_dir!("fixtures"); @@ -16,3 +18,46 @@ pub fn fixture_bytes(path: &str) -> &'static [u8] { .unwrap_or_else(|| panic!("bad fixture file path {}", path)) .contents() } + +struct FixtureLoader {} + +impl FileLoader for FixtureLoader { + fn load_file(&self, path: &Path) -> std::io::Result { + Ok(FIXTURES + .get_file(path) + .unwrap_or_else(|| panic!("bad fixture file path {:?}", path)) + .contents_utf8() + .expect("fixture file isn't utf8") + .to_string()) + } +} + +pub fn build_filestore(path: &str) -> FileStore { + let mut files = FileStore::with_loader(Box::new(FixtureLoader {})); + + for path in all_file_paths_in_fixture_dir(path) { + files.load_file(&path).unwrap(); + } + + files +} + +fn all_file_paths_in_fixture_dir(path: &str) -> Vec { + let dir = FIXTURES + .get_dir(path) + .unwrap_or_else(|| panic!("no fixture dir named \"{}\"", path)); + + let mut files = vec![]; + + for file in dir.files() { + files.push(file.path().to_string_lossy().to_string()) + } + + for sub_dir in dir.dirs() { + files.append(&mut all_file_paths_in_fixture_dir( + &sub_dir.path().to_string_lossy().to_string(), + )); + } + + files +} diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index 69f1d7035c..762dd98468 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -14,6 +14,7 @@ fe-common = {path = "../common", version = "^0.10.0-alpha"} fe-driver = {path = "../driver", version = "^0.10.0-alpha"} fe-yulgen = {path = "../yulgen", version = "^0.10.0-alpha"} fe-yulc = {path = "../yulc", version = "^0.10.0-alpha", optional = true, features = ["solc-backend"]} +fe-analyzer = {path = "../analyzer", version = "^0.10.0-alpha"} test-files = {path = "../test-files", package = "fe-test-files" } hex = "0.4" primitive-types = {version = "0.9", default-features = false, features = ["rlp"]} diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index b9556fff3c..9fa50fb881 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -245,7 +245,7 @@ pub fn deploy_contract( let mut files = FileStore::new(); let id = files.add_file(fixture, src); - let compiled_module = match driver::compile(&files, id, src, true, true) { + let compiled_module = match driver::compile_module(&files, id, true, true) { Ok(module) => module, Err(error) => { fe_common::diagnostics::print_diagnostics(&error.0, &files); @@ -266,6 +266,38 @@ pub fn deploy_contract( ) } +#[allow(dead_code)] +#[cfg(feature = "solc-backend")] +pub fn deploy_contract_from_ingot( + executor: &mut Executor, + path: &str, + contract_name: &str, + init_params: &[ethabi::Token], +) -> ContractHarness { + let files = test_files::build_filestore(path); + + let compiled_module = match driver::compile_ingot(path, &files, &files.all_files(), true, true) + { + Ok(module) => module, + Err(error) => { + fe_common::diagnostics::print_diagnostics(&error.0, &files); + panic!("failed to compile ingot: {}", path) + } + }; + + let compiled_contract = compiled_module + .contracts + .get(contract_name) + .expect("could not find contract in fixture"); + + _deploy_contract( + executor, + &compiled_contract.bytecode, + &compiled_contract.json_abi, + init_params, + ) +} + #[allow(dead_code)] #[cfg(feature = "solc-backend")] pub fn deploy_solidity_contract( @@ -448,7 +480,7 @@ pub fn load_contract(address: H160, fixture: &str, contract_name: &str) -> Contr let mut files = FileStore::new(); let src = test_files::fixture(fixture); let id = files.add_file(fixture, src); - let compiled_module = match driver::compile(&files, id, src, true, true) { + let compiled_module = match driver::compile_module(&files, id, true, true) { Ok(module) => module, Err(err) => { print_diagnostics(&err.0, &files); diff --git a/crates/tests/src/crashes.rs b/crates/tests/src/crashes.rs index a830ece38a..a0701e3226 100644 --- a/crates/tests/src/crashes.rs +++ b/crates/tests/src/crashes.rs @@ -10,7 +10,7 @@ macro_rules! test_file { let src = test_files::fixture(path); let mut files = FileStore::new(); let id = files.add_file(path, src); - fe_driver::compile(&files, id, src, true, true).ok(); + fe_driver::compile_module(&files, id, true, true).ok(); } }; } diff --git a/crates/tests/src/ingots.rs b/crates/tests/src/ingots.rs new file mode 100644 index 0000000000..5a5b1bf4d2 --- /dev/null +++ b/crates/tests/src/ingots.rs @@ -0,0 +1,33 @@ +#![cfg(feature = "solc-backend")] +use fe_compiler_test_utils::*; +use fe_compiler_test_utils::{self as test_utils}; + +pub fn deploy_ingot( + executor: &mut Executor, + fixture: &str, + contract_name: &str, + init_params: &[ethabi::Token], +) -> ContractHarness { + test_utils::deploy_contract_from_ingot( + executor, + &format!("ingots/{}", fixture), + contract_name, + init_params, + ) +} + +#[test] +fn test_basic_ingot() { + with_executor(&|mut executor| { + let harness = deploy_ingot(&mut executor, "basic_ingot", "Foo", &[]); + + harness.test_function( + &mut executor, + "get_my_baz", + &[], + Some(&tuple_token(&[bool_token(true), uint_token(26)])), + ); + + harness.test_function(&mut executor, "get_42", &[], Some(&uint_token(42))); + }) +} diff --git a/crates/tests/src/lib.rs b/crates/tests/src/lib.rs index d7c18ff9d0..88e524f638 100644 --- a/crates/tests/src/lib.rs +++ b/crates/tests/src/lib.rs @@ -11,6 +11,8 @@ mod differential; #[cfg(test)] mod features; #[cfg(test)] +mod ingots; +#[cfg(test)] mod runtime; #[cfg(test)] mod solidity; diff --git a/crates/yulc/Cargo.toml b/crates/yulc/Cargo.toml index b86154818e..4add103fec 100644 --- a/crates/yulc/Cargo.toml +++ b/crates/yulc/Cargo.toml @@ -11,6 +11,7 @@ fe-yulgen = {path = "../yulgen", version = "^0.10.0-alpha"} # This fork supports concurrent compilation, which is required for Rust tests. solc = { git = "https://github.com/g-r-a-n-t/solc-rust", optional = true} serde_json = "1.0" +indexmap = "1.6.2" [features] solc-backend = ["solc"] diff --git a/crates/yulc/src/lib.rs b/crates/yulc/src/lib.rs index 16cd72025d..de49d3ee92 100644 --- a/crates/yulc/src/lib.rs +++ b/crates/yulc/src/lib.rs @@ -1,38 +1,30 @@ -//! Fe to EVM compiler. - -use fe_yulgen::{NamedYulContracts, YulIr}; -use std::collections::HashMap; - -/// The name of a Fe contract. -pub type ContractName = String; -/// The bytecode of a contract as string object. -pub type Bytecode = String; -/// A mapping of contract names and their bytecode. -pub type NamedBytecodeContracts = HashMap; +use indexmap::map::IndexMap; #[derive(Debug)] pub struct YulcError(pub String); /// Compile a map of Yul contracts to a map of bytecode contracts. +/// +/// Returns a `contract_name -> hex_encoded_bytecode` map. pub fn compile( - mut contracts: NamedYulContracts, + mut contracts: IndexMap, optimize: bool, -) -> Result { +) -> Result, YulcError> { contracts - .drain() + .drain(0..) .map(|(name, yul_src)| { compile_single_contract(&name, yul_src, optimize).map(|bytecode| (name, bytecode)) }) - .collect::>() + .collect() } #[cfg(feature = "solc-backend")] /// Compiles a single Yul contract to bytecode. pub fn compile_single_contract( name: &str, - yul_src: YulIr, + yul_src: String, optimize: bool, -) -> Result { +) -> Result { let solc_temp = include_str!("solc_temp.json"); let input = solc_temp .replace("{optimizer_enabled}", &optimize.to_string()) @@ -56,9 +48,9 @@ pub fn compile_single_contract( /// Compiles a single Yul contract to bytecode. pub fn compile_single_contract( _name: &str, - _yul_src: YulIr, + _yul_src: String, _optimize: bool, -) -> Result { +) -> Result { // This is ugly, but required (as far as I can tell) to make // `cargo test --workspace` work without solc. panic!("fe-yulc requires 'solc-backend' feature") diff --git a/crates/yulgen/Cargo.toml b/crates/yulgen/Cargo.toml index 802de932a5..2381aa782d 100644 --- a/crates/yulgen/Cargo.toml +++ b/crates/yulgen/Cargo.toml @@ -9,6 +9,7 @@ repository = "https://github.com/ethereum/fe" [dependencies] fe-abi = {path = "../abi", version = "^0.10.0-alpha"} fe-analyzer = {path = "../analyzer", version = "^0.10.0-alpha"} +fe-lowering = {path = "../lowering", version = "^0.10.0-alpha"} fe-common = {path = "../common", version = "^0.10.0-alpha"} fe-parser = {path = "../parser", version = "^0.10.0-alpha"} indexmap = "1.6.2" diff --git a/crates/yulgen/src/db.rs b/crates/yulgen/src/db.rs new file mode 100644 index 0000000000..e0c6c259c1 --- /dev/null +++ b/crates/yulgen/src/db.rs @@ -0,0 +1,44 @@ +use fe_analyzer::namespace::items::ModuleId; +use fe_analyzer::AnalyzerDb; +use fe_common::Upcast; +use fe_lowering::LoweringDb; +use indexmap::map::IndexMap; + +mod queries; + +#[salsa::query_group(YulgenDbStorage)] +pub trait YulgenDb: + AnalyzerDb + LoweringDb + Upcast + Upcast +{ + #[salsa::invoke(queries::compile_module)] + fn compile_module(&self, module_id: ModuleId) -> IndexMap; +} + +#[salsa::database( + fe_analyzer::db::AnalyzerDbStorage, + fe_lowering::db::LoweringDbStorage, + YulgenDbStorage +)] +#[derive(Default)] +pub struct Db { + storage: salsa::Storage, +} +impl salsa::Database for Db {} + +impl Upcast for Db { + fn upcast(&self) -> &(dyn LoweringDb + 'static) { + &*self + } +} + +impl Upcast for Db { + fn upcast(&self) -> &(dyn AnalyzerDb + 'static) { + &*self + } +} + +impl Upcast for Db { + fn upcast(&self) -> &(dyn YulgenDb + 'static) { + &*self + } +} diff --git a/crates/yulgen/src/db/queries.rs b/crates/yulgen/src/db/queries.rs new file mode 100644 index 0000000000..769c642882 --- /dev/null +++ b/crates/yulgen/src/db/queries.rs @@ -0,0 +1,44 @@ +use crate::db::YulgenDb; +use crate::mappers; +use fe_analyzer::namespace::items::ModuleId; +use indexmap::map::IndexMap; +use yultsur::yul; + +pub fn compile_module(db: &dyn YulgenDb, module: ModuleId) -> IndexMap { + let analyzer_db = db.upcast(); + mappers::module::module(analyzer_db, module) + .drain() + .map(|(name, object)| (name, to_safe_json(object))) + .collect() +} + +fn to_safe_json(obj: yul::Object) -> String { + normalize_object(obj).to_string().replace("\"", "\\\"") +} + +fn normalize_object(obj: yul::Object) -> yul::Object { + let data = obj + .data + .into_iter() + .map(|data| yul::Data { + name: data.name, + value: data + .value + .replace('\\', "\\\\\\\\") + .replace('\n', "\\\\n") + .replace("\"", "\\\\\"") + .replace('\r', "\\\\r") + .replace('\t', "\\\\t"), + }) + .collect::>(); + yul::Object { + name: obj.name, + code: obj.code, + objects: obj + .objects + .into_iter() + .map(normalize_object) + .collect::>(), + data, + } +} diff --git a/crates/yulgen/src/lib.rs b/crates/yulgen/src/lib.rs index aa6f64f981..efa3304aba 100644 --- a/crates/yulgen/src/lib.rs +++ b/crates/yulgen/src/lib.rs @@ -1,13 +1,14 @@ //! Fe to Yul compiler. +pub use crate::db::{Db, YulgenDb}; use fe_analyzer::namespace::items::ModuleId; use fe_analyzer::AnalyzerDb; -use std::collections::HashMap; -use yultsur::yul; +use indexmap::map::IndexMap; pub mod constants; pub mod constructor; mod context; +mod db; mod mappers; pub mod names; pub mod operations; @@ -15,53 +16,14 @@ pub mod runtime; pub mod types; mod utils; -/// The name of a Fe contract. -pub type ContractName = String; -/// The intermediate representation of a contract as a string object. -pub type YulIr = String; -/// A mapping of contract names and their Yul IR. -pub type NamedYulContracts = HashMap; - -/// Compiles Fe source code to Yul. +/// Compiles a lowered Fe module to Yul contracts. +/// +/// Returns a `contract_name -> hex_encoded_bytecode` map. /// /// # Panics /// /// Any failure to compile an AST to Yul is considered a bug, and thus panics. /// Invalid ASTs should be caught by an analysis step prior to Yul generation. -pub fn compile(db: &dyn AnalyzerDb, module: ModuleId) -> NamedYulContracts { - mappers::module::module(db, module) - .drain() - .map(|(name, object)| (name, to_safe_json(object))) - .collect::() -} - -fn to_safe_json(obj: yul::Object) -> String { - normalize_object(obj).to_string().replace("\"", "\\\"") -} - -fn normalize_object(obj: yul::Object) -> yul::Object { - let data = obj - .data - .into_iter() - .map(|data| yul::Data { - name: data.name, - value: data - .value - .replace('\\', "\\\\\\\\") - .replace('\n', "\\\\n") - .replace("\"", "\\\\\"") - .replace('\r', "\\\\r") - .replace('\t', "\\\\t"), - }) - .collect::>(); - yul::Object { - name: obj.name, - code: obj.code, - objects: obj - .objects - .into_iter() - .map(normalize_object) - .collect::>(), - data, - } +pub fn compile(db: &dyn YulgenDb, module: ModuleId) -> IndexMap { + db.compile_module(module) } diff --git a/crates/yulgen/src/mappers/expressions.rs b/crates/yulgen/src/mappers/expressions.rs index c67b18ae8d..4fb5f86b9d 100644 --- a/crates/yulgen/src/mappers/expressions.rs +++ b/crates/yulgen/src/mappers/expressions.rs @@ -25,6 +25,7 @@ use yultsur::*; pub fn expr(context: &mut FnContext, exp: &Node) -> yul::Expression { let expression = match &exp.kind { fe::Expr::Name(_) => expr_name(exp), + fe::Expr::Path(_) => panic!("path expressions should be lowered or rejected"), fe::Expr::Num(_) => expr_num(exp), fe::Expr::Bool(_) => expr_bool(exp), fe::Expr::Subscript { .. } => expr_subscript(context, exp), diff --git a/newsfragments/562.feature.md b/newsfragments/562.feature.md new file mode 100644 index 0000000000..4c5c67867e --- /dev/null +++ b/newsfragments/562.feature.md @@ -0,0 +1,3 @@ +Added support for multi-file inputs. + +TODO: add more info