diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index aa8f9d7fc0..ebceb1540f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -102,7 +102,7 @@ jobs: # wasm-pack needs a Cargo.toml with a 'package' field. # (see https://github.com/rustwasm/wasm-pack/issues/642) # This will still run all tests in the workspace. - run: wasm-pack test --node crates/fe --workspace + run: wasm-pack test --node crates/fe --workspace --exclude fe-language-server release: # Only run this when we push a tag diff --git a/.vscode/launch.json b/.vscode/launch.json index 3b7fb3e6a5..564278de62 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -7,7 +7,8 @@ { "args": [ "--extensionDevelopmentPath=${workspaceFolder}/crates/language-server/editors/vscode", - "${workspaceFolder}/crates/" + "${workspaceFolder}/crates/", + "--disable-extensions" ], "name": "Launch Fe VSCode Extension", "outFiles": [ @@ -18,8 +19,10 @@ "type": "extensionHost", // we need to enable backtrace on the extension host "env": { - "RUST_BACKTRACE": "1" + "RUST_BACKTRACE": "full" } + // we need to disable rust-analyzer it's so slow + }, ] } \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 4c1f38447b..c36d2f25b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,21 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + [[package]] name = "ahash" version = "0.7.6" @@ -100,6 +115,39 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "atty" version = "0.2.14" @@ -129,6 +177,66 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + [[package]] name = "base16ct" version = "0.2.0" @@ -144,6 +252,12 @@ dependencies = [ "byteorder", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "beef" version = "0.5.2" @@ -252,7 +366,7 @@ dependencies = [ "bitflags", "clap_derive 3.2.18", "clap_lex 0.2.4", - "indexmap", + "indexmap 1.9.3", "once_cell", "strsim", "termcolor", @@ -304,7 +418,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -367,6 +481,43 @@ dependencies = [ "winapi", ] +[[package]] +name = "console-api" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" +dependencies = [ + "futures-core", + "prost", + "prost-types", + "tonic", + "tracing-core", +] + +[[package]] +name = "console-subscriber" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" +dependencies = [ + "console-api", + "crossbeam-channel", + "crossbeam-utils", + "futures-task", + "hdrhistogram", + "humantime", + "prost-types", + "serde", + "serde_json", + "thread_local", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-core", + "tracing-subscriber", +] + [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -410,6 +561,15 @@ version = "0.91.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a59bcbca89c3f1b70b93ab3cbba5e5e0cbf3e63dadb23c7525cb142e21a9d4c" +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "criterion" version = "0.3.6" @@ -724,28 +884,23 @@ checksum = "48016319042fb7c87b78d2993084a831793a897a5cd1a2a67cab9d1eeb4b7d76" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] -name = "errno" -version = "0.3.1" +name = "equivalent" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] -name = "errno-dragonfly" -version = "0.1.2" +name = "errno" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -882,7 +1037,7 @@ dependencies = [ "fe-test-runner", "fs_extra", "include_dir", - "indexmap", + "indexmap 1.9.3", "walkdir", ] @@ -906,7 +1061,7 @@ dependencies = [ "fe-test-files", "hex", "if_chain", - "indexmap", + "indexmap 1.9.3", "insta", "num-bigint", "num-traits", @@ -931,7 +1086,7 @@ dependencies = [ "fe-common", "fe-mir", "fxhash", - "indexmap", + "indexmap 1.9.3", "num-bigint", "salsa", "smol_str", @@ -947,7 +1102,7 @@ dependencies = [ "difference", "fe-library", "hex", - "indexmap", + "indexmap 1.9.3", "num-bigint", "num-traits", "once_cell", @@ -984,7 +1139,7 @@ dependencies = [ "fe-yulc", "getrandom", "hex", - "indexmap", + "indexmap 1.9.3", "insta", "primitive-types", "serde_json", @@ -1040,7 +1195,7 @@ dependencies = [ "fe-parser", "fe-test-runner", "fe-yulc", - "indexmap", + "indexmap 1.9.3", "serde_json", "smol_str", "toml", @@ -1110,31 +1265,44 @@ version = "0.23.0" dependencies = [ "anyhow", "camino", - "clap 4.3.12", "codespan-reporting", - "crossbeam-channel", + "console-subscriber", "dir-test", - "fe-analyzer", "fe-common2", "fe-compiler-test-utils", - "fe-driver2", "fe-hir", "fe-hir-analysis", - "fe-macros", + "fe-language-server-macros", + "fork_stream", + "futures", + "futures-batch", + "futures-concurrency", "fxhash", "glob", - "indexmap", - "log", - "lsp-server", "lsp-types", "patricia_tree", "rowan", + "rust-embed", "salsa-2022", - "serde", "serde_json", + "tokio", + "tokio-macros", + "tokio-stream", + "tower-lsp", + "tracing", + "tracing-subscriber", "url", ] +[[package]] +name = "fe-language-server-macros" +version = "0.23.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "fe-library" version = "0.23.0" @@ -1149,7 +1317,7 @@ dependencies = [ "glob", "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -1164,7 +1332,7 @@ dependencies = [ "fe-test-files", "fxhash", "id-arena", - "indexmap", + "indexmap 1.9.3", "num-bigint", "num-integer", "num-traits", @@ -1231,7 +1399,7 @@ dependencies = [ name = "fe-yulc" version = "0.23.0" dependencies = [ - "indexmap", + "indexmap 1.9.3", "serde_json", "solc", ] @@ -1264,12 +1432,32 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "flate2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "fork_stream" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc54cf296aa5a82dfffcc911fc7a37b0dcba605725bbb4db486f7b24d7667f9d" +dependencies = [ + "futures", + "pin-project", +] + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -1291,6 +1479,125 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-batch" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f444c45a1cb86f2a7e301469fd50a82084a60dadc25d94529a8312276ecb71a" +dependencies = [ + "futures", + "futures-timer", + "pin-utils", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-concurrency" +version = "7.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b590a729e1cbaf9ae3ec294143ea034d93cbb1de01c884d04bcd0af8b613d02" +dependencies = [ + "bitvec", + "futures-core", + "pin-project", + "slab", + "smallvec", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + [[package]] name = "fxhash" version = "0.2.1" @@ -1324,6 +1631,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + [[package]] name = "glob" version = "0.3.1" @@ -1341,6 +1654,25 @@ dependencies = [ "subtle", ] +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "1.8.2" @@ -1380,6 +1712,12 @@ dependencies = [ "ahash 0.8.3", ] +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + [[package]] name = "hashlink" version = "0.8.1" @@ -1389,6 +1727,19 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "base64 0.21.7", + "byteorder", + "flate2", + "nom", + "num-traits", +] + [[package]] name = "heck" version = "0.3.3" @@ -1449,6 +1800,82 @@ dependencies = [ "digest", ] +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + [[package]] name = "id-arena" version = "2.2.1" @@ -1544,6 +1971,16 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", +] + [[package]] name = "insta" version = "1.29.0" @@ -1644,9 +2081,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.142" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "libm" @@ -1708,23 +2145,11 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "lsp-server" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ea9ae5a5082ca3b6ae824fc7666cd206b99168a4d4c769ad8fe9cc740df6a6" -dependencies = [ - "crossbeam-channel", - "log", - "serde", - "serde_json", -] - [[package]] name = "lsp-types" -version = "0.94.0" +version = "0.94.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" +checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" dependencies = [ "bitflags", "serde", @@ -1734,10 +2159,25 @@ dependencies = [ ] [[package]] -name = "memchr" -version = "2.6.4" +name = "matchers" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memoffset" @@ -1748,6 +2188,58 @@ dependencies = [ "autocfg", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num" version = "0.4.0" @@ -1835,6 +2327,15 @@ dependencies = [ "libc", ] +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.18.0" @@ -1862,6 +2363,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parity-scale-codec" version = "3.4.0" @@ -1965,9 +2472,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 1.9.3", ] +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "plotters" version = "0.3.4" @@ -2064,9 +2603,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -2089,6 +2628,38 @@ dependencies = [ "unarray", ] +[[package]] +name = "prost" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "prost-types" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +dependencies = [ + "prost", +] + [[package]] name = "quick-error" version = "2.0.1" @@ -2097,9 +2668,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quote" -version = "1.0.32" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -2194,10 +2765,19 @@ checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-automata", + "regex-automata 0.4.3", "regex-syntax 0.8.2", ] +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + [[package]] name = "regex-automata" version = "0.4.3" @@ -2329,7 +2909,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5" dependencies = [ - "base64", + "base64 0.10.1", "bitflags", "serde", ] @@ -2380,6 +2960,46 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62cc5760263ea229d367e7dff3c0cbf09e4797a125bd87059a6c095804f3b2d1" +[[package]] +name = "rust-embed" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.48", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581" +dependencies = [ + "sha2", + "walkdir", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + [[package]] name = "rustc-hash" version = "1.1.0" @@ -2443,7 +3063,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403" dependencies = [ "crossbeam-utils", - "indexmap", + "indexmap 1.9.3", "lock_api", "log", "oorandom", @@ -2463,7 +3083,7 @@ dependencies = [ "crossbeam-utils", "dashmap", "hashlink", - "indexmap", + "indexmap 1.9.3", "log", "parking_lot 0.12.1", "rustc-hash", @@ -2594,9 +3214,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.176" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -2613,13 +3233,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.176" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2641,7 +3261,7 @@ checksum = "e168eaaf71e8f9bd6037feb05190485708e019f4fd87d161b3c0a0d37daf85e5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2674,6 +3294,24 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + [[package]] name = "signature" version = "2.1.0" @@ -2690,11 +3328,20 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "smol_str" @@ -2705,6 +3352,16 @@ dependencies = [ "serde", ] +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "solc" version = "0.1.0" @@ -2773,7 +3430,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2808,15 +3465,21 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.27" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "tap" version = "1.0.1" @@ -2870,7 +3533,17 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", ] [[package]] @@ -2907,6 +3580,73 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "tokio" +version = "1.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "tracing", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + [[package]] name = "toml" version = "0.5.11" @@ -2928,11 +3668,165 @@ version = "0.19.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" dependencies = [ - "indexmap", + "indexmap 1.9.3", "toml_datetime", "winnow", ] +[[package]] +name = "tonic" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-lsp" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" +dependencies = [ + "async-trait", + "auto_impl", + "bytes", + "dashmap", + "futures", + "httparse", + "lsp-types", + "memchr", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tower", + "tower-lsp-macros", + "tracing", +] + +[[package]] +name = "tower-lsp-macros" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + [[package]] name = "triehash" version = "0.8.4" @@ -2943,6 +3837,12 @@ dependencies = [ "rlp", ] +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "typenum" version = "1.16.0" @@ -3024,6 +3924,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vec1" version = "1.10.1" @@ -3058,6 +3964,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -3213,6 +4128,15 @@ dependencies = [ "windows-targets 0.48.1", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -3243,6 +4167,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.0", ] +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -3255,6 +4194,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -3267,6 +4212,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -3279,6 +4230,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -3291,6 +4248,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -3303,6 +4266,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -3315,6 +4284,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -3327,6 +4302,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + [[package]] name = "winnow" version = "0.4.1" diff --git a/Makefile b/Makefile index f0849e0991..976cff8b66 100644 --- a/Makefile +++ b/Makefile @@ -71,7 +71,7 @@ docker-wasm-test: --volume "$(shell pwd):/mnt" \ --workdir '/mnt' \ davesque/rust-wasm \ - wasm-pack test --node -- --workspace + wasm-pack test --node -- --workspace --exclude fe-language-server .PHONY: coverage coverage: diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 34c0bd2748..3fe6c025dc 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -292,4 +292,8 @@ impl DiagnosticVoucher for NameResDiag { CompleteDiagnostic::new(self.severity(), message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/hir-analysis/src/ty/def_analysis.rs b/crates/hir-analysis/src/ty/def_analysis.rs index 4a074b42a9..22323479c5 100644 --- a/crates/hir-analysis/src/ty/def_analysis.rs +++ b/crates/hir-analysis/src/ty/def_analysis.rs @@ -889,7 +889,7 @@ enum DefKind { Adt(AdtDef), Trait(TraitDef), ImplTrait(Implementor), - Impl(HirImpl, TyId), + Impl(HirImpl, #[allow(dead_code)] TyId), Func(FuncDef), } diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 100a6328ef..44ef63b658 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -516,6 +516,10 @@ impl DiagnosticVoucher for TyLowerDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -614,6 +618,10 @@ impl DiagnosticVoucher for TraitLowerDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -813,6 +821,10 @@ impl DiagnosticVoucher for TraitConstraintDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1259,4 +1271,8 @@ impl DiagnosticVoucher for ImplDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 4ec9530393..0157b8c4e8 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -24,6 +24,7 @@ pub trait DiagnosticVoucher: Send { fn error_code(&self) -> GlobalErrorCode; /// Makes a [`CompleteDiagnostic`]. fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; + fn clone_box(&self) -> Box; } impl DiagnosticVoucher for CompleteDiagnostic { @@ -34,6 +35,10 @@ impl DiagnosticVoucher for CompleteDiagnostic { fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { self.clone() } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } impl DiagnosticVoucher for Box { @@ -44,4 +49,8 @@ impl DiagnosticVoucher for Box { fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic { self.as_ref().to_complete(db) } + + fn clone_box(&self) -> Box { + self.as_ref().clone_box() + } } diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index c1e6fed2df..fbdabec065 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -54,4 +54,8 @@ impl DiagnosticVoucher for ParserError { error_code, ) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/language-server-macros/Cargo.toml b/crates/language-server-macros/Cargo.toml new file mode 100644 index 0000000000..f3b42833e1 --- /dev/null +++ b/crates/language-server-macros/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "fe-language-server-macros" +version = "0.23.0" +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1" +quote = "1" +syn = { version = "2", features = ["full"] } \ No newline at end of file diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs new file mode 100644 index 0000000000..3c0e5a316c --- /dev/null +++ b/crates/language-server-macros/src/lib.rs @@ -0,0 +1,346 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::{format_ident, quote}; +use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; + +/// Generates message channels and dispatch methods for a `tower_lsp::LanguageServer` implementation. +/// +/// This macro generates two structs: +/// - `MessageSenders`: Contains `tokio::sync::mpsc::UnboundedSender` channels for each method in the `LanguageServer` trait. +/// - `MessageReceivers`: Contains `tokio_stream::wrappers::UnboundedReceiverStream` streams for each method in the `LanguageServer` trait. +/// +/// It also generates a `setup_message_channels` function that initializes the channels and returns instances of the `MessageSenders` and `MessageReceivers` structs. +/// +/// # Example +/// +/// ```rust,ignore +/// use tower_lsp::LanguageServer; +/// +/// #[language_server_macros::message_channels] +/// #[tower_lsp::async_trait] +/// impl LanguageServer for Server { +/// // ... +/// } +/// ``` +#[proc_macro_attribute] +pub fn message_channels(_attr: TokenStream, item: TokenStream) -> TokenStream { + let channel_senders_struct_name = format_ident!( + "MessageSenders", + // attr.clone().map_or("MessageSenders".to_string(), |attr| attr.to_string()) + ); + + let channel_receivers_struct_name = format_ident!( + "MessageReceivers", + // attr.map_or("MessageReceivers".to_string(), |attr| attr.to_string()) + ); + + let lang_server_trait_impl = parse_macro_input!(item as ItemImpl); + + let method_calls = parse_method_calls(&lang_server_trait_impl); + let channel_struct = gen_channel_structs( + &method_calls, + channel_senders_struct_name, + channel_receivers_struct_name, + ); + + let tokens = quote! { + #channel_struct + #lang_server_trait_impl + }; + + tokens.into() + // item +} + +struct MessageTypeChannel<'a> { + // handler_name: &'a syn::Ident, + tx_name: syn::Ident, + stream_name: syn::Ident, + sender_fn_name: syn::Ident, + // subscribe_fn_name: syn::Ident, + rx_name: syn::Ident, + params: Option<&'a syn::Type>, + result: Option<&'a syn::Type>, +} + +fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { + let mut calls = Vec::new(); + + for item in &lang_server_trait.items { + let method = match item { + ImplItem::Fn(m) => m, + _ => continue, + }; + + let params = method.sig.inputs.iter().nth(1).and_then(|arg| match arg { + FnArg::Typed(pat) => Some(&*pat.ty), + _ => None, + }); + + let result = match &method.sig.output { + ReturnType::Default => None, + ReturnType::Type(_, ty) => Some(&**ty), + }; + + let handler_name = &method.sig.ident; + let tx_name = format_ident!("{}_tx", handler_name); + let stream_name = format_ident!("{}_stream", handler_name); + let sender_fn_name = format_ident!("send_{}", handler_name); + + let rx_name = format_ident!("{}_rx", handler_name); + + calls.push(MessageTypeChannel { + tx_name, + stream_name, + rx_name, + sender_fn_name, + params, + result, + }); + } + + calls +} + +fn gen_channel_structs( + channels: &[MessageTypeChannel], + channel_senders_struct_name: syn::Ident, + channel_receivers_struct_name: syn::Ident, +) -> proc_macro2::TokenStream { + // unit type + let unit_type = syn::Type::Tuple(syn::TypeTuple { + paren_token: syn::token::Paren::default(), + elems: syn::punctuated::Punctuated::new(), + }); + + let channel_senders_declarations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let params = channel.params; + let result = channel.result; + + // if params is None we need to use the type of () as the default + let params = match params { + Some(params) => params, + None => &unit_type, + }; + + let sender_type = match result { + Some(result) => quote! { tokio::sync::mpsc::UnboundedSender<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio::sync::mpsc::UnboundedSender<#params> }, + }; + + quote! { + pub #tx: #sender_type, + } + }) + .collect(); + + let channel_receivers_declarations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let stream_name = &channel.stream_name; + let params = channel.params; + let result = channel.result; + + // if params is None we need to use the type of () as the default + let params = match params { + Some(params) => params, + None => &unit_type, + }; + let stream_type = match result { + Some(result) => quote! { tokio_stream::wrappers::UnboundedReceiverStream<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio_stream::wrappers::UnboundedReceiverStream<#params> }, + }; + + quote! { + pub #stream_name: #stream_type, + } + }) + .collect(); + + let channel_instantiations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let rx = &channel.rx_name; + quote! { + let (#tx, #rx) = tokio::sync::mpsc::unbounded_channel(); + } + }) + .collect(); + + let channel_senders_assignments: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + quote! { + #tx, + } + }) + .collect(); + + let channel_receivers_assignments: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let stream_name = &channel.stream_name; + let rx = &channel.rx_name; + quote! { + // #rx, + #stream_name: tokio_stream::wrappers::UnboundedReceiverStream::new(#rx), + } + }) + .collect(); + + let sender_dispatch_functions: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let params = &channel.params; + let sender_fn_name = &channel.sender_fn_name; + let sender_fn_result = match channel.result { + Some(result) => quote! {tokio::sync::oneshot::Receiver<#result>}, + None => quote! {()}, + }; + + let payload = match params { + Some(_params) => quote! { params }, + None => quote! { () }, + }; + + let send_payload = match channel.result { + Some(result) => quote! { + let (oneshot_tx, oneshot_rx) = tokio::sync::oneshot::channel::<#result>(); + let mpsc = self.#tx.clone(); + info!("sending oneshot sender: {:?}", #payload); + mpsc.send((#payload, oneshot_tx)).expect("send payload with oneshot"); + info!("returning oneshot receiver: {:?}", oneshot_rx); + oneshot_rx + }, + None => quote! { + self.#tx.send(#payload).expect("send payload"); + }, + }; + + let dispatcher_fn = match params { + Some(params) => quote! { + /// Forward the LSP request parameters to the designated channel. + /// + /// An oneshot receiver is returned which can optionally be used to get a response back from the channel listener. + pub fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { + #send_payload + } + }, + None => quote! { + /// Forward the LSP notification parameters to the designated channel. + pub fn #sender_fn_name(&self) -> #sender_fn_result { + #send_payload + } + }, + }; + + quote! { + #dispatcher_fn + } + }) + .collect(); + + quote! { + /// Struct containing `tokio::sync::mpsc::UnboundedSender` channels for each method in the `LanguageServer` trait. + /// + /// This struct is generated by the `#[message_channels]` macro. For each method in the `LanguageServer` trait, + /// it generates a corresponding field with a name in the format `_tx`. + /// + /// For each implemented LSP notification method, a channel of type `tokio::sync::mpsc::UnboundedSender` is generated, where `Params` is the method's parameter type. + /// For each implemented LSP request methods, a channel of type `tokio::sync::mpsc::UnboundedSender<(Params, tokio::sync::oneshot::Sender)` is generated, where `Params` is the method's parameter type and `Result` is the method's return type. + /// + /// The macro also generates corresponding `send_` helper methods for each implemented LSP method to allow sending + /// requests or notifications through the respective channels. + /// + /// # Example + /// + /// ```rust,ignore + /// use tower_lsp::{LanguageServer, Client, jsonrpc::Result}; + /// use lsp_types::{InitializeParams, InitializeResult}; + /// + /// struct Backend { + /// messaging: MessageSenders, + /// client: Client, + /// } + /// + /// #[tower_lsp::async_trait] + /// impl LanguageServer for Backend { + /// async fn initialize(&self, params: InitializeParams) -> Result { + /// let rx = self.messaging.send_initialize(params); + /// + /// match rx.await { + /// Ok(result) => { + /// self.client.log_message(lsp_types::MessageType::INFO, "Server initialized!").await; + /// Ok(result) + /// } + /// Err(e) => { + /// self.client.log_message(lsp_types::MessageType::ERROR, format!("Failed to initialize: {:?}", e)).await; + /// Err(jsonrpc::Error::internal_error()) + /// } + /// } + /// } + /// + /// // Other LanguageServer methods... + /// } + /// ``` + pub struct #channel_receivers_struct_name { + #channel_receivers_declarations + } + + /// Struct containing `tokio_stream::wrappers::UnboundedReceiverStream` streams for each implemented `LanguageServer` trait method. + /// + /// This struct is generated by the `#[message_channels]` macro. For each implemented method of the `LanguageServer` trait, + /// it generates a corresponding field with a name in the format `_stream`. + /// + /// The type of each field depends on the signature of the corresponding `LanguageServer` method: + /// - If the method has a return type, the field type is `tokio_stream::wrappers::UnboundedReceiverStream<(Params, tokio::sync::oneshot::Sender)>`, + /// where `Params` is the type of the method's parameter and `Result` is the return type. + /// - If the method doesn't have a return type, the field type is `tokio_stream::wrappers::UnboundedReceiverStream`. + /// + /// These streams can be used to handle incoming requests or notifications for each `LanguageServer` method. + /// + /// # Example + /// + /// ```rust,ignore + /// let (senders, receivers) = setup_message_channels(); + /// let mut initialized_stream = receivers.initialize_stream.fuse(); + /// loop { + /// select! { + /// Some((params, responder)) = initialized_stream.next() => { + /// // Handle initialization request + /// let result = lsp_types::InitializeResult { ... }; + /// let _ = responder.send(Ok(result)); + /// } + /// // ... + /// } + /// } + /// ``` + pub struct #channel_senders_struct_name { + #channel_senders_declarations + } + + /// Initializes the message channels and returns instances of the `MessageSenders` and `MessageReceivers` structs. + pub fn setup_message_channels() -> (#channel_senders_struct_name, #channel_receivers_struct_name) { + #channel_instantiations + ( + #channel_senders_struct_name { + #channel_senders_assignments + }, + #channel_receivers_struct_name { + #channel_receivers_assignments + } + ) + } + + impl #channel_senders_struct_name { + #sender_dispatch_functions + } + } +} diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 1d53425b3e..af212f6cbd 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -13,25 +13,29 @@ description = "An LSP language server for Fe lang" salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } codespan-reporting = "0.11" hir = { path = "../hir", package = "fe-hir" } -macros = { path = "../macros", package = "fe-macros" } +language-server-macros = { path = "../language-server-macros", package = "fe-language-server-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" -clap = { version = "4.3", features = ["derive"] } -fe-analyzer = {path = "../analyzer", version = "^0.23.0"} -driver = { path = "../driver2", package = "fe-driver2" } common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" -crossbeam-channel = "0.5.8" -lsp-server = "0.7.0" lsp-types = "0.94.0" -serde = "1.0.162" serde_json = "1.0.96" -indexmap = "1.6.2" rowan = "0.15.10" fxhash = "0.2.1" dir-test = "0.1" fe-compiler-test-utils = { path = "../test-utils" } -log = "0.4" patricia_tree = "0.6.2" glob = "0.3.1" url = "2.4.1" +tower-lsp = "0.20.0" +tokio = { version = "1.35.1", features = ["full", "io-std", "tracing"] } +tokio-macros = "2.2.0" +futures = "0.3.28" +tokio-stream = { version = "0.1.14", features = ["sync"] } +fork_stream = "0.1.0" +futures-concurrency = "7.5.0" +console-subscriber = "0.2.0" +rust-embed = "8.3.0" +futures-batch = "0.6.1" +tracing = "0.1.40" +tracing-subscriber = "0.3.18" diff --git a/crates/language-server/editors/vscode/language-configuration.json b/crates/language-server/editors/vscode/language-configuration.json new file mode 100644 index 0000000000..7ab55b9b0f --- /dev/null +++ b/crates/language-server/editors/vscode/language-configuration.json @@ -0,0 +1,36 @@ +{ + "comments": { + "lineComment": "//", + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" }, + { "open": "'", "close": "'", "notIn": ["string", "comment"] }, + { "open": "\"", "close": "\"", "notIn": ["string"] }, + ], + "autoCloseBefore": ";:.,=}])>` \n\t", + "surroundingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["'", "'"], + ["\"", "\""], + ], + "folding": { + "markers": { + "start": "^\\s*//\\s*#?region\\b", + "end": "^\\s*//\\s*#?endregion\\b" + } + }, + "wordPattern": "(-?\\d*\\.\\d\\w*)|([^\\`\\~\\!\\@\\#\\%\\^\\&\\*\\(\\)\\-\\=\\+\\[\\{\\]\\}\\\\\\|\\;\\:\\'\\\"\\,\\.\\<\\>\\/\\?\\s]+)", + "indentationRules": { + "increaseIndentPattern": "^((?!\\/\\/).)*(\\{[^}\"'`]*|\\([^)\"'`]*|\\[[^\\]\"'`]*)$", + "decreaseIndentPattern": "^((?!.*?\\/\\*).*\\*/)?\\s*[\\)\\}\\]].*$" + } +} diff --git a/crates/language-server/editors/vscode/package.json b/crates/language-server/editors/vscode/package.json index eaff771874..d2e33c8b1d 100644 --- a/crates/language-server/editors/vscode/package.json +++ b/crates/language-server/editors/vscode/package.json @@ -25,14 +25,15 @@ "id": "fe", "extensions": [ ".fe" - ] + ], + "configuration": "./language-configuration.json" } ], "grammars": [ { - "language": "fe", - "scopeName": "source.fe", - "path": "./fe.tmLanguage.json" + "language": "fe", + "scopeName": "source.fe", + "path": "./fe.tmLanguage.json" } ] }, diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs new file mode 100644 index 0000000000..0dd0fc1dd0 --- /dev/null +++ b/crates/language-server/src/backend/db.rs @@ -0,0 +1,58 @@ +use common::InputDb; + +use hir::{HirDb, LowerHirDb, SpannedHirDb}; +use hir_analysis::HirAnalysisDb; +use salsa::{ParallelDatabase, Snapshot}; + +#[salsa::jar(db = LanguageServerDb)] +pub struct Jar(crate::functionality::diagnostics::file_line_starts); + +pub trait LanguageServerDb: + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +impl LanguageServerDb for DB where + DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +#[salsa::db( + common::Jar, + hir::Jar, + hir::LowerJar, + hir::SpannedJar, + hir_analysis::Jar, + Jar +)] +pub struct LanguageServerDatabase { + storage: salsa::Storage, +} + +// impl LanguageServerDatabase { +// pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { +// >::as_jar_db::<'_>(self) +// } +// } + +impl salsa::Database for LanguageServerDatabase { + fn salsa_event(&self, _: salsa::Event) {} +} + +impl Default for LanguageServerDatabase { + fn default() -> Self { + let db = Self { + storage: Default::default(), + }; + db.prefill(); + db + } +} + +impl ParallelDatabase for LanguageServerDatabase { + fn snapshot(&self) -> Snapshot { + Snapshot::new(LanguageServerDatabase { + storage: self.storage.snapshot(), + }) + } +} diff --git a/crates/language-server/src/backend/mod.rs b/crates/language-server/src/backend/mod.rs new file mode 100644 index 0000000000..e77ce263a1 --- /dev/null +++ b/crates/language-server/src/backend/mod.rs @@ -0,0 +1,32 @@ +pub(crate) mod db; +pub(crate) mod workspace; +use db::LanguageServerDatabase; +use workspace::Workspace; + +use tower_lsp::Client; + +pub struct Backend { + pub(super) client: Client, + pub(super) db: LanguageServerDatabase, + pub(super) workspace: Workspace, + pub(super) workers: tokio::runtime::Runtime, +} + +impl Backend { + pub fn new(client: Client) -> Self { + let db = LanguageServerDatabase::default(); + let workspace = Workspace::default(); + + let workers = tokio::runtime::Builder::new_multi_thread() + .worker_threads(4) + .enable_all() + .build() + .unwrap(); + Self { + client, + db, + workspace, + workers, + } + } +} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/backend/workspace.rs similarity index 65% rename from crates/language-server/src/workspace.rs rename to crates/language-server/src/backend/workspace.rs index 9e933029bc..e1590fc236 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/backend/workspace.rs @@ -1,15 +1,25 @@ -use std::{collections::BTreeSet, path::PathBuf}; +use std::{ + borrow::Cow, + collections::BTreeSet, + path::{Path, PathBuf}, +}; use anyhow::Result; use common::{ input::{IngotKind, Version}, InputFile, InputIngot, }; -use hir::{hir_def::TopLevelMod, lower::map_file_to_mod}; -use log::info; +use hir::{hir_def::TopLevelMod, lower::map_file_to_mod, LowerHirDb}; use patricia_tree::StringPatriciaMap; +use tracing::info; + +use super::db::LanguageServerDatabase; + +use rust_embed::RustEmbed; -use crate::db::LanguageServerDatabase; +#[derive(RustEmbed)] +#[folder = "../library/std"] +struct StdLib; const FE_CONFIG_SUFFIX: &str = "fe.toml"; fn ingot_directory_key(path: String) -> String { @@ -19,21 +29,24 @@ fn ingot_directory_key(path: String) -> String { } pub trait IngotFileContext { - fn input_from_file_path( + fn get_input_for_file_path(&self, path: &str) -> Option; + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn ingot_from_file_path( + fn get_ingot_for_file_path(&self, path: &str) -> Option; + fn touch_ingot_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn top_mod_from_file_path( + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; + fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, - ) -> Option; + ) -> Result<()>; } pub struct LocalIngotContext { @@ -48,7 +61,7 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -pub fn get_containing_ingot<'a, T>( +pub fn get_containing_ingot_mut<'a, T>( ingots: &'a mut StringPatriciaMap, path: &'a str, ) -> Option<&'a mut T> { @@ -58,6 +71,16 @@ pub fn get_containing_ingot<'a, T>( .map(|(_, ingot)| ingot) } +pub fn get_containing_ingot<'a, T>( + ingots: &'a StringPatriciaMap, + path: &'a str, +) -> Option<&'a T> { + ingots + .get_longest_common_prefix(path) + .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) + .map(|(_, ingot)| ingot) +} + impl LocalIngotContext { pub fn new(db: &LanguageServerDatabase, config_path: &str) -> Option { let ingot = InputIngot::new( @@ -75,12 +98,12 @@ impl LocalIngotContext { } impl IngotFileContext for LocalIngotContext { - fn input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_for_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -89,10 +112,15 @@ impl IngotFileContext for LocalIngotContext { |file| Some(*file), ); self.files.insert(path, input.unwrap()); + ingot.set_files(db, self.files.values().copied().collect()); input } - fn ingot_from_file_path( + fn get_input_for_file_path(&self, path: &str) -> Option { + self.files.get(path).copied() + } + + fn touch_ingot_for_file_path( &mut self, _db: &mut LanguageServerDatabase, _path: &str, @@ -100,13 +128,34 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } - fn top_mod_from_file_path( + fn get_ingot_for_file_path(&self, _path: &str) -> Option { + Some(self.ingot) + } + + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let file = self.get_input_for_file_path(path)?; + Some(map_file_to_mod(db.as_lower_hir_db(), file)) + } + + fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, - ) -> Option { - let file = self.input_from_file_path(db, path)?; - Some(map_file_to_mod(db, file)) + ) -> Result<()> { + let file = self.files.remove(path); + + if let Some(_file) = file { + let ingot = self.ingot; + let new_ingot_files = self + .files + .values() + .copied() + .collect::>(); + ingot.set_files(db, new_ingot_files); + Ok(()) + } else { + Err(anyhow::anyhow!("File not found in ingot")) + } } } @@ -125,12 +174,12 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_for_file_path(db, path)?; let input_file = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -144,12 +193,16 @@ impl IngotFileContext for StandaloneIngotContext { input_file } - fn ingot_from_file_path( + fn get_input_for_file_path(&self, path: &str) -> Option { + self.files.get(path).copied() + } + + fn touch_ingot_for_file_path( &mut self, _db: &mut LanguageServerDatabase, path: &str, ) -> Option { - get_containing_ingot(&mut self.ingots, path) + get_containing_ingot_mut(&mut self.ingots, path) .as_deref() .copied() .map_or_else( @@ -168,13 +221,26 @@ impl IngotFileContext for StandaloneIngotContext { ) } - fn top_mod_from_file_path( + fn get_ingot_for_file_path(&self, path: &str) -> Option { + // this shouldn't mutate, it should only get the ingot or return `None` + get_containing_ingot(&self.ingots, path).copied() + } + + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let file = self.get_input_for_file_path(path)?; + Some(map_file_to_mod(db.as_lower_hir_db(), file)) + } + + fn remove_input_for_file_path( &mut self, - db: &mut LanguageServerDatabase, + _db: &mut LanguageServerDatabase, path: &str, - ) -> Option { - let file = self.input_from_file_path(db, path)?; - Some(map_file_to_mod(db, file)) + ) -> Result<()> { + let file = self.files.remove(path); + if let Some(_file) = file { + self.ingots.remove(path); + } + Ok(()) } } @@ -193,13 +259,39 @@ impl Workspace { } } + pub fn load_std_lib( + &mut self, + db: &mut LanguageServerDatabase, + root_path: &Path, + ) -> Result<()> { + let root_path = root_path.to_str().unwrap(); + self.touch_ingot_for_file_path(db, &format!("{}/std/fe.toml", root_path)) + .unwrap(); + + info!("Loading std lib..."); + + StdLib::iter().for_each(|path: Cow<'static, str>| { + let path = path.as_ref(); + let std_path = format!("{}/std/{}", root_path, path); + info!("adding std file... {:?} --- {:?}", std_path, path); + if let Some(file) = StdLib::get(path) { + let contents = String::from_utf8(file.data.as_ref().to_vec()); + if let Ok(contents) = contents { + let input = self.touch_input_for_file_path(db, &std_path); + input.unwrap().set_text(db).to(contents); + }; + }; + }); + Ok(()) + } + pub fn set_workspace_root( &mut self, db: &mut LanguageServerDatabase, - root_path: PathBuf, + root_path: &Path, ) -> Result<()> { let path = root_path; - self.root_path = Some(path); + self.root_path = Some(path.to_path_buf()); self.sync(db) } @@ -251,28 +343,28 @@ impl Workspace { info!("Syncing ingot at {}", config_path); let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); - let paths = &glob::glob(&format!("{ingot_root}/src/**/*.fe")) + let actual_paths = &glob::glob(&format!("{ingot_root}/src/**/*.fe")) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); - info!("Found {} files in ingot", paths.len()); - info!("Syncing ingot files: {:?}", paths); + info!("Found {} files in ingot", actual_paths.len()); + info!("Syncing ingot files: {:?}", actual_paths); let ingot_context = self .ingot_context_from_config_path(db, config_path) .unwrap(); - let ingot_context_file_keys = &ingot_context.files.keys().collect::>(); - for path in ingot_context_file_keys { - if !paths.contains(path) { - ingot_context.files.remove(path); + let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); + for path in previous_ingot_context_file_keys { + if !actual_paths.contains(path) { + let _ = ingot_context.remove_input_for_file_path(db, path); } } - for path in paths { - if !ingot_context_file_keys.contains(path) { - let file = ingot_context.input_from_file_path(db, path); + for path in actual_paths { + if !previous_ingot_context_file_keys.contains(path) { + let file = ingot_context.touch_input_for_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); } @@ -303,38 +395,54 @@ impl Workspace { } impl IngotFileContext for Workspace { - fn input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.touch_input_for_file_path(db, path) + } else { + self.standalone_ingot_context + .touch_input_for_file_path(db, path) + } + } + + fn get_input_for_file_path(&self, path: &str) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.input_from_file_path(db, path) + ctx.get_input_for_file_path(path) } else { - self.standalone_ingot_context.input_from_file_path(db, path) + self.standalone_ingot_context.get_input_for_file_path(path) } } - fn ingot_from_file_path( + fn touch_ingot_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - Some(ctx.ingot_from_file_path(db, path).unwrap()) + Some(ctx.touch_ingot_for_file_path(db, path).unwrap()) } else { - self.standalone_ingot_context.ingot_from_file_path(db, path) + self.standalone_ingot_context + .touch_ingot_for_file_path(db, path) } } - fn top_mod_from_file_path( - &mut self, - db: &mut LanguageServerDatabase, - path: &str, - ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + fn get_ingot_for_file_path(&self, path: &str) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.get_ingot_for_file_path(path) + } else { + self.standalone_ingot_context.get_ingot_for_file_path(path) + } + } + + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { Some(ctx.top_mod_from_file_path(db, path).unwrap()) } else { @@ -342,45 +450,21 @@ impl IngotFileContext for Workspace { .top_mod_from_file_path(db, path) } } -} -pub trait SyncableInputFile { - fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()>; - fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()>; - fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()>; - fn remove_from_ingot(&self, db: &mut LanguageServerDatabase) -> Result<()>; -} - -impl SyncableInputFile for InputFile { - fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()> { - let path = self.path(db); - let contents = std::fs::read_to_string(path)?; - self.set_text(db).to(contents); - Ok(()) - } - fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()> { - self.set_text(db).to(contents); - Ok(()) - } - fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()> { - // check to see if the file actually exists anymore: - let path = self.path(db); - if !path.exists() { - // if not let's remove it from the ingot - self.remove_from_ingot(db) - } else if let Some(contents) = contents { - self.sync_from_text(db, contents) + fn remove_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Result<()> { + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.remove_input_for_file_path(db, path) } else { - self.sync_from_fs(db) + self.standalone_ingot_context + .remove_input_for_file_path(db, path)?; + Ok(()) } } - fn remove_from_ingot(&self, db: &mut LanguageServerDatabase) -> Result<()> { - let ingot = self.ingot(db); - let mut files = ingot.files(db).clone(); - files.remove(self); - ingot.set_files(db, files); - Ok(()) - } } pub trait SyncableIngotFileContext { @@ -411,21 +495,6 @@ impl SyncableIngotFileContext for Workspace { for ingot_path in ingot_paths { self.sync_ingot_files(db, &ingot_path); } - - let paths = glob::glob(&format!("{path}/src/**/*.fe")) - .ok() - .unwrap() - .filter_map(|p| { - p.ok() - .unwrap() - .to_str() - .map(std::string::ToString::to_string) - }) - .collect::>(); - - for path in paths { - self.input_from_file_path(db, &path); - } Ok(()) } } @@ -433,22 +502,24 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; + use crate::backend::workspace::{ + get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX, + }; use std::path::PathBuf; use super::StandaloneIngotContext; #[test] fn test_standalone_context() { - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; let ctx = &mut StandaloneIngotContext::new(); - let file = ctx.input_from_file_path(&mut db, file_path); + let file = ctx.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = ctx.ingot_from_file_path(&mut db, file_path); + let ingot = ctx.touch_ingot_for_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!( ingot.unwrap().kind(&db), @@ -460,9 +531,9 @@ mod tests { #[test] fn test_workspace_standalone_ingot() { let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); } @@ -473,7 +544,7 @@ mod tests { let _ingot_context_ingot = { let ingot_context = workspace.ingot_context_from_config_path( - &crate::db::LanguageServerDatabase::default(), + &crate::backend::db::LanguageServerDatabase::default(), config_path, ); @@ -489,12 +560,14 @@ mod tests { .get_longest_common_prefix(file_path) .is_some()); - let containing_ingot = get_containing_ingot(&mut workspace.ingot_contexts, file_path); + let containing_ingot = get_containing_ingot_mut(&mut workspace.ingot_contexts, file_path); assert!(containing_ingot.as_deref().is_some()); - let ingot = workspace - .ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); + let ingot = workspace.touch_ingot_for_file_path( + &mut crate::backend::db::LanguageServerDatabase::default(), + file_path, + ); assert!(ingot.is_some()); } @@ -502,7 +575,7 @@ mod tests { fn test_workspace_local_ingot() { let config_path = "tests/data/ingot1/fe.toml"; let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let ingot_context_ingot = { let ingot_context = workspace.ingot_context_from_config_path(&db, config_path); @@ -512,10 +585,10 @@ mod tests { }; let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = workspace.ingot_from_file_path(&mut db, file_path); + let ingot = workspace.touch_ingot_for_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!(file.map(|f| f.ingot(&db)).unwrap(), ingot.unwrap()); @@ -536,15 +609,15 @@ mod tests { let _ingot_config_path = &ingot_base_dir.join("fe.toml"); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); - let _ = workspace.set_workspace_root(&mut db, ingot_base_dir.clone()); + let _ = workspace.set_workspace_root(&mut db, &ingot_base_dir); // panic!("wtf? {:?}", ingot_base_dir); assert_eq!(workspace.ingot_contexts.len(), 1); let fe_source_path = ingot_base_dir.join("src/main.fe"); - let input = workspace.input_from_file_path(&mut db, fe_source_path.to_str().unwrap()); + let input = workspace.touch_input_for_file_path(&mut db, fe_source_path.to_str().unwrap()); assert!(input.is_some()); assert!(input.unwrap().ingot(&db).kind(&db) == common::input::IngotKind::Local); } @@ -561,13 +634,13 @@ mod tests { ); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &path); assert!(workspace.ingot_contexts.len() == 2); - let _ = workspace.set_workspace_root(&mut db, PathBuf::from(&path)); + let _ = workspace.set_workspace_root(&mut db, &PathBuf::from(&path)); // get all top level modules for .fe files in the workspace let fe_files = glob::glob(&format!("{path}/**/*.fe")) @@ -577,12 +650,14 @@ mod tests { .collect::>(); for src_path in fe_files { - let _file = workspace.input_from_file_path(&mut db, &src_path).unwrap(); + let _file = workspace + .touch_input_for_file_path(&mut db, &src_path) + .unwrap(); // normally would do this but it's not relevant here... // file.sync(&mut db, None); // this would panic if a file has been added to multiple ingots - let _top_mod = workspace.top_mod_from_file_path(&mut db, src_path.as_str()); + let _top_mod = workspace.top_mod_from_file_path(&db, src_path.as_str()); } } @@ -598,7 +673,7 @@ mod tests { ); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &path); @@ -616,7 +691,9 @@ mod tests { let foo_files = foo_context.files.keys().collect::>(); for file in foo_files { let contents = std::fs::read_to_string(&file).unwrap(); - let file = foo_context.input_from_file_path(&mut db, &file).unwrap(); + let file = foo_context + .touch_input_for_file_path(&mut db, &file) + .unwrap(); assert!(*file.text(&db) == contents); } @@ -629,11 +706,11 @@ mod tests { let dangling_path = format!("{crate_dir}/test_files/messy/dangling.fe"); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &messy_workspace_path); let dangling_file = workspace - .input_from_file_path(&mut db, &dangling_path) + .touch_input_for_file_path(&mut db, &dangling_path) .unwrap(); assert_eq!( @@ -654,7 +731,7 @@ mod tests { let non_dangling_file_path = format!("{crate_dir}/test_files/messy/foo/bar/src/main.fe"); let non_dangling_input = workspace - .input_from_file_path(&mut db, &non_dangling_file_path) + .touch_input_for_file_path(&mut db, &non_dangling_file_path) .unwrap(); assert_eq!( diff --git a/crates/language-server/src/cursor.rs b/crates/language-server/src/cursor.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs deleted file mode 100644 index 3bdadfd238..0000000000 --- a/crates/language-server/src/db.rs +++ /dev/null @@ -1,120 +0,0 @@ -use common::{diagnostics::CompleteDiagnostic, InputDb}; -use hir::{ - analysis_pass::AnalysisPassManager, - diagnostics::DiagnosticVoucher, - hir_def::{ItemKind, TopLevelMod}, - span::{DynLazySpan, LazySpan}, - HirDb, LowerHirDb, ParsingPass, SpannedHirDb, -}; -use hir_analysis::{ - name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, - HirAnalysisDb, -}; - -use crate::goto::Cursor; - -#[salsa::jar(db = LanguageServerDb)] -pub struct Jar(crate::diagnostics::file_line_starts); - -pub trait LanguageServerDb: - salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ -} - -impl LanguageServerDb for DB where - DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ -} - -#[salsa::db( - common::Jar, - hir::Jar, - hir::LowerJar, - hir::SpannedJar, - hir_analysis::Jar, - Jar -)] -pub struct LanguageServerDatabase { - storage: salsa::Storage, - diags: Vec>, -} - -impl LanguageServerDatabase { - pub fn analyze_top_mod(&mut self, top_mod: TopLevelMod) { - self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass); - } - - pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) - where - F: FnOnce(&Self) -> AnalysisPassManager<'_>, - { - self.diags.clear(); - self.diags = { - let mut pass_manager = pm_builder(self); - pass_manager.run_on_module(top_mod) - }; - } - - pub fn find_enclosing_item( - &mut self, - top_mod: TopLevelMod, - cursor: Cursor, - ) -> Option { - let items = top_mod - .scope_graph(self.as_hir_db()) - .items_dfs(self.as_hir_db()); - - let mut smallest_enclosing_item = None; - let mut smallest_range_size = None; - - for item in items { - let lazy_item_span = DynLazySpan::from(item.lazy_span()); - let item_span = lazy_item_span - .resolve(SpannedHirDb::as_spanned_hir_db(self)) - .unwrap(); - - if item_span.range.contains(cursor) { - let range_size = item_span.range.end() - item_span.range.start(); - if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { - smallest_enclosing_item = Some(item); - smallest_range_size = Some(range_size); - } - } - } - - smallest_enclosing_item - } - - pub fn finalize_diags(&self) -> Vec { - let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); - diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { - std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), - ord => ord, - }); - diags - } -} - -impl salsa::Database for LanguageServerDatabase { - fn salsa_event(&self, _: salsa::Event) {} -} - -impl Default for LanguageServerDatabase { - fn default() -> Self { - let db = Self { - storage: Default::default(), - diags: Vec::new(), - }; - db.prefill(); - db - } -} - -fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { - let mut pass_manager = AnalysisPassManager::new(); - pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); - pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); - pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); - pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); - pass_manager -} diff --git a/crates/language-server/src/functionality/capabilities.rs b/crates/language-server/src/functionality/capabilities.rs new file mode 100644 index 0000000000..a915da550c --- /dev/null +++ b/crates/language-server/src/functionality/capabilities.rs @@ -0,0 +1,99 @@ +use lsp_types::{HoverProviderCapability, ServerCapabilities}; + +#[cfg(target_arch = "wasm32")] +use crate::util::DummyFilePathConversion; + +pub(crate) fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + hover_provider: Some(HoverProviderCapability::Simple(true)), + // full sync mode for now + text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind( + lsp_types::TextDocumentSyncKind::FULL, + )), + // goto definition + definition_provider: Some(lsp_types::OneOf::Left(true)), + // support for workspace add/remove changes + workspace: Some(lsp_types::WorkspaceServerCapabilities { + workspace_folders: Some(lsp_types::WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(lsp_types::OneOf::Left(true)), + }), + file_operations: Some(lsp_types::WorkspaceFileOperationsServerCapabilities { + did_create: Some(lsp_types::FileOperationRegistrationOptions { + filters: vec![lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + did_rename: Some(lsp_types::FileOperationRegistrationOptions { + filters: vec![lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + did_delete: Some(lsp_types::FileOperationRegistrationOptions { + filters: vec![lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + will_create: None, + will_rename: None, + will_delete: None, + // TODO: implement file operation refactors and workspace cache updates + // will_create: Some(lsp_types::FileOperationRegistrationOptions { + // filters: vec![lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + // will_rename: Some(lsp_types::FileOperationRegistrationOptions { + // filters: vec![lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + // will_delete: Some(lsp_types::FileOperationRegistrationOptions { + // filters: vec![lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + }), + }), + ..Default::default() + } +} diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/functionality/diagnostics.rs similarity index 57% rename from crates/language-server/src/diagnostics.rs rename to crates/language-server/src/functionality/diagnostics.rs index 4d1d577218..8540d0a5ea 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/functionality/diagnostics.rs @@ -1,16 +1,28 @@ use std::ops::Range; use camino::Utf8Path; + use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ - diagnostics::{LabelStyle, Severity}, - InputFile, + diagnostics::{CompleteDiagnostic, LabelStyle, Severity}, + InputDb, InputFile, +}; + +use fxhash::FxHashMap; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, + lower::map_file_to_mod, ParsingPass, +}; +use hir_analysis::name_resolution::{ + DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass, }; -use hir::diagnostics::DiagnosticVoucher; -use crate::db::{LanguageServerDatabase, LanguageServerDb}; +use crate::{ + backend::db::{LanguageServerDatabase, LanguageServerDb}, + util::diag_to_lsp, +}; pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; @@ -18,7 +30,7 @@ pub trait ToCsDiag { impl ToCsDiag for T where - T: DiagnosticVoucher, + T: DiagnosticVoucher + Sync, { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); @@ -120,3 +132,50 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { Ok(Range { start, end }) } } + +impl LanguageServerDatabase { + pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> { + let mut pass_manager = initialize_analysis_pass(self); + pass_manager.run_on_module(top_mod) + } + + pub fn finalize_diags(&self, diags: &[Box]) -> Vec { + let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + diags + } + + pub fn get_lsp_diagnostics( + &self, + files: Vec, + ) -> FxHashMap> { + let mut result = FxHashMap::>::default(); + files + .iter() + .flat_map(|file| { + let top_mod = map_file_to_mod(self, *file); + let diagnostics = self.analyze_top_mod(top_mod); + self.finalize_diags(&diagnostics) + .into_iter() + .flat_map(|diag| diag_to_lsp(diag, self.as_input_db()).clone()) + }) + .for_each(|(uri, more_diags)| { + let _ = result.entry(uri.clone()).or_insert_with(Vec::new); + let diags = result.entry(uri).or_insert_with(Vec::new); + diags.extend(more_diags); + }); + result + } +} + +fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs new file mode 100644 index 0000000000..4403bba523 --- /dev/null +++ b/crates/language-server/src/functionality/goto.rs @@ -0,0 +1,418 @@ +use fxhash::FxHashMap; +use hir::{ + hir_def::{scope_graph::ScopeId, IdentId, ItemKind, Partial, PathId, TopLevelMod}, + span::DynLazySpan, + visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, + LowerHirDb, SpannedHirDb, +}; +use hir_analysis::name_resolution::{EarlyResolvedPath, NameDomain, NameRes}; + +use crate::{ + backend::{db::LanguageServerDb, Backend}, + util::{to_lsp_location_from_scope, to_offset_from_position}, +}; +use common::diagnostics::Span; +use hir::span::LazySpan; + +pub type Cursor = rowan::TextSize; +struct GotoEnclosingPathSegment { + path: PathId, + idx: usize, + scope: ScopeId, +} +impl GotoEnclosingPathSegment { + fn segments<'db>(&self, db: &'db dyn LanguageServerDb) -> &'db [Partial] { + &self.path.segments(db.as_hir_db())[0..self.idx + 1] + } + fn is_intermediate(&self, db: &dyn LanguageServerDb) -> bool { + self.idx < self.path.segments(db.as_hir_db()).len() - 1 + } +} + +struct PathSegmentSpanCollector<'db> { + segment_map: FxHashMap, + db: &'db dyn LanguageServerDb, +} + +impl<'db> PathSegmentSpanCollector<'db> { + fn new(db: &'db dyn LanguageServerDb) -> Self { + Self { + segment_map: FxHashMap::default(), + db, + } + } +} + +impl<'db> Visitor for PathSegmentSpanCollector<'db> { + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { + let Some(path_span) = ctxt.span() else { + return; + }; + + let scope = ctxt.scope(); + for i in 0..path.segments(self.db.as_hir_db()).iter().len() { + let Some(segment_span) = path_span.segment(i).resolve(self.db.as_spanned_hir_db()) + else { + continue; + }; + + self.segment_map.insert( + segment_span, + GotoEnclosingPathSegment { + path, + idx: i, + scope, + }, + ); + } + } +} + +fn smallest_enclosing_segment( + cursor: Cursor, + ident_map: &FxHashMap, +) -> Option<&GotoEnclosingPathSegment> { + let mut smallest_enclosing_segment = None; + let mut smallest_range_size = None; + + for (span, enclosing_segment) in ident_map { + if span.range.contains(cursor) { + let range_size = span.range.end() - span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_segment = Some(enclosing_segment); + smallest_range_size = Some(range_size); + } + } + } + + smallest_enclosing_segment +} + +pub fn find_enclosing_item( + db: &dyn SpannedHirDb, + top_mod: TopLevelMod, + cursor: Cursor, +) -> Option { + let items = top_mod + .scope_graph(db.as_hir_db()) + .items_dfs(db.as_hir_db()); + + let mut smallest_enclosing_item = None; + let mut smallest_range_size = None; + + for item in items { + let lazy_item_span = DynLazySpan::from(item.lazy_span()); + let item_span = lazy_item_span + .resolve(SpannedHirDb::as_spanned_hir_db(db)) + .unwrap(); + + if item_span.range.contains(cursor) { + let range_size = item_span.range.end() - item_span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_item = Some(item); + smallest_range_size = Some(range_size); + } + } + } + + smallest_enclosing_item +} + +pub fn get_goto_target_scopes_for_cursor( + db: &dyn LanguageServerDb, + top_mod: TopLevelMod, + cursor: Cursor, +) -> Option> { + let item: ItemKind = find_enclosing_item(db.as_spanned_hir_db(), top_mod, cursor)?; + + let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + let mut path_segment_collector = PathSegmentSpanCollector::new(db); + path_segment_collector.visit_item(&mut visitor_ctxt, item); + + let cursor_segment = smallest_enclosing_segment(cursor, &path_segment_collector.segment_map)?; + + let segments = cursor_segment.segments(db); + let is_intermediate_segment = cursor_segment.is_intermediate(db); + // let is_partial = cursor_segment.idx < cursor_segment.path.segments(db.as_jar_db()).len(); + let resolved_segments = hir_analysis::name_resolution::resolve_segments_early( + db.as_hir_analysis_db(), + segments, + cursor_segment.scope, + ); + + let scopes = match resolved_segments { + EarlyResolvedPath::Full(bucket) => { + if is_intermediate_segment { + match bucket.pick(NameDomain::Type) { + Ok(res) => res.scope().iter().cloned().collect::>(), + _ => bucket.iter().filter_map(NameRes::scope).collect::>(), + } + } else { + bucket.iter().filter_map(NameRes::scope).collect::>() + } + } + EarlyResolvedPath::Partial { + res, + unresolved_from: _, + } => res.scope().iter().cloned().collect::>(), + }; + + Some(scopes) +} + +use crate::backend::workspace::IngotFileContext; + +impl Backend { + pub(super) async fn handle_goto_definition( + &self, + params: lsp_types::GotoDefinitionParams, + responder: tokio::sync::oneshot::Sender< + Result, tower_lsp::jsonrpc::Error>, + >, + ) { + // Convert the position to an offset in the file + let params = params.text_document_position_params; + let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); + let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); + + // Get the module and the goto info + let file_path = params.text_document.uri.path(); + let top_mod = self + .workspace + .top_mod_from_file_path(self.db.as_lower_hir_db(), file_path) + .unwrap(); + + let scopes = + get_goto_target_scopes_for_cursor(&self.db, top_mod, cursor).unwrap_or_default(); + + let locations = scopes + .iter() + .map(|scope| to_lsp_location_from_scope(*scope, self.db.as_spanned_hir_db())) + .collect::>(); + + let result: Result, ()> = + Ok(Some(lsp_types::GotoDefinitionResponse::Array( + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), + ))); + let response = match result { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling goto definition: {:?}", e); + None + } + }; + let _ = responder.send(Ok(response)); + } +} +#[cfg(test)] +mod tests { + use crate::backend::{ + db::LanguageServerDatabase, + workspace::{IngotFileContext, Workspace}, + }; + + use super::*; + use common::input::IngotKind; + use dir_test::{dir_test, Fixture}; + use fe_compiler_test_utils::snap_test; + use hir::{HirDb, LowerHirDb}; + use std::{collections::BTreeMap, path::Path}; + + // given a cursor position and a string, convert to cursor line and column + fn line_col_from_cursor(cursor: Cursor, s: &str) -> (usize, usize) { + let mut line = 0; + let mut col = 0; + for (i, c) in s.chars().enumerate() { + if i == Into::::into(cursor) { + return (line, col); + } + if c == '\n' { + line += 1; + col = 0; + } else { + col += 1; + } + } + (line, col) + } + + fn extract_multiple_cursor_positions_from_spans( + db: &mut LanguageServerDatabase, + top_mod: TopLevelMod, + ) -> Vec { + let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + // let mut path_collector = PathSpanCollector::new(db); + let mut path_collector = PathSegmentSpanCollector::new(db); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let segment_map = path_collector.segment_map; + + let mut cursors = Vec::new(); + for (span, _) in segment_map { + let cursor = span.range.start(); + cursors.push(cursor); + } + + cursors + } + + fn make_goto_cursors_snapshot( + db: &mut LanguageServerDatabase, + fixture: &Fixture<&str>, + top_mod: TopLevelMod, + ) -> String { + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + let mut cursor_path_map: BTreeMap = BTreeMap::default(); + + for cursor in &cursors { + let scopes = + get_goto_target_scopes_for_cursor(db, top_mod, *cursor).unwrap_or_default(); + + if !scopes.is_empty() { + cursor_path_map.insert( + *cursor, + scopes + .iter() + .flat_map(|x| x.pretty_path(db)) + .collect::>() + .join("\n"), + ); + } + } + + let cursor_lines = cursor_path_map + .iter() + .map(|(cursor, path)| { + let (cursor_line, cursor_col) = line_col_from_cursor(*cursor, fixture.content()); + format!("cursor position ({cursor_line:?}, {cursor_col:?}), path: {path}") + }) + .collect::>(); + + format!( + "{}\n---\n{}", + fixture + .content() + .lines() + .enumerate() + .map(|(i, line)| format!("{i:?}: {line}")) + .collect::>() + .join("\n"), + cursor_lines.join("\n") + ) + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/single_ingot", + glob: "**/lib.fe", + )] + fn test_goto_multiple_files(fixture: Fixture<&str>) { + let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); + + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + + let _ = workspace.set_workspace_root(db, &ingot_base_dir); + + let fe_source_path = ingot_base_dir.join(fixture.path()); + let fe_source_path = fe_source_path.to_str().unwrap(); + let input = workspace.touch_input_for_file_path(db, fixture.path()); + assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); + + input + .unwrap() + .set_text(db) + .to((*fixture.content()).to_string()); + let top_mod = workspace + .top_mod_from_file_path(db.as_lower_hir_db(), fe_source_path) + .unwrap(); + + let ingot = workspace.touch_ingot_for_file_path(db, fixture.path()); + assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); + + let snapshot = make_goto_cursors_snapshot(db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "goto*.fe" + )] + fn test_goto_cursor_target(fixture: Fixture<&str>) { + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + let input = workspace + .touch_input_for_file_path(db, fixture.path()) + .unwrap(); + input.set_text(db).to((*fixture.content()).to_string()); + let top_mod = workspace + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) + .unwrap(); + + let snapshot = make_goto_cursors_snapshot(db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "smallest_enclosing*.fe" + )] + fn test_smallest_enclosing_path(fixture: Fixture<&str>) { + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + + workspace + .touch_input_for_file_path(db, fixture.path()) + .unwrap() + .set_text(db) + .to((*fixture.content()).to_string()); + let top_mod = workspace + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) + .unwrap(); + + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + + let mut cursor_path_map: FxHashMap = FxHashMap::default(); + + for cursor in &cursors { + let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + let mut path_collector = PathSegmentSpanCollector::new(db); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let path_map = path_collector.segment_map; + let enclosing_path_segment = smallest_enclosing_segment(*cursor, &path_map); + + if let Some(GotoEnclosingPathSegment { path, scope, .. }) = enclosing_path_segment { + let resolved_enclosing_path = + hir_analysis::name_resolution::resolve_path_early(db, *path, *scope); + + let res = match resolved_enclosing_path { + EarlyResolvedPath::Full(bucket) => bucket + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n"), + EarlyResolvedPath::Partial { + res, + unresolved_from: _, + } => res.pretty_path(db).unwrap(), + }; + cursor_path_map.insert(*cursor, res); + } + } + + let result = format!( + "{}\n---\n{}", + fixture.content(), + cursor_path_map + .iter() + .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) + .collect::>() + .join("\n") + ); + snap_test!(result, fixture.path()); + } +} diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs new file mode 100644 index 0000000000..38b71e1f3d --- /dev/null +++ b/crates/language-server/src/functionality/handlers.rs @@ -0,0 +1,156 @@ +use crate::backend::Backend; + +use crate::backend::workspace::SyncableIngotFileContext; + +use common::InputDb; +use futures::TryFutureExt; +use fxhash::FxHashSet; + +use salsa::ParallelDatabase; + +use super::{ + capabilities::server_capabilities, + hover::hover_helper, + streams::{ChangeKind, FileChange}, +}; + +use crate::backend::workspace::IngotFileContext; + +use tracing::{error, info}; + +impl Backend { + pub(super) async fn handle_initialized( + &mut self, + params: lsp_types::InitializeParams, + responder: tokio::sync::oneshot::Sender< + Result, + >, + ) { + info!("initializing language server!"); + + let root = params.root_uri.unwrap().to_file_path().ok().unwrap(); + + let _ = self.workspace.set_workspace_root(&mut self.db, &root); + let _ = self.workspace.load_std_lib(&mut self.db, &root); + let _ = self.workspace.sync(&mut self.db); + + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + let _ = responder.send(Ok(initialize_result)); + } + + pub(super) async fn handle_shutdown( + &mut self, + responder: tokio::sync::oneshot::Sender>, + ) { + info!("shutting down language server"); + let _ = responder.send(Ok(())); + } + + pub(super) async fn handle_change( + &mut self, + change: FileChange, + tx_needs_diagnostics: tokio::sync::mpsc::UnboundedSender, + ) { + let path = change + .uri + .to_file_path() + .unwrap_or_else(|_| panic!("Failed to convert URI to path: {:?}", change.uri)); + + let path = path.to_str().unwrap(); + + match change.kind { + ChangeKind::Open(contents) => { + info!("file opened: {:?}", &path); + self.update_input_file_text(path, contents); + } + ChangeKind::Create => { + info!("file created: {:?}", &path); + let contents = tokio::fs::read_to_string(&path).await.unwrap(); + self.update_input_file_text(path, contents) + } + ChangeKind::Edit(contents) => { + info!("file edited: {:?}", &path); + let contents = if let Some(text) = contents { + text + } else { + tokio::fs::read_to_string(&path).await.unwrap() + }; + self.update_input_file_text(path, contents); + } + ChangeKind::Delete => { + info!("file deleted: {:?}", path); + self.workspace + .remove_input_for_file_path(&mut self.db, path) + .unwrap(); + } + } + tx_needs_diagnostics.send(path.to_string()).unwrap(); + } + + fn update_input_file_text(&mut self, path: &str, contents: String) { + let input = self + .workspace + .touch_input_for_file_path(&mut self.db, path) + .unwrap(); + input.set_text(&mut self.db).to(contents); + } + + pub(super) async fn handle_diagnostics(&mut self, files_need_diagnostics: Vec) { + let ingot_files_need_diagnostics: FxHashSet<_> = files_need_diagnostics + .into_iter() + .filter_map(|file| self.workspace.get_ingot_for_file_path(&file)) + .flat_map(|ingot| ingot.files(self.db.as_input_db())) + .cloned() + .collect(); + + let db = self.db.snapshot(); + let client = self.client.clone(); + let compute_and_send_diagnostics = self + .workers + .spawn_blocking(move || { + db.get_lsp_diagnostics(ingot_files_need_diagnostics.into_iter().collect()) + }) + .and_then(|diagnostics| async move { + futures::future::join_all(diagnostics.into_iter().map(|(path, diagnostic)| { + let client = client.clone(); + async move { client.publish_diagnostics(path, diagnostic, None).await } + })) + .await; + Ok(()) + }); + tokio::spawn(compute_and_send_diagnostics); + } + + pub(super) async fn handle_hover( + &mut self, + params: lsp_types::HoverParams, + responder: tokio::sync::oneshot::Sender< + Result, tower_lsp::jsonrpc::Error>, + >, + ) { + // let db = self.db.snapshot(); + let file = self.workspace.get_input_for_file_path( + params + .text_document_position_params + .text_document + .uri + .path(), + ); + + let response = file.and_then(|file| { + hover_helper(&self.db, file, params).unwrap_or_else(|e| { + error!("Error handling hover: {:?}", e); + None + }) + }); + + let _ = responder.send(Ok(response)); + } +} diff --git a/crates/language-server/src/functionality/hover.rs b/crates/language-server/src/functionality/hover.rs new file mode 100644 index 0000000000..4a3570d34e --- /dev/null +++ b/crates/language-server/src/functionality/hover.rs @@ -0,0 +1,60 @@ +use common::InputFile; +use hir::lower::map_file_to_mod; + +use lsp_types::Hover; +use tracing::info; + +use tower_lsp::jsonrpc::Result; + +use crate::backend::db::LanguageServerDb; +use crate::util::to_offset_from_position; + +use super::goto::{get_goto_target_scopes_for_cursor, Cursor}; +use super::item_info::{get_item_definition_markdown, get_item_docstring, get_item_path_markdown}; + +pub fn hover_helper( + db: &dyn LanguageServerDb, + input: InputFile, + params: lsp_types::HoverParams, +) -> Result> { + info!("handling hover"); + let file_text = input.text(db.as_input_db()); + + let cursor: Cursor = to_offset_from_position( + params.text_document_position_params.position, + file_text.as_str(), + ); + + let top_mod = map_file_to_mod(db.as_lower_hir_db(), input); + let goto_info = &get_goto_target_scopes_for_cursor(db, top_mod, cursor).unwrap_or_default(); + + let hir_db = db.as_hir_db(); + let scopes_info = goto_info + .iter() + .map(|scope| { + let item = &scope.item(); + let pretty_path = get_item_path_markdown(*item, hir_db); + let definition_source = get_item_definition_markdown(*item, db.as_spanned_hir_db()); + let docs = get_item_docstring(*item, hir_db); + + let result = [pretty_path, definition_source, docs] + .iter() + .filter_map(|info| info.clone().map(|info| format!("{}\n", info))) + .collect::>() + .join("\n"); + + result + }) + .collect::>(); + + let info = scopes_info.join("\n---\n"); + + let result = lsp_types::Hover { + contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: info, + }), + range: None, + }; + Ok(Some(result)) +} diff --git a/crates/language-server/src/functionality/item_info.rs b/crates/language-server/src/functionality/item_info.rs new file mode 100644 index 0000000000..94cc48c7b2 --- /dev/null +++ b/crates/language-server/src/functionality/item_info.rs @@ -0,0 +1,77 @@ +use hir::{ + hir_def::{Attr, ItemKind}, + span::LazySpan, + HirDb, SpannedHirDb, +}; + +pub fn get_item_docstring(item: ItemKind, hir_db: &dyn HirDb) -> Option { + let docstring = match item { + ItemKind::Func(func) => func.attributes(hir_db).data(hir_db), + ItemKind::Mod(mod_) => mod_.attributes(hir_db).data(hir_db), + ItemKind::Struct(struct_) => struct_.attributes(hir_db).data(hir_db), + ItemKind::Enum(enum_) => enum_.attributes(hir_db).data(hir_db), + ItemKind::TypeAlias(type_alias) => type_alias.attributes(hir_db).data(hir_db), + ItemKind::Trait(trait_) => trait_.attributes(hir_db).data(hir_db), + ItemKind::Impl(impl_) => impl_.attributes(hir_db).data(hir_db), + // ItemKind::Body(body) => body.attributes(hir_db).data(hir_db).clone(), + // ItemKind::Const(const_) => const_.attributes(hir_db).data(hir_db).clone(), + // ItemKind::Use(use_) => use_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Contract(contract) => contract.attributes(hir_db).data(hir_db), + _ => return None, + } + .iter() + .filter_map(|attr| { + if let Attr::DocComment(doc) = attr { + Some(doc.text.data(hir_db).clone()) + } else { + None + } + }) + .collect::>(); + + if docstring.is_empty() { + None + } else { + Some(docstring.join("\n")) + } +} + +pub fn get_item_path_markdown(item: ItemKind, hir_db: &dyn HirDb) -> Option { + item.scope() + .pretty_path(hir_db) + .map(|path| format!("```fe\n{}\n```", path)) +} + +pub fn get_item_definition_markdown(item: ItemKind, db: &dyn SpannedHirDb) -> Option { + // TODO: use pending AST features to get the definition without all this text manipulation + let hir_db = db.as_hir_db(); + let span = item.lazy_span().resolve(db)?; + + let mut start: usize = span.range.start().into(); + let mut end: usize = span.range.end().into(); + + // if the item has a body or children, cut that stuff out + let body_start = match item { + ItemKind::Func(func) => Some(func.body(hir_db)?.lazy_span().resolve(db)?.range.start()), + ItemKind::Mod(module) => Some(module.scope().name_span(hir_db)?.resolve(db)?.range.end()), + // TODO: handle other item types + _ => None, + }; + if let Some(body_start) = body_start { + end = body_start.into(); + } + + // let's start at the beginning of the line where the name is defined + let name_span = item.name_span()?.resolve(db); + if let Some(name_span) = name_span { + let mut name_line_start = name_span.range.start().into(); + let file_text = span.file.text(db.as_input_db()).as_str(); + while name_line_start > 0 && file_text.chars().nth(name_line_start - 1).unwrap() != '\n' { + name_line_start -= 1; + } + start = name_line_start; + } + + let item_definition = span.file.text(db.as_input_db()).as_str()[start..end].to_string(); + Some(format!("```fe\n{}\n```", item_definition.trim())) +} diff --git a/crates/language-server/src/functionality/mod.rs b/crates/language-server/src/functionality/mod.rs new file mode 100644 index 0000000000..8d55ac65f8 --- /dev/null +++ b/crates/language-server/src/functionality/mod.rs @@ -0,0 +1,7 @@ +mod capabilities; +pub(super) mod diagnostics; +pub(super) mod goto; +pub(super) mod handlers; +pub(super) mod hover; +pub(super) mod item_info; +pub(crate) mod streams; diff --git a/crates/language-server/src/functionality/streams.rs b/crates/language-server/src/functionality/streams.rs new file mode 100644 index 0000000000..573e326e7e --- /dev/null +++ b/crates/language-server/src/functionality/streams.rs @@ -0,0 +1,79 @@ +use crate::backend::Backend; +use crate::server::MessageReceivers; +use futures::StreamExt; +use futures_batch::ChunksTimeoutStreamExt; +use futures_concurrency::prelude::*; +use lsp_types::FileChangeType; +use tokio_stream::wrappers::UnboundedReceiverStream; + +use tracing::info; + +pub struct FileChange { + pub uri: url::Url, + pub kind: ChangeKind, +} +pub enum ChangeKind { + Open(String), + Create, + Edit(Option), + Delete, +} + +pub async fn setup_streams(backend: &mut Backend, mut receivers: MessageReceivers) { + info!("setting up streams"); + let mut initialized_stream = receivers.initialize_stream.fuse(); + let mut shutdown_stream = receivers.shutdown_stream.fuse(); + + let mut change_stream = ( + receivers + .did_change_watched_files_stream + .map(|params| futures::stream::iter(params.changes)) + .flatten() + .fuse() + .map(|event| { + let kind = match event.typ { + FileChangeType::CHANGED => ChangeKind::Edit(None), + FileChangeType::CREATED => ChangeKind::Create, + FileChangeType::DELETED => ChangeKind::Delete, + _ => unreachable!(), + }; + FileChange { + uri: event.uri, + kind, + } + }), + receivers.did_open_stream.fuse().map(|params| FileChange { + uri: params.text_document.uri, + kind: ChangeKind::Open(params.text_document.text), + }), + receivers.did_change_stream.fuse().map(|params| FileChange { + uri: params.text_document.uri, + kind: ChangeKind::Edit(Some(params.content_changes[0].text.clone())), + }), + ) + .merge() + .fuse(); + + let (tx_needs_diagnostics, rx_needs_diagnostics) = + tokio::sync::mpsc::unbounded_channel::(); + + let mut diagnostics_stream = UnboundedReceiverStream::from(rx_needs_diagnostics) + .chunks_timeout(500, std::time::Duration::from_millis(30)) + .fuse(); + + let mut hover_stream = (&mut receivers.hover_stream).fuse(); + let mut goto_definition_stream = (&mut receivers.goto_definition_stream).fuse(); + + info!("streams set up, looping on them now"); + loop { + tokio::select! { + Some((params, responder)) = initialized_stream.next() => backend.handle_initialized(params, responder).await, + Some((_, responder)) = shutdown_stream.next() => backend.handle_shutdown(responder).await, + Some(change) = change_stream.next() => backend.handle_change(change, tx_needs_diagnostics.clone()).await, + Some(files_need_diagnostics) = diagnostics_stream.next() => backend.handle_diagnostics(files_need_diagnostics).await, + Some((params, responder)) = hover_stream.next() => backend.handle_hover(params, responder).await, + Some((params, responder)) = goto_definition_stream.next() => backend.handle_goto_definition(params, responder).await, + } + tokio::task::yield_now().await; + } +} diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs deleted file mode 100644 index 038694cf4c..0000000000 --- a/crates/language-server/src/goto.rs +++ /dev/null @@ -1,301 +0,0 @@ -use fxhash::FxHashMap; -use hir::{ - hir_def::{scope_graph::ScopeId, ItemKind, PathId, TopLevelMod}, - visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, - HirDb, -}; -use hir_analysis::name_resolution::EarlyResolvedPath; - -use crate::db::{LanguageServerDatabase, LanguageServerDb}; -use common::diagnostics::Span; -use hir::span::LazySpan; - -pub type GotoEnclosingPath = (PathId, ScopeId); -pub type GotoPathMap = FxHashMap; - -pub struct PathSpanCollector<'db> { - path_map: GotoPathMap, - db: &'db dyn LanguageServerDb, -} - -impl<'db> PathSpanCollector<'db> { - pub fn new(db: &'db LanguageServerDatabase) -> Self { - Self { - path_map: FxHashMap::default(), - db, - } - } -} - -pub type Cursor = rowan::TextSize; - -impl<'db> Visitor for PathSpanCollector<'db> { - fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { - let Some(span) = ctxt - .span() - .and_then(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) - else { - return; - }; - - let scope = ctxt.scope(); - self.path_map.insert(span, (path, scope)); - } -} - -fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option { - let mut smallest_enclosing_path = None; - let mut smallest_range_size = None; - - for (span, enclosing_path) in path_map { - if span.range.contains(cursor) { - let range_size = span.range.end() - span.range.start(); - if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { - smallest_enclosing_path = Some(*enclosing_path); - smallest_range_size = Some(range_size); - } - } - } - - smallest_enclosing_path -} - -pub fn goto_enclosing_path( - db: &mut LanguageServerDatabase, - top_mod: TopLevelMod, - cursor: Cursor, -) -> Option { - // Find the innermost item enclosing the cursor. - let item: ItemKind = db.find_enclosing_item(top_mod, cursor)?; - - let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); - let mut path_collector = PathSpanCollector::new(db); - path_collector.visit_item(&mut visitor_ctxt, item); - - // can we do this without the cache? - let path_map = path_collector.path_map; - - // Find the path that encloses the cursor. - let goto_starting_path = smallest_enclosing_path(cursor, &path_map)?; - - let (path_id, scope_id) = goto_starting_path; - - // Resolve path. - let resolved_path = hir_analysis::name_resolution::resolve_path_early(db, path_id, scope_id); - - Some(resolved_path) -} - -#[cfg(test)] -mod tests { - use crate::workspace::{IngotFileContext, Workspace}; - - use super::*; - use common::input::IngotKind; - use dir_test::{dir_test, Fixture}; - use fe_compiler_test_utils::snap_test; - use std::path::Path; - - fn extract_multiple_cursor_positions_from_spans( - db: &mut LanguageServerDatabase, - top_mod: TopLevelMod, - ) -> Vec { - let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(db); - path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); - - let path_map = path_collector.path_map; - - let mut cursors = Vec::new(); - for (span, _) in path_map { - let cursor = span.range.start(); - // println!("cursor from span: {:?}, {:?}", span, cursor); - cursors.push(cursor); - } - - cursors - } - - #[dir_test( - dir: "$CARGO_MANIFEST_DIR/test_files/single_ingot", - glob: "**/lib.fe", - )] - fn test_goto_multiple_files(fixture: Fixture<&str>) { - let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); - - let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); - - let _ = workspace.set_workspace_root(db, ingot_base_dir.clone()); - - let fe_source_path = ingot_base_dir.join(fixture.path()); - let fe_source_path = fe_source_path.to_str().unwrap(); - let input = workspace.input_from_file_path(db, fixture.path()); - assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); - - input - .unwrap() - .set_text(db) - .to((*fixture.content()).to_string()); - let top_mod = workspace - .top_mod_from_file_path(db, fe_source_path) - .unwrap(); - - let ingot = workspace.ingot_from_file_path(db, fixture.path()); - assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); - - let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - let mut cursor_path_map: FxHashMap = FxHashMap::default(); - - for cursor in &cursors { - let early_resolution = goto_enclosing_path(db, top_mod, *cursor); - - let goto_info = match early_resolution { - Some(EarlyResolvedPath::Full(bucket)) => { - if !bucket.is_empty() { - bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n") - } else { - String::from("`NameResBucket` is empty") - } - } - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => res.pretty_path(db).unwrap(), - None => String::from("No resolution available"), - }; - - cursor_path_map.insert(*cursor, goto_info); - } - - let result = format!( - "{}\n---\n{}", - fixture.content(), - cursor_path_map - .iter() - .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path:?}") }) - .collect::>() - .join("\n") - ); - snap_test!(result, fixture.path()); - } - - #[dir_test( - dir: "$CARGO_MANIFEST_DIR/test_files", - glob: "goto*.fe" - )] - fn test_goto_enclosing_path(fixture: Fixture<&str>) { - let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); - let input = workspace.input_from_file_path(db, fixture.path()).unwrap(); - input.set_text(db).to((*fixture.content()).to_string()); - let top_mod = workspace - .top_mod_from_file_path(db, fixture.path()) - .unwrap(); - - let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - - let mut cursor_path_map: FxHashMap = FxHashMap::default(); - - for cursor in &cursors { - let resolved_path = goto_enclosing_path(db, top_mod, *cursor); - - if let Some(path) = resolved_path { - match path { - EarlyResolvedPath::Full(bucket) => { - let path = bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n"); - cursor_path_map.insert(*cursor, path); - } - EarlyResolvedPath::Partial { - res, - unresolved_from: _, - } => { - let path = res.pretty_path(db).unwrap(); - cursor_path_map.insert(*cursor, path); - } - } - } - } - - let result = format!( - "{}\n---\n{}", - fixture.content(), - cursor_path_map - .iter() - .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) - .collect::>() - .join("\n") - ); - snap_test!(result, fixture.path()); - } - - #[dir_test( - dir: "$CARGO_MANIFEST_DIR/test_files", - glob: "smallest_enclosing*.fe" - )] - fn test_smallest_enclosing_path(fixture: Fixture<&str>) { - let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); - - workspace - .input_from_file_path(db, fixture.path()) - .unwrap() - .set_text(db) - .to((*fixture.content()).to_string()); - let top_mod = workspace - .top_mod_from_file_path(db, fixture.path()) - .unwrap(); - - let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - - let mut cursor_path_map: FxHashMap = FxHashMap::default(); - - for cursor in &cursors { - let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(db); - path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); - - let path_map = path_collector.path_map; - let enclosing_path = smallest_enclosing_path(*cursor, &path_map); - - let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early( - db, - enclosing_path.unwrap().0, - enclosing_path.unwrap().1, - ); - - let res = match resolved_enclosing_path { - EarlyResolvedPath::Full(bucket) => bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n"), - EarlyResolvedPath::Partial { - res, - unresolved_from: _, - } => res.pretty_path(db).unwrap(), - }; - cursor_path_map.insert(*cursor, res); - } - - let result = format!( - "{}\n---\n{}", - fixture.content(), - cursor_path_map - .iter() - .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) - .collect::>() - .join("\n") - ); - snap_test!(result, fixture.path()); - } -} diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs deleted file mode 100644 index 93ebb4f19c..0000000000 --- a/crates/language-server/src/handlers/notifications.rs +++ /dev/null @@ -1,196 +0,0 @@ -use anyhow::{Error, Result}; -use fxhash::FxHashMap; -// use log::info; -use serde::Deserialize; - -use crate::{ - state::ServerState, - util::diag_to_lsp, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, -}; - -#[cfg(target_arch = "wasm32")] -use crate::util::DummyFilePathConversion; - -fn run_diagnostics( - state: &mut ServerState, - path: &str, -) -> Vec { - let db = &mut state.db; - let workspace = &mut state.workspace; - let file_path = path; - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); - db.analyze_top_mod(top_mod); - db.finalize_diags() -} - -pub fn get_diagnostics( - state: &mut ServerState, - uri: lsp_types::Url, -) -> Result>, Error> { - let diags = run_diagnostics(state, uri.to_file_path().unwrap().to_str().unwrap()); - - let diagnostics = diags - .into_iter() - .flat_map(|diag| diag_to_lsp(diag, &state.db).clone()); - - // we need to reduce the diagnostics to a map from URL to Vec - let mut result = FxHashMap::>::default(); - - // add a null diagnostic to the result for the given URL - let _ = result.entry(uri.clone()).or_insert_with(Vec::new); - - diagnostics.for_each(|(uri, more_diags)| { - let diags = result.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - }); - - Ok(result) -} - -pub fn handle_document_did_open( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace - .input_from_file_path( - &mut state.db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(&mut state.db, None); - let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics) -} - -// Currently this is used to handle document renaming since the "document open" handler is called -// before the "document was renamed" handler. -// -// The fix: handle document renaming more explicitly in the "will rename" flow, along with the document -// rename refactor. -pub fn handle_document_did_close( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidCloseTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace - .input_from_file_path( - &mut state.db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - input.sync(&mut state.db, None) -} - -pub fn handle_document_did_change( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace - .input_from_file_path( - &mut state.db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(&mut state.db, Some(params.content_changes[0].text.clone())); - let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; - // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(state, diagnostics) -} - -fn send_diagnostics( - state: &mut ServerState, - diagnostics: FxHashMap>, -) -> Result<(), Error> { - let results = diagnostics.into_iter().map(|(uri, diags)| { - let result = lsp_types::PublishDiagnosticsParams { - uri, - diagnostics: diags, - version: None, - }; - lsp_server::Message::Notification(lsp_server::Notification { - method: String::from("textDocument/publishDiagnostics"), - params: serde_json::to_value(result).unwrap(), - }) - }); - - results.for_each(|result| { - let sender = state.sender.lock().unwrap(); - let _ = sender.send(result); - }); - - Ok(()) -} - -pub fn handle_watched_file_changes( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidChangeWatchedFilesParams::deserialize(note.params)?; - let changes = params.changes; - let mut diagnostics = FxHashMap::>::default(); - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = state.workspace.sync(&mut state.db); - let input = state - .workspace - .input_from_file_path(&mut state.db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(&mut state.db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = state - .workspace - .input_from_file_path(&mut state.db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(&mut state.db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = state.workspace.sync(&mut state.db); - } - _ => {} - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let diags = get_diagnostics(state, uri.clone())?; - for (uri, more_diags) in diags { - let diags = diagnostics.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - } - } - } - // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(state, diagnostics) - // Ok(()) -} diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs deleted file mode 100644 index f30f08fa8c..0000000000 --- a/crates/language-server/src/handlers/request.rs +++ /dev/null @@ -1,191 +0,0 @@ -use std::io::BufRead; - -use common::input::IngotKind; -use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; -use log::info; -use lsp_server::{Response, ResponseError}; -use serde::Deserialize; - -use crate::{ - goto::{goto_enclosing_path, Cursor}, - state::ServerState, - util::{to_lsp_location_from_scope, to_offset_from_position}, - workspace::IngotFileContext, -}; - -pub fn handle_hover( - state: &mut ServerState, - req: lsp_server::Request, -) -> Result<(), anyhow::Error> { - // TODO: get more relevant information for the hover - let params = lsp_types::HoverParams::deserialize(req.params)?; - let file_path = ¶ms - .text_document_position_params - .text_document - .uri - .path(); - let file = std::fs::File::open(file_path)?; - let reader = std::io::BufReader::new(file); - let line = reader - .lines() - .nth(params.text_document_position_params.position.line as usize) - .unwrap() - .unwrap(); - - let file_text = std::fs::read_to_string(file_path)?; - - // let cursor: Cursor = params.text_document_position_params.position.into(); - let cursor: Cursor = to_offset_from_position( - params.text_document_position_params.position, - file_text.as_str(), - ); - // let file_path = std::path::Path::new(file_path); - info!("getting hover info for file_path: {:?}", file_path); - let ingot = state - .workspace - .input_from_file_path(&mut state.db, file_path) - .map(|input| input.ingot(&state.db)); - - // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); - - let ingot_info: Option = { - let ingot_type = match ingot { - Some(ingot) => match ingot.kind(&state.db) { - IngotKind::StandAlone => None, - IngotKind::Local => Some("Local ingot"), - IngotKind::External => Some("External ingot"), - IngotKind::Std => Some("Standard library"), - }, - None => Some("No ingot information available"), - }; - let ingot_file_count = ingot.unwrap().files(&state.db).len(); - let ingot_path = ingot - .unwrap() - .path(&state.db) - .strip_prefix(&state.workspace.root_path.clone().unwrap_or("".into())) - .ok(); - - ingot_type.map(|ingot_type| { - format!("{ingot_type} with {ingot_file_count} files at path: {ingot_path:?}") - }) - }; - - let top_mod = state - .workspace - .top_mod_from_file_path(&mut state.db, file_path) - .unwrap(); - let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); - - let goto_info = match early_resolution { - Some(EarlyResolvedPath::Full(bucket)) => bucket - .iter() - .map(|x| x.pretty_path(&state.db).unwrap()) - .collect::>() - .join("\n"), - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => res.pretty_path(&state.db).unwrap(), - None => String::from("No goto info available"), - }; - - let result = lsp_types::Hover { - contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent { - kind: lsp_types::MarkupKind::Markdown, - value: format!( - "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}\n\n### Ingot info: \n\n{:?}", - &line, - serde_json::to_string_pretty(¶ms).unwrap(), - goto_info, - ingot_info, - ), - }), - range: None, - }; - let response_message = Response { - id: req.id, - result: Some(serde_json::to_value(result)?), - error: None, - }; - - state.send_response(response_message)?; - Ok(()) -} - -use lsp_types::TextDocumentPositionParams; - -pub fn handle_goto_definition( - state: &mut ServerState, - req: lsp_server::Request, -) -> Result<(), anyhow::Error> { - info!("handling goto definition request: {:?}", req); - let params = TextDocumentPositionParams::deserialize(req.params)?; - - // Convert the position to an offset in the file - let file_text = std::fs::read_to_string(params.text_document.uri.path())?; - let cursor: Cursor = to_offset_from_position(params.position, file_text.as_str()); - - // Get the module and the goto info - let file_path = params.text_document.uri.path(); - let top_mod = state - .workspace - .top_mod_from_file_path(&mut state.db, file_path) - .unwrap(); - let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); - - // Convert the goto info to a Location - let scopes = match goto_info { - Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(NameRes::scope).collect::>() - } - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => { - vec![res.scope()] - } - None => return Ok(()), - }; - - // info!("scopes: {:?}", scopes); - - let locations = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, &state.db)) - .collect::>(); - - let errors = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, &state.db)) - .filter_map(std::result::Result::err) - .map(|err| err.to_string()) - .collect::>() - .join("\n"); - - let error = (!errors.is_empty()).then_some(ResponseError { - code: lsp_types::error_codes::SERVER_CANCELLED as i32, - message: errors, - data: None, - }); - - // Send the response - let response_message = Response { - id: req.id, - result: Some(serde_json::to_value( - lsp_types::GotoDefinitionResponse::Array( - locations - .into_iter() - .filter_map(std::result::Result::ok) - .collect(), - ), - )?), - error, - }; - - info!("goto definition response: {:?}", response_message); - - state.send_response(response_message)?; - Ok(()) -} diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs new file mode 100644 index 0000000000..0af0cb9908 --- /dev/null +++ b/crates/language-server/src/logger.rs @@ -0,0 +1,80 @@ +use std::io::Write; + +use lsp_types::MessageType; +use tokio::task::yield_now; +use tower_lsp::Client; +use tracing_subscriber::fmt::writer::MakeWriterExt; +use tracing_subscriber::fmt::MakeWriter; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::prelude::*; + +pub async fn handle_log_messages( + mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, + client: Client, +) -> tokio::sync::mpsc::UnboundedReceiver { + loop { + if let Some((message, message_type)) = rx.recv().await { + client.log_message(message_type, message).await; + yield_now().await; + } + } +} + +#[derive(Clone)] +pub struct LoggerLayer { + log_sender: tokio::sync::mpsc::UnboundedSender<(String, MessageType)>, +} + +impl Write for LoggerLayer { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + let message = String::from_utf8_lossy(buf).to_string(); + let _ = self.log_sender.send((message, MessageType::LOG)); + Ok(buf.len()) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } +} + +impl MakeWriter<'_> for LoggerLayer { + type Writer = Self; + fn make_writer(&self) -> Self::Writer { + self.clone() + } +} + +pub fn setup_logger( + level: tracing::Level, +) -> Result, Box> +{ + let (log_sender, log_receiver) = + tokio::sync::mpsc::unbounded_channel::<(String, MessageType)>(); + let logger = LoggerLayer { log_sender }; + let logger = logger + .with_filter(|metadata| { + metadata + .module_path() + .map_or(false, |path| path.starts_with("fe_language_server")) + }) + .with_max_level(level); + + let pretty_logger = tracing_subscriber::fmt::layer() + .event_format(tracing_subscriber::fmt::format::format().pretty()) + .with_ansi(false) + .with_writer(logger); + + #[cfg(tokio_unstable)] + let console_layer = console_subscriber::spawn(); + + #[cfg(tokio_unstable)] + tracing_subscriber::registry() + .with(pretty_logger) + .with(console_layer) + .init(); + + #[cfg(not(tokio_unstable))] + tracing_subscriber::registry().with(pretty_logger).init(); + + Ok(log_receiver) +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index e5f3e7dcac..b578a17ccb 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,19 +1,47 @@ -mod db; -mod diagnostics; -mod goto; +mod backend; +mod functionality; +mod logger; mod server; -mod state; mod util; -mod workspace; -use db::Jar; -mod handlers { - pub mod notifications; - pub mod request; -} +use backend::db::Jar; +use backend::Backend; +use tracing::Level; + +use server::Server; + +use crate::logger::{handle_log_messages, setup_logger}; + +#[tokio_macros::main] +async fn main() { + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + let rx = setup_logger(Level::INFO).unwrap(); + + let (message_senders, message_receivers) = server::setup_message_channels(); + let (service, socket) = + tower_lsp::LspService::build(|client| Server::new(client, message_senders)).finish(); + let server = service.inner(); + + let client = server.client.clone(); + let mut backend = Backend::new(client); + + // separate runtime for the backend + // let backend_runtime = tokio::runtime::Builder::new_multi_thread() + // .worker_threads(4) + // .enable_all() + // .build() + // .unwrap(); -use server::run_server; + // backend_runtime.spawn(backend.handle_streams()); -fn main() { - let _ = run_server(); + tokio::select! { + // setup logging + _ = handle_log_messages(rx, server.client.clone()) => {}, + // start the server + _ = tower_lsp::Server::new(stdin, stdout, socket) + .serve(service) => {} + // backend + _ = functionality::streams::setup_streams(&mut backend, message_receivers) => {} + } } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index c3ef4ffcf9..87b68f6771 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,157 +1,97 @@ -use super::state::ServerState; -use anyhow::Result; -use lsp_server::{Connection, Notification}; -use lsp_types::{HoverProviderCapability, InitializeParams, ServerCapabilities}; +use lsp_types::{ + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, + DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, + Registration, +}; +use tracing::{error, info}; -#[cfg(target_arch = "wasm32")] -use crate::util::DummyFilePathConversion; +use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; -fn server_capabilities() -> ServerCapabilities { - ServerCapabilities { - hover_provider: Some(HoverProviderCapability::Simple(true)), - // full sync mode for now - text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind( - lsp_types::TextDocumentSyncKind::FULL, - )), - // goto definition - definition_provider: Some(lsp_types::OneOf::Left(true)), - // support for workspace add/remove changes - workspace: Some(lsp_types::WorkspaceServerCapabilities { - workspace_folders: Some(lsp_types::WorkspaceFoldersServerCapabilities { - supported: Some(true), - change_notifications: Some(lsp_types::OneOf::Left(true)), - }), - file_operations: Some(lsp_types::WorkspaceFileOperationsServerCapabilities { - did_create: Some(lsp_types::FileOperationRegistrationOptions { - filters: vec![lsp_types::FileOperationFilter { - scheme: Some(String::from("file")), - pattern: lsp_types::FileOperationPattern { - glob: String::from("**/*"), - options: None, - // options: Some(lsp_types::FileOperationPatternOptions { - // ignore_case: Some(true), - // }), - matches: None, - }, - }], - }), - did_rename: Some(lsp_types::FileOperationRegistrationOptions { - filters: vec![lsp_types::FileOperationFilter { - scheme: Some(String::from("file")), - pattern: lsp_types::FileOperationPattern { - glob: String::from("**/*"), - options: None, - // options: Some(lsp_types::FileOperationPatternOptions { - // ignore_case: Some(true), - // }), - matches: None, - }, - }], - }), - did_delete: Some(lsp_types::FileOperationRegistrationOptions { - filters: vec![lsp_types::FileOperationFilter { - scheme: Some(String::from("file")), - pattern: lsp_types::FileOperationPattern { - glob: String::from("**/*"), - options: None, - // options: Some(lsp_types::FileOperationPatternOptions { - // ignore_case: Some(true), - // }), - matches: None, - }, +pub(crate) struct Server { + pub(crate) messaging: MessageSenders, + pub(crate) client: Client, +} + +impl Server { + pub(crate) async fn register_watchers(&self) -> Result<()> { + let registration = Registration { + id: String::from("watch-fe-files"), + method: String::from("workspace/didChangeWatchedFiles"), + register_options: Some( + serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.fe".to_string()), + kind: None, }], - }), - will_create: None, - will_rename: None, - will_delete: None, - // TODO: implement file operation refactors and workspace cache updates - // will_create: Some(lsp_types::FileOperationRegistrationOptions { - // filters: vec![lsp_types::FileOperationFilter { - // scheme: Some(String::from("file")), - // pattern: lsp_types::FileOperationPattern { - // glob: String::from("**/*"), - // options: None, - // matches: None, - // }, - // }], - // }), - // will_rename: Some(lsp_types::FileOperationRegistrationOptions { - // filters: vec![lsp_types::FileOperationFilter { - // scheme: Some(String::from("file")), - // pattern: lsp_types::FileOperationPattern { - // glob: String::from("**/*"), - // options: None, - // matches: None, - // }, - // }], - // }), - // will_delete: Some(lsp_types::FileOperationRegistrationOptions { - // filters: vec![lsp_types::FileOperationFilter { - // scheme: Some(String::from("file")), - // pattern: lsp_types::FileOperationPattern { - // glob: String::from("**/*"), - // options: None, - // matches: None, - // }, - // }], - // }), - }), - }), - // ..Default::default() - ..Default::default() + }) + .unwrap(), + ), + }; + self.client.register_capability(vec![registration]).await + } + + pub(crate) fn new(client: Client, messaging: MessageSenders) -> Self { + Self { messaging, client } } } -pub fn run_server() -> Result<()> { - let (connection, io_threads) = Connection::stdio(); +#[language_server_macros::message_channels] +#[tower_lsp::async_trait] +impl LanguageServer for Server { + async fn initialize(&self, initialize_params: InitializeParams) -> Result { + // forward the initialize request to the messaging system + let rx = self.messaging.send_initialize(initialize_params); - let (request_id, _initialize_params) = connection.initialize_start()?; - let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; - // let debug_params = initialize_params.clone(); - // todo: actually use initialization params + info!("awaiting initialization result"); + match rx.await { + Ok(initialize_result) => initialize_result, + Err(e) => { + error!("Failed to initialize: {}", e); + return Err(tower_lsp::jsonrpc::Error::internal_error()); + } + } + } - let capabilities = server_capabilities(); + async fn initialized(&self, _params: lsp_types::InitializedParams) { + info!("initialized... registering file watchers"); + // register file watchers + if let Err(e) = self.register_watchers().await { + error!("Failed to register file watchers: {}", e); + } else { + info!("registered watchers"); + } + } - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + Ok(()) + } - let initialize_result = serde_json::to_value(initialize_result).unwrap(); + async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { + self.messaging.send_did_open(params); + } - connection.initialize_finish(request_id, initialize_result)?; - // send a "hello" message to the client - connection - .sender - .send(lsp_server::Message::Notification(Notification { - method: String::from("window/showMessage"), - params: serde_json::to_value(lsp_types::ShowMessageParams { - typ: lsp_types::MessageType::INFO, - message: String::from("hello from the Fe language server"), - }) - .unwrap(), - }))?; + async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { + self.messaging.send_did_change(params); + } - let mut state = ServerState::new(connection.sender); - let _ = state.init_logger(log::Level::Info); - state.workspace.set_workspace_root( - &mut state.db, - initialize_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - )?; - // info!("TESTING"); - // info!("initialized with params: {:?}", debug_params); + async fn did_close(&self, params: DidCloseTextDocumentParams) { + self.messaging.send_did_close(params); + } - let result = state.run(connection.receiver); + async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + self.messaging.send_did_change_watched_files(params); + } - io_threads.join().unwrap(); + async fn hover(&self, params: lsp_types::HoverParams) -> Result> { + let rx = self.messaging.send_hover(params); + rx.await.expect("hover response") + } - result + async fn goto_definition( + &self, + params: lsp_types::GotoDefinitionParams, + ) -> Result> { + let rx = self.messaging.send_goto_definition(params); + rx.await.expect("goto definition response") + } } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs deleted file mode 100644 index 2c063a7249..0000000000 --- a/crates/language-server/src/state.rs +++ /dev/null @@ -1,191 +0,0 @@ -use std::sync::{Arc, Mutex}; - -use crate::db::LanguageServerDatabase; -use crate::workspace::Workspace; -use anyhow::Result; -use crossbeam_channel::{Receiver, Sender}; -use log::{info, Level, Metadata, Record}; -use log::{LevelFilter, SetLoggerError}; -use lsp_server::Message; -use lsp_types::notification::Notification; -use lsp_types::request::Request; - -use crate::handlers::notifications::{ - handle_document_did_change, handle_document_did_close, handle_watched_file_changes, -}; -use crate::handlers::request::handle_goto_definition; -use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; - -pub struct ServerState { - pub(crate) sender: Arc>>, - pub(crate) db: LanguageServerDatabase, - pub(crate) workspace: Workspace, -} - -impl ServerState { - pub fn new(sender: Sender) -> Self { - let sender = Arc::new(Mutex::new(sender)); - - Self { - sender, - db: LanguageServerDatabase::default(), - workspace: Workspace::default(), - } - } - - fn send(&mut self, msg: Message) -> Result<()> { - info!("SEND: {:?}", msg); - let sender = self.sender.lock().unwrap(); - sender.send(msg)?; - Ok(()) - } - - pub fn run(&mut self, receiver: Receiver) -> Result<()> { - info!("Fe Language Server listening..."); - - // watch the workspace root for changes - self.send(lsp_server::Message::Request(lsp_server::Request::new( - 28_716_283.into(), - String::from("client/registerCapability"), - lsp_types::RegistrationParams { - registrations: vec![lsp_types::Registration { - id: String::from("watch-fe-files"), - method: String::from("workspace/didChangeWatchedFiles"), - register_options: Some( - serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: vec![lsp_types::FileSystemWatcher { - glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), - kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), - }], - }) - .unwrap(), - ), - }], - }, - )))?; - - while let Some(msg) = self.next_message(&receiver) { - if let lsp_server::Message::Notification(notification) = &msg { - if notification.method == lsp_types::notification::Exit::METHOD { - return Ok(()); - } - } - - let _ = self.handle_message(msg); - } - Ok(()) - } - - fn next_message(&self, receiver: &Receiver) -> Option { - crossbeam_channel::select! { - recv(receiver) -> msg => msg.ok() - } - } - - fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - if let lsp_server::Message::Request(req) = msg { - info!("REQUEST: {:?}", req); - - match req.method.as_str() { - // TODO: implement actually useful hover handler - lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, - // goto definition - lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, - lsp_types::request::GotoTypeDefinition::METHOD => { - handle_goto_definition(self, req)?; - } - lsp_types::request::GotoImplementation::METHOD => { - handle_goto_definition(self, req)?; - } - lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, - _ => {} - } - } else if let lsp_server::Message::Notification(note) = msg { - // log the notification to the console - info!("NOTIFICATION: {:?}", note); - - match note.method.as_str() { - lsp_types::notification::DidOpenTextDocument::METHOD => { - handle_document_did_open(self, note)?; - } - // TODO: this is currently something of a hack to deal with - // file renames. We should be using the workspace - // "will change" requests instead. - lsp_types::notification::DidCloseTextDocument::METHOD => { - handle_document_did_close(self, note)?; - } - lsp_types::notification::DidChangeTextDocument::METHOD => { - handle_document_did_change(self, note)?; - } - lsp_types::notification::DidChangeWatchedFiles::METHOD => { - handle_watched_file_changes(self, note)?; - } - _ => {} - } - } else if let lsp_server::Message::Response(resp) = msg { - info!("RESPONSE: {:?}", resp); - } - - Ok(()) - } - - pub(crate) fn send_response(&mut self, response: lsp_server::Response) -> Result<()> { - self.send(lsp_server::Message::Response(response))?; - Ok(()) - } - - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { - let logger = LspLogger { - level, - sender: self.sender.clone(), - }; - let static_logger = Box::leak(Box::new(logger)); - log::set_logger(static_logger)?; - log::set_max_level(LevelFilter::Debug); - Ok(()) - } -} - -pub struct LspLogger { - level: Level, - sender: Arc>>, -} - -impl LspLogger { - fn send(&self, msg: Message) -> Result<()> { - let sender = self.sender.lock().unwrap(); - sender.send(msg)?; - Ok(()) - } -} - -impl log::Log for LspLogger { - fn enabled(&self, metadata: &Metadata) -> bool { - let logger = self; - metadata.level() <= logger.level - } - - fn log(&self, record: &Record) { - if self.enabled(record.metadata()) { - let message = format!("{} - {}", record.level(), record.args()); - let _ = self.send(lsp_server::Message::Notification( - lsp_server::Notification { - method: String::from("window/logMessage"), - params: serde_json::to_value(lsp_types::LogMessageParams { - typ: match record.level() { - Level::Error => lsp_types::MessageType::ERROR, - Level::Warn => lsp_types::MessageType::WARNING, - Level::Info => lsp_types::MessageType::INFO, - Level::Debug => lsp_types::MessageType::LOG, - Level::Trace => lsp_types::MessageType::LOG, - }, - message, - }) - .unwrap(), - }, - )); - } - } - - fn flush(&self) {} -} diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index eb7e393c30..b762788142 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -4,8 +4,8 @@ use common::{ }; use fxhash::FxHashMap; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; -use log::error; use lsp_types::Position; +use tracing::error; use url::Url; pub fn calculate_line_offsets(text: &str) -> Vec { diff --git a/crates/language-server/test_files/goto.snap b/crates/language-server/test_files/goto.snap index a3956e2580..3a74e9e809 100644 --- a/crates/language-server/test_files/goto.snap +++ b/crates/language-server/test_files/goto.snap @@ -1,25 +1,23 @@ --- source: crates/language-server/src/goto.rs -assertion_line: 153 -expression: result +assertion_line: 283 +expression: snapshot input_file: crates/language-server/test_files/goto.fe --- -struct Foo {} -struct Bar {} - -fn main() { - let x: Foo - let y: Bar - let z: baz::Baz -} - -mod baz { - pub struct Baz {} -} +0: struct Foo {} +1: struct Bar {} +2: +3: fn main() { +4: let x: Foo +5: let y: Bar +6: let z: baz::Baz +7: } +8: +9: mod baz { +10: pub struct Baz {} +11: } --- -cursor position: 64, path: -cursor position: 82, path: goto::baz::Baz -cursor position: 79, path: -cursor position: 52, path: goto::Foo -cursor position: 49, path: -cursor position: 67, path: goto::Bar +cursor position (4, 11), path: goto::Foo +cursor position (5, 11), path: goto::Bar +cursor position (6, 11), path: goto::baz +cursor position (6, 16), path: goto::baz::Baz diff --git a/crates/language-server/src/config.rs b/crates/language-server/test_files/hoverable/fe.toml similarity index 100% rename from crates/language-server/src/config.rs rename to crates/language-server/test_files/hoverable/fe.toml diff --git a/crates/language-server/test_files/hoverable/src/lib.fe b/crates/language-server/test_files/hoverable/src/lib.fe new file mode 100644 index 0000000000..a91ad35b84 --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/lib.fe @@ -0,0 +1,31 @@ +use stuff::calculations::{ return_three, return_four } + +/// ## `return_seven` +/// ### a function of numbers +/// #### returns the 3+4=7 +pub fn return_seven() { + return_three() + return_four() +} + +fn calculate() { + return_seven() + let x: stuff::calculations::ambiguous +} + +/// Anything that can be calculated ought to implement a +/// `calculate` function +pub trait Calculatable { + fn calculate(self) +} + +/// A struct for holding numbers like `x` and `y` +struct Numbers { + x: i32, + y: i32 +} + +impl Calculatable for Numbers { + fn calculate(self) { + self.x + self.y + } +} \ No newline at end of file diff --git a/crates/language-server/test_files/hoverable/src/stuff.fe b/crates/language-server/test_files/hoverable/src/stuff.fe new file mode 100644 index 0000000000..b97ffe7660 --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/stuff.fe @@ -0,0 +1,19 @@ +/// ### Calculation helper functions +pub mod calculations { + /// A function that returns `3` + pub fn return_three() -> u32 { + 3 + } + + /// ## A function that returns 4 + pub fn return_four() { + 4 + } + + /// which one is it? + pub mod ambiguous { + + } + /// is it this one? + pub fn ambiguous() {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/lol.fe b/crates/language-server/test_files/lol.fe new file mode 100644 index 0000000000..f08c02f075 --- /dev/null +++ b/crates/language-server/test_files/lol.fe @@ -0,0 +1,12 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Barrr + let z: baz::Bazzz +} + +mod baz { + pub struct Baz {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index c2251ee70c..99e9264c32 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -1,8 +1,8 @@ -pub fn foo() { +pub fn why() { let x = 5 x } -pub struct Foo { +pub struct Why { pub x: i32 } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe index cc513131dc..5669526f6d 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.fe +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -1,18 +1,23 @@ -use foo::Foo +use ingot::foo::Why -mod baz { - use super::Foo - - pub struct Bar { - x: Foo - } - - fn bar() -> () { - let x: Foo +mod who { + use super::Why + pub mod what { + pub fn how() {} + pub mod how { + use ingot::Why + pub struct When { + x: Why + } } + } + pub struct Bar { + x: Why + } } fn bar() -> () { - let y: Foo - let z: baz::Bar + let y: Why + let z = who::what::how + let z: who::what::how::When } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap index 07d1a8119c..680a6b6afd 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.snap +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -1,35 +1,41 @@ --- -source: crates/language-server/src/goto.rs -assertion_line: 170 -expression: result +source: crates/language-server/src/functionality/goto.rs +assertion_line: 337 +expression: snapshot input_file: crates/language-server/test_files/single_ingot/src/lib.fe --- -use foo::Foo - -mod baz { - use super::Foo - - pub struct Bar { - x: Foo - } - - fn bar() -> () { - let x: Foo - } -} - -fn bar() -> () { - let y: Foo - let z: baz::Bar -} +0: use ingot::foo::Why +1: +2: mod who { +3: use super::Why +4: pub mod what { +5: pub fn how() {} +6: pub mod how { +7: use ingot::Why +8: pub struct When { +9: x: Why +10: } +11: } +12: } +13: pub struct Bar { +14: x: Why +15: } +16: } +17: +18: fn bar() -> () { +19: let y: Why +20: let z = who::what::how +21: let z: who::what::how::When +22: } --- -cursor position: 80, path: "lib::foo::Foo" -cursor position: 29, path: "No resolution available" -cursor position: 183, path: "lib::baz::Bar" -cursor position: 180, path: "`NameResBucket` is empty" -cursor position: 168, path: "lib::foo::Foo" -cursor position: 21, path: "No resolution available" -cursor position: 165, path: "`NameResBucket` is empty" -cursor position: 127, path: "lib::foo::Foo" -cursor position: 124, path: "`NameResBucket` is empty" -cursor position: 60, path: "No resolution available" +cursor position (9, 11), path: lib::foo::Why +cursor position (14, 7), path: lib::foo::Why +cursor position (19, 11), path: lib::foo::Why +cursor position (20, 12), path: lib::who +cursor position (20, 17), path: lib::who::what +cursor position (20, 23), path: lib::who::what::how +lib::who::what::how +cursor position (21, 11), path: lib::who +cursor position (21, 16), path: lib::who::what +cursor position (21, 22), path: lib::who::what::how +cursor position (21, 27), path: lib::who::what::how::When diff --git a/crates/library/std/src/math.fe b/crates/library/std/src/math.fe index bc37ee6739..79f6317f03 100644 --- a/crates/library/std/src/math.fe +++ b/crates/library/std/src/math.fe @@ -1,3 +1,4 @@ + pub fn min(_ x: u256, _ y: u256) -> u256 { if x < y { return x diff --git a/crates/test-files/fixtures/features/numeric_sizes.fe b/crates/test-files/fixtures/features/numeric_sizes.fe index fdec05e3a9..9773757e61 100644 --- a/crates/test-files/fixtures/features/numeric_sizes.fe +++ b/crates/test-files/fixtures/features/numeric_sizes.fe @@ -27,8 +27,8 @@ const U128_MAX: u128 = 340282366920938463463374607431768211455 const I256_MIN: i256 = -57896044618658097711785492504343953926634992332820282019728792003956564819968 const I256_MAX: i256 = 57896044618658097711785492504343953926634992332820282019728792003956564819967 -const U256_MIN: u256 = 0 -const U256_MAX: u256 = 115792089237316195423570985008687907853269984665640564039457584007913129639935 +const u256_MIN: u256 = 0 +const u256_MAX: u256 = 115792089237316195423570985008687907853269984665640564039457584007913129639935 contract Foo { @@ -38,7 +38,7 @@ contract Foo { assert u32::min() == U32_MIN assert u64::min() == U64_MIN assert u128::min() == U128_MIN - assert u256::min() == U256_MIN + assert u256::min() == u256_MIN // TODO: Investigate why these can't be compared against their const values assert i8::min() == get_i8_const_min() assert i16::min() == get_i16_const_min() @@ -52,7 +52,7 @@ contract Foo { assert u32::max() == U32_MAX assert u64::max() == U64_MAX assert u128::max() == U128_MAX - assert u256::max() == U256_MAX + assert u256::max() == u256_MAX assert i8::max() == I8_MAX assert i16::max() == I16_MAX @@ -106,7 +106,7 @@ contract Foo { } pub fn get_u256_const_min() -> u256 { - return U256_MIN + return u256_MIN } pub fn get_i8_min() -> i8 { @@ -202,7 +202,7 @@ contract Foo { } pub fn get_u256_const_max() -> u256 { - return U256_MAX + return u256_MAX } pub fn get_i8_max() -> i8 {