diff --git a/Cargo.lock b/Cargo.lock index 87f5b19f4a726..97d092bbffa4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -418,7 +418,7 @@ dependencies = [ "async-stream", "async-trait", "auto_impl", - "dashmap", + "dashmap 6.1.0", "either", "futures", "futures-utils-wasm", @@ -452,7 +452,7 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tower", + "tower 0.5.2", "tracing", "wasmtimer", ] @@ -499,7 +499,7 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tower", + "tower 0.5.2", "tracing", "url", "wasmtimer", @@ -832,7 +832,7 @@ dependencies = [ "serde_json", "thiserror 2.0.15", "tokio", - "tower", + "tower 0.5.2", "tracing", "url", "wasmtimer", @@ -848,7 +848,7 @@ dependencies = [ "alloy-transport", "reqwest", "serde_json", - "tower", + "tower 0.5.2", "tracing", "url", ] @@ -2004,7 +2004,7 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-tungstenite", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -2241,7 +2241,7 @@ dependencies = [ "boa_string", "bytemuck", "cfg-if", - "dashmap", + "dashmap 6.1.0", "fast-float2", "hashbrown 0.15.5", "icu_normalizer", @@ -3351,6 +3351,19 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "dashmap" version = "6.1.0" @@ -4028,6 +4041,7 @@ dependencies = [ "forge-doc", "forge-fmt", "forge-lint", + "forge-lsp", "forge-script", "forge-script-sequence", "forge-sol-macro-gen", @@ -4075,6 +4089,7 @@ dependencies = [ "tokio", "toml_edit 0.23.3", "tower-http", + "tower-lsp", "tracing", "watchexec", "watchexec-events", @@ -4138,6 +4153,20 @@ dependencies = [ "thiserror 2.0.15", ] +[[package]] +name = "forge-lsp" +version = "1.3.1" +dependencies = [ + "foundry-common", + "foundry-test-utils", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.15", + "tokio", + "tower-lsp", +] + [[package]] name = "forge-script" version = "1.3.2" @@ -4455,7 +4484,7 @@ dependencies = [ "terminal_size", "thiserror 2.0.15", "tokio", - "tower", + "tower 0.5.2", "tracing", "url", "vergen", @@ -5064,7 +5093,7 @@ dependencies = [ "serde_json", "tokio", "tonic", - "tower", + "tower 0.5.2", "tower-layer", "tower-util", "tracing", @@ -5850,7 +5879,7 @@ checksum = "62f17d4bce58d4380de6432e6b1a0ebb561dfbbe21fc123204870b7006189677" dependencies = [ "boxcar", "bumpalo", - "dashmap", + "dashmap 6.1.0", "hashbrown 0.14.5", "thread_local", ] @@ -6320,6 +6349,19 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" +[[package]] +name = "lsp-types" +version = "0.94.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" +dependencies = [ + "bitflags 1.3.2", + "serde", + "serde_json", + "serde_repr", + "url", +] + [[package]] name = "mac" version = "0.1.1" @@ -8013,7 +8055,7 @@ dependencies = [ "tokio", "tokio-rustls", "tokio-util", - "tower", + "tower 0.5.2", "tower-http", "tower-service", "url", @@ -8833,6 +8875,17 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -10071,7 +10124,7 @@ dependencies = [ "tokio", "tokio-rustls", "tokio-stream", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -10083,6 +10136,20 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project 1.1.10", + "pin-project-lite", + "tower-layer", + "tower-service", +] + [[package]] name = "tower" version = "0.5.2" @@ -10124,7 +10191,7 @@ dependencies = [ "pin-project-lite", "tokio", "tokio-util", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -10136,6 +10203,40 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" +[[package]] +name = "tower-lsp" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" +dependencies = [ + "async-trait", + "auto_impl", + "bytes", + "dashmap 5.5.3", + "futures", + "httparse", + "lsp-types", + "memchr", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tower 0.4.13", + "tower-lsp-macros", + "tracing", +] + +[[package]] +name = "tower-lsp-macros" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "tower-service" version = "0.3.3" @@ -10479,6 +10580,7 @@ dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 65a3a93fd4723..ac778db496123 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ members = [ "crates/macros/", "crates/test-utils/", "crates/lint/", + "crates/lsp/", ] resolver = "2" @@ -180,6 +181,7 @@ forge = { path = "crates/forge" } forge-doc = { path = "crates/doc" } forge-fmt = { path = "crates/fmt" } forge-lint = { path = "crates/lint" } +forge-lsp = { path = "crates/lsp" } forge-verify = { path = "crates/verify" } forge-script = { path = "crates/script" } forge-sol-macro-gen = { path = "crates/sol-macro-gen" } diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index 3ef27d86695d0..41027952a7b08 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -45,6 +45,7 @@ chrono.workspace = true forge-doc.workspace = true forge-fmt.workspace = true forge-lint.workspace = true +forge-lsp.workspace = true forge-verify.workspace = true forge-script.workspace = true forge-sol-macro-gen.workspace = true @@ -89,6 +90,7 @@ watchexec-signals = "5.0" clearscreen = "4.0" evm-disassembler.workspace = true path-slash.workspace = true +tower-lsp = "0.20" # doc server axum = { workspace = true, features = ["ws"] } diff --git a/crates/forge/src/args.rs b/crates/forge/src/args.rs index 725dcad551286..3908bb715087b 100644 --- a/crates/forge/src/args.rs +++ b/crates/forge/src/args.rs @@ -148,5 +148,6 @@ pub fn run_command(args: Forge) -> Result<()> { ForgeSubcommand::Eip712(cmd) => cmd.run(), ForgeSubcommand::BindJson(cmd) => cmd.run(), ForgeSubcommand::Lint(cmd) => cmd.run(), + ForgeSubcommand::Lsp(cmd) => global.block_on(cmd.run()), } } diff --git a/crates/forge/src/cmd/lsp.rs b/crates/forge/src/cmd/lsp.rs new file mode 100644 index 0000000000000..51a705b5285df --- /dev/null +++ b/crates/forge/src/cmd/lsp.rs @@ -0,0 +1,31 @@ +use clap::Parser; +use eyre::Result; + +use forge_lsp::lsp::ForgeLsp; +use tower_lsp::{LspService, Server}; +use tracing::info; + +/// Start the Foundry Language Server Protocol (LSP) server +#[derive(Clone, Debug, Parser)] +pub struct LspArgs { + /// See: + #[arg(long)] + pub stdio: bool, +} + +impl LspArgs { + pub async fn run(self) -> Result<()> { + // Start stdio LSP server + info!("Starting Foundry LSP server..."); + + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + let (service, socket) = LspService::new(ForgeLsp::new); + + Server::new(stdin, stdout, socket).serve(service).await; + + info!("Foundry LSP server stopped"); + + Ok(()) + } +} diff --git a/crates/forge/src/cmd/mod.rs b/crates/forge/src/cmd/mod.rs index 0a0945bab99e9..24dec37a0e7a3 100644 --- a/crates/forge/src/cmd/mod.rs +++ b/crates/forge/src/cmd/mod.rs @@ -24,6 +24,7 @@ pub mod init; pub mod inspect; pub mod install; pub mod lint; +pub mod lsp; pub mod remappings; pub mod remove; pub mod selectors; diff --git a/crates/forge/src/opts.rs b/crates/forge/src/opts.rs index eea7f531c22d8..e2c03aa1e4309 100644 --- a/crates/forge/src/opts.rs +++ b/crates/forge/src/opts.rs @@ -2,8 +2,8 @@ use crate::cmd::{ bind::BindArgs, bind_json, build::BuildArgs, cache::CacheArgs, clone::CloneArgs, compiler::CompilerArgs, config, coverage, create::CreateArgs, doc::DocArgs, eip712, flatten, fmt::FmtArgs, geiger, generate, init::InitArgs, inspect, install::InstallArgs, lint::LintArgs, - remappings::RemappingArgs, remove::RemoveArgs, selectors::SelectorsSubcommands, snapshot, - soldeer, test, tree, update, + lsp::LspArgs, remappings::RemappingArgs, remove::RemoveArgs, selectors::SelectorsSubcommands, + snapshot, soldeer, test, tree, update, }; use clap::{Parser, Subcommand, ValueHint}; use forge_script::ScriptArgs; @@ -136,6 +136,9 @@ pub enum ForgeSubcommand { #[command(visible_alias = "l")] Lint(LintArgs), + /// Start the Foundry Language Server Protocol (LSP) server + Lsp(LspArgs), + /// Get specialized information about a smart contract. #[command(visible_alias = "in")] Inspect(inspect::InspectArgs), diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml new file mode 100644 index 0000000000000..779e4ac3fddcd --- /dev/null +++ b/crates/lsp/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "forge-lsp" +description = "Language Server Protocol implementation for Solidity" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[lints] +workspace = true + +[dependencies] +foundry-common.workspace = true +tower-lsp = "0.20" +tokio = { workspace = true, features = ["full"] } +serde.workspace = true +serde_json.workspace = true +thiserror.workspace = true + +[dev-dependencies] +foundry-test-utils.workspace = true +tempfile.workspace = true + +[features] +default = [] diff --git a/crates/lsp/README.md b/crates/lsp/README.md new file mode 100644 index 0000000000000..a6a31777fff39 --- /dev/null +++ b/crates/lsp/README.md @@ -0,0 +1,104 @@ +# Language Server Protocol (`lsp`) + +A native Language Server Protocol (LSP) implementation for Solidity development using Foundry's compilation and linting infrastructure. + +## Usage + +Start the LSP server using: + +```bash +forge lsp --stdio +``` + +## Supported LSP Features + +### Planned + +- [x] forge lint errors +- [x] Diagnostics (compilation errors and warnings) +- [ ] Go-to-definition +- [ ] Symbol search and references +- [ ] Code completion +- [ ] Hover information +- [ ] Code formatting +- [ ] Code Actions + +## Development + +### Building + +```bash +cargo build --bin forge +``` + +### Testing + +```bash +cargo test -p forge-lsp +``` + +### VSCode or Cursor + +> Install forge nightly with `foundryup -i nightly` to access forge lint feature + +You can add the following to VSCode (or cursor) using a lsp-proxy extension see comment [here](https://github.com/foundry-rs/foundry/pull/11187#issuecomment-3148743488): + +```json +[ + { + "languageId": "solidity", + "command": "forge", + "fileExtensions": [ + ".sol" + ], + "args": [ + "lsp", + "--stdio" + ] + } +] +``` + +### Neovim + +> Install forge nightly with `foundryup -i nightly` to access forge lint feature + +If you have neovim 0.11+ installed add these to your config + +```lua +-- lsp/forge_lsp.lua +return { + cmd = { "forge", "lsp", "--stdio" }, + filetypes = { "solidity" }, + root_markers = { "foundry.toml", ".git" }, + root_dir = vim.fs.root(0, { "foundry.toml", ".git" }), +} +-- init.lua +vim.lsp.enable("forge_lsp") +``` + +### Debugging in neovim + +Lsp logs are stored in `~/.local/state/nvim/lsp.log` + +To clear lsp logs run: + +```bash +> -f ~/.local/state/nvim/lsp.log +``` + +To monitor logs in real time run: + +```bash +tail -f ~/.local/state/nvim/lsp.log +``` + +Enable traces in neovim to view full traces in logs: + +```sh +:lua vim.lsp.set_log_level("trace") +``` + +## Contributing + +Check out the [foundry contribution guide](https://github.com/foundry-rs/foundry/blob/master/CONTRIBUTING.md). diff --git a/crates/lsp/src/build.rs b/crates/lsp/src/build.rs new file mode 100644 index 0000000000000..4af3222a07ae3 --- /dev/null +++ b/crates/lsp/src/build.rs @@ -0,0 +1,244 @@ +use crate::utils::byte_offset_to_position; +use std::path::Path; +use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Position, Range}; + +fn ignored_code_for_tests(value: &serde_json::Value) -> bool { + let error_code = value.get("errorCode").and_then(|v| v.as_str()).unwrap_or_default(); + let file_path = value + .get("sourceLocation") + .and_then(|loc| loc.get("file")) + .and_then(|f| f.as_str()) + .unwrap_or_default(); + + // Ignore error code 5574 for test files (code size limit) + error_code == "5574" && file_path.contains(".t.sol") +} + +pub fn build_output_to_diagnostics( + forge_output: &serde_json::Value, + filename: &str, + content: &str, +) -> Vec { + let mut diagnostics = Vec::new(); + + if let Some(errors) = forge_output.get("errors").and_then(|e| e.as_array()) { + for err in errors { + if ignored_code_for_tests(err) { + continue; + } + + let source_file = err + .get("sourceLocation") + .and_then(|loc| loc.get("file")) + .and_then(|f| f.as_str()) + .and_then(|full_path| Path::new(full_path).file_name()) + .and_then(|os_str| os_str.to_str()); + + if source_file != Some(filename) { + continue; + } + + let start_offset = err + .get("sourceLocation") + .and_then(|loc| loc.get("start")) + .and_then(|s| s.as_u64()) + .unwrap_or(0) as usize; + + let end_offset = err + .get("sourceLocation") + .and_then(|loc| loc.get("end")) + .and_then(|s| s.as_u64()) + .map(|v| v as usize) + .unwrap_or(start_offset); + + let (start_line, start_col) = byte_offset_to_position(content, start_offset); + let (mut end_line, mut end_col) = byte_offset_to_position(content, end_offset); + + if end_col > 0 { + end_col -= 1; + } else if end_line > 0 { + end_line -= 1; + end_col = content + .lines() + .nth(end_line.try_into().unwrap()) + .map(|l| l.len() as u32) + .unwrap_or(0); + } + + let range = Range { + start: Position { line: start_line, character: start_col }, + end: Position { line: end_line, character: end_col + 1 }, + }; + + let message = + err.get("message").and_then(|m| m.as_str()).unwrap_or("Unknown error").to_string(); + + let severity = match err.get("severity").and_then(|s| s.as_str()) { + Some("error") => Some(DiagnosticSeverity::ERROR), + Some("warning") => Some(DiagnosticSeverity::WARNING), + Some("note") => Some(DiagnosticSeverity::INFORMATION), + Some("help") => Some(DiagnosticSeverity::HINT), + _ => Some(DiagnosticSeverity::INFORMATION), + }; + + let code = err + .get("errorCode") + .and_then(|c| c.as_str()) + .map(|s| NumberOrString::String(s.to_string())); + + diagnostics.push(Diagnostic { + range, + severity, + code, + code_description: None, + source: Some("forge-build".to_string()), + message: format!("[forge build] {message}"), + related_information: None, + tags: None, + data: None, + }); + } + } + + diagnostics +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::runner::{ForgeRunner, Runner}; + use std::io::Write; + + static CONTRACT: &str = r#"// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +contract A { + using B for string; + + function() internal c; + + function add_num(uint256 a) public pure returns (uint256) { + bool fad; + return a + 4; + } +}"#; + + fn setup(contents: &str) -> (tempfile::TempPath, ForgeRunner) { + let mut tmp = + tempfile::Builder::new().suffix(".sol").tempfile().expect("failed to create temp file"); + + tmp.write_all(contents.as_bytes()).expect("failed to write temp file"); + tmp.flush().expect("flush failed"); + tmp.as_file().sync_all().expect("sync failed"); + + let path = tmp.into_temp_path(); + + let compiler = ForgeRunner; + (path, compiler) + } + + #[tokio::test] + async fn test_build_success() { + let (tmp_file, compiler) = setup(CONTRACT); + let file_path = tmp_file.to_string_lossy().to_string(); + + let result = compiler.build(&file_path).await; + assert!(result.is_ok(), "Expected build to succeed"); + } + + #[tokio::test] + async fn test_build_has_errors_array() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.to_string_lossy().to_string(); + + let json = compiler.build(&file_path).await.unwrap(); + assert!(json.get("errors").is_some(), "Expected 'errors' array in build output"); + } + + #[tokio::test] + async fn test_build_error_formatting() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.to_string_lossy().to_string(); + + let json = compiler.build(&file_path).await.unwrap(); + if let Some(errors) = json.get("errors") + && let Some(first) = errors.get(0) + { + assert!(first.get("message").is_some(), "Expected error object to have a message"); + } + } + + #[tokio::test] + async fn test_diagnostic_offsets_match_source() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.to_string_lossy().to_string(); + let source_code = tokio::fs::read_to_string(&file_path).await.expect("read source"); + let build_output = compiler.build(&file_path).await.expect("build failed"); + let expected_start_byte = 81; + let expected_end_byte = 82; + let expected_start_pos = byte_offset_to_position(&source_code, expected_start_byte); + let expected_end_pos = byte_offset_to_position(&source_code, expected_end_byte); + let filename = std::path::Path::new(&file_path) + .file_name() + .and_then(|f| f.to_str()) + .expect("filename"); + let diagnostics = build_output_to_diagnostics(&build_output, filename, &source_code); + assert!(!diagnostics.is_empty(), "no diagnostics found"); + + let diag = &diagnostics[0]; + assert_eq!(diag.range.start.line, expected_start_pos.0); + assert_eq!(diag.range.start.character, expected_start_pos.1); + assert_eq!(diag.range.end.line, expected_end_pos.0); + assert_eq!(diag.range.end.character, expected_end_pos.1); + } + + #[tokio::test] + async fn test_build_output_to_diagnostics_from_file() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.to_string_lossy().to_string(); + let source_code = + tokio::fs::read_to_string(&file_path).await.expect("Failed to read source file"); + let build_output = compiler.build(&file_path).await.expect("Compiler build failed"); + let filename = std::path::Path::new(&file_path) + .file_name() + .and_then(|f| f.to_str()) + .expect("Failed to get filename"); + + let diagnostics = build_output_to_diagnostics(&build_output, filename, &source_code); + assert!(!diagnostics.is_empty(), "Expected at least one diagnostic"); + + let diag = &diagnostics[0]; + assert_eq!(diag.severity, Some(DiagnosticSeverity::ERROR)); + assert!(diag.message.contains("Identifier is not a library name")); + assert_eq!(diag.code, Some(NumberOrString::String("9589".to_string()))); + assert!(diag.range.start.line > 0); + assert!(diag.range.start.character > 0); + } + + #[tokio::test] + async fn test_ignored_code_for_tests() { + let error_json = serde_json::json!({ + "errorCode": "5574", + "sourceLocation": { + "file": "test/ERC6909Claims.t.sol" + } + }); + assert!(ignored_code_for_tests(&error_json)); + + let error_json_non_test = serde_json::json!({ + "errorCode": "5574", + "sourceLocation": { + "file": "contracts/ERC6909Claims.sol" + } + }); + assert!(!ignored_code_for_tests(&error_json_non_test)); + + let error_json_other_code = serde_json::json!({ + "errorCode": "1234", + "sourceLocation": { + "file": "test/ERC6909Claims.t.sol" + } + }); + assert!(!ignored_code_for_tests(&error_json_other_code)); + } +} diff --git a/crates/lsp/src/goto.rs b/crates/lsp/src/goto.rs new file mode 100644 index 0000000000000..43fe84d6ba3ab --- /dev/null +++ b/crates/lsp/src/goto.rs @@ -0,0 +1,748 @@ +use serde_json::Value; +use std::collections::HashMap; +use tower_lsp::lsp_types::{Location, Position, Range, Url}; + +#[derive(Debug, Clone)] +pub struct NodeInfo { + pub src: String, + pub name_location: Option, + pub referenced_declaration: Option, + pub node_type: Option, + pub member_location: Option, +} + +fn push_if_node_or_array<'a>(tree: &'a Value, key: &str, stack: &mut Vec<&'a Value>) { + if let Some(value) = tree.get(key) { + match value { + Value::Array(arr) => { + stack.extend(arr); + } + Value::Object(_) => { + stack.push(value); + } + _ => {} + } + } +} + +pub fn cache_ids( + sources: &Value, +) -> (HashMap>, HashMap) { + let mut nodes: HashMap> = HashMap::new(); + let mut path_to_abs: HashMap = HashMap::new(); + + if let Some(sources_obj) = sources.as_object() { + for (path, contents) in sources_obj { + if let Some(contents_array) = contents.as_array() + && let Some(first_content) = contents_array.first() + && let Some(source_file) = first_content.get("source_file") + && let Some(ast) = source_file.get("ast") + { + // Get the absolute path for this file + let abs_path = + ast.get("absolutePath").and_then(|v| v.as_str()).unwrap_or(path).to_string(); + + path_to_abs.insert(path.clone(), abs_path.clone()); + + // Initialize the nodes map for this file + if !nodes.contains_key(&abs_path) { + nodes.insert(abs_path.clone(), HashMap::new()); + } + + if let Some(id) = ast.get("id").and_then(|v| v.as_u64()) + && let Some(src) = ast.get("src").and_then(|v| v.as_str()) + { + nodes.get_mut(&abs_path).unwrap().insert( + id, + NodeInfo { + src: src.to_string(), + name_location: None, + referenced_declaration: None, + node_type: ast + .get("nodeType") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + member_location: None, + }, + ); + } + + let mut stack = vec![ast]; + + while let Some(tree) = stack.pop() { + if let Some(id) = tree.get("id").and_then(|v| v.as_u64()) + && let Some(src) = tree.get("src").and_then(|v| v.as_str()) + { + // Check for nameLocation first + let mut name_location = tree + .get("nameLocation") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + // Check for nameLocations array and use first element if + // available + if name_location.is_none() + && let Some(name_locations) = tree.get("nameLocations") + && let Some(locations_array) = name_locations.as_array() + && !locations_array.is_empty() + { + name_location = locations_array[0].as_str().map(|s| s.to_string()); + } + + let node_info = NodeInfo { + src: src.to_string(), + name_location, + referenced_declaration: tree + .get("referencedDeclaration") + .and_then(|v| v.as_u64()), + node_type: tree + .get("nodeType") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + member_location: tree + .get("memberLocation") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + }; + + nodes.get_mut(&abs_path).unwrap().insert(id, node_info); + } + + push_if_node_or_array(tree, "arguments", &mut stack); + push_if_node_or_array(tree, "arguments", &mut stack); + push_if_node_or_array(tree, "baseContracts", &mut stack); + push_if_node_or_array(tree, "baseContracts", &mut stack); + push_if_node_or_array(tree, "baseExpression", &mut stack); + push_if_node_or_array(tree, "baseName", &mut stack); + push_if_node_or_array(tree, "baseType", &mut stack); + push_if_node_or_array(tree, "block", &mut stack); + push_if_node_or_array(tree, "body", &mut stack); + push_if_node_or_array(tree, "components", &mut stack); + push_if_node_or_array(tree, "components", &mut stack); + push_if_node_or_array(tree, "condition", &mut stack); + push_if_node_or_array(tree, "declarations", &mut stack); + push_if_node_or_array(tree, "endExpression", &mut stack); + push_if_node_or_array(tree, "errorCall", &mut stack); + push_if_node_or_array(tree, "eventCall", &mut stack); + push_if_node_or_array(tree, "expression", &mut stack); + push_if_node_or_array(tree, "externalCall", &mut stack); + push_if_node_or_array(tree, "falseBody", &mut stack); + push_if_node_or_array(tree, "falseExpression", &mut stack); + push_if_node_or_array(tree, "file", &mut stack); + push_if_node_or_array(tree, "foreign", &mut stack); + push_if_node_or_array(tree, "indexExpression", &mut stack); + push_if_node_or_array(tree, "initialValue", &mut stack); + push_if_node_or_array(tree, "initialValue", &mut stack); + push_if_node_or_array(tree, "initializationExpression", &mut stack); + push_if_node_or_array(tree, "keyType", &mut stack); + push_if_node_or_array(tree, "leftExpression", &mut stack); + push_if_node_or_array(tree, "leftHandSide", &mut stack); + push_if_node_or_array(tree, "libraryName", &mut stack); + push_if_node_or_array(tree, "literals", &mut stack); + push_if_node_or_array(tree, "loopExpression", &mut stack); + push_if_node_or_array(tree, "members", &mut stack); + push_if_node_or_array(tree, "modifierName", &mut stack); + push_if_node_or_array(tree, "modifiers", &mut stack); + push_if_node_or_array(tree, "name", &mut stack); + push_if_node_or_array(tree, "names", &mut stack); + push_if_node_or_array(tree, "nodes", &mut stack); + push_if_node_or_array(tree, "options", &mut stack); + push_if_node_or_array(tree, "options", &mut stack); + push_if_node_or_array(tree, "options", &mut stack); + push_if_node_or_array(tree, "overrides", &mut stack); + push_if_node_or_array(tree, "overrides", &mut stack); + push_if_node_or_array(tree, "parameters", &mut stack); + push_if_node_or_array(tree, "parameters", &mut stack); + push_if_node_or_array(tree, "pathNode", &mut stack); + push_if_node_or_array(tree, "returnParameters", &mut stack); + push_if_node_or_array(tree, "returnParameters", &mut stack); + push_if_node_or_array(tree, "rightExpression", &mut stack); + push_if_node_or_array(tree, "rightHandSide", &mut stack); + push_if_node_or_array(tree, "startExpression", &mut stack); + push_if_node_or_array(tree, "statements", &mut stack); + push_if_node_or_array(tree, "statements", &mut stack); + push_if_node_or_array(tree, "storageLayout", &mut stack); + push_if_node_or_array(tree, "subExpression", &mut stack); + push_if_node_or_array(tree, "subdenomination", &mut stack); + push_if_node_or_array(tree, "symbolAliases", &mut stack); + push_if_node_or_array(tree, "trueBody", &mut stack); + push_if_node_or_array(tree, "trueExpression", &mut stack); + push_if_node_or_array(tree, "typeName", &mut stack); + push_if_node_or_array(tree, "unitAlias", &mut stack); + push_if_node_or_array(tree, "value", &mut stack); + push_if_node_or_array(tree, "valueType", &mut stack); + } + } + } + } + + (nodes, path_to_abs) +} + +pub fn goto_bytes( + nodes: &HashMap>, + path_to_abs: &HashMap, + id_to_path: &HashMap, + uri: &str, + position: usize, +) -> Option<(String, usize)> { + let path = match uri.starts_with("file://") { + true => &uri[7..], + false => uri, + }; + + // Get absolute path for this file + let abs_path = path_to_abs.get(path)?; + + // Get nodes for the current file only + let current_file_nodes = nodes.get(abs_path)?; + + let mut refs = HashMap::new(); + + // Only consider nodes from the current file that have references + for (id, content) in current_file_nodes { + if content.referenced_declaration.is_none() { + continue; + } + + let src_parts: Vec<&str> = content.src.split(':').collect(); + if src_parts.len() != 3 { + continue; + } + + let start_b: usize = src_parts[0].parse().ok()?; + let length: usize = src_parts[1].parse().ok()?; + let end_b = start_b + length; + + if start_b <= position && position < end_b { + let diff = end_b - start_b; + if !refs.contains_key(&diff) || refs[&diff] <= *id { + refs.insert(diff, *id); + } + } + } + + if refs.is_empty() { + return None; + } + + // Find the reference with minimum diff (most specific) + let min_diff = *refs.keys().min()?; + let chosen_id = refs[&min_diff]; + + // Get the referenced declaration ID + let ref_id = current_file_nodes[&chosen_id].referenced_declaration?; + + // Search for the referenced declaration across all files + let mut target_node: Option<&NodeInfo> = None; + for file_nodes in nodes.values() { + if let Some(node) = file_nodes.get(&ref_id) { + target_node = Some(node); + break; + } + } + + let node = target_node?; + + // Get location from nameLocation or src + let (location_str, file_id) = if let Some(name_location) = &node.name_location { + let parts: Vec<&str> = name_location.split(':').collect(); + if parts.len() == 3 { + (parts[0], parts[2]) + } else { + return None; + } + } else { + let parts: Vec<&str> = node.src.split(':').collect(); + if parts.len() == 3 { + (parts[0], parts[2]) + } else { + return None; + } + }; + + let location: usize = location_str.parse().ok()?; + let file_path = id_to_path.get(file_id)?.clone(); + + Some((file_path, location)) +} +pub fn pos_to_bytes(source_bytes: &[u8], position: Position) -> usize { + let text = String::from_utf8_lossy(source_bytes); + let lines: Vec<&str> = text.lines().collect(); + + let mut byte_offset = 0; + + for (line_num, line_text) in lines.iter().enumerate() { + if line_num < position.line as usize { + byte_offset += line_text.len() + 1; // +1 for newline + } else if line_num == position.line as usize { + let char_offset = std::cmp::min(position.character as usize, line_text.len()); + byte_offset += char_offset; + break; + } + } + + byte_offset +} + +pub fn bytes_to_pos(source_bytes: &[u8], byte_offset: usize) -> Option { + let text = String::from_utf8_lossy(source_bytes); + let mut curr_offset = 0; + + for (line_num, line_text) in text.lines().enumerate() { + let line_bytes = line_text.len() + 1; // +1 for newline + if curr_offset + line_bytes > byte_offset { + let col = byte_offset - curr_offset; + return Some(Position::new(line_num as u32, col as u32)); + } + curr_offset += line_bytes; + } + + None +} + +pub fn goto_declaration( + ast_data: &Value, + file_uri: &Url, + position: Position, + source_bytes: &[u8], +) -> Option { + let sources = ast_data.get("sources")?; + let build_infos = ast_data.get("build_infos")?.as_array()?; + let first_build_info = build_infos.first()?; + let id_to_path = first_build_info.get("source_id_to_path")?.as_object()?; + + let id_to_path_map: HashMap = + id_to_path.iter().map(|(k, v)| (k.clone(), v.as_str().unwrap_or("").to_string())).collect(); + + let (nodes, path_to_abs) = cache_ids(sources); + let byte_position = pos_to_bytes(source_bytes, position); + + if let Some((file_path, location_bytes)) = + goto_bytes(&nodes, &path_to_abs, &id_to_path_map, file_uri.as_ref(), byte_position) + { + // Read the target file to convert byte position to line/column + let target_file_path = std::path::Path::new(&file_path); + + // Make the path absolute if it's relative + let absolute_path = if target_file_path.is_absolute() { + target_file_path.to_path_buf() + } else { + std::env::current_dir().ok()?.join(target_file_path) + }; + + if let Ok(target_source_bytes) = std::fs::read(&absolute_path) + && let Some(target_position) = bytes_to_pos(&target_source_bytes, location_bytes) + && let Ok(target_uri) = Url::from_file_path(&absolute_path) + { + return Some(Location { + uri: target_uri, + range: Range { start: target_position, end: target_position }, + }); + } + } + + // Fallback to current position + Some(Location { uri: file_uri.clone(), range: Range { start: position, end: position } }) +} +#[cfg(test)] +mod tests { + use super::*; + use std::process::Command; + + #[test] + fn test_pos_to_bytes() { + let source = b"line1\nline2\nline3"; + + // Test position at start of file + let pos = Position::new(0, 0); + assert_eq!(pos_to_bytes(source, pos), 0); + + // Test position at start of second line + let pos = Position::new(1, 0); + assert_eq!(pos_to_bytes(source, pos), 6); // "line1\n" = 6 bytes + + // Test position in middle of first line + let pos = Position::new(0, 2); + assert_eq!(pos_to_bytes(source, pos), 2); + } + + #[test] + fn test_bytes_to_pos() { + let source = b"line1\nline2\nline3"; + + // Test byte offset 0 + assert_eq!(bytes_to_pos(source, 0), Some(Position::new(0, 0))); + + // Test byte offset at start of second line + assert_eq!(bytes_to_pos(source, 6), Some(Position::new(1, 0))); + + // Test byte offset in middle of first line + assert_eq!(bytes_to_pos(source, 2), Some(Position::new(0, 2))); + } + + fn get_ast_data() -> Option { + let output = Command::new("forge") + .arg("build") + .arg("testdata/C.sol") + .arg("--json") + .arg("--no-cache") + .arg("--ast") + .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1") + .env("FOUNDRY_LINT_LINT_ON_BUILD", "false") + .output() + .ok()?; + + let stdout_str = String::from_utf8_lossy(&output.stdout); + serde_json::from_str(&stdout_str).ok() + } + + fn get_test_file_uri(relative_path: &str) -> Url { + let current_dir = std::env::current_dir().expect("Failed to get current directory"); + let absolute_path = current_dir.join(relative_path); + Url::from_file_path(absolute_path).expect("Failed to create file URI") + } + + #[test] + fn test_goto_declaration_basic() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on line 22, column 8 (position of "name" in add_vote function, + // 0-based = line 21) + let position = Position::new(21, 8); + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + let location = result.unwrap(); + + // Should find the declaration of the "name" parameter + // Just verify we get a reasonable result within file bounds + assert!(location.range.start.line < 25, "Declaration should be within the file bounds"); + } + + #[test] + fn test_goto_declaration_variable_reference() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on "votes" usage (line 23, 0-based = line 22) + let position = Position::new(22, 25); // Position of "votes" in name.add_one(votes) + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + let location = result.unwrap(); + + // Should find the declaration of the "votes" state variable + // Just verify we get a reasonable result within file bounds + assert!(location.range.start.line < 25, "Declaration should be within the file bounds"); + } + + #[test] + fn test_goto_declaration_function_call() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on function call "name" in constructor (line 17, 0-based = line 16) + let position = Position::new(16, 8); // Position of "name" function call + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + // The result should point to the function declaration + let location = result.unwrap(); + // This should find a declaration (exact line depends on where the function is defined) + // Just verify we got a valid location + assert!(location.range.start.line < 100); // Reasonable upper bound + } + + #[test] + fn test_goto_declaration_state_variable() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on "votes" in constructor (line 16, 0-based = line 15) + let position = Position::new(15, 8); // Position of "votes" in constructor + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + let location = result.unwrap(); + + // Should find the declaration of the "votes" state variable + // Just verify we get a reasonable result within file bounds + assert!(location.range.start.line < 25, "Declaration should be within the file bounds"); + } + + #[test] + fn test_goto_declaration_immutable_variable() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on immutable variable "SCREAM" (line 10, 0-based = line 9) + let position = Position::new(9, 20); // Position of "SCREAM" + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + let location = result.unwrap(); + + // Should find the declaration of the "SCREAM" immutable variable (same line) + assert_eq!(location.range.start.line, 9); + } + + #[test] + fn test_goto_declaration_no_reference() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto declaration on a position with no reference (e.g., a comment or whitespace) + let position = Position::new(0, 0); // Start of file (comment) + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + + assert!(result.is_some()); + let location = result.unwrap(); + + // Should fallback to current position + assert_eq!(location.uri, file_uri); + assert_eq!(location.range.start, position); + } + + #[test] + fn test_cache_ids_functionality() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let sources = ast_data.get("sources").unwrap(); + let (nodes, path_to_abs) = cache_ids(sources); + + // Should have cached multiple files + assert!(!nodes.is_empty()); + assert!(!path_to_abs.is_empty()); + + // Check that nodes have the expected structure + nodes.iter().for_each(|(_file_path, file_nodes)| { + for node_info in file_nodes.values() { + assert!(!node_info.src.is_empty()); + // Some nodes should have referenced declarations + if node_info.referenced_declaration.is_some() {} + } + }); + } + + #[test] + fn test_goto_bytes_functionality() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let sources = ast_data.get("sources").unwrap(); + let build_infos = ast_data.get("build_infos").unwrap().as_array().unwrap(); + let first_build_info = build_infos.first().unwrap(); + let id_to_path = first_build_info.get("source_id_to_path").unwrap().as_object().unwrap(); + + let id_to_path_map: HashMap = id_to_path + .iter() + .map(|(k, v)| (k.clone(), v.as_str().unwrap_or("").to_string())) + .collect(); + + let (nodes, path_to_abs) = cache_ids(sources); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test with a position that should have a reference + let position = Position::new(21, 8); // "name" in add_vote function + let byte_position = pos_to_bytes(&source_bytes, position); + + let file_uri_url = get_test_file_uri("testdata/C.sol"); + let file_uri = file_uri_url.as_str(); + let result = goto_bytes(&nodes, &path_to_abs, &id_to_path_map, file_uri, byte_position); + + // Should find a declaration + if let Some((file_path, _location_bytes)) = result { + assert!(!file_path.is_empty()); + } + } + #[test] + fn test_goto_declaration_and_definition_consistency() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test that goto_declaration and goto_definition return the same result + let position = Position::new(21, 8); // "name" in add_vote function + + let declaration_result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + let definition_result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); // Same function used for both + + assert!(declaration_result.is_some()); + assert!(definition_result.is_some()); + + let declaration_location = declaration_result.unwrap(); + let definition_location = definition_result.unwrap(); + + // Both should return the same location + assert_eq!(declaration_location.uri, definition_location.uri); + assert_eq!(declaration_location.range.start.line, definition_location.range.start.line); + assert_eq!( + declaration_location.range.start.character, + definition_location.range.start.character + ); + } + + #[test] + fn test_goto_definition_multiple_positions() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test multiple positions to ensure goto_definition works consistently + let test_positions = vec![ + (Position::new(21, 8), "parameter reference"), // "name" in add_vote function + (Position::new(22, 25), "state variable reference"), // "votes" in name.add_one(votes) + (Position::new(15, 8), "state variable in constructor"), // "votes" in constructor + ]; + + for (position, description) in test_positions { + let result = goto_declaration(&ast_data, &file_uri, position, &source_bytes); + assert!(result.is_some(), "Failed to find definition for {description}"); + + let location = result.unwrap(); + // Verify we got a valid location + assert!(location.range.start.line < 100, "Invalid line number for {description}"); + assert!( + location.range.start.character < 1000, + "Invalid character position for {description}" + ); + } + } + + #[test] + fn test_name_locations_handling() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let sources = ast_data.get("sources").unwrap(); + let (nodes, _path_to_abs) = cache_ids(sources); + + // Verify that nodes have name_location set (either from nameLocation or nameLocations[0]) + let mut nodes_with_name_location = 0; + for file_nodes in nodes.values() { + for node_info in file_nodes.values() { + if node_info.name_location.is_some() { + nodes_with_name_location += 1; + } + } + } + + // Should have at least some nodes with name locations + assert!(nodes_with_name_location > 0, "Expected to find nodes with name locations"); + } + + #[test] + fn test_name_locations_array_parsing() { + use serde_json::json; + + // Create a mock AST structure with nameLocations array + let mock_sources = json!({ + "test.sol": [{ + "source_file": { + "ast": { + "id": 1, + "src": "0:100:0", + "nodeType": "SourceUnit", + "absolutePath": "test.sol", + "nodes": [{ + "id": 2, + "src": "10:20:0", + "nodeType": "ContractDefinition", + "nameLocations": ["15:8:0", "25:8:0"] + }, { + "id": 3, + "src": "30:15:0", + "nodeType": "VariableDeclaration", + "nameLocation": "35:5:0" + }] + } + } + }] + }); + + let (nodes, _path_to_abs) = cache_ids(&mock_sources); + + // Should have nodes for test.sol + assert!(nodes.contains_key("test.sol")); + let test_file_nodes = &nodes["test.sol"]; + + // Node 2 should have nameLocation from nameLocations[0] + assert!(test_file_nodes.contains_key(&2)); + let node2 = &test_file_nodes[&2]; + assert_eq!(node2.name_location, Some("15:8:0".to_string())); + + // Node 3 should have nameLocation from nameLocation field + assert!(test_file_nodes.contains_key(&3)); + let node3 = &test_file_nodes[&3]; + assert_eq!(node3.name_location, Some("35:5:0".to_string())); + } +} diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs new file mode 100644 index 0000000000000..32cbb4a44cc28 --- /dev/null +++ b/crates/lsp/src/lib.rs @@ -0,0 +1,17 @@ +//! Foundry Language Server Protocol implementation +//! +//! This crate provides a native LSP server for Solidity development using Foundry's +//! compilation and linting infrastructure. + +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] + +pub mod build; +pub mod goto; +pub mod lint; +pub mod lsp; +pub mod references; +pub mod runner; +pub mod utils; + +pub use lsp::ForgeLsp; diff --git a/crates/lsp/src/lint.rs b/crates/lsp/src/lint.rs new file mode 100644 index 0000000000000..957c40406b5dd --- /dev/null +++ b/crates/lsp/src/lint.rs @@ -0,0 +1,188 @@ +use serde::{Deserialize, Serialize}; +use std::path::Path; +use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; + +pub fn lint_output_to_diagnostics( + forge_output: &serde_json::Value, + target_file: &str, +) -> Vec { + let mut diagnostics = Vec::new(); + + if let serde_json::Value::Array(items) = forge_output { + for item in items { + if let Ok(forge_diag) = serde_json::from_value::(item.clone()) { + // Only include diagnostics for the target file + for span in &forge_diag.spans { + let target_path = Path::new(target_file) + .canonicalize() + .unwrap_or_else(|_| Path::new(target_file).to_path_buf()); + let span_path = Path::new(&span.file_name) + .canonicalize() + .unwrap_or_else(|_| Path::new(&span.file_name).to_path_buf()); + if target_path == span_path && span.is_primary { + let diagnostic = Diagnostic { + range: Range { + start: Position { + line: (span.line_start - 1), // LSP is 0-based + character: (span.column_start - 1), // LSP is 0-based + }, + end: Position { + line: (span.line_end - 1), + character: (span.column_end - 1), + }, + }, + severity: Some(match forge_diag.level.as_str() { + "error" => DiagnosticSeverity::ERROR, + "warning" => DiagnosticSeverity::WARNING, + "note" => DiagnosticSeverity::INFORMATION, + "help" => DiagnosticSeverity::HINT, + _ => DiagnosticSeverity::INFORMATION, + }), + code: forge_diag.code.as_ref().map(|c| { + tower_lsp::lsp_types::NumberOrString::String(c.code.clone()) + }), + code_description: None, + source: Some("forge-lint".to_string()), + message: format!("[forge lint] {}", forge_diag.message), + related_information: None, + tags: None, + data: None, + }; + diagnostics.push(diagnostic); + break; // Only take the first primary span per diagnostic + } + } + } + } + } + + diagnostics +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeDiagnostic { + #[serde(rename = "$message_type")] + pub message_type: String, + pub message: String, + pub code: Option, + pub level: String, + pub spans: Vec, + pub children: Vec, + pub rendered: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeLintCode { + pub code: String, + pub explanation: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeLintSpan { + pub file_name: String, + pub byte_start: u32, + pub byte_end: u32, + pub line_start: u32, + pub line_end: u32, + pub column_start: u32, + pub column_end: u32, + pub is_primary: bool, + pub text: Vec, + pub label: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeLintText { + pub text: String, + pub highlight_start: u32, + pub highlight_end: u32, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeLintChild { + pub message: String, + pub code: Option, + pub level: String, + pub spans: Vec, + pub children: Vec, + pub rendered: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::runner::{ForgeRunner, Runner}; + use std::io::Write; + + static CONTRACT: &str = r#"// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +contract A { + function add_num(uint256 a) public pure returns (uint256) { + return a + 4; + } +}"#; + + fn setup(contents: &str) -> (tempfile::NamedTempFile, ForgeRunner) { + let mut tmp = tempfile::Builder::new() + .prefix("A") + .suffix(".sol") + .tempfile_in(".") + .expect("failed to create temp file"); + + tmp.write_all(contents.as_bytes()).expect("failed to write temp file"); + tmp.flush().expect("flush failed"); + tmp.as_file().sync_all().expect("sync failed"); + + let compiler = ForgeRunner; + (tmp, compiler) + } + + #[tokio::test] + async fn test_lint_valid_file() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.path().to_string_lossy().to_string(); + + let result = compiler.lint(&file_path).await; + assert!(result.is_ok(), "Expected lint to succeed"); + + let json_value = result.unwrap(); + assert!(json_value.is_array(), "Expected lint output to be an array"); + } + + #[tokio::test] + async fn test_lint_diagnosis_output() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.path().to_string_lossy().to_string(); + + let result = compiler.lint(&file_path).await; + assert!(result.is_ok()); + + let json_value = result.unwrap(); + let diagnostics = lint_output_to_diagnostics(&json_value, &file_path); + assert!(!diagnostics.is_empty(), "Expected diagnostics"); + } + + #[tokio::test] + async fn test_lint_to_lsp_diagnostics() { + let (file_, compiler) = setup(CONTRACT); + let file_path = file_.path().to_string_lossy().to_string(); + + let result = compiler.lint(&file_path).await; + assert!(result.is_ok(), "Expected lint to succeed"); + + let json_value = result.unwrap(); + let diagnostics = lint_output_to_diagnostics(&json_value, &file_path); + assert!(!diagnostics.is_empty(), "Expected at least one diagnostic"); + + let first_diag = &diagnostics[0]; + assert_eq!(first_diag.source, Some("forge-lint".to_string())); + assert_eq!(first_diag.message, "[forge lint] function names should use mixedCase"); + assert_eq!( + first_diag.severity, + Some(tower_lsp::lsp_types::DiagnosticSeverity::INFORMATION) + ); + assert_eq!(first_diag.range.start.line, 4); + assert_eq!(first_diag.range.start.character, 13); + } +} diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs new file mode 100644 index 0000000000000..5bf9da3cf70c1 --- /dev/null +++ b/crates/lsp/src/lsp.rs @@ -0,0 +1,489 @@ +use crate::{ + goto, references, + runner::{ForgeRunner, Runner}, +}; +use foundry_common::version::SHORT_VERSION; +use std::{collections::HashMap, sync::Arc}; +use tokio::sync::RwLock; +use tower_lsp::{Client, LanguageServer, lsp_types::*}; + +pub type FileId = usize; + +pub struct ForgeLsp { + client: Client, + compiler: Arc, + ast_cache: Arc>>, +} + +#[allow(dead_code)] +#[derive(Debug, Clone)] +struct TextDocumentItem<'a> { + uri: Url, + text: &'a str, + version: Option, +} + +impl ForgeLsp { + pub fn new(client: Client) -> Self { + let compiler = Arc::new(ForgeRunner) as Arc; + let ast_cache = Arc::new(RwLock::new(HashMap::new())); + Self { client, compiler, ast_cache } + } + + async fn on_change<'a>(&self, params: TextDocumentItem<'a>) { + let uri = params.uri.clone(); + let version = params.version; + + // Get file path for AST caching + let file_path = match uri.to_file_path() { + Ok(path) => path, + Err(_) => { + self.client + .log_message(MessageType::ERROR, "Invalid file URI for AST caching") + .await; + return; + } + }; + + let path_str = match file_path.to_str() { + Some(s) => s, + None => { + self.client + .log_message(MessageType::ERROR, "Invalid file path for AST caching") + .await; + return; + } + }; + + let (lint_result, build_result, ast_result) = tokio::join!( + self.compiler.get_lint_diagnostics(&uri), + self.compiler.get_build_diagnostics(&uri), + self.compiler.ast(path_str) + ); + + // Cache the AST data + if let Ok(ast_data) = ast_result { + let mut cache = self.ast_cache.write().await; + cache.insert(uri.to_string(), ast_data); + self.client.log_message(MessageType::INFO, "AST data cached successfully").await; + } else if let Err(e) = ast_result { + self.client + .log_message(MessageType::WARNING, format!("Failed to cache AST data: {e}")) + .await; + } + + let mut all_diagnostics = vec![]; + + match lint_result { + Ok(mut lints) => { + self.client + .log_message( + MessageType::INFO, + format!("Found {} linting diagnostics", lints.len()), + ) + .await; + all_diagnostics.append(&mut lints); + } + Err(e) => { + self.client + .log_message( + MessageType::WARNING, + format!("Forge linting diagnostics failed: {e}"), + ) + .await; + } + } + + match build_result { + Ok(mut builds) => { + self.client + .log_message( + MessageType::INFO, + format!("Found {} build diagnostics", builds.len()), + ) + .await; + all_diagnostics.append(&mut builds); + } + Err(e) => { + self.client + .log_message( + MessageType::WARNING, + format!("Forge build diagnostics failed: {e}"), + ) + .await; + } + } + + self.client.publish_diagnostics(uri, all_diagnostics, version).await; + } +} + +#[tower_lsp::async_trait] +impl LanguageServer for ForgeLsp { + async fn initialize( + &self, + _: InitializeParams, + ) -> tower_lsp::jsonrpc::Result { + Ok(InitializeResult { + server_info: Some(ServerInfo { + name: "forge lsp".to_string(), + version: Some(SHORT_VERSION.to_string()), + }), + capabilities: ServerCapabilities { + definition_provider: Some(OneOf::Left(true)), + declaration_provider: Some(DeclarationCapability::Simple(true)), + references_provider: Some(OneOf::Left(true)), + text_document_sync: Some(TextDocumentSyncCapability::Kind( + TextDocumentSyncKind::FULL, + )), + ..ServerCapabilities::default() + }, + }) + } + + async fn initialized(&self, _: InitializedParams) { + self.client.log_message(MessageType::INFO, "lsp server initialized!").await; + } + + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + self.client.log_message(MessageType::INFO, "lsp server shutting down").await; + Ok(()) + } + + async fn did_open(&self, params: DidOpenTextDocumentParams) { + self.client.log_message(MessageType::INFO, "file opened").await; + + self.on_change(TextDocumentItem { + uri: params.text_document.uri, + text: ¶ms.text_document.text, + version: Some(params.text_document.version), + }) + .await + } + + async fn did_change(&self, params: DidChangeTextDocumentParams) { + self.client.log_message(MessageType::INFO, "file changed").await; + + // Invalidate cached AST data for the changed file + let uri = params.text_document.uri; + let mut cache = self.ast_cache.write().await; + if cache.remove(&uri.to_string()).is_some() { + self.client + .log_message(MessageType::INFO, "Invalidated cached AST data for changed file") + .await; + } + } + + async fn did_save(&self, params: DidSaveTextDocumentParams) { + self.client.log_message(MessageType::INFO, "file saved - running diagnostics").await; + + // Run diagnostics on save, regardless of whether text is provided + // If text is provided, use it; otherwise read from file system + let text_content = if let Some(text) = params.text { + text + } else { + // Read the file from disk since many LSP clients don't send text on save + match std::fs::read_to_string(params.text_document.uri.path()) { + Ok(content) => content, + Err(e) => { + self.client + .log_message( + MessageType::ERROR, + format!("Failed to read file on save: {e}"), + ) + .await; + return; + } + } + }; + + let item = + TextDocumentItem { uri: params.text_document.uri, text: &text_content, version: None }; + + // Always run diagnostics on save to reflect the current file state + self.on_change(item).await; + _ = self.client.semantic_tokens_refresh().await; + } + + async fn did_close(&self, _: DidCloseTextDocumentParams) { + self.client.log_message(MessageType::INFO, "file closed").await; + } + + async fn did_change_configuration(&self, _: DidChangeConfigurationParams) { + self.client.log_message(MessageType::INFO, "configuration changed!").await; + } + + async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) { + self.client.log_message(MessageType::INFO, "workspace folders changed!").await; + } + + async fn did_change_watched_files(&self, _: DidChangeWatchedFilesParams) { + self.client.log_message(MessageType::INFO, "watched files have changed!").await; + } + + async fn goto_definition( + &self, + params: GotoDefinitionParams, + ) -> tower_lsp::jsonrpc::Result> { + self.client.log_message(MessageType::INFO, "Got a textDocument/definition request").await; + + let uri = params.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + + // Get the file path from URI + let file_path = match uri.to_file_path() { + Ok(path) => path, + Err(_) => { + self.client.log_message(MessageType::ERROR, "Invalid file URI").await; + return Ok(None); + } + }; + + // Read the source file + let source_bytes = match std::fs::read(&file_path) { + Ok(bytes) => bytes, + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to read file: {e}")) + .await; + return Ok(None); + } + }; + + // Try to get AST data from cache first + let ast_data = { + let cache = self.ast_cache.read().await; + if let Some(cached_ast) = cache.get(&uri.to_string()) { + self.client.log_message(MessageType::INFO, "Using cached AST data").await; + cached_ast.clone() + } else { + // Cache miss - get AST data and cache it + drop(cache); // Release read lock + + let path_str = match file_path.to_str() { + Some(s) => s, + None => { + self.client.log_message(MessageType::ERROR, "Invalid file path").await; + return Ok(None); + } + }; + + match self.compiler.ast(path_str).await { + Ok(data) => { + self.client + .log_message(MessageType::INFO, "Fetched and caching new AST data") + .await; + + // Cache the new AST data + let mut cache = self.ast_cache.write().await; + cache.insert(uri.to_string(), data.clone()); + data + } + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to get AST: {e}")) + .await; + return Ok(None); + } + } + } + }; + + // Use goto_declaration function (same logic for both definition and declaration) + if let Some(location) = goto::goto_declaration(&ast_data, &uri, position, &source_bytes) { + self.client + .log_message( + MessageType::INFO, + format!("Found definition at {}:{}", location.uri, location.range.start.line), + ) + .await; + Ok(Some(GotoDefinitionResponse::from(location))) + } else { + self.client.log_message(MessageType::INFO, "No definition found").await; + // Fallback to current position + let location = Location { uri, range: Range { start: position, end: position } }; + Ok(Some(GotoDefinitionResponse::from(location))) + } + } + + async fn goto_declaration( + &self, + params: request::GotoDeclarationParams, + ) -> tower_lsp::jsonrpc::Result> { + self.client.log_message(MessageType::INFO, "Got a textDocument/declaration request").await; + + let uri = params.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + + // Get the file path from URI + let file_path = match uri.to_file_path() { + Ok(path) => path, + Err(_) => { + self.client.log_message(MessageType::ERROR, "Invalid file URI").await; + return Ok(None); + } + }; + + // Read the source file + let source_bytes = match std::fs::read(&file_path) { + Ok(bytes) => bytes, + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to read file: {e}")) + .await; + return Ok(None); + } + }; + + // Try to get AST data from cache first + let ast_data = { + let cache = self.ast_cache.read().await; + if let Some(cached_ast) = cache.get(&uri.to_string()) { + self.client.log_message(MessageType::INFO, "Using cached AST data").await; + cached_ast.clone() + } else { + // Cache miss - get AST data and cache it + drop(cache); // Release read lock + + let path_str = match file_path.to_str() { + Some(s) => s, + None => { + self.client.log_message(MessageType::ERROR, "Invalid file path").await; + return Ok(None); + } + }; + + match self.compiler.ast(path_str).await { + Ok(data) => { + self.client + .log_message(MessageType::INFO, "Fetched and caching new AST data") + .await; + + // Cache the new AST data + let mut cache = self.ast_cache.write().await; + cache.insert(uri.to_string(), data.clone()); + data + } + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to get AST: {e}")) + .await; + return Ok(None); + } + } + } + }; + + // Use goto_declaration function + if let Some(location) = goto::goto_declaration(&ast_data, &uri, position, &source_bytes) { + self.client + .log_message( + MessageType::INFO, + format!("Found declaration at {}:{}", location.uri, location.range.start.line), + ) + .await; + Ok(Some(request::GotoDeclarationResponse::from(location))) + } else { + self.client.log_message(MessageType::INFO, "No declaration found").await; + // Fallback to current position + let location = Location { uri, range: Range { start: position, end: position } }; + Ok(Some(request::GotoDeclarationResponse::from(location))) + } + } + + async fn references( + &self, + params: ReferenceParams, + ) -> tower_lsp::jsonrpc::Result>> { + self.client.log_message(MessageType::INFO, "Got a textDocument/references request").await; + + let uri = params.text_document_position.text_document.uri; + let position = params.text_document_position.position; + + // Get the file path from URI + let file_path = match uri.to_file_path() { + Ok(path) => path, + Err(_) => { + self.client.log_message(MessageType::ERROR, "Invalid file URI").await; + return Ok(None); + } + }; + + // Read the source file + let source_bytes = match std::fs::read(&file_path) { + Ok(bytes) => bytes, + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to read file: {e}")) + .await; + return Ok(None); + } + }; + + // Try to get AST data from cache first + let ast_data = { + let cache = self.ast_cache.read().await; + if let Some(cached_ast) = cache.get(&uri.to_string()) { + self.client.log_message(MessageType::INFO, "Using cached AST data").await; + cached_ast.clone() + } else { + // Cache miss - get AST data and cache it + drop(cache); // Release read lock + + let path_str = match file_path.to_str() { + Some(s) => s, + None => { + self.client.log_message(MessageType::ERROR, "Invalid file path").await; + return Ok(None); + } + }; + + match self.compiler.ast(path_str).await { + Ok(data) => { + self.client + .log_message(MessageType::INFO, "Fetched and caching new AST data") + .await; + + // Cache the new AST data + let mut cache = self.ast_cache.write().await; + cache.insert(uri.to_string(), data.clone()); + data + } + Err(e) => { + self.client + .log_message(MessageType::ERROR, format!("Failed to get AST: {e}")) + .await; + return Ok(None); + } + } + } + }; + + // Use goto_references function to find all references + let locations = references::goto_references(&ast_data, &uri, position, &source_bytes); + + if locations.is_empty() { + self.client.log_message(MessageType::INFO, "No references found").await; + Ok(None) + } else { + self.client + .log_message(MessageType::INFO, format!("Found {} references", locations.len())) + .await; + Ok(Some(locations)) + } + } + + async fn execute_command( + &self, + _: ExecuteCommandParams, + ) -> tower_lsp::jsonrpc::Result> { + self.client.log_message(MessageType::INFO, "command executed!").await; + + match self.client.apply_edit(WorkspaceEdit::default()).await { + Ok(res) if res.applied => self.client.log_message(MessageType::INFO, "applied").await, + Ok(_) => self.client.log_message(MessageType::INFO, "rejected").await, + Err(err) => self.client.log_message(MessageType::ERROR, err).await, + } + Ok(None) + } +} diff --git a/crates/lsp/src/references.rs b/crates/lsp/src/references.rs new file mode 100644 index 0000000000000..8ccd9cc9554d3 --- /dev/null +++ b/crates/lsp/src/references.rs @@ -0,0 +1,271 @@ +use serde_json::Value; +use std::collections::{HashMap, HashSet}; +use tower_lsp::lsp_types::{Location, Position, Range, Url}; + +use crate::goto::{NodeInfo, bytes_to_pos, cache_ids, pos_to_bytes}; + +/// Build a map of all reference relationships in the AST +/// Returns a HashMap where keys are node IDs and values are vectors of related node IDs +pub fn all_references(nodes: &HashMap>) -> HashMap> { + let mut all_refs: HashMap> = HashMap::new(); + + // Iterate through all files and nodes + for file_nodes in nodes.values() { + for (id, node_info) in file_nodes { + if let Some(ref_id) = node_info.referenced_declaration { + // Add the reference relationship + all_refs.entry(ref_id).or_default().push(*id); + all_refs.entry(*id).or_default().push(ref_id); + } + } + } + + all_refs +} + +/// Find the node ID at a specific byte position in a file +pub fn byte_to_id( + nodes: &HashMap>, + abs_path: &str, + byte_position: usize, +) -> Option { + let file_nodes = nodes.get(abs_path)?; + let mut refs: HashMap = HashMap::new(); + + for (id, node_info) in file_nodes { + let src_parts: Vec<&str> = node_info.src.split(':').collect(); + if src_parts.len() != 3 { + continue; + } + + let start: usize = src_parts[0].parse().ok()?; + let length: usize = src_parts[1].parse().ok()?; + let end = start + length; + + if start <= byte_position && byte_position < end { + let diff = end - start; + refs.entry(diff).or_insert(*id); + } + } + + refs.keys().min().map(|min_diff| refs[min_diff]) +} + +/// Convert a node ID to a Location for LSP +pub fn id_to_location( + nodes: &HashMap>, + id_to_path: &HashMap, + node_id: u64, +) -> Option { + // Find the file containing this node + let mut target_node: Option<&NodeInfo> = None; + for file_nodes in nodes.values() { + if let Some(node) = file_nodes.get(&node_id) { + target_node = Some(node); + break; + } + } + + let node = target_node?; + + // Get location from nameLocation or src + let (byte_str, length_str, file_id) = if let Some(name_location) = &node.name_location { + let parts: Vec<&str> = name_location.split(':').collect(); + if parts.len() == 3 { + (parts[0], parts[1], parts[2]) + } else { + return None; + } + } else { + let parts: Vec<&str> = node.src.split(':').collect(); + if parts.len() == 3 { + (parts[0], parts[1], parts[2]) + } else { + return None; + } + }; + + let byte_offset: usize = byte_str.parse().ok()?; + let length: usize = length_str.parse().ok()?; + let file_path = id_to_path.get(file_id)?; + + // Read the file to convert byte positions to line/column + let absolute_path = if std::path::Path::new(file_path).is_absolute() { + std::path::PathBuf::from(file_path) + } else { + std::env::current_dir().ok()?.join(file_path) + }; + + let source_bytes = std::fs::read(&absolute_path).ok()?; + let start_pos = bytes_to_pos(&source_bytes, byte_offset)?; + let end_pos = bytes_to_pos(&source_bytes, byte_offset + length)?; + + let uri = Url::from_file_path(&absolute_path).ok()?; + + Some(Location { uri, range: Range { start: start_pos, end: end_pos } }) +} + +/// Find all references to a symbol at the given position +pub fn goto_references( + ast_data: &Value, + file_uri: &Url, + position: Position, + source_bytes: &[u8], +) -> Vec { + let sources = match ast_data.get("sources") { + Some(s) => s, + None => return vec![], + }; + + let build_infos = match ast_data.get("build_infos").and_then(|v| v.as_array()) { + Some(infos) => infos, + None => return vec![], + }; + + let first_build_info = match build_infos.first() { + Some(info) => info, + None => return vec![], + }; + + let id_to_path = match first_build_info.get("source_id_to_path").and_then(|v| v.as_object()) { + Some(map) => map, + None => return vec![], + }; + + let id_to_path_map: HashMap = + id_to_path.iter().map(|(k, v)| (k.clone(), v.as_str().unwrap_or("").to_string())).collect(); + + let (nodes, path_to_abs) = cache_ids(sources); + let all_refs = all_references(&nodes); + + // Get the file path and convert to absolute path + let path = match file_uri.to_file_path() { + Ok(p) => p, + Err(_) => return vec![], + }; + + let path_str = match path.to_str() { + Some(s) => s, + None => return vec![], + }; + + let abs_path = match path_to_abs.get(path_str) { + Some(ap) => ap, + None => return vec![], + }; + + // Convert position to byte offset + let byte_position = pos_to_bytes(source_bytes, position); + + // Find the node ID at this position + let node_id = match byte_to_id(&nodes, abs_path, byte_position) { + Some(id) => id, + None => return vec![], + }; + + // Get all references for this node + let refs = match all_refs.get(&node_id) { + Some(r) => r, + None => return vec![], + }; + + // Collect all related references + let mut results = HashSet::new(); + results.extend(refs.iter().copied()); + + // For each reference, also get its references (transitive closure) + for ref_id in refs { + if let Some(transitive_refs) = all_refs.get(ref_id) { + results.extend(transitive_refs.iter().copied()); + } + } + + // Convert node IDs to locations + let mut locations = Vec::new(); + for id in results { + if let Some(location) = id_to_location(&nodes, &id_to_path_map, id) { + locations.push(location); + } + } + + locations +} + +#[cfg(test)] +mod tests { + use super::*; + use std::process::Command; + + fn get_ast_data() -> Option { + let output = Command::new("forge") + .args(["build", "--ast", "--silent", "--build-info"]) + .current_dir("testdata") + .output() + .ok()?; + + let stdout_str = String::from_utf8(output.stdout).ok()?; + serde_json::from_str(&stdout_str).ok() + } + + fn get_test_file_uri(relative_path: &str) -> Url { + let current_dir = std::env::current_dir().expect("Failed to get current directory"); + let absolute_path = current_dir.join(relative_path); + Url::from_file_path(absolute_path).expect("Failed to create file URI") + } + + #[test] + fn test_goto_references_basic() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let file_uri = get_test_file_uri("testdata/C.sol"); + let source_bytes = std::fs::read("testdata/C.sol").unwrap(); + + // Test goto references on "name" in add_vote function (line 22, column 8) + let position = Position::new(21, 8); + let references = goto_references(&ast_data, &file_uri, position, &source_bytes); + + // The function should return a vector (may be empty if no references found) + // This is just testing that the function runs without panicking + + // If references are found, verify they have valid locations + for location in &references { + assert!(location.range.start.line < 100, "Reference line should be reasonable"); + assert!(!location.uri.as_str().is_empty(), "Reference URI should not be empty"); + } + } + + #[test] + fn test_all_references_basic() { + let ast_data = match get_ast_data() { + Some(data) => data, + None => { + return; + } + }; + + let sources = ast_data.get("sources").unwrap(); + let (nodes, _path_to_abs) = cache_ids(sources); + let all_refs = all_references(&nodes); + + // Should have some reference relationships (or be empty if none found) + // Just verify the function runs without panicking + + // If references exist, verify they are bidirectional + for refs in all_refs.values() { + for ref_id in refs { + if let Some(back_refs) = all_refs.get(ref_id) { + // This is a more lenient check - just verify the structure is reasonable + assert!( + !back_refs.is_empty(), + "Back references should exist if forward references exist" + ); + } + } + } + } +} diff --git a/crates/lsp/src/runner.rs b/crates/lsp/src/runner.rs new file mode 100644 index 0000000000000..9f13d53cfa8f3 --- /dev/null +++ b/crates/lsp/src/runner.rs @@ -0,0 +1,151 @@ +use crate::{build::build_output_to_diagnostics, lint::lint_output_to_diagnostics}; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use thiserror::Error; +use tokio::process::Command; +use tower_lsp::{ + async_trait, + lsp_types::{Diagnostic, Url}, +}; + +pub struct ForgeRunner; + +#[async_trait] +pub trait Runner: Send + Sync { + async fn build(&self, file: &str) -> Result; + async fn lint(&self, file: &str) -> Result; + async fn ast(&self, file: &str) -> Result; + async fn get_build_diagnostics(&self, file: &Url) -> Result, RunnerError>; + async fn get_lint_diagnostics(&self, file: &Url) -> Result, RunnerError>; +} + +#[async_trait] +impl Runner for ForgeRunner { + async fn lint(&self, file_path: &str) -> Result { + let output = Command::new("forge") + .arg("lint") + .arg(file_path) + .arg("--json") + .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1") + .output() + .await?; + + let stderr_str = String::from_utf8_lossy(&output.stderr); + + // Parse JSON output line by line + let mut diagnostics = Vec::new(); + for line in stderr_str.lines() { + if line.trim().is_empty() { + continue; + } + + match serde_json::from_str::(line) { + Ok(value) => diagnostics.push(value), + Err(_e) => { + continue; + } + } + } + + Ok(serde_json::Value::Array(diagnostics)) + } + + async fn build(&self, file_path: &str) -> Result { + let output = Command::new("forge") + .arg("build") + .arg(file_path) + .arg("--json") + .arg("--no-cache") + .arg("--ast") + .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1") + .env("FOUNDRY_LINT_LINT_ON_BUILD", "false") + .output() + .await?; + + let stdout_str = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout_str)?; + + Ok(parsed) + } + + async fn ast(&self, file_path: &str) -> Result { + let output = Command::new("forge") + .arg("build") + .arg(file_path) + .arg("--json") + .arg("--no-cache") + .arg("--ast") + .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1") + .env("FOUNDRY_LINT_LINT_ON_BUILD", "false") + .output() + .await?; + + let stdout_str = String::from_utf8_lossy(&output.stdout); + let parsed: serde_json::Value = serde_json::from_str(&stdout_str)?; + + Ok(parsed) + } + + async fn get_lint_diagnostics(&self, file: &Url) -> Result, RunnerError> { + let path: PathBuf = file.to_file_path().map_err(|_| RunnerError::InvalidUrl)?; + let path_str = path.to_str().ok_or(RunnerError::InvalidUrl)?; + let lint_output = self.lint(path_str).await?; + let diagnostics = lint_output_to_diagnostics(&lint_output, path_str); + Ok(diagnostics) + } + + async fn get_build_diagnostics(&self, file: &Url) -> Result, RunnerError> { + let path = file.to_file_path().map_err(|_| RunnerError::InvalidUrl)?; + let path_str = path.to_str().ok_or(RunnerError::InvalidUrl)?; + let filename = + path.file_name().and_then(|os_str| os_str.to_str()).ok_or(RunnerError::InvalidUrl)?; + let content = tokio::fs::read_to_string(&path).await.map_err(|_| RunnerError::ReadError)?; + let build_output = self.build(path_str).await?; + let diagnostics = build_output_to_diagnostics(&build_output, filename, &content); + Ok(diagnostics) + } +} + +#[derive(Error, Debug)] +pub enum RunnerError { + #[error("Invalid file URL")] + InvalidUrl, + #[error("Failed to run command: {0}")] + CommandError(#[from] std::io::Error), + #[error("JSON error: {0}")] + JsonError(#[from] serde_json::Error), + #[error("Empty output from compiler")] + EmptyOutput, + #[error("ReadError")] + ReadError, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct SourceLocation { + file: String, + start: i32, // Changed to i32 to handle -1 values + end: i32, // Changed to i32 to handle -1 values +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ForgeDiagnosticMessage { + #[serde(rename = "sourceLocation")] + source_location: SourceLocation, + #[serde(rename = "type")] + error_type: String, + component: String, + severity: String, + #[serde(rename = "errorCode")] + error_code: String, + message: String, + #[serde(rename = "formattedMessage")] + formatted_message: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct CompileOutput { + errors: Option>, + sources: serde_json::Value, + contracts: serde_json::Value, + build_infos: Vec, +} diff --git a/crates/lsp/src/utils.rs b/crates/lsp/src/utils.rs new file mode 100644 index 0000000000000..2e5bc34c77625 --- /dev/null +++ b/crates/lsp/src/utils.rs @@ -0,0 +1,122 @@ +pub fn byte_offset_to_position(source: &str, byte_offset: usize) -> (u32, u32) { + let mut line = 0; + let mut col = 0; + let mut i = 0; + + let bytes = source.as_bytes(); + while i < byte_offset && i < bytes.len() { + match bytes[i] { + b'\n' => { + line += 1; + col = 0; + i += 1; + } + b'\r' if i + 1 < bytes.len() && bytes[i + 1] == b'\n' => { + line += 1; + col = 0; + i += 2; + } + _ => { + col += 1; + i += 1; + } + } + } + + (line, col) +} + +pub fn position_to_byte_offset(source: &str, line: u32, character: u32) -> usize { + let mut current_line = 0; + let mut current_col = 0; + + for (i, ch) in source.char_indices() { + if current_line == line && current_col == character { + return i; + } + + match ch { + '\n' => { + if current_line == line && current_col < character { + return i; // clamp to end of line + } + current_line += 1; + current_col = 0; + } + _ => { + current_col += 1; + } + } + } + + source.len() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_byte_offset_to_position_unix_newlines() { + let source = "line1\nline2\nline3\n"; + assert_eq!(byte_offset_to_position(source, 0), (0, 0)); // 'l' in line1 + assert_eq!(byte_offset_to_position(source, 5), (0, 5)); // '\n' + assert_eq!(byte_offset_to_position(source, 6), (1, 0)); // 'l' in line2 + assert_eq!(byte_offset_to_position(source, 11), (1, 5)); // '\n' + assert_eq!(byte_offset_to_position(source, 12), (2, 0)); // 'l' in line3 + } + + #[test] + fn test_byte_offset_to_position_windows_newlines() { + let source = "line1\r\nline2\r\nline3\r\n"; + assert_eq!(byte_offset_to_position(source, 0), (0, 0)); + assert_eq!(byte_offset_to_position(source, 5), (0, 5)); + assert_eq!(byte_offset_to_position(source, 7), (1, 0)); // skips \r\n + assert_eq!(byte_offset_to_position(source, 12), (1, 5)); + assert_eq!(byte_offset_to_position(source, 14), (2, 0)); + } + + #[test] + fn test_byte_offset_to_position_no_newlines() { + let source = "justoneline"; + assert_eq!(byte_offset_to_position(source, 0), (0, 0)); + assert_eq!(byte_offset_to_position(source, 5), (0, 5)); + assert_eq!(byte_offset_to_position(source, 11), (0, 11)); + } + + #[test] + fn test_byte_offset_to_position_offset_out_of_bounds() { + let source = "short\nfile"; + let offset = source.len() + 10; + assert_eq!(byte_offset_to_position(source, offset), (1, 4)); + } + + #[test] + fn test_byte_offset_to_position_empty_source() { + let source = ""; + assert_eq!(byte_offset_to_position(source, 0), (0, 0)); + assert_eq!(byte_offset_to_position(source, 10), (0, 0)); + } + + #[test] + fn test_position_to_byte_offset_basic() { + let source = "line1\nline2\nline3\n"; + assert_eq!(position_to_byte_offset(source, 0, 0), 0); // 'l' + assert_eq!(position_to_byte_offset(source, 0, 5), 5); // '\n' + assert_eq!(position_to_byte_offset(source, 1, 0), 6); // 'l' in line2 + assert_eq!(position_to_byte_offset(source, 1, 3), 9); // 'e' in line2 + assert_eq!(position_to_byte_offset(source, 2, 0), 12); // 'l' in line3 + } + + #[test] + fn test_position_to_byte_offset_out_of_bounds() { + let source = "line1\nline2\n"; + assert_eq!(position_to_byte_offset(source, 10, 10), source.len()); + } + + #[test] + fn test_position_to_byte_offset_empty() { + let source = ""; + assert_eq!(position_to_byte_offset(source, 0, 0), 0); + } +} diff --git a/crates/lsp/testdata/A.sol b/crates/lsp/testdata/A.sol new file mode 100644 index 0000000000000..2c92ba5205ee4 --- /dev/null +++ b/crates/lsp/testdata/A.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +contract A { + uint256 a; + bool hi; + uint256 cc; + + /// @dev returns a bool + function bar() external returns (bool) { + require(cc == 9); + return a++ == 0; + } + + function name(string memory) public returns (bool) { + return this.bar(); + } +} diff --git a/crates/lsp/testdata/B.sol b/crates/lsp/testdata/B.sol new file mode 100644 index 0000000000000..bb5640f2d3392 --- /dev/null +++ b/crates/lsp/testdata/B.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +library B { + /// @notice Some state store and accessed with library + struct State { + string name; + mapping(string => uint256) count; + bool d; + } + + function add_one(string memory self, State storage state) internal { + state.count[self] += 1; + } + + function get_votes(string memory self, State storage state) internal view returns (uint256) { + return state.count[self]; + bool name; + } +} diff --git a/crates/lsp/testdata/C.sol b/crates/lsp/testdata/C.sol new file mode 100644 index 0000000000000..cf2d87ccc5f5d --- /dev/null +++ b/crates/lsp/testdata/C.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +import {A} from "./A.sol"; +import {B as D} from "./B.sol"; + +contract C is A { + using D for *; + + uint256 immutable SCREAM = 124; + + D.State public votes; + function() internal c; + + constructor() { + votes.name = "2024 Elections"; + name("meek"); + } + + function add_vote(string memory name) public returns (uint256) { + bool fad; + name.add_one(votes); + return name.get_votes(votes); + } +} + +contract E {} diff --git a/crates/lsp/testdata/Reference.sol b/crates/lsp/testdata/Reference.sol new file mode 100644 index 0000000000000..0c68d06c8a755 --- /dev/null +++ b/crates/lsp/testdata/Reference.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +contract Reference { + uint256 public myValue; + + function setMyValue(uint256 _value) public { + myValue = _value; + } + + function getMyValue() public view returns (uint256) { + return myValue; + } +} \ No newline at end of file diff --git a/crates/lsp/testdata/Simple.sol b/crates/lsp/testdata/Simple.sol new file mode 100644 index 0000000000000..5ab1da1594cf9 --- /dev/null +++ b/crates/lsp/testdata/Simple.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.29; + +contract Simple { + uint256 public value; + + function setValue(uint256 _value) public { + value = _value; + } + + function getValue() public view returns (uint256) { + return value; + } +} \ No newline at end of file