From 3e4ff459ae335270bc867116b25acc603f7443f5 Mon Sep 17 00:00:00 2001 From: Alexander Brevig Date: Mon, 1 Dec 2025 12:13:11 +0100 Subject: [PATCH 1/4] feat: bugfix rename and dynamic load of user symbols --- Cargo.toml | 2 +- README.md | 38 +++--- lib/forth-lexer/src/parser.rs | 15 ++- src/main.rs | 20 +++- src/utils/definition_index.rs | 39 ++++--- src/utils/handlers/common.rs | 27 ++--- src/utils/handlers/mod.rs | 2 + src/utils/handlers/notification_did_change.rs | 12 ++ src/utils/handlers/notification_did_open.rs | 1 + src/utils/handlers/notification_did_save.rs | 48 ++++++++ src/utils/handlers/request_completion.rs | 92 ++++++++++++--- src/utils/handlers/request_find_references.rs | 2 +- src/utils/handlers/request_hover.rs | 66 +++++++---- src/utils/handlers/request_prepare_rename.rs | 110 ++++++++++++++++++ src/utils/handlers/request_rename.rs | 4 +- src/utils/handlers/request_signature_help.rs | 2 +- src/utils/ropey/word_at.rs | 10 ++ src/utils/server_capabilities.rs | 5 +- 18 files changed, 392 insertions(+), 103 deletions(-) create mode 100644 src/utils/handlers/notification_did_save.rs create mode 100644 src/utils/handlers/request_prepare_rename.rs diff --git a/Cargo.toml b/Cargo.toml index 4a18425..e71cf6c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [package] name = "forth-lsp" -version = "0.3.0" +version = "0.3.1" edition = "2021" license = "MIT" description = "LSP for the Forth programming language" diff --git a/README.md b/README.md index 1299417..4cd4f6b 100644 --- a/README.md +++ b/README.md @@ -2,39 +2,37 @@ [![CI](https://github.com/AlexanderBrevig/forth-lsp/actions/workflows/ci.yml/badge.svg)](https://github.com/AlexanderBrevig/forth-lsp/actions/workflows/ci.yml) -`forth-lsp` is an implementation of the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) for the [Forth](https://forth-standard.org/) programming language. +A [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) implementation for [Forth](https://forth-standard.org/), bringing modern IDE features to Forth development. -I like forth, and I love [helix](https://github.com/helix-editor/helix)! -This project is a companion to [tree-sitter-forth](https://github.com/AlexanderBrevig/tree-sitter-forth) in order to make forth barable on helix :) +## Features -Currently this simple LSP supports `Hover`, `Completion` and `GotoDefinition`. +- **Hover** - View documentation for built-in words and user-defined functions +- **Completion** - Auto-complete for built-in words and your definitions +- **Go to Definition** - Jump to where words are defined +- **Find References** - Find all usages of a word +- **Rename** - Rename symbols across your workspace +- **Document Symbols** - Outline view of word definitions in current file +- **Workspace Symbols** - Search for definitions across all files +- **Signature Help** - View parameter information while typing +- **Diagnostics** - Real-time error detection for undefined words -[Issues](https://github.com/AlexanderBrevig/forth-lsp/issues) and [PRs](https://github.com/AlexanderBrevig/forth-lsp/pulls) are very welcome! - -## Install +## Installation ```shell cargo install forth-lsp ``` -You can now configure your editor to use this LSP. - -## Development +Then configure your editor to use `forth-lsp`. Works with any LSP-compatible editor (VS Code, Neovim, Helix, Emacs, etc.). -This is a Cargo workspace containing: +## Contributing -- `forth-lsp` - The main LSP server -- `lib/forth-lexer` - The Forth lexer/tokenizer library +[Issues](https://github.com/AlexanderBrevig/forth-lsp/issues) and [PRs](https://github.com/AlexanderBrevig/forth-lsp/pulls) welcome! -### Testing +### Development ```shell -# Test all workspace members (both forth-lsp and forth-lexer) +# Run tests cargo test --workspace - -# Or use the convenient alias +# or cargo t - -# Test only the main forth-lsp package -cargo test ``` diff --git a/lib/forth-lexer/src/parser.rs b/lib/forth-lexer/src/parser.rs index cd85723..20bc42f 100644 --- a/lib/forth-lexer/src/parser.rs +++ b/lib/forth-lexer/src/parser.rs @@ -176,10 +176,11 @@ impl<'a> Lexer<'a> { self.read_char(); } + let end = self.position.min(self.raw.len()); Data { start, - end: self.position, - value: &self.raw[start..self.position], + end, + value: &self.raw[start..end], } } @@ -188,10 +189,11 @@ impl<'a> Lexer<'a> { while !self.ch.is_whitespace() && self.ch != '\0' { self.read_char(); } + let end = self.position.min(self.raw.len()); Data { start, - end: self.position, - value: &self.raw[start..self.position], + end, + value: &self.raw[start..end], } } @@ -207,10 +209,11 @@ impl<'a> Lexer<'a> { { self.read_char(); } + let end = self.position.min(self.raw.len()); Data { start, - end: self.position, - value: &self.raw[start..self.position], + end, + value: &self.raw[start..end], } } diff --git a/src/main.rs b/src/main.rs index c80a873..62efd50 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,11 +7,13 @@ use crate::prelude::*; use crate::utils::definition_index::DefinitionIndex; use crate::utils::handlers::notification_did_change::handle_did_change_text_document; use crate::utils::handlers::notification_did_open::handle_did_open_text_document; +use crate::utils::handlers::notification_did_save::handle_did_save_text_document; use crate::utils::handlers::request_completion::handle_completion; use crate::utils::handlers::request_document_symbols::handle_document_symbols; use crate::utils::handlers::request_find_references::handle_find_references; use crate::utils::handlers::request_goto_definition::handle_goto_definition; use crate::utils::handlers::request_hover::handle_hover; +use crate::utils::handlers::request_prepare_rename::handle_prepare_rename; use crate::utils::handlers::request_rename::handle_rename; use crate::utils::handlers::request_signature_help::handle_signature_help; use crate::utils::handlers::request_workspace_symbols::handle_workspace_symbols; @@ -87,6 +89,9 @@ fn main_loop(connection: Connection, params: serde_json::Value) -> Result<()> { if handle_find_references(&request, &connection, &mut files, &def_index).is_ok() { continue; } + if handle_prepare_rename(&request, &connection, &files, &def_index).is_ok() { + continue; + } if handle_rename(&request, &connection, &mut files, &def_index).is_ok() { continue; } @@ -127,6 +132,17 @@ fn main_loop(connection: Connection, params: serde_json::Value) -> Result<()> { { continue; } + if handle_did_save_text_document( + ¬ification, + &connection, + &mut files, + &mut def_index, + &data, + ) + .is_ok() + { + continue; + } } } } @@ -147,7 +163,9 @@ fn load_dir( let raw_content = fs::read(entry)?; let content = String::from_utf8_lossy(&raw_content); let rope = Rope::from_str(&content); - files.insert(entry.to_string(), rope); + // Convert path to URI to match DidOpen/DidChange format + let file_uri = format!("file://{}", entry); + files.insert(file_uri, rope); } } } diff --git a/src/utils/definition_index.rs b/src/utils/definition_index.rs index b2123d2..10a3bb8 100644 --- a/src/utils/definition_index.rs +++ b/src/utils/definition_index.rs @@ -65,17 +65,15 @@ impl DefinitionIndex { definition_token_indices.insert(data.start); } - let Some((name, _selection_start, _selection_end)) = + let Some((name, selection_start, selection_end)) = extract_word_name_with_range(result, 1) else { continue; }; - let tok = Token::Illegal(Data::new(0, 0, "")); - let begin = result.first().unwrap_or(&tok).get_data(); - let end = result.last().unwrap_or(&tok).get_data(); - - let range = data_range_from_to(begin, end, rope); + // Use the name's range, not the entire definition block + let name_data = Data::new(selection_start, selection_end, ""); + let range = data_range_from_to(&name_data, &name_data, rope); // Store with lowercase key for case-insensitive lookup self.definitions @@ -115,8 +113,8 @@ impl DefinitionIndex { let name = name_data.value.to_string(); - // Create range spanning from the defining word to the name - let range = data_range_from_to(defining_word_data, name_data, rope); + // Use only the name's range, not including the defining word + let range = name_data.to_range(rope); // Store with lowercase key for case-insensitive lookup self.definitions @@ -167,8 +165,15 @@ impl DefinitionIndex { let mut locations = Vec::new(); if let Some(defs) = self.definitions.get(&word.to_lowercase()) { - for (file_path, range) in defs { - if let Ok(uri) = Url::from_file_path(file_path) { + for (file_path_or_uri, range) in defs { + // Try parsing as URI first (file:// scheme), then fall back to file path + let uri = if file_path_or_uri.starts_with("file://") { + Url::parse(file_path_or_uri).ok() + } else { + Url::from_file_path(file_path_or_uri).ok() + }; + + if let Some(uri) = uri { locations.push(Location { uri, range: *range }); } } @@ -182,8 +187,15 @@ impl DefinitionIndex { let mut locations = Vec::new(); if let Some(refs) = self.references.get(&word.to_lowercase()) { - for (file_path, range) in refs { - if let Ok(uri) = Url::from_file_path(file_path) { + for (file_path_or_uri, range) in refs { + // Try parsing as URI first (file:// scheme), then fall back to file path + let uri = if file_path_or_uri.starts_with("file://") { + Url::parse(file_path_or_uri).ok() + } else { + Url::from_file_path(file_path_or_uri).ok() + }; + + if let Some(uri) = uri { locations.push(Location { uri, range: *range }); } } @@ -493,7 +505,8 @@ mod tests { let defs = index.find_definitions("DATE"); assert_eq!(defs.len(), 1); assert_eq!(defs[0].range.start.line, 0); - assert_eq!(defs[0].range.start.character, 0); + // Range should be for "DATE" (starts at character 9), not "VARIABLE" + assert_eq!(defs[0].range.start.character, 9); } #[test] diff --git a/src/utils/handlers/common.rs b/src/utils/handlers/common.rs index 20d98a9..402e126 100644 --- a/src/utils/handlers/common.rs +++ b/src/utils/handlers/common.rs @@ -3,40 +3,29 @@ use lsp_types::{Position, TextDocumentIdentifier, TextDocumentPositionParams}; /// Extracted position information from a TextDocumentPositionParams pub struct ExtractedPosition { - pub file_path: String, + pub file_uri: String, pub line: u32, pub character: u32, } impl ExtractedPosition { - /// Extract file path and position from TextDocumentPositionParams + /// Extract file URI and position from TextDocumentPositionParams pub fn from_text_document_position(params: &TextDocumentPositionParams) -> Result { - let file_path = params - .text_document - .uri - .to_file_path() - .map_err(|_| Error::Generic("Invalid file path".to_string()))? - .to_string_lossy() - .to_string(); + let file_uri = params.text_document.uri.to_string(); Ok(ExtractedPosition { - file_path, + file_uri, line: params.position.line, character: params.position.character, }) } - /// Extract file path and position from components + /// Extract file URI and position from components pub fn from_parts(text_document: &TextDocumentIdentifier, position: &Position) -> Result { - let file_path = text_document - .uri - .to_file_path() - .map_err(|_| Error::Generic("Invalid file path".to_string()))? - .to_string_lossy() - .to_string(); + let file_uri = text_document.uri.to_string(); Ok(ExtractedPosition { - file_path, + file_uri, line: position.line, character: position.character, }) @@ -44,6 +33,6 @@ impl ExtractedPosition { /// Format position for logging pub fn format(&self) -> String { - format!("{}:{}:{}", self.file_path, self.line, self.character) + format!("{}:{}:{}", self.file_uri, self.line, self.character) } } diff --git a/src/utils/handlers/mod.rs b/src/utils/handlers/mod.rs index b090698..b7e00b1 100644 --- a/src/utils/handlers/mod.rs +++ b/src/utils/handlers/mod.rs @@ -4,11 +4,13 @@ use crate::prelude::*; pub mod common; pub mod notification_did_change; pub mod notification_did_open; +pub mod notification_did_save; pub mod request_completion; pub mod request_document_symbols; pub mod request_find_references; pub mod request_goto_definition; pub mod request_hover; +pub mod request_prepare_rename; pub mod request_rename; pub mod request_signature_help; pub mod request_workspace_symbols; diff --git a/src/utils/handlers/notification_did_change.rs b/src/utils/handlers/notification_did_change.rs index ba09c0c..0db9317 100644 --- a/src/utils/handlers/notification_did_change.rs +++ b/src/utils/handlers/notification_did_change.rs @@ -22,10 +22,20 @@ pub fn handle_did_change_text_document( { Ok(params) => { let file_uri = params.text_document.uri.to_string(); + log_debug!("DidChange for file: {}", file_uri); let rope = files .get_mut(&file_uri) .expect("Must be able to get rope for lang"); + log_debug!( + "Processing {} content changes", + params.content_changes.len() + ); for change in params.content_changes { + log_debug!( + "Change range: {:?}, text length: {}", + change.range, + change.text.len() + ); let range = change.range.unwrap_or_default(); let start = rope.line_to_char(range.start.line as usize) + range.start.character as usize; @@ -33,7 +43,9 @@ pub fn handle_did_change_text_document( rope.remove(start..end); rope.insert(start, change.text.as_str()); } + log_debug!("After changes, rope has {} chars", rope.len_chars()); // Update definition index for the changed file + log_debug!("Updating definition index for: {}", file_uri); def_index.update_file(&file_uri, rope); // Publish diagnostics for the changed file diff --git a/src/utils/handlers/notification_did_open.rs b/src/utils/handlers/notification_did_open.rs index 7f9efae..d498204 100644 --- a/src/utils/handlers/notification_did_open.rs +++ b/src/utils/handlers/notification_did_open.rs @@ -21,6 +21,7 @@ pub fn handle_did_open_text_document( match cast_notification::(notification.clone()) { Ok(params) => { let file_uri = params.text_document.uri.to_string(); + log_debug!("DidOpen for file: {}", file_uri); if let std::collections::hash_map::Entry::Vacant(e) = files.entry(file_uri.clone()) { let rope = Rope::from_str(params.text_document.text.as_str()); // Update index for newly opened file diff --git a/src/utils/handlers/notification_did_save.rs b/src/utils/handlers/notification_did_save.rs new file mode 100644 index 0000000..55312c7 --- /dev/null +++ b/src/utils/handlers/notification_did_save.rs @@ -0,0 +1,48 @@ +#[allow(unused_imports)] +use crate::prelude::*; +use crate::utils::definition_index::DefinitionIndex; +use crate::utils::diagnostics::{get_diagnostics, publish_diagnostics}; +use crate::words::Words; + +use std::collections::HashMap; + +use lsp_server::{Connection, Notification}; +use ropey::Rope; + +use super::cast_notification; + +pub fn handle_did_save_text_document( + notification: &Notification, + connection: &Connection, + files: &mut HashMap, + def_index: &mut DefinitionIndex, + builtin_words: &Words, +) -> Result<()> { + match cast_notification::(notification.clone()) { + Ok(params) => { + let file_uri = params.text_document.uri.to_string(); + + // Get the rope for this file + if let Some(rope) = files.get(&file_uri) { + // Update definition index for the saved file + log_debug!("Updating definition index on save for: {}", file_uri); + def_index.update_file(&file_uri, rope); + + // Publish diagnostics for the saved file + let diagnostics = get_diagnostics(rope, def_index, builtin_words); + publish_diagnostics( + connection, + params.text_document.uri.clone(), + diagnostics, + 0, // No version for save notifications + )?; + } + + Ok(()) + } + Err(err) => { + log_handler_error!("Did save notification", err); + Err(err) + } + } +} diff --git a/src/utils/handlers/request_completion.rs b/src/utils/handlers/request_completion.rs index 134ce9b..1dfd44c 100644 --- a/src/utils/handlers/request_completion.rs +++ b/src/utils/handlers/request_completion.rs @@ -24,6 +24,7 @@ pub fn get_completions( use_lower: bool, data: &Words, def_index: Option<&DefinitionIndex>, + files: Option<&HashMap>, ) -> Option { if !word_prefix.is_empty() { let mut ret = vec![]; @@ -39,10 +40,73 @@ pub fn get_completions( } else { word.clone() }; + + // Get definitions to extract documentation + let definitions = index.find_definitions(&word); + let (detail, documentation) = if !definitions.is_empty() { + let def = &definitions[0]; + let file_name = def.uri.path().split('/').last().unwrap_or("unknown"); + + // Try to extract source code like hover does + let mut doc_text = format!( + "**Defined in:** `{}:{}:{}`\n\n", + file_name, + def.range.start.line + 1, + def.range.start.character + 1 + ); + + // Extract source code if files are available + if let Some(files_map) = files { + if let Some(rope) = files_map.get(&def.uri.to_string()) { + let start_line = def.range.start.line as usize; + let end_line = def.range.end.line as usize; + + // For single-line definitions (just the word name), expand to show full definition + let (display_start, display_end) = if start_line == end_line { + let expanded_end = (end_line + 20).min(rope.len_lines().saturating_sub(1)); + (start_line, expanded_end) + } else { + (start_line, end_line) + }; + + // Extract source code lines + let mut source_lines = Vec::new(); + for line_idx in display_start..=display_end.min(display_start + 20) { + if let Some(line) = rope.get_line(line_idx) { + let line_str = line.to_string(); + source_lines.push(line_str.trim_end().to_string()); + // Stop at semicolon for colon definitions + if line_str.trim_end().ends_with(';') { + break; + } + } + } + + if !source_lines.is_empty() { + doc_text.push_str("```forth\n"); + doc_text.push_str(&source_lines.join("")); + doc_text.push_str("\n```"); + } + } + } + + ( + Some(format!("user-defined in {}", file_name)), + Some(lsp_types::Documentation::MarkupContent( + lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: doc_text, + }, + )), + ) + } else { + (Some("user-defined".to_string()), None) + }; + ret.push(CompletionItem { label, - detail: Some("user-defined".to_string()), - documentation: None, + detail, + documentation, ..Default::default() }); } @@ -115,7 +179,7 @@ pub fn handle_completion( let result = if word.len_chars() > 0 { log_debug!("Found word {}", word); let use_lower = word.is_lowercase(); - get_completions(&word.to_string(), use_lower, data, Some(def_index)) + get_completions(&word.to_string(), use_lower, data, Some(def_index), Some(files)) } else { None }; @@ -137,7 +201,7 @@ mod tests { #[test] fn test_completion_finds_matching_words() { let words = Words::default(); - let result = get_completions("DU", false, &words, None); + let result = get_completions("DU", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -154,7 +218,7 @@ mod tests { #[test] fn test_completion_respects_lowercase() { let words = Words::default(); - let result = get_completions("du", true, &words, None); + let result = get_completions("du", true, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -172,7 +236,7 @@ mod tests { #[test] fn test_completion_respects_uppercase() { let words = Words::default(); - let result = get_completions("DU", false, &words, None); + let result = get_completions("DU", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -190,7 +254,7 @@ mod tests { #[test] fn test_completion_includes_stack_effects() { let words = Words::default(); - let result = get_completions("SWAP", false, &words, None); + let result = get_completions("SWAP", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -208,7 +272,7 @@ mod tests { #[test] fn test_completion_includes_documentation() { let words = Words::default(); - let result = get_completions("+", false, &words, None); + let result = get_completions("+", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -228,7 +292,7 @@ mod tests { #[test] fn test_completion_empty_prefix() { let words = Words::default(); - let result = get_completions("", false, &words, None); + let result = get_completions("", false, &words, None, None); assert!(result.is_none()); } @@ -236,7 +300,7 @@ mod tests { #[test] fn test_completion_no_matches() { let words = Words::default(); - let result = get_completions("ZZZZNONEXISTENT", false, &words, None); + let result = get_completions("ZZZZNONEXISTENT", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -249,7 +313,7 @@ mod tests { #[test] fn test_completion_single_character() { let words = Words::default(); - let result = get_completions("+", false, &words, None); + let result = get_completions("+", false, &words, None, None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -279,7 +343,7 @@ mod tests { &Rope::from_str(": myword 1 + ;\n: mytest 2 * ;"), ); - let result = get_completions("my", false, &words, Some(&index)); + let result = get_completions("my", false, &words, Some(&index), None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -305,7 +369,7 @@ mod tests { // Define a word that exists in built-ins index.update_file(&file_path, &Rope::from_str(": DUP 1 + ;")); - let result = get_completions("du", false, &words, Some(&index)); + let result = get_completions("du", false, &words, Some(&index), None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { @@ -337,7 +401,7 @@ mod tests { index.update_file(&file_path, &Rope::from_str(": myword 1 + ;")); - let result = get_completions("+", false, &words, Some(&index)); + let result = get_completions("+", false, &words, Some(&index), None); assert!(result.is_some()); if let Some(CompletionResponse::Array(items)) = result { diff --git a/src/utils/handlers/request_find_references.rs b/src/utils/handlers/request_find_references.rs index bbb1f22..d5bd3cf 100644 --- a/src/utils/handlers/request_find_references.rs +++ b/src/utils/handlers/request_find_references.rs @@ -62,7 +62,7 @@ pub fn handle_find_references( eprintln!("#{id}: find references at {}", pos.format()); let references = get_references( - &pos.file_path, + &pos.file_uri, pos.line, pos.character, params.context.include_declaration, diff --git a/src/utils/handlers/request_hover.rs b/src/utils/handlers/request_hover.rs index c58821e..f690670 100644 --- a/src/utils/handlers/request_hover.rs +++ b/src/utils/handlers/request_hover.rs @@ -51,26 +51,38 @@ pub fn get_hover_result( // Try to extract source code if files are available if let Some(files_map) = files { - if let Ok(file_path) = def.uri.to_file_path() { - if let Some(rope) = - files_map.get(&file_path.to_string_lossy().to_string()) - { - let start_line = def.range.start.line as usize; - let end_line = def.range.end.line as usize; - - // Extract the source code lines - let mut source_lines = Vec::new(); - for line_idx in start_line..=end_line.min(start_line + 20) { - if let Some(line) = rope.get_line(line_idx) { - source_lines.push(line.to_string().trim_end().to_string()); + // Use URI string directly (files HashMap keys are URIs, not paths) + if let Some(rope) = files_map.get(&def.uri.to_string()) { + let start_line = def.range.start.line as usize; + let end_line = def.range.end.line as usize; + + // For single-line definitions (just the word name), try to expand to show the full definition + let (display_start, display_end) = if start_line == end_line { + // Expand to show context (up to 20 lines after the name) + let expanded_end = + (end_line + 20).min(rope.len_lines().saturating_sub(1)); + (start_line, expanded_end) + } else { + (start_line, end_line) + }; + + // Extract the source code lines + let mut source_lines = Vec::new(); + for line_idx in display_start..=display_end.min(display_start + 20) { + if let Some(line) = rope.get_line(line_idx) { + let line_str = line.to_string(); + source_lines.push(line_str.trim_end().to_string()); + // Stop at semicolon for colon definitions + if line_str.trim_end().ends_with(';') { + break; } } + } - if !source_lines.is_empty() { - hover_text.push_str("```forth\n"); - hover_text.push_str(&source_lines.join("")); - hover_text.push_str("\n```\n"); - } + if !source_lines.is_empty() { + hover_text.push_str("```forth\n"); + hover_text.push_str(&source_lines.join("")); + hover_text.push_str("\n```\n"); } } } @@ -228,13 +240,14 @@ mod tests { let mut index = DefinitionIndex::new(); let temp_dir = env::temp_dir(); let file_path = temp_dir.join("user.forth").to_string_lossy().to_string(); + let file_uri = format!("file://{}", file_path); // Define a word that exists in built-ins let rope = Rope::from_str(": DUP 1 + ;"); - index.update_file(&file_path, &rope); + index.update_file(&file_uri, &rope); let mut files = HashMap::new(); - files.insert(file_path.clone(), rope); + files.insert(file_uri.clone(), rope); let result = get_hover_result("DUP", &words, Some(&index), Some(&files)); @@ -261,13 +274,14 @@ mod tests { let mut index = DefinitionIndex::new(); let temp_dir = env::temp_dir(); let file_path = temp_dir.join("user.forth").to_string_lossy().to_string(); + let file_uri = format!("file://{}", file_path); // Define a word that doesn't exist in built-ins let rope = Rope::from_str(": myword 1 + ;"); - index.update_file(&file_path, &rope); + index.update_file(&file_uri, &rope); let mut files = HashMap::new(); - files.insert(file_path.clone(), rope); + files.insert(file_uri.clone(), rope); let result = get_hover_result("myword", &words, Some(&index), Some(&files)); @@ -292,13 +306,14 @@ mod tests { let mut index = DefinitionIndex::new(); let temp_dir = env::temp_dir(); let file_path = temp_dir.join("user.forth").to_string_lossy().to_string(); + let file_uri = format!("file://{}", file_path); // Define a variable let rope = Rope::from_str("VARIABLE counter"); - index.update_file(&file_path, &rope); + index.update_file(&file_uri, &rope); let mut files = HashMap::new(); - files.insert(file_path.clone(), rope); + files.insert(file_uri.clone(), rope); let result = get_hover_result("counter", &words, Some(&index), Some(&files)); @@ -323,15 +338,16 @@ mod tests { let mut index = DefinitionIndex::new(); let temp_dir = env::temp_dir(); let file_path = temp_dir.join("user.forth").to_string_lossy().to_string(); + let file_uri = format!("file://{}", file_path); // Define a multiline word let rope = Rope::from_str( ": factorial\n dup 0= if\n drop 1\n else\n dup 1- factorial *\n then\n;", ); - index.update_file(&file_path, &rope); + index.update_file(&file_uri, &rope); let mut files = HashMap::new(); - files.insert(file_path.clone(), rope); + files.insert(file_uri.clone(), rope); let result = get_hover_result("factorial", &words, Some(&index), Some(&files)); diff --git a/src/utils/handlers/request_prepare_rename.rs b/src/utils/handlers/request_prepare_rename.rs new file mode 100644 index 0000000..47fd6e1 --- /dev/null +++ b/src/utils/handlers/request_prepare_rename.rs @@ -0,0 +1,110 @@ +use crate::prelude::*; +use crate::utils::definition_index::DefinitionIndex; +use crate::utils::handlers::{common::ExtractedPosition, send_response}; + +use lsp_server::{Connection, Request}; +use lsp_types::{request::PrepareRenameRequest, PrepareRenameResponse, Range}; +use ropey::Rope; +use std::collections::HashMap; + +use super::cast; + +pub fn handle_prepare_rename( + req: &Request, + connection: &Connection, + files: &HashMap, + def_index: &DefinitionIndex, +) -> Result<()> { + match cast::(req.clone()) { + Ok((id, params)) => { + log_request_msg!( + id, + "prepareRename at {}:{}", + params.position.line, + params.position.character + ); + + let pos = ExtractedPosition::from_text_document_position(¶ms)?; + + let response = get_prepare_rename_response( + &pos.file_uri, + pos.line, + pos.character, + files, + def_index, + ); + + send_response(connection, id, response)?; + Ok(()) + } + Err(Error::ExtractRequestError(req)) => Err(Error::ExtractRequestError(req)), + Err(err) => Err(err), + } +} + +fn get_prepare_rename_response( + file_uri: &str, + line: u32, + character: u32, + files: &HashMap, + def_index: &DefinitionIndex, +) -> Option { + let rope = files.get(file_uri)?; + + if line as usize >= rope.len_lines() { + return None; + } + + let ix = rope.line_to_char(line as usize) + character as usize; + + if ix >= rope.len_chars() { + return None; + } + + // Find the word boundaries by searching for whitespace + // Search backwards from cursor to find word start + let mut word_start = ix; + while word_start > 0 { + let ch = rope.char(word_start.saturating_sub(1)); + if ch.is_whitespace() { + break; + } + word_start -= 1; + } + + // Search forwards from cursor to find word end + let mut word_end = ix; + while word_end < rope.len_chars() { + let ch = rope.char(word_end); + if ch.is_whitespace() { + break; + } + word_end += 1; + } + + // Extract the word for validation + let word_str = rope.slice(word_start..word_end).to_string(); + + // Check if this word has any definitions/references + if def_index.find_all_references(&word_str, true).is_empty() { + return None; + } + + // Calculate line and column positions + let start_line = rope.char_to_line(word_start); + let start_col = word_start - rope.line_to_char(start_line); + + let end_line = rope.char_to_line(word_end); + let end_col = word_end - rope.line_to_char(end_line); + + Some(PrepareRenameResponse::Range(Range { + start: lsp_types::Position { + line: start_line as u32, + character: start_col as u32, + }, + end: lsp_types::Position { + line: end_line as u32, + character: end_col as u32, + }, + })) +} diff --git a/src/utils/handlers/request_rename.rs b/src/utils/handlers/request_rename.rs index 5fefc7b..a3e4f91 100644 --- a/src/utils/handlers/request_rename.rs +++ b/src/utils/handlers/request_rename.rs @@ -38,6 +38,8 @@ pub fn get_rename_edits( let word = rope.word_on_or_before(ix).to_string(); + eprintln!("Rename: word at {}:{} is '{}'", line, character, word); + if word.is_empty() { return None; } @@ -82,7 +84,7 @@ pub fn handle_rename( log_request_msg!(id, "rename at {} to '{}'", pos.format(), params.new_name); let workspace_edit = get_rename_edits( - &pos.file_path, + &pos.file_uri, pos.line, pos.character, ¶ms.new_name, diff --git a/src/utils/handlers/request_signature_help.rs b/src/utils/handlers/request_signature_help.rs index 8c17004..5dc2d52 100644 --- a/src/utils/handlers/request_signature_help.rs +++ b/src/utils/handlers/request_signature_help.rs @@ -72,7 +72,7 @@ pub fn handle_signature_help( eprintln!("#{id}: signature help at {}", pos.format()); - let rope = files.get(&pos.file_path).ok_or_else(|| { + let rope = files.get(&pos.file_uri).ok_or_else(|| { Error::NoSuchFile( params .text_document_position_params diff --git a/src/utils/ropey/word_at.rs b/src/utils/ropey/word_at.rs index afea899..cbf4068 100644 --- a/src/utils/ropey/word_at.rs +++ b/src/utils/ropey/word_at.rs @@ -103,4 +103,14 @@ mod tests { let word = rope.word_at(3); assert_eq!("test", word); } + + #[test] + fn word_at_hyphenated() { + let rope = Rope::from_str(": my-test 1 + ;"); + // Test at various positions in "my-test" + assert_eq!("my-test", rope.word_at(2).to_string()); // at 'm' + assert_eq!("my-test", rope.word_at(4).to_string()); // at '-' + assert_eq!("my-test", rope.word_at(6).to_string()); // at 'e' + assert_eq!("my-test", rope.word_at(8).to_string()); // at 't' + } } diff --git a/src/utils/server_capabilities.rs b/src/utils/server_capabilities.rs index d25f277..e2b0007 100644 --- a/src/utils/server_capabilities.rs +++ b/src/utils/server_capabilities.rs @@ -18,7 +18,10 @@ pub fn forth_lsp_capabilities() -> ServerCapabilities { completion_provider: Some(lsp_types::CompletionOptions::default()), document_symbol_provider: Some(OneOf::Left(true)), references_provider: Some(OneOf::Left(true)), - rename_provider: Some(OneOf::Left(true)), + rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: Default::default(), + })), signature_help_provider: Some(lsp_types::SignatureHelpOptions { trigger_characters: Some(vec![" ".to_string()]), retrigger_characters: None, From 210bb53517b237ae74ae9dec0728317885352457 Mon Sep 17 00:00:00 2001 From: Alexander Brevig Date: Mon, 1 Dec 2025 12:13:38 +0100 Subject: [PATCH 2/4] chore: add lock --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 2b8a860..56d3ed8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,7 +65,7 @@ dependencies = [ [[package]] name = "forth-lsp" -version = "0.3.0" +version = "0.3.1" dependencies = [ "anyhow", "forth-lexer", From 24d8f0cc85d7c0dce37bb907503b5c3dbb560805 Mon Sep 17 00:00:00 2001 From: Alexander Brevig Date: Mon, 1 Dec 2025 12:13:49 +0100 Subject: [PATCH 3/4] chore: fmt --- src/utils/handlers/request_completion.rs | 30 +++++++++++++++--------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/utils/handlers/request_completion.rs b/src/utils/handlers/request_completion.rs index 1dfd44c..bf45af1 100644 --- a/src/utils/handlers/request_completion.rs +++ b/src/utils/handlers/request_completion.rs @@ -40,13 +40,13 @@ pub fn get_completions( } else { word.clone() }; - + // Get definitions to extract documentation let definitions = index.find_definitions(&word); let (detail, documentation) = if !definitions.is_empty() { let def = &definitions[0]; let file_name = def.uri.path().split('/').last().unwrap_or("unknown"); - + // Try to extract source code like hover does let mut doc_text = format!( "**Defined in:** `{}:{}:{}`\n\n", @@ -54,24 +54,26 @@ pub fn get_completions( def.range.start.line + 1, def.range.start.character + 1 ); - + // Extract source code if files are available if let Some(files_map) = files { if let Some(rope) = files_map.get(&def.uri.to_string()) { let start_line = def.range.start.line as usize; let end_line = def.range.end.line as usize; - + // For single-line definitions (just the word name), expand to show full definition let (display_start, display_end) = if start_line == end_line { - let expanded_end = (end_line + 20).min(rope.len_lines().saturating_sub(1)); + let expanded_end = + (end_line + 20).min(rope.len_lines().saturating_sub(1)); (start_line, expanded_end) } else { (start_line, end_line) }; - + // Extract source code lines let mut source_lines = Vec::new(); - for line_idx in display_start..=display_end.min(display_start + 20) { + for line_idx in display_start..=display_end.min(display_start + 20) + { if let Some(line) = rope.get_line(line_idx) { let line_str = line.to_string(); source_lines.push(line_str.trim_end().to_string()); @@ -81,7 +83,7 @@ pub fn get_completions( } } } - + if !source_lines.is_empty() { doc_text.push_str("```forth\n"); doc_text.push_str(&source_lines.join("")); @@ -89,7 +91,7 @@ pub fn get_completions( } } } - + ( Some(format!("user-defined in {}", file_name)), Some(lsp_types::Documentation::MarkupContent( @@ -102,7 +104,7 @@ pub fn get_completions( } else { (Some("user-defined".to_string()), None) }; - + ret.push(CompletionItem { label, detail, @@ -179,7 +181,13 @@ pub fn handle_completion( let result = if word.len_chars() > 0 { log_debug!("Found word {}", word); let use_lower = word.is_lowercase(); - get_completions(&word.to_string(), use_lower, data, Some(def_index), Some(files)) + get_completions( + &word.to_string(), + use_lower, + data, + Some(def_index), + Some(files), + ) } else { None }; From 0b9ec44080ab4035a3f10e1d58afcca81fb5b8e9 Mon Sep 17 00:00:00 2001 From: Alexander Brevig Date: Mon, 1 Dec 2025 12:22:11 +0100 Subject: [PATCH 4/4] chore: clippy fix --- src/utils/handlers/request_completion.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/utils/handlers/request_completion.rs b/src/utils/handlers/request_completion.rs index bf45af1..0ce2398 100644 --- a/src/utils/handlers/request_completion.rs +++ b/src/utils/handlers/request_completion.rs @@ -45,7 +45,7 @@ pub fn get_completions( let definitions = index.find_definitions(&word); let (detail, documentation) = if !definitions.is_empty() { let def = &definitions[0]; - let file_name = def.uri.path().split('/').last().unwrap_or("unknown"); + let file_name = def.uri.path().split('/').next_back().unwrap_or("unknown"); // Try to extract source code like hover does let mut doc_text = format!(