diff --git a/pyrefly/lib/state/lsp.rs b/pyrefly/lib/state/lsp.rs index e3e6ecb460..347bb92a7d 100644 --- a/pyrefly/lib/state/lsp.rs +++ b/pyrefly/lib/state/lsp.rs @@ -780,10 +780,17 @@ impl<'a> Transaction<'a> { } Some(IdentifierWithContext { identifier: _, - context: IdentifierContext::ImportedName { .. }, + context: + IdentifierContext::ImportedName { + name_after_import, .. + }, }) => { - // TODO(grievejia): handle definitions of imported names - None + let key = Key::Definition(ShortIdentifier::new(&name_after_import)); + let bindings = self.get_bindings(handle)?; + if !bindings.is_valid_key(&key) { + return None; + } + self.get_type(handle, &key) } Some(IdentifierWithContext { identifier: _, diff --git a/pyrefly/lib/state/semantic_tokens.rs b/pyrefly/lib/state/semantic_tokens.rs index be6d5ecff6..3bb40b1af4 100644 --- a/pyrefly/lib/state/semantic_tokens.rs +++ b/pyrefly/lib/state/semantic_tokens.rs @@ -338,11 +338,19 @@ impl SemanticTokenBuilder { } } } - Stmt::ImportFrom(StmtImportFrom { - module: Some(module), - .. - }) => { - self.push_if_in_range(module.range, SemanticTokenType::NAMESPACE, vec![]); + Stmt::ImportFrom(StmtImportFrom { module, names, .. }) => { + if let Some(module) = module { + self.push_if_in_range(module.range, SemanticTokenType::NAMESPACE, vec![]); + } + for alias in names { + if alias.asname.is_some() { + self.push_if_in_range( + alias.name.range, + SemanticTokenType::NAMESPACE, + vec![], + ); + } + } } Stmt::AnnAssign(ann_assign) => { if let Expr::Name(name) = &*ann_assign.target { diff --git a/pyrefly/lib/test/lsp/definition.rs b/pyrefly/lib/test/lsp/definition.rs index 9b167fc853..8f0e03f762 100644 --- a/pyrefly/lib/test/lsp/definition.rs +++ b/pyrefly/lib/test/lsp/definition.rs @@ -1698,3 +1698,119 @@ x = None assert!(report.contains("Definition Result:")); assert!(report.contains("None")); } + +#[test] +fn goto_def_on_import_same_name_alias_first_token_test() { + let lib = r#" +def func(): + pass +"#; + let code = r#" +from lib import func as func +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import func as func + ^ +Definition Result: +2 | def func(): + ^^^^ + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn goto_def_on_import_same_name_alias_second_token_test() { + let lib = r#" +def func(): + pass +"#; + let code = r#" +from lib import func as func +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import func as func + ^ +Definition Result: +2 | def func(): + ^^^^ + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn goto_def_on_import_different_name_alias_first_token_test() { + let lib = r#" +def bar(): + pass +"#; + let code = r#" +from lib import bar as baz +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import bar as baz + ^ +Definition Result: +2 | def bar(): + ^^^ + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn goto_def_on_import_different_name_alias_second_token_test() { + let lib = r#" +def bar(): + pass +"#; + let code = r#" +from lib import bar as baz +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import bar as baz + ^ +Definition Result: +2 | def bar(): + ^^^ + + +# lib.py +"# + .trim(), + report.trim(), + ); +} diff --git a/pyrefly/lib/test/lsp/hover.rs b/pyrefly/lib/test/lsp/hover.rs index 14b44297ff..1e24e437b3 100644 --- a/pyrefly/lib/test/lsp/hover.rs +++ b/pyrefly/lib/test/lsp/hover.rs @@ -653,3 +653,115 @@ greeter.attr ); assert!(!report.contains("__call__")); } + +#[test] +fn hover_on_import_same_name_alias_first_token_test() { + let lib = r#" +def func() -> None: ... +"#; + let code = r#" +from lib import func as func +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import func as func + ^ +```python +(function) func: def func() -> None: ... +``` + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn hover_on_import_same_name_alias_second_token_test() { + let lib = r#" +def func() -> None: ... +"#; + let code = r#" +from lib import func as func +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import func as func + ^ +```python +(function) func: def func() -> None: ... +``` + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn hover_on_import_different_name_alias_first_token_test() { + let lib = r#" +def bar() -> None: ... +"#; + let code = r#" +from lib import bar as baz +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import bar as baz + ^ +```python +(function) bar: def bar() -> None: ... +``` + + +# lib.py +"# + .trim(), + report.trim(), + ); +} + +#[test] +fn hover_on_import_different_name_alias_second_token_test() { + let lib = r#" +def bar() -> None: ... +"#; + let code = r#" +from lib import bar as baz +# ^ +"#; + let report = + get_batched_lsp_operations_report(&[("main", code), ("lib", lib)], get_test_report); + assert_eq!( + r#" +# main.py +2 | from lib import bar as baz + ^ +```python +(function) bar: def bar() -> None: ... +``` + + +# lib.py +"# + .trim(), + report.trim(), + ); +} diff --git a/pyrefly/lib/test/lsp/lsp_interaction/mod.rs b/pyrefly/lib/test/lsp/lsp_interaction/mod.rs index 1c3424cc27..8c500eda05 100644 --- a/pyrefly/lib/test/lsp/lsp_interaction/mod.rs +++ b/pyrefly/lib/test/lsp/lsp_interaction/mod.rs @@ -33,6 +33,7 @@ mod object_model; mod provide_type; mod references; mod rename; +mod semantic_tokens; mod type_definition; mod unsaved_file; mod util; diff --git a/pyrefly/lib/test/lsp/lsp_interaction/semantic_tokens.rs b/pyrefly/lib/test/lsp/lsp_interaction/semantic_tokens.rs new file mode 100644 index 0000000000..b4d3a034cb --- /dev/null +++ b/pyrefly/lib/test/lsp/lsp_interaction/semantic_tokens.rs @@ -0,0 +1,95 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::fs; + +use lsp_types::SemanticTokensResult; +use lsp_types::Url; +use lsp_types::request::SemanticTokensFullRequest; +use serde_json::json; + +use crate::state::semantic_tokens::SemanticTokensLegends; +use crate::test::lsp::lsp_interaction::object_model::InitializeSettings; +use crate::test::lsp::lsp_interaction::object_model::LspInteraction; +use crate::test::lsp::lsp_interaction::util::get_test_files_root; + +#[test] +fn semantic_tokens_import_submodule_alias() { + let root = get_test_files_root(); + let root_path = root.path().join("semantic_tokens_imports"); + let mut interaction = LspInteraction::new(); + interaction.set_root(root_path.clone()); + interaction + .initialize(InitializeSettings { + configuration: Some(None), + ..Default::default() + }) + .unwrap(); + + let main_path = root_path.join("main.py"); + let main_text = fs::read_to_string(&main_path).unwrap(); + let main_uri = Url::from_file_path(&main_path).unwrap(); + + interaction.client.did_open("main.py"); + + let legend = SemanticTokensLegends::lsp_semantic_token_legends(); + interaction + .client + .send_request::(json!({ + "textDocument": { "uri": main_uri.to_string() } + })) + .expect_response_with(|response| match response { + Some(SemanticTokensResult::Tokens(tokens)) => { + let mut line = 0u32; + let mut col = 0u32; + let mut pkg_tokens = 0; + let mut sub_tokens = 0; + let lines: Vec<&str> = main_text.lines().collect(); + for token in tokens.data { + let delta_line = token.delta_line; + let delta_start = token.delta_start; + let length = token.length; + let token_type = token.token_type; + + line += delta_line; + col = if delta_line == 0 { + col + delta_start + } else { + delta_start + }; + + let line_text = match lines.get(line as usize) { + Some(line_text) => *line_text, + None => continue, + }; + let start = col as usize; + let end = start + length as usize; + let text = match line_text.get(start..end) { + Some(text) => text, + None => continue, + }; + let token_type = legend + .token_types + .get(token_type as usize) + .map(|token_type| token_type.as_str()) + .unwrap_or_default(); + + if text == "pkg" && token_type == "namespace" { + pkg_tokens += 1; + } + if text == "sub" && token_type == "namespace" { + sub_tokens += 1; + } + } + pkg_tokens == 1 && sub_tokens == 2 + } + _ => false, + }) + .unwrap(); + + interaction.shutdown().unwrap(); +} diff --git a/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/main.py b/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/main.py new file mode 100644 index 0000000000..6aa52836a2 --- /dev/null +++ b/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/main.py @@ -0,0 +1 @@ +from pkg import sub as sub diff --git a/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/pkg/__init__.py b/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/pkg/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/pkg/sub.py b/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/pkg/sub.py new file mode 100644 index 0000000000..b9bfa6f123 --- /dev/null +++ b/pyrefly/lib/test/lsp/lsp_interaction/test_files/semantic_tokens_imports/pkg/sub.py @@ -0,0 +1,2 @@ +def func(): + pass diff --git a/pyrefly/lib/test/lsp/semantic_tokens.rs b/pyrefly/lib/test/lsp/semantic_tokens.rs index 564597f378..072dead45d 100644 --- a/pyrefly/lib/test/lsp/semantic_tokens.rs +++ b/pyrefly/lib/test/lsp/semantic_tokens.rs @@ -790,3 +790,63 @@ token-type: variable, token-modifiers: [readonly] "#, ); } + +#[test] +fn import_with_same_name_alias_test() { + let lib = r#" +def func(): + pass +"#; + let code = r#" +from lib import func as func +"#; + assert_full_semantic_tokens( + &[("main", code), ("lib", lib)], + r#" +# main.py +line: 1, column: 5, length: 3, text: lib +token-type: namespace + +line: 1, column: 16, length: 4, text: func +token-type: namespace + +line: 1, column: 24, length: 4, text: func +token-type: function + + +# lib.py +line: 1, column: 4, length: 4, text: func +token-type: function +"#, + ); +} + +#[test] +fn import_with_renamed_alias_test() { + let foo = r#" +def bar(): + pass +"#; + let code = r#" +from foo import bar as baz +"#; + assert_full_semantic_tokens( + &[("main", code), ("foo", foo)], + r#" +# main.py +line: 1, column: 5, length: 3, text: foo +token-type: namespace + +line: 1, column: 16, length: 3, text: bar +token-type: namespace + +line: 1, column: 23, length: 3, text: baz +token-type: function + + +# foo.py +line: 1, column: 4, length: 3, text: bar +token-type: function +"#, + ); +}