diff --git a/book/src/guides/tags.md b/book/src/guides/tags.md index 00794962a391..f33d3824d7c1 100644 --- a/book/src/guides/tags.md +++ b/book/src/guides/tags.md @@ -25,6 +25,14 @@ The following [captures][tree-sitter-captures] are recognized: | `definition.module` | | `definition.struct` | | `definition.type` | +| `reference.class` | +| `reference.constant` | +| `reference.function` | +| `reference.interface` | +| `reference.macro` | +| `reference.module` | +| `reference.struct` | +| `reference.type` | [Example query files][example-queries] can be found in the Helix GitHub repository. diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 929774c7999d..19d1e6f52a5b 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -11,7 +11,9 @@ use helix_lsp::{ use tokio_stream::StreamExt; use tui::{text::Span, widgets::Row}; -use super::{align_view, push_jump, Align, Context, Editor}; +use super::{ + align_view, push_jump, search_selection_impl, syntax_workspace_symbol_picker, syntax_workspace_symbol_reference_picker, Align, Context, Editor +}; use helix_core::{ diagnostic::DiagnosticProvider, syntax::config::LanguageServerFeature, @@ -951,38 +953,97 @@ where } pub fn goto_declaration(cx: &mut Context) { - goto_single_impl( - cx, - LanguageServerFeature::GotoDeclaration, - |ls, pos, doc_id| ls.goto_declaration(doc_id, pos, None), - ); + let doc = doc!(cx.editor); + + if doc + .language_servers_with_feature(LanguageServerFeature::GotoDeclaration) + .next() + .is_some() + { + goto_single_impl( + cx, + LanguageServerFeature::GotoDeclaration, + |ls, pos, doc_id| ls.goto_declaration(doc_id, pos, None), + ); + } else { + search_selection_impl(cx, false); + syntax_workspace_symbol_picker(cx); + } } pub fn goto_definition(cx: &mut Context) { - goto_single_impl( - cx, - LanguageServerFeature::GotoDefinition, - |ls, pos, doc_id| ls.goto_definition(doc_id, pos, None), - ); + let doc = doc!(cx.editor); + + if doc + .language_servers_with_feature(LanguageServerFeature::GotoDefinition) + .next() + .is_some() + { + goto_single_impl( + cx, + LanguageServerFeature::GotoDefinition, + |ls, pos, doc_id| ls.goto_definition(doc_id, pos, None), + ); + } else { + search_selection_impl(cx, false); + syntax_workspace_symbol_picker(cx); + } } pub fn goto_type_definition(cx: &mut Context) { - goto_single_impl( - cx, - LanguageServerFeature::GotoTypeDefinition, - |ls, pos, doc_id| ls.goto_type_definition(doc_id, pos, None), - ); + let doc = doc!(cx.editor); + + if doc + .language_servers_with_feature(LanguageServerFeature::GotoTypeDefinition) + .next() + .is_some() + { + goto_single_impl( + cx, + LanguageServerFeature::GotoTypeDefinition, + |ls, pos, doc_id| ls.goto_type_definition(doc_id, pos, None), + ); + } else { + search_selection_impl(cx, false); + syntax_workspace_symbol_picker(cx); + } } pub fn goto_implementation(cx: &mut Context) { - goto_single_impl( - cx, - LanguageServerFeature::GotoImplementation, - |ls, pos, doc_id| ls.goto_implementation(doc_id, pos, None), - ); + let doc = doc!(cx.editor); + + if doc + .language_servers_with_feature(LanguageServerFeature::GotoImplementation) + .next() + .is_some() + { + goto_single_impl( + cx, + LanguageServerFeature::GotoImplementation, + |ls, pos, doc_id| ls.goto_implementation(doc_id, pos, None), + ); + } else { + search_selection_impl(cx, false); + syntax_workspace_symbol_picker(cx); + } } pub fn goto_reference(cx: &mut Context) { + let doc = doc!(cx.editor); + + if doc + .language_servers_with_feature(LanguageServerFeature::GotoReference) + .next() + .is_some() { + goto_reference_picker(cx) + + } else { + search_selection_impl(cx, false); + syntax_workspace_symbol_reference_picker(cx); + } +} + +pub fn goto_reference_picker(cx: &mut Context) { let config = cx.editor.config(); let (view, doc) = current_ref!(cx.editor); diff --git a/helix-term/src/commands/syntax.rs b/helix-term/src/commands/syntax.rs index fec222ce8d8f..24d8de63dab0 100644 --- a/helix-term/src/commands/syntax.rs +++ b/helix-term/src/commands/syntax.rs @@ -109,6 +109,27 @@ fn tags_iter<'a>( text: RopeSlice<'a>, doc: UriOrDocumentId, pattern: Option<&'a rope::Regex>, +) -> impl Iterator + 'a { + tags_iter_with_prefix(syntax, loader, text, doc, pattern, "definition.") +} + +fn references_iter<'a>( + syntax: &'a Syntax, + loader: &'a Loader, + text: RopeSlice<'a>, + doc: UriOrDocumentId, + pattern: Option<&'a rope::Regex>, +) -> impl Iterator + 'a { + tags_iter_with_prefix(syntax, loader, text, doc, pattern, "reference.") +} + +fn tags_iter_with_prefix<'a>( + syntax: &'a Syntax, + loader: &'a Loader, + text: RopeSlice<'a>, + doc: UriOrDocumentId, + pattern: Option<&'a rope::Regex>, + prefix: &'a str, ) -> impl Iterator + 'a { let mut tags_iter = syntax.tags(text, loader, ..); @@ -122,7 +143,7 @@ fn tags_iter<'a>( .query; let Some(kind) = query .capture_name(mat.capture) - .strip_prefix("definition.") + .strip_prefix(prefix) .and_then(TagKind::from_name) else { continue; @@ -391,6 +412,7 @@ pub fn syntax_workspace_symbol_picker(cx: &mut Context) { } .boxed() }; + let reg = cx.register.unwrap_or('/'); let picker = Picker::new( columns, 1, // name @@ -428,7 +450,255 @@ pub fn syntax_workspace_symbol_picker(cx: &mut Context) { Some((tag.start_line, tag.end_line)), )) }) - .truncate_start(false); + .truncate_start(false) + .with_history_register(Some(reg)); + cx.push_layer(Box::new(overlaid(picker))); +} + +pub fn syntax_workspace_symbol_reference_picker(cx: &mut Context) { + #[derive(Debug)] + struct SearchState { + searcher_builder: SearcherBuilder, + walk_builder: WalkBuilder, + regex_matcher_builder: RegexMatcherBuilder, + rope_regex_builder: rope::RegexBuilder, + search_root: PathBuf, + /// A cache of files that have been parsed in prior searches. + syntax_cache: DashMap>, + } + + let mut searcher_builder = SearcherBuilder::new(); + searcher_builder.binary_detection(BinaryDetection::quit(b'\x00')); + + // Search from the workspace that the currently focused document is within. This behaves like global + // search most of the time but helps when you have two projects open in splits. + let search_root = if let Some(path) = doc!(cx.editor).path() { + helix_loader::find_workspace_in(path).0 + } else { + helix_loader::find_workspace().0 + }; + + let absolute_root = search_root + .canonicalize() + .unwrap_or_else(|_| search_root.clone()); + + let config = cx.editor.config(); + let dedup_symlinks = config.file_picker.deduplicate_links; + + let mut walk_builder = WalkBuilder::new(&search_root); + walk_builder + .hidden(config.file_picker.hidden) + .parents(config.file_picker.parents) + .ignore(config.file_picker.ignore) + .follow_links(config.file_picker.follow_symlinks) + .git_ignore(config.file_picker.git_ignore) + .git_global(config.file_picker.git_global) + .git_exclude(config.file_picker.git_exclude) + .max_depth(config.file_picker.max_depth) + .filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks)) + .add_custom_ignore_filename(helix_loader::config_dir().join("ignore")) + .add_custom_ignore_filename(".helix/ignore"); + + let mut regex_matcher_builder = RegexMatcherBuilder::new(); + regex_matcher_builder.case_smart(config.search.smart_case); + let mut rope_regex_builder = rope::RegexBuilder::new(); + rope_regex_builder.syntax(rope::Config::new().case_insensitive(config.search.smart_case)); + let state = SearchState { + searcher_builder, + walk_builder, + regex_matcher_builder, + rope_regex_builder, + search_root, + syntax_cache: DashMap::default(), + }; + let reg = cx.register.unwrap_or('/'); + cx.editor.registers.last_search_register = reg; + let columns = vec![ + PickerColumn::new("kind", |tag: &Tag, _| tag.kind.as_str().into()), + PickerColumn::new("name", |tag: &Tag, _| tag.name.as_str().into()).without_filtering(), + PickerColumn::new("path", |tag: &Tag, state: &SearchState| { + match &tag.doc { + UriOrDocumentId::Uri(uri) => { + if let Some(path) = uri.as_path() { + let path = if let Ok(stripped) = path.strip_prefix(&state.search_root) { + stripped + } else { + path + }; + path.to_string_lossy().into() + } else { + uri.to_string().into() + } + } + // This picker only uses `Id` for scratch buffers for better display. + UriOrDocumentId::Id(_) => SCRATCH_BUFFER_NAME.into(), + } + }), + ]; + + let get_tags = |query: &str, + editor: &mut Editor, + state: Arc, + injector: &Injector<_, _>| { + if query.len() < 3 { + return async { Ok(()) }.boxed(); + } + // Attempt to find the tag in any open documents. + let pattern = match state.rope_regex_builder.build(query) { + Ok(pattern) => pattern, + Err(err) => return async { Err(anyhow::anyhow!(err)) }.boxed(), + }; + let loader = editor.syn_loader.load(); + for doc in editor.documents() { + let Some(syntax) = doc.syntax() else { continue }; + let text = doc.text().slice(..); + let uri_or_id = doc + .uri() + .map(UriOrDocumentId::Uri) + .unwrap_or_else(|| UriOrDocumentId::Id(doc.id())); + for tag in references_iter(syntax, &loader, text.slice(..), uri_or_id, Some(&pattern)) { + if injector.push(tag).is_err() { + return async { Ok(()) }.boxed(); + } + } + } + if !state.search_root.exists() { + return async { Err(anyhow::anyhow!("Current working directory does not exist")) } + .boxed(); + } + let matcher = match state.regex_matcher_builder.build(query) { + Ok(matcher) => { + // Clear any "Failed to compile regex" errors out of the statusline. + editor.clear_status(); + matcher + } + Err(err) => { + log::info!( + "Failed to compile search pattern in workspace symbol search: {}", + err + ); + return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed(); + } + }; + let pattern = Arc::new(pattern); + let injector = injector.clone(); + let loader = editor.syn_loader.load(); + let documents: HashSet<_> = editor + .documents() + .filter_map(Document::path) + .cloned() + .collect(); + async move { + let searcher = state.searcher_builder.build(); + state.walk_builder.build_parallel().run(|| { + let mut searcher = searcher.clone(); + let matcher = matcher.clone(); + let injector = injector.clone(); + let loader = loader.clone(); + let documents = &documents; + let pattern = pattern.clone(); + let syntax_cache = &state.syntax_cache; + Box::new(move |entry: Result| -> WalkState { + let entry = match entry { + Ok(entry) => entry, + Err(_) => return WalkState::Continue, + }; + match entry.file_type() { + Some(entry) if entry.is_file() => {} + // skip everything else + _ => return WalkState::Continue, + }; + let path = entry.path(); + // If this document is open, skip it because we've already processed it above. + if documents.contains(path) { + return WalkState::Continue; + }; + let mut quit = false; + let sink = sinks::UTF8(|_line, _content| { + if !syntax_cache.contains_key(path) { + // Read the file into a Rope and attempt to recognize the language + // and parse it with tree-sitter. Save the Rope and Syntax for future + // queries. + syntax_cache.insert(path.to_path_buf(), syntax_for_path(path, &loader)); + }; + let entry = syntax_cache.get(path).unwrap(); + let Some((text, syntax)) = entry.value() else { + // If the file couldn't be parsed, move on. + return Ok(false); + }; + let uri = Uri::from(path::normalize(path)); + for tag in references_iter( + syntax, + &loader, + text.slice(..), + UriOrDocumentId::Uri(uri), + Some(&pattern), + ) { + if injector.push(tag).is_err() { + quit = true; + break; + } + } + // Quit after seeing the first regex match. We only care to find files + // that contain the pattern and then we run the tags query within + // those. The location and contents of a match are irrelevant - it's + // only important _if_ a file matches. + Ok(false) + }); + if let Err(err) = searcher.search_path(&matcher, path, sink) { + log::info!("Workspace syntax search error: {}, {}", path.display(), err); + } + if quit { + WalkState::Quit + } else { + WalkState::Continue + } + }) + }); + Ok(()) + } + .boxed() + }; + let reg = cx.register.unwrap_or('/'); + let picker = Picker::new( + columns, + 1, // name + [], + state, + move |cx, tag, action| { + let doc_id = match &tag.doc { + UriOrDocumentId::Id(id) => *id, + UriOrDocumentId::Uri(uri) => match cx.editor.open(uri.as_path().expect(""), action) { + Ok(id) => id, + Err(e) => { + cx.editor + .set_error(format!("Failed to open file '{uri:?}': {e}")); + return; + } + } + }; + let doc = doc_mut!(cx.editor, &doc_id); + let view = view_mut!(cx.editor); + let len_chars = doc.text().len_chars(); + if tag.start >= len_chars || tag.end > len_chars { + cx.editor.set_error("The location you jumped to does not exist anymore because the file has changed."); + return; + } + doc.set_selection(view.id, Selection::single(tag.start, tag.end)); + if action.align_view(view, doc.id()) { + align_view(doc, view, Align::Center) + } + }, + ) + .with_dynamic_query(get_tags, Some(275)) + .with_preview(move |_editor, tag| { + Some(( + tag.doc.path_or_id()?, + Some((tag.start_line, tag.end_line)), + )) + }) + .truncate_start(false) + .with_history_register(Some(reg)); cx.push_layer(Box::new(overlaid(picker))); } diff --git a/runtime/queries/java/tags.scm b/runtime/queries/java/tags.scm index a84650226753..05cc599f3537 100644 --- a/runtime/queries/java/tags.scm +++ b/runtime/queries/java/tags.scm @@ -25,3 +25,23 @@ (enum_constant name: (identifier) @definition.constant) + +; References + +(superclass + (type_identifier) @reference.class) + +(super_interfaces + (type_list + (type_identifier) @reference.interface)) + +(object_creation_expression + type: (_) @reference.type) + +(type_identifier) @reference.type + +(method_invocation + name: (identifier) @reference.function) + +(field_access + field: (identifier) @reference.constant)