Skip to content

Commit 48bebea

Browse files
committed
support goto definition and find references
1 parent f2775ac commit 48bebea

File tree

4 files changed

+163
-45
lines changed

4 files changed

+163
-45
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,5 @@ generated_assists.adoc
1111
generated_features.adoc
1212
generated_diagnostic.adoc
1313
.DS_Store
14+
/out/
15+
/dump.lsif

crates/ide/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ pub use crate::{
8787
references::ReferenceSearchResult,
8888
rename::RenameError,
8989
runnables::{Runnable, RunnableKind, TestId},
90-
static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId},
90+
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
9191
syntax_highlighting::{
9292
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
9393
HlRange,

crates/ide/src/static_index.rs

Lines changed: 59 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,17 @@
33
44
use std::collections::HashMap;
55

6+
use hir::Semantics;
67
use hir::{db::HirDatabase, Crate, Module};
7-
use ide_db::base_db::{FileId, SourceDatabaseExt};
8-
use ide_db::RootDatabase;
8+
use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt};
99
use ide_db::defs::Definition;
10+
use ide_db::RootDatabase;
1011
use rustc_hash::FxHashSet;
11-
use syntax::TextRange;
1212
use syntax::{AstNode, SyntaxKind::*, T};
13+
use syntax::{SyntaxToken, TextRange};
1314

14-
use crate::hover::{get_definition_of_token, hover_for_definition};
15+
use crate::display::TryToNav;
16+
use crate::hover::hover_for_definition;
1517
use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult};
1618

1719
/// A static representation of fully analyzed source code.
@@ -25,8 +27,15 @@ pub struct StaticIndex<'a> {
2527
def_map: HashMap<Definition, TokenId>,
2628
}
2729

30+
pub struct ReferenceData {
31+
pub range: FileRange,
32+
pub is_definition: bool,
33+
}
34+
2835
pub struct TokenStaticData {
2936
pub hover: Option<HoverResult>,
37+
pub definition: Option<FileRange>,
38+
pub references: Vec<ReferenceData>,
3039
}
3140

3241
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
@@ -42,14 +51,16 @@ impl TokenStore {
4251
id
4352
}
4453

54+
pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
55+
self.0.get_mut(id.0)
56+
}
57+
4558
pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
4659
self.0.get(id.0)
4760
}
48-
49-
pub fn iter(self) -> impl Iterator<Item=(TokenId, TokenStaticData)> {
50-
self.0.into_iter().enumerate().map(|(i, x)| {
51-
(TokenId(i), x)
52-
})
61+
62+
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
63+
self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
5364
}
5465
}
5566

@@ -84,26 +95,15 @@ impl StaticIndex<'_> {
8495
});
8596
let hover_config =
8697
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
87-
let tokens = tokens
88-
.filter(|token| match token.kind() {
89-
IDENT
90-
| INT_NUMBER
91-
| LIFETIME_IDENT
92-
| T![self]
93-
| T![super]
94-
| T![crate] => true,
95-
_ => false,
96-
});
97-
let mut result = StaticIndexedFile {
98-
file_id,
99-
folds,
100-
tokens: vec![],
101-
};
98+
let tokens = tokens.filter(|token| match token.kind() {
99+
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true,
100+
_ => false,
101+
});
102+
let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] };
102103
for token in tokens {
103104
let range = token.text_range();
104105
let node = token.parent().unwrap();
105-
let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None);
106-
let def = if let Some(x) = def {
106+
let def = if let Some(x) = get_definition(&sema, token.clone()) {
107107
x
108108
} else {
109109
continue;
@@ -112,18 +112,34 @@ impl StaticIndex<'_> {
112112
*x
113113
} else {
114114
let x = self.tokens.insert(TokenStaticData {
115-
hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config),
115+
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
116+
definition: def
117+
.try_to_nav(self.db)
118+
.map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
119+
references: vec![],
116120
});
117121
self.def_map.insert(def, x);
118122
x
119123
};
124+
let token = self.tokens.get_mut(id).unwrap();
125+
token.references.push(ReferenceData {
126+
range: FileRange { range, file_id },
127+
is_definition: if let Some(x) = def.try_to_nav(self.db) {
128+
x.file_id == file_id && x.focus_or_full_range() == range
129+
} else {
130+
false
131+
},
132+
});
120133
result.tokens.push((range, id));
121134
}
122135
self.files.push(result);
123136
Ok(())
124137
}
125-
126-
pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable<StaticIndex<'a>> {
138+
139+
pub fn compute<'a>(
140+
db: &'a RootDatabase,
141+
analysis: &'a Analysis,
142+
) -> Cancellable<StaticIndex<'a>> {
127143
let work = all_modules(db).into_iter().filter(|module| {
128144
let file_id = module.definition_source(db).file_id.original_file(db);
129145
let source_root = db.file_source_root(file_id);
@@ -133,7 +149,8 @@ impl StaticIndex<'_> {
133149
let mut this = StaticIndex {
134150
files: vec![],
135151
tokens: Default::default(),
136-
analysis, db,
152+
analysis,
153+
db,
137154
def_map: Default::default(),
138155
};
139156
let mut visited_files = FxHashSet::default();
@@ -150,3 +167,15 @@ impl StaticIndex<'_> {
150167
Ok(this)
151168
}
152169
}
170+
171+
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
172+
for token in sema.descend_into_macros_many(token) {
173+
let def = Definition::from_token(&sema, &token);
174+
if let [x] = def.as_slice() {
175+
return Some(*x);
176+
} else {
177+
continue;
178+
};
179+
}
180+
None
181+
}

crates/rust-analyzer/src/cli/lsif.rs

Lines changed: 101 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ use std::collections::HashMap;
44
use std::env;
55
use std::time::Instant;
66

7-
use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData};
7+
use ide::{
8+
Analysis, Cancellable, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile,
9+
TokenId, TokenStaticData,
10+
};
811
use ide_db::LineIndexDatabase;
912

1013
use ide_db::base_db::salsa::{self, ParallelDatabase};
@@ -31,6 +34,8 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
3134
struct LsifManager<'a> {
3235
count: i32,
3336
token_map: HashMap<TokenId, Id>,
37+
range_map: HashMap<FileRange, Id>,
38+
file_map: HashMap<FileId, Id>,
3439
analysis: &'a Analysis,
3540
db: &'a RootDatabase,
3641
vfs: &'a Vfs,
@@ -50,12 +55,14 @@ impl LsifManager<'_> {
5055
LsifManager {
5156
count: 0,
5257
token_map: HashMap::default(),
58+
range_map: HashMap::default(),
59+
file_map: HashMap::default(),
5360
analysis,
5461
db,
5562
vfs,
5663
}
5764
}
58-
65+
5966
fn add(&mut self, data: Element) -> Id {
6067
let id = Id(self.count);
6168
self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap());
@@ -68,9 +75,54 @@ impl LsifManager<'_> {
6875
println!("{}", data);
6976
}
7077

71-
fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
78+
fn get_token_id(&mut self, id: TokenId) -> Id {
79+
if let Some(x) = self.token_map.get(&id) {
80+
return *x;
81+
}
7282
let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None })));
7383
self.token_map.insert(id, result_set_id);
84+
result_set_id
85+
}
86+
87+
fn get_range_id(&mut self, id: FileRange) -> Cancellable<Id> {
88+
if let Some(x) = self.range_map.get(&id) {
89+
return Ok(*x);
90+
}
91+
let file_id = id.file_id;
92+
let doc_id = self.get_file_id(file_id);
93+
let line_index = self.db.line_index(file_id);
94+
let line_index = LineIndex {
95+
index: line_index.clone(),
96+
encoding: OffsetEncoding::Utf16,
97+
endings: LineEndings::Unix,
98+
};
99+
let range_id = self.add(Element::Vertex(Vertex::Range {
100+
range: to_proto::range(&line_index, id.range),
101+
tag: None,
102+
}));
103+
self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn {
104+
in_vs: vec![range_id.into()],
105+
out_v: doc_id.into(),
106+
})));
107+
Ok(range_id)
108+
}
109+
110+
fn get_file_id(&mut self, id: FileId) -> Id {
111+
if let Some(x) = self.file_map.get(&id) {
112+
return *x;
113+
}
114+
let path = self.vfs.file_path(id);
115+
let path = path.as_path().unwrap();
116+
let doc_id = self.add(Element::Vertex(Vertex::Document(Document {
117+
language_id: "rust".to_string(),
118+
uri: lsp_types::Url::from_file_path(path).unwrap(),
119+
})));
120+
self.file_map.insert(id, doc_id);
121+
doc_id
122+
}
123+
124+
fn add_token(&mut self, id: TokenId, token: TokenStaticData) -> Cancellable<()> {
125+
let result_set_id = self.get_token_id(id);
74126
if let Some(hover) = token.hover {
75127
let hover_id = self.add(Element::Vertex(Vertex::HoverResult {
76128
result: Hover {
@@ -83,16 +135,50 @@ impl LsifManager<'_> {
83135
out_v: result_set_id.into(),
84136
})));
85137
}
138+
if let Some(def) = token.definition {
139+
let result_id = self.add(Element::Vertex(Vertex::DefinitionResult));
140+
let def_vertex = self.get_range_id(def)?;
141+
self.add(Element::Edge(Edge::Item(Item {
142+
document: (*self.file_map.get(&def.file_id).unwrap()).into(),
143+
property: None,
144+
edge_data: EdgeDataMultiIn {
145+
in_vs: vec![def_vertex.into()],
146+
out_v: result_id.into(),
147+
},
148+
})));
149+
self.add(Element::Edge(Edge::Definition(EdgeData {
150+
in_v: result_id.into(),
151+
out_v: result_set_id.into(),
152+
})));
153+
}
154+
if !token.references.is_empty() {
155+
let result_id = self.add(Element::Vertex(Vertex::ReferenceResult));
156+
self.add(Element::Edge(Edge::References(EdgeData {
157+
in_v: result_id.into(),
158+
out_v: result_set_id.into(),
159+
})));
160+
for x in token.references {
161+
let vertex = *self.range_map.get(&x.range).unwrap();
162+
self.add(Element::Edge(Edge::Item(Item {
163+
document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
164+
property: Some(if x.is_definition {
165+
ItemKind::Definitions
166+
} else {
167+
ItemKind::References
168+
}),
169+
edge_data: EdgeDataMultiIn {
170+
in_vs: vec![vertex.into()],
171+
out_v: result_id.into(),
172+
},
173+
})));
174+
}
175+
}
176+
Ok(())
86177
}
87178

88179
fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> {
89-
let StaticIndexedFile { file_id, tokens, folds} = file;
90-
let path = self.vfs.file_path(file_id);
91-
let path = path.as_path().unwrap();
92-
let doc_id = self.add(Element::Vertex(Vertex::Document(Document {
93-
language_id: "rust".to_string(),
94-
uri: lsp_types::Url::from_file_path(path).unwrap(),
95-
})));
180+
let StaticIndexedFile { file_id, tokens, folds } = file;
181+
let doc_id = self.get_file_id(file_id);
96182
let text = self.analysis.file_text(file_id)?;
97183
let line_index = self.db.line_index(file_id);
98184
let line_index = LineIndex {
@@ -116,7 +202,8 @@ impl LsifManager<'_> {
116202
range: to_proto::range(&line_index, range),
117203
tag: None,
118204
}));
119-
let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id");
205+
self.range_map.insert(FileRange { file_id, range }, range_id);
206+
let result_set_id = self.get_token_id(id);
120207
self.add(Element::Edge(Edge::Next(EdgeData {
121208
in_v: result_set_id.into(),
122209
out_v: range_id.into(),
@@ -161,12 +248,12 @@ impl flags::Lsif {
161248
position_encoding: Encoding::Utf16,
162249
tool_info: None,
163250
})));
164-
for (id, token) in si.tokens.iter() {
165-
lsif.add_token(id, token);
166-
}
167251
for file in si.files {
168252
lsif.add_file(file)?;
169253
}
254+
for (id, token) in si.tokens.iter() {
255+
lsif.add_token(id, token)?;
256+
}
170257
eprintln!("Generating LSIF finished in {:?}", now.elapsed());
171258
Ok(())
172259
}

0 commit comments

Comments
 (0)