Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 15 additions & 15 deletions crates/deps-cargo/src/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,29 +205,29 @@ fn parse_index_json(data: &[u8], _crate_name: &str) -> Result<Vec<CargoVersion>>
let content = std::str::from_utf8(data)
.map_err(|e| DepsError::CacheError(format!("Invalid UTF-8: {}", e)))?;

let mut versions: Vec<CargoVersion> = content
// Parse versions once and cache the parsed Version for sorting
let mut versions_with_parsed: Vec<(CargoVersion, Version)> = content
.lines()
.filter(|line| !line.trim().is_empty())
.filter_map(|line| {
let entry: IndexEntry = serde_json::from_str(line).ok()?;
Some(CargoVersion {
num: entry.version,
yanked: entry.yanked,
features: entry.features,
})
let parsed = entry.version.parse::<Version>().ok()?;
Some((
CargoVersion {
num: entry.version,
yanked: entry.yanked,
features: entry.features,
},
parsed,
))
})
.collect();

versions.sort_by(|a, b| {
let ver_a = a.num.parse::<Version>().ok();
let ver_b = b.num.parse::<Version>().ok();
match (ver_a, ver_b) {
(Some(a), Some(b)) => b.cmp(&a),
_ => std::cmp::Ordering::Equal,
}
});
// Sort using already-parsed versions (newest first)
versions_with_parsed.sort_unstable_by(|a, b| b.1.cmp(&a.1));

Ok(versions)
// Extract sorted versions
Ok(versions_with_parsed.into_iter().map(|(v, _)| v).collect())
}

/// Response from crates.io search API.
Expand Down
44 changes: 28 additions & 16 deletions crates/deps-core/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,32 +338,44 @@ impl HttpCache {

/// Evicts approximately `CACHE_EVICTION_PERCENTAGE`% of cache entries when capacity is reached.
///
/// Uses a simple random eviction strategy. In a production system,
/// this could be replaced with LRU or TTL-based eviction.
/// Uses a min-heap to efficiently find the oldest entries instead of full sorting.
/// For each entry, we potentially push/pop from the heap, which is O(log K).
///
/// Time complexity: O(N log K) where N = number of cache entries, K = target_removals
/// Space complexity: O(K) for the min-heap
fn evict_entries(&self) {
use std::cmp::Reverse;
use std::collections::BinaryHeap;

let target_removals = MAX_CACHE_ENTRIES / CACHE_EVICTION_PERCENTAGE;
let mut removed = 0;

// Simple eviction: remove oldest entries by fetched_at timestamp
let mut entries_to_remove = Vec::new();
// Use min-heap to efficiently find N oldest entries
// The heap maintains the K oldest entries seen so far
let mut oldest = BinaryHeap::with_capacity(target_removals);

for entry in self.entries.iter() {
entries_to_remove.push((entry.key().clone(), entry.value().fetched_at));
if entries_to_remove.len() >= MAX_CACHE_ENTRIES {
break;
let item = (entry.value().fetched_at, entry.key().clone());

if oldest.len() < target_removals {
// Heap not full, insert directly
oldest.push(Reverse(item));
} else if let Some(Reverse(newest_of_oldest)) = oldest.peek() {
// If this entry is older than the newest entry in our "oldest" set,
// replace it
if item.0 < newest_of_oldest.0 {
oldest.pop();
oldest.push(Reverse(item));
}
}
}

// Sort by age (oldest first)
entries_to_remove.sort_by_key(|(_, time)| *time);

// Remove oldest entries
for (url, _) in entries_to_remove.iter().take(target_removals) {
self.entries.remove(url);
removed += 1;
// Remove selected oldest entries
let removed = oldest.len();
for Reverse((_, url)) in oldest {
self.entries.remove(&url);
}

tracing::debug!("evicted {} cache entries", removed);
tracing::debug!("evicted {} cache entries (O(N) algorithm)", removed);
}

/// Benchmark-only helper: Direct cache lookup without network requests.
Expand Down
1 change: 1 addition & 0 deletions crates/deps-core/src/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,7 @@ where
H: EcosystemHandler,
UnifiedVer: VersionStringGetter + YankedChecker,
{
// Pre-allocate with estimated capacity
let mut cached_deps = Vec::with_capacity(dependencies.len());
let mut fetch_deps = Vec::with_capacity(dependencies.len());

Expand Down
25 changes: 16 additions & 9 deletions crates/deps-core/src/lsp_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@ pub async fn generate_hover<R: Registry + ?Sized>(
registry: &R,
formatter: &dyn EcosystemFormatter,
) -> Option<Hover> {
use std::fmt::Write;

let dep = parse_result.dependencies().into_iter().find(|d| {
let on_name = ranges_overlap(d.name_range(), position);
let on_version = d
Expand All @@ -167,38 +169,43 @@ pub async fn generate_hover<R: Registry + ?Sized>(
let versions = registry.get_versions(dep.name()).await.ok()?;

let url = formatter.package_url(dep.name());
let mut markdown = format!("# [{}]({})\n\n", dep.name(), url);

// Pre-allocate with estimated capacity to reduce allocations
let mut markdown = String::with_capacity(512);
write!(&mut markdown, "# [{}]({})\n\n", dep.name(), url).unwrap();

let normalized_name = formatter.normalize_package_name(dep.name());

let resolved = resolved_versions
.get(&normalized_name)
.or_else(|| resolved_versions.get(dep.name()));
if let Some(resolved_ver) = resolved {
markdown.push_str(&format!("**Current**: `{}`\n\n", resolved_ver));
write!(&mut markdown, "**Current**: `{}`\n\n", resolved_ver).unwrap();
} else if let Some(version_req) = dep.version_requirement() {
markdown.push_str(&format!("**Requirement**: `{}`\n\n", version_req));
write!(&mut markdown, "**Requirement**: `{}`\n\n", version_req).unwrap();
}

let latest = cached_versions
.get(&normalized_name)
.or_else(|| cached_versions.get(dep.name()));
if let Some(latest_ver) = latest {
markdown.push_str(&format!("**Latest**: `{}`\n\n", latest_ver));
write!(&mut markdown, "**Latest**: `{}`\n\n", latest_ver).unwrap();
}

markdown.push_str("**Recent versions**:\n");
for (i, version) in versions.iter().take(8).enumerate() {
if i == 0 {
markdown.push_str(&format!("- {} *(latest)*\n", version.version_string()));
writeln!(&mut markdown, "- {} *(latest)*", version.version_string()).unwrap();
} else if version.is_yanked() {
markdown.push_str(&format!(
"- {} {}\n",
writeln!(
&mut markdown,
"- {} {}",
version.version_string(),
formatter.yanked_label()
));
)
.unwrap();
} else {
markdown.push_str(&format!("- {}\n", version.version_string()));
writeln!(&mut markdown, "- {}", version.version_string()).unwrap();
}
}

Expand Down
33 changes: 33 additions & 0 deletions crates/deps-lsp/src/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -416,6 +416,39 @@ impl ServerState {
self.documents.get(uri)
}

/// Retrieves a cloned copy of document state by URI.
///
/// This method clones the document state immediately and releases
/// the DashMap lock, allowing concurrent access to the map while
/// the document is being processed. Use this in hot paths where
/// async operations are performed with the document data.
///
/// # Performance
///
/// Cloning `DocumentState` is relatively cheap as it only clones
/// `String` and `HashMap` metadata, not the underlying parse result
/// trait object.
///
/// # Examples
///
/// ```no_run
/// # use deps_lsp::document::ServerState;
/// # use tower_lsp::lsp_types::Url;
/// # async fn example(state: &ServerState, uri: &Url) {
/// // Lock released immediately after clone
/// let doc = state.get_document_clone(uri);
///
/// if let Some(doc) = doc {
/// // Perform async operations without holding lock
/// let result = process_async(&doc).await;
/// }
/// # }
/// # async fn process_async(doc: &deps_lsp::document::DocumentState) {}
/// ```
pub fn get_document_clone(&self, uri: &Url) -> Option<DocumentState> {
self.documents.get(uri).map(|doc| doc.clone())
}

/// Updates or inserts document state.
///
/// If a document already exists at the given URI, it is replaced.
Expand Down
72 changes: 46 additions & 26 deletions crates/deps-lsp/src/document_lifecycle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,54 @@ use crate::config::DepsConfig;
use crate::document::{DocumentState, ServerState};
use crate::handlers::diagnostics;
use deps_core::Ecosystem;
use deps_core::Registry;
use deps_core::Result;
use futures::future::join_all;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tokio::task::JoinHandle;
use tower_lsp::Client;
use tower_lsp::lsp_types::Url;

/// Fetches latest versions for multiple packages in parallel.
///
/// Returns a HashMap mapping package names to their latest version strings.
/// Packages that fail to fetch are omitted from the result.
///
/// This function executes all registry requests concurrently, reducing
/// total fetch time from O(N × network_latency) to O(max(network_latency)).
///
/// # Examples
///
/// With 50 dependencies and 100ms per request:
/// - Sequential: 50 × 100ms = 5000ms
/// - Parallel: max(100ms) ≈ 150ms
async fn fetch_latest_versions_parallel(
registry: Arc<dyn Registry>,
package_names: Vec<String>,
) -> HashMap<String, String> {
let futures: Vec<_> = package_names
.into_iter()
.map(|name| {
let registry = Arc::clone(&registry);
async move {
registry
.get_versions(&name)
.await
.ok()
.and_then(|versions| {
versions
.first()
.map(|v| (name, v.version_string().to_string()))
})
}
})
.collect();

join_all(futures).await.into_iter().flatten().collect()
}

/// Generic document open handler using ecosystem registry.
///
/// Parses manifest using the ecosystem's parser, creates document state,
Expand Down Expand Up @@ -72,7 +112,7 @@ pub async fn handle_document_open(
doc.update_cached_versions(resolved_versions.clone());
}

let doc = match state_clone.get_document(&uri_clone) {
let doc = match state_clone.get_document_clone(&uri_clone) {
Some(d) => d,
None => return,
};
Expand All @@ -89,19 +129,9 @@ pub async fn handle_document_open(
.map(|d| d.name().to_string())
.collect();

drop(doc); // Release guard before async operations

// Fetch latest versions from registry (for update hints)
// Fetch latest versions from registry in parallel (for update hints)
let registry = ecosystem_clone.registry();
let mut cached_versions = HashMap::new();

for name in dep_names {
if let Ok(versions) = registry.get_versions(&name).await
&& let Some(latest) = versions.first()
{
cached_versions.insert(name, latest.version_string().to_string());
}
}
let cached_versions = fetch_latest_versions_parallel(registry, dep_names).await;

// Update document state with cached versions (latest from registry)
if let Some(mut doc) = state_clone.documents.get_mut(&uri_clone) {
Expand Down Expand Up @@ -184,7 +214,7 @@ pub async fn handle_document_change(
doc.update_cached_versions(resolved_versions.clone());
}

let doc = match state_clone.get_document(&uri_clone) {
let doc = match state_clone.get_document_clone(&uri_clone) {
Some(d) => d,
None => return,
};
Expand All @@ -201,19 +231,9 @@ pub async fn handle_document_change(
.map(|d| d.name().to_string())
.collect();

drop(doc);

// Fetch latest versions from registry (for update hints)
// Fetch latest versions from registry in parallel (for update hints)
let registry = ecosystem_clone.registry();
let mut cached_versions = HashMap::new();

for name in dep_names {
if let Ok(versions) = registry.get_versions(&name).await
&& let Some(latest) = versions.first()
{
cached_versions.insert(name, latest.version_string().to_string());
}
}
let cached_versions = fetch_latest_versions_parallel(registry, dep_names).await;

// Update document state with cached versions (latest from registry)
if let Some(mut doc) = state_clone.documents.get_mut(&uri_clone) {
Expand Down
29 changes: 15 additions & 14 deletions crates/deps-npm/src/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,26 +170,27 @@ struct VersionMetadata {
fn parse_package_metadata(data: &[u8]) -> Result<Vec<NpmVersion>> {
let metadata: PackageMetadata = serde_json::from_slice(data)?;

let mut versions: Vec<NpmVersion> = metadata
// Parse versions once and cache the parsed Version for sorting
let mut versions_with_parsed: Vec<(NpmVersion, node_semver::Version)> = metadata
.versions
.into_iter()
.map(|(version, meta)| NpmVersion {
version,
deprecated: meta.deprecated.is_some(),
.filter_map(|(version, meta)| {
let parsed = node_semver::Version::parse(&version).ok()?;
Some((
NpmVersion {
version,
deprecated: meta.deprecated.is_some(),
},
parsed,
))
})
.collect();

// Sort by semver version (newest first)
versions.sort_by(|a, b| {
let ver_a = node_semver::Version::parse(&a.version).ok();
let ver_b = node_semver::Version::parse(&b.version).ok();
match (ver_a, ver_b) {
(Some(a), Some(b)) => b.cmp(&a),
_ => std::cmp::Ordering::Equal,
}
});
// Sort using already-parsed versions (newest first)
versions_with_parsed.sort_unstable_by(|a, b| b.1.cmp(&a.1));

Ok(versions)
// Extract sorted versions
Ok(versions_with_parsed.into_iter().map(|(v, _)| v).collect())
}

/// Search response from npm registry.
Expand Down
Loading