Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

26 changes: 24 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,12 @@ A universal Language Server Protocol (LSP) server for dependency management acro

- **Intelligent Autocomplete** — Package names, versions, and feature flags
- **Version Hints** — Inlay hints showing latest available versions
- **Loading Indicators** — Visual feedback during registry fetches with LSP progress support
- **Lock File Support** — Reads resolved versions from Cargo.lock, package-lock.json, poetry.lock, uv.lock, go.sum
- **Diagnostics** — Warnings for outdated, unknown, or yanked dependencies
- **Hover Information** — Package descriptions with resolved version from lock file
- **Code Actions** — Quick fixes to update dependencies
- **High Performance** — Parallel fetching, optimized caching, minimal latency
- **High Performance** — Parallel fetching with per-dependency timeouts, optimized caching

![deps-lsp in action](https://raw.githubusercontent.com/bug-ops/deps-zed/main/assets/img.png)

Expand Down Expand Up @@ -150,7 +151,15 @@ Configure via LSP initialization options:
"yanked_severity": "warning"
},
"cache": {
"refresh_interval_secs": 300
"enabled": true,
"refresh_interval_secs": 300,
"fetch_timeout_secs": 5,
"max_concurrent_fetches": 20
},
"loading_indicator": {
"enabled": true,
"fallback_to_hints": true,
"loading_text": "⏳"
},
"cold_start": {
"enabled": true,
Expand All @@ -159,6 +168,19 @@ Configure via LSP initialization options:
}
```

### Configuration Reference

| Section | Option | Default | Description |
|---------|--------|---------|-------------|
| `cache` | `fetch_timeout_secs` | `5` | Per-package fetch timeout (1-300 seconds) |
| `cache` | `max_concurrent_fetches` | `20` | Concurrent registry requests (1-100) |
| `loading_indicator` | `enabled` | `true` | Show loading feedback during fetches |
| `loading_indicator` | `fallback_to_hints` | `true` | Show loading in inlay hints if LSP progress unsupported |
| `loading_indicator` | `loading_text` | `"⏳"` | Text shown during loading (max 100 chars) |

> [!TIP]
> Increase `fetch_timeout_secs` for slower networks. The per-dependency timeout prevents slow packages from blocking others.

> [!NOTE]
> Cold start support ensures LSP features work immediately when your IDE restores previously opened files.

Expand Down
1 change: 1 addition & 0 deletions crates/deps-lsp/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ tracing = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }

[dev-dependencies]
async-trait = { workspace = true }
criterion = { workspace = true }
insta = { workspace = true, features = ["json"] }
mockito = { workspace = true }
Expand Down
149 changes: 148 additions & 1 deletion crates/deps-lsp/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,8 @@ impl Default for DiagnosticsConfig {
///
/// - `enabled`: `true`
/// - `refresh_interval_secs`: `300` (5 minutes)
/// - `fetch_timeout_secs`: `5` (5 seconds per package)
/// - `max_concurrent_fetches`: `20` (20 concurrent requests)
///
/// # Examples
///
Expand All @@ -143,23 +145,39 @@ impl Default for DiagnosticsConfig {
/// let config = CacheConfig {
/// refresh_interval_secs: 600, // 10 minutes
/// enabled: true,
/// fetch_timeout_secs: 5,
/// max_concurrent_fetches: 20,
/// };
///
/// assert_eq!(config.refresh_interval_secs, 600);
/// ```
#[derive(Debug, Deserialize)]
#[derive(Debug, Clone, Deserialize)]
pub struct CacheConfig {
#[serde(default = "default_refresh_interval")]
pub refresh_interval_secs: u64,
#[serde(default = "default_true")]
pub enabled: bool,
/// Timeout for fetching a single package's versions (default: 5 seconds)
#[serde(
default = "default_fetch_timeout_secs",
deserialize_with = "deserialize_fetch_timeout"
)]
pub fetch_timeout_secs: u64,
/// Maximum concurrent package fetches (default: 20)
#[serde(
default = "default_max_concurrent_fetches",
deserialize_with = "deserialize_max_concurrent"
)]
pub max_concurrent_fetches: usize,
}

impl Default for CacheConfig {
fn default() -> Self {
Self {
refresh_interval_secs: default_refresh_interval(),
enabled: true,
fetch_timeout_secs: default_fetch_timeout_secs(),
max_concurrent_fetches: default_max_concurrent_fetches(),
}
}
}
Expand Down Expand Up @@ -262,6 +280,62 @@ const fn default_refresh_interval() -> u64 {
300 // 5 minutes
}

const fn default_fetch_timeout_secs() -> u64 {
5
}

const fn default_max_concurrent_fetches() -> usize {
20
}

/// Minimum timeout (seconds) to prevent zero-timeout edge case
const MIN_FETCH_TIMEOUT_SECS: u64 = 1;
/// Maximum timeout (seconds) - 5 minutes is generous
const MAX_FETCH_TIMEOUT_SECS: u64 = 300;

/// Minimum concurrent fetches (must be at least 1)
const MIN_CONCURRENT_FETCHES: usize = 1;
/// Maximum concurrent fetches
const MAX_CONCURRENT_FETCHES: usize = 100;

/// Custom deserializer for fetch_timeout_secs that validates bounds
fn deserialize_fetch_timeout<'de, D>(deserializer: D) -> Result<u64, D::Error>
where
D: serde::Deserializer<'de>,
{
let secs = u64::deserialize(deserializer)?;
let clamped = secs.clamp(MIN_FETCH_TIMEOUT_SECS, MAX_FETCH_TIMEOUT_SECS);
if clamped != secs {
tracing::warn!(
"fetch_timeout_secs {} clamped to {} (valid range: {}-{})",
secs,
clamped,
MIN_FETCH_TIMEOUT_SECS,
MAX_FETCH_TIMEOUT_SECS
);
}
Ok(clamped)
}

/// Custom deserializer for max_concurrent_fetches that validates bounds
fn deserialize_max_concurrent<'de, D>(deserializer: D) -> Result<usize, D::Error>
where
D: serde::Deserializer<'de>,
{
let count = usize::deserialize(deserializer)?;
let clamped = count.clamp(MIN_CONCURRENT_FETCHES, MAX_CONCURRENT_FETCHES);
if clamped != count {
tracing::warn!(
"max_concurrent_fetches {} clamped to {} (valid range: {}-{})",
count,
clamped,
MIN_CONCURRENT_FETCHES,
MAX_CONCURRENT_FETCHES
);
}
Ok(clamped)
}

/// Configuration for cold start behavior.
///
/// Controls how the server handles loading documents from disk when
Expand Down Expand Up @@ -362,6 +436,31 @@ mod tests {
assert!(!config.enabled);
}

#[test]
fn test_cache_config_defaults() {
let config = CacheConfig::default();
assert!(config.enabled);
assert_eq!(config.refresh_interval_secs, 300);
assert_eq!(config.fetch_timeout_secs, 5);
assert_eq!(config.max_concurrent_fetches, 20);
}

#[test]
fn test_cache_config_with_timeout_and_concurrency() {
let json = r#"{
"refresh_interval_secs": 600,
"enabled": true,
"fetch_timeout_secs": 10,
"max_concurrent_fetches": 50
}"#;

let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.refresh_interval_secs, 600);
assert!(config.enabled);
assert_eq!(config.fetch_timeout_secs, 10);
assert_eq!(config.max_concurrent_fetches, 50);
}

#[test]
fn test_full_config_deserialization() {
let json = r#"{
Expand Down Expand Up @@ -529,4 +628,52 @@ mod tests {
let config: LoadingIndicatorConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.loading_text, "⏳");
}

#[test]
fn test_cache_config_fetch_timeout_clamped_min() {
let json = r#"{"fetch_timeout_secs": 0}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.fetch_timeout_secs, 1, "Should clamp 0 to MIN");
}

#[test]
fn test_cache_config_fetch_timeout_clamped_max() {
let json = r#"{"fetch_timeout_secs": 999999}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.fetch_timeout_secs, 300, "Should clamp to MAX");
}

#[test]
fn test_cache_config_fetch_timeout_valid_range() {
let json = r#"{"fetch_timeout_secs": 10}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(
config.fetch_timeout_secs, 10,
"Valid value should not be clamped"
);
}

#[test]
fn test_cache_config_max_concurrent_clamped_min() {
let json = r#"{"max_concurrent_fetches": 0}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.max_concurrent_fetches, 1, "Should clamp 0 to MIN");
}

#[test]
fn test_cache_config_max_concurrent_clamped_max() {
let json = r#"{"max_concurrent_fetches": 100000}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.max_concurrent_fetches, 100, "Should clamp to MAX");
}

#[test]
fn test_cache_config_max_concurrent_valid_range() {
let json = r#"{"max_concurrent_fetches": 50}"#;
let config: CacheConfig = serde_json::from_str(json).unwrap();
assert_eq!(
config.max_concurrent_fetches, 50,
"Valid value should not be clamped"
);
}
}
Loading