Skip to content

Commit 06bc52d

Browse files
authored
feat(lsp): add per-dependency timeout isolation for registry fetches (#46)
* feat(lsp): add per-dependency timeout isolation for registry fetches Prevents slow packages from blocking the entire fetch queue by wrapping each package fetch in an individual timeout. Previously, a single slow package (30s+) would block a concurrency slot and delay all other packages. Changes: - Add configurable `fetch_timeout_secs` (default: 5s, range: 1-300s) - Add configurable `max_concurrent_fetches` (default: 20, range: 1-100) - Wrap each registry fetch in `tokio::time::timeout()` - Add bounds validation with clamping for security - Add comprehensive tests for timeout behavior and edge cases Performance impact: - Before: 50-dep manifest with 1 slow package → 30+ seconds - After: 50-dep manifest with 1 slow package → ~5 seconds User configuration: ```json { "cache": { "fetch_timeout_secs": 10, "max_concurrent_fetches": 50 } } ``` * docs: add loading indicators and timeout configuration - Add Loading Indicators feature to README - Document fetch_timeout_secs and max_concurrent_fetches options - Document loading_indicator configuration section - Add Configuration Reference table with all new options - Fix outdated ecosystem registration path in templates/README.md
1 parent 1e7cb46 commit 06bc52d

File tree

6 files changed

+612
-23
lines changed

6 files changed

+612
-23
lines changed

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

README.md

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,12 @@ A universal Language Server Protocol (LSP) server for dependency management acro
1414

1515
- **Intelligent Autocomplete** — Package names, versions, and feature flags
1616
- **Version Hints** — Inlay hints showing latest available versions
17+
- **Loading Indicators** — Visual feedback during registry fetches with LSP progress support
1718
- **Lock File Support** — Reads resolved versions from Cargo.lock, package-lock.json, poetry.lock, uv.lock, go.sum
1819
- **Diagnostics** — Warnings for outdated, unknown, or yanked dependencies
1920
- **Hover Information** — Package descriptions with resolved version from lock file
2021
- **Code Actions** — Quick fixes to update dependencies
21-
- **High Performance** — Parallel fetching, optimized caching, minimal latency
22+
- **High Performance** — Parallel fetching with per-dependency timeouts, optimized caching
2223

2324
![deps-lsp in action](https://raw.githubusercontent.com/bug-ops/deps-zed/main/assets/img.png)
2425

@@ -150,7 +151,15 @@ Configure via LSP initialization options:
150151
"yanked_severity": "warning"
151152
},
152153
"cache": {
153-
"refresh_interval_secs": 300
154+
"enabled": true,
155+
"refresh_interval_secs": 300,
156+
"fetch_timeout_secs": 5,
157+
"max_concurrent_fetches": 20
158+
},
159+
"loading_indicator": {
160+
"enabled": true,
161+
"fallback_to_hints": true,
162+
"loading_text": ""
154163
},
155164
"cold_start": {
156165
"enabled": true,
@@ -159,6 +168,19 @@ Configure via LSP initialization options:
159168
}
160169
```
161170

171+
### Configuration Reference
172+
173+
| Section | Option | Default | Description |
174+
|---------|--------|---------|-------------|
175+
| `cache` | `fetch_timeout_secs` | `5` | Per-package fetch timeout (1-300 seconds) |
176+
| `cache` | `max_concurrent_fetches` | `20` | Concurrent registry requests (1-100) |
177+
| `loading_indicator` | `enabled` | `true` | Show loading feedback during fetches |
178+
| `loading_indicator` | `fallback_to_hints` | `true` | Show loading in inlay hints if LSP progress unsupported |
179+
| `loading_indicator` | `loading_text` | `"⏳"` | Text shown during loading (max 100 chars) |
180+
181+
> [!TIP]
182+
> Increase `fetch_timeout_secs` for slower networks. The per-dependency timeout prevents slow packages from blocking others.
183+
162184
> [!NOTE]
163185
> Cold start support ensures LSP features work immediately when your IDE restores previously opened files.
164186

crates/deps-lsp/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ tracing = { workspace = true }
4747
tracing-subscriber = { workspace = true, features = ["env-filter"] }
4848

4949
[dev-dependencies]
50+
async-trait = { workspace = true }
5051
criterion = { workspace = true }
5152
insta = { workspace = true, features = ["json"] }
5253
mockito = { workspace = true }

crates/deps-lsp/src/config.rs

Lines changed: 148 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,8 @@ impl Default for DiagnosticsConfig {
134134
///
135135
/// - `enabled`: `true`
136136
/// - `refresh_interval_secs`: `300` (5 minutes)
137+
/// - `fetch_timeout_secs`: `5` (5 seconds per package)
138+
/// - `max_concurrent_fetches`: `20` (20 concurrent requests)
137139
///
138140
/// # Examples
139141
///
@@ -143,23 +145,39 @@ impl Default for DiagnosticsConfig {
143145
/// let config = CacheConfig {
144146
/// refresh_interval_secs: 600, // 10 minutes
145147
/// enabled: true,
148+
/// fetch_timeout_secs: 5,
149+
/// max_concurrent_fetches: 20,
146150
/// };
147151
///
148152
/// assert_eq!(config.refresh_interval_secs, 600);
149153
/// ```
150-
#[derive(Debug, Deserialize)]
154+
#[derive(Debug, Clone, Deserialize)]
151155
pub struct CacheConfig {
152156
#[serde(default = "default_refresh_interval")]
153157
pub refresh_interval_secs: u64,
154158
#[serde(default = "default_true")]
155159
pub enabled: bool,
160+
/// Timeout for fetching a single package's versions (default: 5 seconds)
161+
#[serde(
162+
default = "default_fetch_timeout_secs",
163+
deserialize_with = "deserialize_fetch_timeout"
164+
)]
165+
pub fetch_timeout_secs: u64,
166+
/// Maximum concurrent package fetches (default: 20)
167+
#[serde(
168+
default = "default_max_concurrent_fetches",
169+
deserialize_with = "deserialize_max_concurrent"
170+
)]
171+
pub max_concurrent_fetches: usize,
156172
}
157173

158174
impl Default for CacheConfig {
159175
fn default() -> Self {
160176
Self {
161177
refresh_interval_secs: default_refresh_interval(),
162178
enabled: true,
179+
fetch_timeout_secs: default_fetch_timeout_secs(),
180+
max_concurrent_fetches: default_max_concurrent_fetches(),
163181
}
164182
}
165183
}
@@ -262,6 +280,62 @@ const fn default_refresh_interval() -> u64 {
262280
300 // 5 minutes
263281
}
264282

283+
const fn default_fetch_timeout_secs() -> u64 {
284+
5
285+
}
286+
287+
const fn default_max_concurrent_fetches() -> usize {
288+
20
289+
}
290+
291+
/// Minimum timeout (seconds) to prevent zero-timeout edge case
292+
const MIN_FETCH_TIMEOUT_SECS: u64 = 1;
293+
/// Maximum timeout (seconds) - 5 minutes is generous
294+
const MAX_FETCH_TIMEOUT_SECS: u64 = 300;
295+
296+
/// Minimum concurrent fetches (must be at least 1)
297+
const MIN_CONCURRENT_FETCHES: usize = 1;
298+
/// Maximum concurrent fetches
299+
const MAX_CONCURRENT_FETCHES: usize = 100;
300+
301+
/// Custom deserializer for fetch_timeout_secs that validates bounds
302+
fn deserialize_fetch_timeout<'de, D>(deserializer: D) -> Result<u64, D::Error>
303+
where
304+
D: serde::Deserializer<'de>,
305+
{
306+
let secs = u64::deserialize(deserializer)?;
307+
let clamped = secs.clamp(MIN_FETCH_TIMEOUT_SECS, MAX_FETCH_TIMEOUT_SECS);
308+
if clamped != secs {
309+
tracing::warn!(
310+
"fetch_timeout_secs {} clamped to {} (valid range: {}-{})",
311+
secs,
312+
clamped,
313+
MIN_FETCH_TIMEOUT_SECS,
314+
MAX_FETCH_TIMEOUT_SECS
315+
);
316+
}
317+
Ok(clamped)
318+
}
319+
320+
/// Custom deserializer for max_concurrent_fetches that validates bounds
321+
fn deserialize_max_concurrent<'de, D>(deserializer: D) -> Result<usize, D::Error>
322+
where
323+
D: serde::Deserializer<'de>,
324+
{
325+
let count = usize::deserialize(deserializer)?;
326+
let clamped = count.clamp(MIN_CONCURRENT_FETCHES, MAX_CONCURRENT_FETCHES);
327+
if clamped != count {
328+
tracing::warn!(
329+
"max_concurrent_fetches {} clamped to {} (valid range: {}-{})",
330+
count,
331+
clamped,
332+
MIN_CONCURRENT_FETCHES,
333+
MAX_CONCURRENT_FETCHES
334+
);
335+
}
336+
Ok(clamped)
337+
}
338+
265339
/// Configuration for cold start behavior.
266340
///
267341
/// Controls how the server handles loading documents from disk when
@@ -362,6 +436,31 @@ mod tests {
362436
assert!(!config.enabled);
363437
}
364438

439+
#[test]
440+
fn test_cache_config_defaults() {
441+
let config = CacheConfig::default();
442+
assert!(config.enabled);
443+
assert_eq!(config.refresh_interval_secs, 300);
444+
assert_eq!(config.fetch_timeout_secs, 5);
445+
assert_eq!(config.max_concurrent_fetches, 20);
446+
}
447+
448+
#[test]
449+
fn test_cache_config_with_timeout_and_concurrency() {
450+
let json = r#"{
451+
"refresh_interval_secs": 600,
452+
"enabled": true,
453+
"fetch_timeout_secs": 10,
454+
"max_concurrent_fetches": 50
455+
}"#;
456+
457+
let config: CacheConfig = serde_json::from_str(json).unwrap();
458+
assert_eq!(config.refresh_interval_secs, 600);
459+
assert!(config.enabled);
460+
assert_eq!(config.fetch_timeout_secs, 10);
461+
assert_eq!(config.max_concurrent_fetches, 50);
462+
}
463+
365464
#[test]
366465
fn test_full_config_deserialization() {
367466
let json = r#"{
@@ -529,4 +628,52 @@ mod tests {
529628
let config: LoadingIndicatorConfig = serde_json::from_str(json).unwrap();
530629
assert_eq!(config.loading_text, "⏳");
531630
}
631+
632+
#[test]
633+
fn test_cache_config_fetch_timeout_clamped_min() {
634+
let json = r#"{"fetch_timeout_secs": 0}"#;
635+
let config: CacheConfig = serde_json::from_str(json).unwrap();
636+
assert_eq!(config.fetch_timeout_secs, 1, "Should clamp 0 to MIN");
637+
}
638+
639+
#[test]
640+
fn test_cache_config_fetch_timeout_clamped_max() {
641+
let json = r#"{"fetch_timeout_secs": 999999}"#;
642+
let config: CacheConfig = serde_json::from_str(json).unwrap();
643+
assert_eq!(config.fetch_timeout_secs, 300, "Should clamp to MAX");
644+
}
645+
646+
#[test]
647+
fn test_cache_config_fetch_timeout_valid_range() {
648+
let json = r#"{"fetch_timeout_secs": 10}"#;
649+
let config: CacheConfig = serde_json::from_str(json).unwrap();
650+
assert_eq!(
651+
config.fetch_timeout_secs, 10,
652+
"Valid value should not be clamped"
653+
);
654+
}
655+
656+
#[test]
657+
fn test_cache_config_max_concurrent_clamped_min() {
658+
let json = r#"{"max_concurrent_fetches": 0}"#;
659+
let config: CacheConfig = serde_json::from_str(json).unwrap();
660+
assert_eq!(config.max_concurrent_fetches, 1, "Should clamp 0 to MIN");
661+
}
662+
663+
#[test]
664+
fn test_cache_config_max_concurrent_clamped_max() {
665+
let json = r#"{"max_concurrent_fetches": 100000}"#;
666+
let config: CacheConfig = serde_json::from_str(json).unwrap();
667+
assert_eq!(config.max_concurrent_fetches, 100, "Should clamp to MAX");
668+
}
669+
670+
#[test]
671+
fn test_cache_config_max_concurrent_valid_range() {
672+
let json = r#"{"max_concurrent_fetches": 50}"#;
673+
let config: CacheConfig = serde_json::from_str(json).unwrap();
674+
assert_eq!(
675+
config.max_concurrent_fetches, 50,
676+
"Valid value should not be clamped"
677+
);
678+
}
532679
}

0 commit comments

Comments
 (0)