Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion mcp/CWM.RoslynNavigator/src/CWM.RoslynNavigator.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

<!-- NuGet package metadata -->
<PackageId>CWM.RoslynNavigator</PackageId>
<Version>0.7.0</Version>
<Version>0.7.1</Version>
<Authors>Mukesh Murugan</Authors>
<Company>codewithmukesh</Company>
<Product>CWM.RoslynNavigator</Product>
Expand Down
123 changes: 70 additions & 53 deletions mcp/CWM.RoslynNavigator/src/WorkspaceManager.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ namespace CWM.RoslynNavigator;

/// <summary>
/// Manages the MSBuildWorkspace lifecycle: loading, on-demand refresh, and compilation caching.
/// File watching is intentionally avoided — on Linux/WSL, recursive FileSystemWatcher creates
/// one inotify watch per subdirectory (including bin/obj/.git), quickly exhausting the kernel limit.
/// File watching is intentionally avoided — on Linux, recursive FileSystemWatcher creates
/// one inotify watch per subdirectory, quickly exhausting the kernel limit for large solutions.
/// Instead, documents are refreshed on demand when tools are invoked.
/// </summary>
public sealed class WorkspaceManager : IDisposable
Expand All @@ -24,13 +24,15 @@ public sealed class WorkspaceManager : IDisposable
private readonly SemaphoreSlim _writeLock = new(1, 1);
private readonly ConcurrentDictionary<ProjectId, Compilation> _compilationCache = new();
private readonly ConcurrentDictionary<ProjectId, long> _cacheAccessOrder = new();
private readonly ConcurrentDictionary<DocumentId, DateTime> _knownFileTimestamps = new();
private readonly ConcurrentDictionary<DocumentId, DocumentInfo> _knownDocuments = new();
private readonly ConcurrentDictionary<string, DateTime> _projectFileTimestamps = new();
private readonly ConcurrentDictionary<string, byte> _knownDocumentPaths = new(StringComparer.OrdinalIgnoreCase);
private long _accessCounter;
private int _rootsAttempted; // 0 = not tried, 1 = tried
private long _lastRefreshTicks;
private long _lastStructuralScanTicks;
private static readonly long RefreshCooldownTicks = TimeSpan.FromSeconds(5).Ticks;
private static readonly long StructuralScanCooldownTicks = TimeSpan.FromSeconds(60).Ticks;

private MSBuildWorkspace? _workspace;
private Solution? _solution;
Expand Down Expand Up @@ -199,7 +201,6 @@ public async Task<IReadOnlyList<Compilation>> GetAllCompilationsAsync(Cancellati
{
if (State == WorkspaceState.Ready)
{
// Refresh any source files that changed since the last tool call
await RefreshChangedDocumentsAsync(ct);
return null;
}
Expand Down Expand Up @@ -262,12 +263,14 @@ await Parallel.ForEachAsync(
/// <summary>
/// Records the last-write time of every document and project file in the solution.
/// Called once after solution load to establish a baseline for staleness detection.
/// Stores file paths and project IDs alongside timestamps to avoid Roslyn lookups
/// during the per-call refresh hot path.
/// </summary>
private void SnapshotFileTimestamps()
{
if (_solution is null) return;

_knownFileTimestamps.Clear();
_knownDocuments.Clear();
_projectFileTimestamps.Clear();
_knownDocumentPaths.Clear();

Expand All @@ -276,88 +279,98 @@ private void SnapshotFileTimestamps()
var project = _solution.GetProject(projectId);
if (project is null) continue;

if (project.FilePath is not null && File.Exists(project.FilePath))
if (project.FilePath is not null)
{
_projectFileTimestamps[project.FilePath] = File.GetLastWriteTimeUtc(project.FilePath);
}

foreach (var document in project.Documents)
{
if (document.FilePath is not null && File.Exists(document.FilePath))
{
_knownFileTimestamps[document.Id] = File.GetLastWriteTimeUtc(document.FilePath);
_knownDocumentPaths[document.FilePath] = 0;
}
if (document.FilePath is null) continue;

var writeTime = File.GetLastWriteTimeUtc(document.FilePath);
// GetLastWriteTimeUtc returns year 1601 for non-existent files
if (writeTime.Year < 1900) continue;

_knownDocuments[document.Id] = new DocumentInfo(document.FilePath, projectId, writeTime);
_knownDocumentPaths[document.FilePath] = 0;
}
}

_logger.LogInformation("Captured timestamps for {Count} documents across {ProjectCount} projects",
_knownFileTimestamps.Count, _projectFileTimestamps.Count);
_knownDocuments.Count, _projectFileTimestamps.Count);
}

private readonly record struct DocumentInfo(string FilePath, ProjectId ProjectId, DateTime LastWriteUtc);

/// <summary>
/// Refreshes the workspace to reflect on-disk changes. Skips if called within the
/// cooldown window (5s) to avoid expensive filesystem scans on rapid-fire tool calls.
/// Checks .csproj timestamps first (cheap), then scans for new files, then does
/// incremental text updates for modified documents.
/// Refreshes the workspace to reflect on-disk changes. Uses tiered cooldowns to
/// keep per-call overhead low: .csproj + document timestamps every 5s, full
/// directory scan for new files every 60s.
/// </summary>
public async Task<bool> RefreshChangedDocumentsAsync(CancellationToken ct = default)
public async Task RefreshChangedDocumentsAsync(CancellationToken ct = default)
{
if (_solution is null || _solutionPath is null) return false;
if (_solution is null || _solutionPath is null) return;

// Skip if we refreshed recently — MCP tool calls often come in bursts
var now = DateTime.UtcNow.Ticks;
var last = Interlocked.Read(ref _lastRefreshTicks);
if (now - last < RefreshCooldownTicks) return false;

var lastRefresh = Interlocked.Read(ref _lastRefreshTicks);
if (now - lastRefresh < RefreshCooldownTicks) return;
Interlocked.Exchange(ref _lastRefreshTicks, now);

// Phase 1: check .csproj timestamps (cheap — just a few stat calls)
// Phase 1: check .csproj timestamps (one stat per project)
if (HasProjectFileChanged())
{
_logger.LogInformation("Project file changed. Full reload needed.");
_compilationCache.Clear();
_cacheAccessOrder.Clear();
await LoadSolutionAsync(_solutionPath, ct);
return true;
return;
}

// Phase 2: check for new source files (uses EnumerationOptions to skip bin/obj at the OS level)
if (HasNewSourceFiles())
// Phase 2: scan for new source files (expensive directory walk, longer cooldown)
var lastStructural = Interlocked.Read(ref _lastStructuralScanTicks);
if (now - lastStructural >= StructuralScanCooldownTicks)
{
_logger.LogInformation("New source files detected. Full reload needed.");
_compilationCache.Clear();
_cacheAccessOrder.Clear();
await LoadSolutionAsync(_solutionPath, ct);
return true;
Interlocked.Exchange(ref _lastStructuralScanTicks, now);
if (HasNewSourceFiles())
{
_logger.LogInformation("New source files detected. Full reload needed.");
_compilationCache.Clear();
_cacheAccessOrder.Clear();
await LoadSolutionAsync(_solutionPath, ct);
return;
}
}

// Phase 3: incremental text updates for modified existing documents.
// Phase 3: collect changed documents without holding the lock (stat calls are
// the expensive part — keeping them lock-free avoids blocking concurrent tool calls).
var changed = new List<(DocumentId Id, DocumentInfo Info)>();
foreach (var (docId, info) in _knownDocuments)
{
var currentWriteTime = File.GetLastWriteTimeUtc(info.FilePath);
if (currentWriteTime > info.LastWriteUtc)
changed.Add((docId, info with { LastWriteUtc = currentWriteTime }));
}

if (changed.Count == 0) return;

// Apply mutations under lock.
await _writeLock.WaitAsync(ct);
try
{
var refreshed = false;

foreach (var (docId, lastKnown) in _knownFileTimestamps)
foreach (var (docId, info) in changed)
{
var document = _solution.GetDocument(docId);
if (document?.FilePath is null) continue;

var currentWriteTime = File.GetLastWriteTimeUtc(document.FilePath);
if (currentWriteTime <= lastKnown) continue;

var text = await File.ReadAllTextAsync(document.FilePath, ct);
var text = await File.ReadAllTextAsync(info.FilePath, ct);
var sourceText = Microsoft.CodeAnalysis.Text.SourceText.From(text);
_solution = _solution.WithDocumentText(docId, sourceText);
_knownFileTimestamps[docId] = currentWriteTime;
_knownDocuments[docId] = info;

_compilationCache.TryRemove(document.Project.Id, out _);
_cacheAccessOrder.TryRemove(document.Project.Id, out _);
_compilationCache.TryRemove(info.ProjectId, out _);
_cacheAccessOrder.TryRemove(info.ProjectId, out _);

refreshed = true;
_logger.LogDebug("Refreshed changed document: {Path}", document.FilePath);
_logger.LogDebug("Refreshed changed document: {Path}", info.FilePath);
}

return refreshed;
}
finally
{
Expand All @@ -369,7 +382,8 @@ private bool HasProjectFileChanged()
{
foreach (var (path, lastKnown) in _projectFileTimestamps)
{
if (File.Exists(path) && File.GetLastWriteTimeUtc(path) > lastKnown)
// GetLastWriteTimeUtc returns year 1601 for missing files — always < lastKnown
if (File.GetLastWriteTimeUtc(path) > lastKnown)
return true;
}
return false;
Expand All @@ -394,12 +408,15 @@ private bool HasNewSourceFiles()
var projectDir = Path.GetDirectoryName(project.FilePath);
if (projectDir is null || !Directory.Exists(projectDir)) continue;

// Pre-compute bin/obj prefixes to avoid per-file Path.GetRelativePath allocation
var sep = Path.DirectorySeparatorChar;
var binPrefix = $"{projectDir}{sep}bin{sep}";
var objPrefix = $"{projectDir}{sep}obj{sep}";

foreach (var file in Directory.EnumerateFiles(projectDir, "*.cs", options))
{
// Skip bin/obj — check cheaply via span to avoid allocation
var relative = Path.GetRelativePath(projectDir, file);
if (relative.StartsWith("bin", StringComparison.OrdinalIgnoreCase) ||
relative.StartsWith("obj", StringComparison.OrdinalIgnoreCase))
if (file.StartsWith(binPrefix, StringComparison.OrdinalIgnoreCase) ||
file.StartsWith(objPrefix, StringComparison.OrdinalIgnoreCase))
continue;

if (!_knownDocumentPaths.ContainsKey(file))
Expand Down
Loading