diff --git a/internal/collections/ordered_map.go b/internal/collections/ordered_map.go index 6c9d334c0f..02f0a56781 100644 --- a/internal/collections/ordered_map.go +++ b/internal/collections/ordered_map.go @@ -300,6 +300,11 @@ func DiffOrderedMaps[K comparable, V comparable](m1 *OrderedMap[K, V], m2 *Order } func DiffOrderedMapsFunc[K comparable, V any](m1 *OrderedMap[K, V], m2 *OrderedMap[K, V], equalValues func(a, b V) bool, onAdded func(key K, value V), onRemoved func(key K, value V), onModified func(key K, oldValue V, newValue V)) { + for k, v2 := range m2.Entries() { + if _, ok := m1.Get(k); !ok { + onAdded(k, v2) + } + } for k, v1 := range m1.Entries() { if v2, ok := m2.Get(k); ok { if !equalValues(v1, v2) { @@ -309,10 +314,4 @@ func DiffOrderedMapsFunc[K comparable, V any](m1 *OrderedMap[K, V], m2 *OrderedM onRemoved(k, v1) } } - - for k, v2 := range m2.Entries() { - if _, ok := m1.Get(k); !ok { - onAdded(k, v2) - } - } } diff --git a/internal/core/core.go b/internal/core/core.go index 3bd84b8075..f284826eda 100644 --- a/internal/core/core.go +++ b/internal/core/core.go @@ -606,6 +606,11 @@ func DiffMaps[K comparable, V comparable](m1 map[K]V, m2 map[K]V, onAdded func(K } func DiffMapsFunc[K comparable, V any](m1 map[K]V, m2 map[K]V, equalValues func(V, V) bool, onAdded func(K, V), onRemoved func(K, V), onChanged func(K, V, V)) { + for k, v2 := range m2 { + if _, ok := m1[k]; !ok { + onAdded(k, v2) + } + } for k, v1 := range m1 { if v2, ok := m2[k]; ok { if !equalValues(v1, v2) { @@ -615,12 +620,6 @@ func DiffMapsFunc[K comparable, V any](m1 map[K]V, m2 map[K]V, equalValues func( onRemoved(k, v1) } } - - for k, v2 := range m2 { - if _, ok := m1[k]; !ok { - onAdded(k, v2) - } - } } // CopyMapInto is maps.Copy, unless dst is nil, in which case it clones and returns src. diff --git a/internal/lsp/server.go b/internal/lsp/server.go index 6c3478ae3d..54f78f2e16 100644 --- a/internal/lsp/server.go +++ b/internal/lsp/server.go @@ -23,6 +23,7 @@ import ( "github.com/microsoft/typescript-go/internal/project" "github.com/microsoft/typescript-go/internal/project/ata" "github.com/microsoft/typescript-go/internal/project/logging" + "github.com/microsoft/typescript-go/internal/tspath" "github.com/microsoft/typescript-go/internal/vfs" "golang.org/x/sync/errgroup" "golang.org/x/text/language" @@ -651,9 +652,27 @@ func (s *Server) handleInitialized(ctx context.Context, params *lsproto.Initiali s.watchEnabled = true } + cwd := s.cwd + if s.initializeParams.Capabilities != nil && + s.initializeParams.Capabilities.Workspace != nil && + s.initializeParams.Capabilities.Workspace.WorkspaceFolders != nil && + ptrIsTrue(s.initializeParams.Capabilities.Workspace.WorkspaceFolders) && + s.initializeParams.WorkspaceFolders != nil && + s.initializeParams.WorkspaceFolders.WorkspaceFolders != nil && + len(*s.initializeParams.WorkspaceFolders.WorkspaceFolders) == 1 { + cwd = lsproto.DocumentUri((*s.initializeParams.WorkspaceFolders.WorkspaceFolders)[0].Uri).FileName() + } else if s.initializeParams.RootUri.DocumentUri != nil { + cwd = s.initializeParams.RootUri.DocumentUri.FileName() + } else if s.initializeParams.RootPath != nil && s.initializeParams.RootPath.String != nil { + cwd = *s.initializeParams.RootPath.String + } + if !tspath.PathIsAbsolute(cwd) { + cwd = s.cwd + } + s.session = project.NewSession(&project.SessionInit{ Options: &project.SessionOptions{ - CurrentDirectory: s.cwd, + CurrentDirectory: cwd, DefaultLibraryPath: s.defaultLibraryPath, TypingsLocation: s.typingsLocation, PositionEncoding: s.positionEncoding, diff --git a/internal/module/resolver.go b/internal/module/resolver.go index 780f0deb67..e88a468cc0 100644 --- a/internal/module/resolver.go +++ b/internal/module/resolver.go @@ -1733,7 +1733,11 @@ func (r *resolutionState) readPackageJsonPeerDependencies(packageJsonInfo *packa r.tracer.write(diagnostics.X_package_json_has_a_peerDependencies_field.Message()) } packageDirectory := r.realPath(packageJsonInfo.PackageDirectory) - nodeModules := packageDirectory[:strings.LastIndex(packageDirectory, "/node_modules")+len("/node_modules")] + "/" + nodeModulesIndex := strings.LastIndex(packageDirectory, "/node_modules") + if nodeModulesIndex == -1 { + return "" + } + nodeModules := packageDirectory[:nodeModulesIndex+len("/node_modules")] + "/" builder := strings.Builder{} for name := range peerDependencies.Value { peerPackageJson := r.getPackageJsonInfo(nodeModules+name /*onlyRecordFailures*/, false) diff --git a/internal/project/configfileregistry.go b/internal/project/configfileregistry.go index bae60b66c9..801d249be7 100644 --- a/internal/project/configfileregistry.go +++ b/internal/project/configfileregistry.go @@ -41,7 +41,7 @@ type configFileEntry struct { // when this is set, no other fields will be used. retainingConfigs map[tspath.Path]struct{} // rootFilesWatch is a watch for the root files of this config file. - rootFilesWatch *WatchedFiles[[]string] + rootFilesWatch *WatchedFiles[patternsAndIgnored] } func newConfigFileEntry(fileName string) *configFileEntry { diff --git a/internal/project/configfileregistrybuilder.go b/internal/project/configfileregistrybuilder.go index 725bc343a8..a4b5a7ff6d 100644 --- a/internal/project/configfileregistrybuilder.go +++ b/internal/project/configfileregistrybuilder.go @@ -165,21 +165,61 @@ func (c *configFileRegistryBuilder) updateRootFilesWatch(fileName string, entry return } - wildcardGlobs := entry.commandLine.WildcardDirectories() - rootFileGlobs := make([]string, 0, len(wildcardGlobs)+1+len(entry.commandLine.ExtendedSourceFiles())) - rootFileGlobs = append(rootFileGlobs, fileName) - for _, extendedConfig := range entry.commandLine.ExtendedSourceFiles() { - rootFileGlobs = append(rootFileGlobs, extendedConfig) + var ignored map[string]struct{} + var globs []string + var externalDirectories []string + var includeWorkspace bool + var includeTsconfigDir bool + tsconfigDir := tspath.GetDirectoryPath(fileName) + wildcardDirectories := entry.commandLine.WildcardDirectories() + comparePathsOptions := tspath.ComparePathsOptions{ + CurrentDirectory: c.sessionOptions.CurrentDirectory, + UseCaseSensitiveFileNames: c.FS().UseCaseSensitiveFileNames(), } - for dir, recursive := range wildcardGlobs { - rootFileGlobs = append(rootFileGlobs, fmt.Sprintf("%s/%s", tspath.NormalizePath(dir), core.IfElse(recursive, recursiveFileGlobPattern, fileGlobPattern))) + for dir := range wildcardDirectories { + if tspath.ContainsPath(c.sessionOptions.CurrentDirectory, dir, comparePathsOptions) { + includeWorkspace = true + } else if tspath.ContainsPath(tsconfigDir, dir, comparePathsOptions) { + includeTsconfigDir = true + } else { + externalDirectories = append(externalDirectories, dir) + } } for _, fileName := range entry.commandLine.LiteralFileNames() { - rootFileGlobs = append(rootFileGlobs, fileName) + if tspath.ContainsPath(c.sessionOptions.CurrentDirectory, fileName, comparePathsOptions) { + includeWorkspace = true + } else if tspath.ContainsPath(tsconfigDir, fileName, comparePathsOptions) { + includeTsconfigDir = true + } else { + externalDirectories = append(externalDirectories, tspath.GetDirectoryPath(fileName)) + } } - slices.Sort(rootFileGlobs) - entry.rootFilesWatch = entry.rootFilesWatch.Clone(rootFileGlobs) + if includeWorkspace { + globs = append(globs, getRecursiveGlobPattern(c.sessionOptions.CurrentDirectory)) + } + if includeTsconfigDir { + globs = append(globs, getRecursiveGlobPattern(tsconfigDir)) + } + for _, fileName := range entry.commandLine.ExtendedSourceFiles() { + if includeWorkspace && tspath.ContainsPath(c.sessionOptions.CurrentDirectory, fileName, comparePathsOptions) { + continue + } + globs = append(globs, fileName) + } + if len(externalDirectories) > 0 { + commonParents, ignoredExternalDirs := tspath.GetCommonParents(externalDirectories, minWatchLocationDepth, getPathComponentsForWatching, comparePathsOptions) + for _, parent := range commonParents { + globs = append(globs, getRecursiveGlobPattern(parent)) + } + ignored = ignoredExternalDirs + } + + slices.Sort(globs) + entry.rootFilesWatch = entry.rootFilesWatch.Clone(patternsAndIgnored{ + patterns: globs, + ignored: ignored, + }) } // acquireConfigForProject loads a config file entry from the cache, or parses it if not already @@ -347,11 +387,8 @@ func (c *configFileRegistryBuilder) DidChangeFiles(summary FileChangeSummary, lo } logger.Logf("Checking if any of %d created files match root files for config %s", len(createdFiles), entry.Key()) for _, fileName := range createdFiles { - parsedGlobs := config.rootFilesWatch.ParsedGlobs() - for _, g := range parsedGlobs { - if g.Match(fileName) { - return true - } + if config.commandLine.PossiblyMatchesFileName(fileName) { + return true } } return false diff --git a/internal/project/project.go b/internal/project/project.go index 317621294b..386cc2f4be 100644 --- a/internal/project/project.go +++ b/internal/project/project.go @@ -69,10 +69,10 @@ type Project struct { // The ID of the snapshot that created the program stored in this project. ProgramLastUpdate uint64 + programFilesWatch *WatchedFiles[patternsAndIgnored] failedLookupsWatch *WatchedFiles[map[tspath.Path]string] affectingLocationsWatch *WatchedFiles[map[tspath.Path]string] - typingsFilesWatch *WatchedFiles[map[tspath.Path]string] - typingsDirectoryWatch *WatchedFiles[map[tspath.Path]string] + typingsWatch *WatchedFiles[patternsAndIgnored] checkerPool *checkerPool @@ -146,26 +146,26 @@ func NewProject( project.configFilePath = tspath.ToPath(configFileName, currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()) if builder.sessionOptions.WatchEnabled { + project.programFilesWatch = NewWatchedFiles( + "non-root program files for "+configFileName, + lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete, + core.Identity, + ) project.failedLookupsWatch = NewWatchedFiles( "failed lookups for "+configFileName, lsproto.WatchKindCreate, - createResolutionLookupGlobMapper(project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()), + createResolutionLookupGlobMapper(builder.sessionOptions.CurrentDirectory, builder.sessionOptions.DefaultLibraryPath, project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()), ) project.affectingLocationsWatch = NewWatchedFiles( "affecting locations for "+configFileName, lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete, - createResolutionLookupGlobMapper(project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()), + createResolutionLookupGlobMapper(builder.sessionOptions.CurrentDirectory, builder.sessionOptions.DefaultLibraryPath, project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()), ) if builder.sessionOptions.TypingsLocation != "" { - project.typingsFilesWatch = NewWatchedFiles( + project.typingsWatch = NewWatchedFiles( "typings installer files", lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete, - globMapperForTypingsInstaller, - ) - project.typingsDirectoryWatch = NewWatchedFiles( - "typings installer directories", - lsproto.WatchKindCreate|lsproto.WatchKindDelete, - globMapperForTypingsInstaller, + core.Identity, ) } } @@ -221,10 +221,10 @@ func (p *Project) Clone() *Project { ProgramUpdateKind: ProgramUpdateKindNone, ProgramLastUpdate: p.ProgramLastUpdate, + programFilesWatch: p.programFilesWatch, failedLookupsWatch: p.failedLookupsWatch, affectingLocationsWatch: p.affectingLocationsWatch, - typingsFilesWatch: p.typingsFilesWatch, - typingsDirectoryWatch: p.typingsDirectoryWatch, + typingsWatch: p.typingsWatch, checkerPool: p.checkerPool, @@ -327,14 +327,19 @@ func (p *Project) CreateProgram() CreateProgramResult { } } -func (p *Project) CloneWatchers() (failedLookupsWatch *WatchedFiles[map[tspath.Path]string], affectingLocationsWatch *WatchedFiles[map[tspath.Path]string]) { +func (p *Project) CloneWatchers(workspaceDir string, libDir string) (programFilesWatch *WatchedFiles[patternsAndIgnored], failedLookupsWatch *WatchedFiles[map[tspath.Path]string], affectingLocationsWatch *WatchedFiles[map[tspath.Path]string]) { failedLookups := make(map[tspath.Path]string) affectingLocations := make(map[tspath.Path]string) + programFiles := getNonRootFileGlobs(workspaceDir, libDir, p.Program.GetSourceFiles(), p.CommandLine.FileNamesByPath(), tspath.ComparePathsOptions{ + UseCaseSensitiveFileNames: p.host.FS().UseCaseSensitiveFileNames(), + CurrentDirectory: p.currentDirectory, + }) extractLookups(p.toPath, failedLookups, affectingLocations, p.Program.GetResolvedModules()) extractLookups(p.toPath, failedLookups, affectingLocations, p.Program.GetResolvedTypeReferenceDirectives()) + programFilesWatch = p.programFilesWatch.Clone(programFiles) failedLookupsWatch = p.failedLookupsWatch.Clone(failedLookups) affectingLocationsWatch = p.affectingLocationsWatch.Clone(affectingLocations) - return failedLookupsWatch, affectingLocationsWatch + return programFilesWatch, failedLookupsWatch, affectingLocationsWatch } func (p *Project) log(msg string) { diff --git a/internal/project/projectcollectionbuilder.go b/internal/project/projectcollectionbuilder.go index 7717016908..c898292bea 100644 --- a/internal/project/projectcollectionbuilder.go +++ b/internal/project/projectcollectionbuilder.go @@ -343,14 +343,14 @@ func (b *projectCollectionBuilder) DidUpdateATAState(ataChanges map[tspath.Path] // the set of typings files is actually different. p.installedTypingsInfo = ataChange.TypingsInfo p.typingsFiles = ataChange.TypingsFiles - fileWatchGlobs, directoryWatchGlobs := getTypingsLocationsGlobs( + typingsWatchGlobs := getTypingsLocationsGlobs( ataChange.TypingsFilesToWatch, b.sessionOptions.TypingsLocation, + b.sessionOptions.CurrentDirectory, p.currentDirectory, b.fs.fs.UseCaseSensitiveFileNames(), ) - p.typingsFilesWatch = p.typingsFilesWatch.Clone(fileWatchGlobs) - p.typingsDirectoryWatch = p.typingsDirectoryWatch.Clone(directoryWatchGlobs) + p.typingsWatch = p.typingsWatch.Clone(typingsWatchGlobs) p.dirty = true p.dirtyFilePath = "" }, @@ -535,7 +535,7 @@ func (b *projectCollectionBuilder) findOrCreateDefaultConfiguredProjectWorker( // For composite projects, we can get an early negative result. // !!! what about declaration files in node_modules? wouldn't it be better to // check project inclusion if the project is already loaded? - if !config.MatchesFileName(fileName) { + if _, ok := config.FileNamesByPath()[path]; !ok { node.logger.Log("Project does not contain file (by composite config inclusion)") return false, false } @@ -793,7 +793,8 @@ func (b *projectCollectionBuilder) updateProgram(entry dirty.Value[*Project], lo if result.UpdateKind == ProgramUpdateKindNewFiles { filesChanged = true if b.sessionOptions.WatchEnabled { - failedLookupsWatch, affectingLocationsWatch := project.CloneWatchers() + programFilesWatch, failedLookupsWatch, affectingLocationsWatch := project.CloneWatchers(b.sessionOptions.CurrentDirectory, b.sessionOptions.DefaultLibraryPath) + project.programFilesWatch = programFilesWatch project.failedLookupsWatch = failedLookupsWatch project.affectingLocationsWatch = affectingLocationsWatch } diff --git a/internal/project/projectlifetime_test.go b/internal/project/projectlifetime_test.go index 0e824f2d96..5c7b8c4194 100644 --- a/internal/project/projectlifetime_test.go +++ b/internal/project/projectlifetime_test.go @@ -70,7 +70,7 @@ func TestProjectLifetime(t *testing.T) { assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2) assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil) assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil) - assert.Equal(t, len(utils.Client().WatchFilesCalls()), 2) + assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil) @@ -89,8 +89,8 @@ func TestProjectLifetime(t *testing.T) { assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) == nil) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p3/tsconfig.json")) != nil) - assert.Equal(t, len(utils.Client().WatchFilesCalls()), 3) - assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 1) + assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1) + assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 0) // Close p2 and p3 files, open p1 file again session.DidCloseFile(context.Background(), uri2) @@ -105,8 +105,8 @@ func TestProjectLifetime(t *testing.T) { assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) == nil) assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p3/tsconfig.json")) == nil) - assert.Equal(t, len(utils.Client().WatchFilesCalls()), 4) - assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 3) + assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1) + assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 0) }) t.Run("unrooted inferred projects", func(t *testing.T) { diff --git a/internal/project/session.go b/internal/project/session.go index 0b6e537d54..a51831ed21 100644 --- a/internal/project/session.go +++ b/internal/project/session.go @@ -108,6 +108,11 @@ type Session struct { // after file watch changes and ATA updates. diagnosticsRefreshCancel context.CancelFunc diagnosticsRefreshMu sync.Mutex + + // watches tracks the current watch globs and how many individual WatchedFiles + // are using each glob. + watches map[fileSystemWatcherKey]*fileSystemWatcherValue + watchesMu sync.Mutex } func NewSession(init *SessionInit) *Session { @@ -149,6 +154,7 @@ func NewSession(init *SessionInit) *Session { toPath, ), pendingATAChanges: make(map[tspath.Path]*ATAStateChange), + watches: make(map[fileSystemWatcherKey]*fileSystemWatcherValue), } if init.Options.TypingsLocation != "" && init.NpmExecutor != nil { @@ -410,33 +416,71 @@ func (s *Session) WaitForBackgroundTasks() { s.backgroundQueue.Wait() } -func updateWatch[T any](ctx context.Context, client Client, logger logging.Logger, oldWatcher, newWatcher *WatchedFiles[T]) []error { +func updateWatch[T any](ctx context.Context, session *Session, logger logging.Logger, oldWatcher, newWatcher *WatchedFiles[T]) []error { var errors []error + session.watchesMu.Lock() + defer session.watchesMu.Unlock() if newWatcher != nil { - if id, watchers := newWatcher.Watchers(); len(watchers) > 0 { - if err := client.WatchFiles(ctx, id, watchers); err != nil { - errors = append(errors, err) - } - if logger != nil { - if oldWatcher == nil { - logger.Log(fmt.Sprintf("Added new watch: %s", id)) - } else { - logger.Log(fmt.Sprintf("Updated watch: %s", id)) + if id, watchers, ignored := newWatcher.Watchers(); len(watchers) > 0 { + var newWatchers collections.OrderedMap[WatcherID, *lsproto.FileSystemWatcher] + for i, watcher := range watchers { + key := toFileSystemWatcherKey(watcher) + value := session.watches[key] + globId := WatcherID(fmt.Sprintf("%s.%d", id, i)) + if value == nil { + value = &fileSystemWatcherValue{id: globId} + session.watches[key] = value + } + value.count++ + if value.count == 1 { + newWatchers.Set(globId, watcher) } - for _, watcher := range watchers { + } + for id, watcher := range newWatchers.Entries() { + if err := session.client.WatchFiles(ctx, id, []*lsproto.FileSystemWatcher{watcher}); err != nil { + errors = append(errors, err) + } else if logger != nil { + if oldWatcher == nil { + logger.Log(fmt.Sprintf("Added new watch: %s", id)) + } else { + logger.Log(fmt.Sprintf("Updated watch: %s", id)) + } logger.Log("\t" + *watcher.GlobPattern.Pattern) + logger.Log("") + } + } + if len(ignored) > 0 { + logger.Logf("%d paths ineligible for watching", len(ignored)) + if logger.IsVerbose() { + for path := range ignored { + logger.Log("\t" + path) + } } - logger.Log("") } } } if oldWatcher != nil { - if id, watchers := oldWatcher.Watchers(); len(watchers) > 0 { - if err := client.UnwatchFiles(ctx, id); err != nil { - errors = append(errors, err) + if _, watchers, _ := oldWatcher.Watchers(); len(watchers) > 0 { + var removedWatchers []WatcherID + for _, watcher := range watchers { + key := toFileSystemWatcherKey(watcher) + value := session.watches[key] + if value == nil { + continue + } + if value.count <= 1 { + delete(session.watches, key) + removedWatchers = append(removedWatchers, value.id) + } else { + value.count-- + } } - if logger != nil && newWatcher == nil { - logger.Log(fmt.Sprintf("Removed watch: %s", id)) + for _, id := range removedWatchers { + if err := session.client.UnwatchFiles(ctx, id); err != nil { + errors = append(errors, err) + } else if logger != nil && newWatcher == nil { + logger.Log(fmt.Sprintf("Removed watch: %s", id)) + } } } } @@ -445,6 +489,7 @@ func updateWatch[T any](ctx context.Context, client Client, logger logging.Logge func (s *Session) updateWatches(oldSnapshot *Snapshot, newSnapshot *Snapshot) error { var errors []error + start := time.Now() ctx := context.Background() core.DiffMapsFunc( oldSnapshot.ConfigFileRegistry.configs, @@ -453,13 +498,13 @@ func (s *Session) updateWatches(oldSnapshot *Snapshot, newSnapshot *Snapshot) er return a.rootFilesWatch.ID() == b.rootFilesWatch.ID() }, func(_ tspath.Path, addedEntry *configFileEntry) { - errors = append(errors, updateWatch(ctx, s.client, s.logger, nil, addedEntry.rootFilesWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedEntry.rootFilesWatch)...) }, func(_ tspath.Path, removedEntry *configFileEntry) { - errors = append(errors, updateWatch(ctx, s.client, s.logger, removedEntry.rootFilesWatch, nil)...) + errors = append(errors, updateWatch(ctx, s, s.logger, removedEntry.rootFilesWatch, nil)...) }, func(_ tspath.Path, oldEntry, newEntry *configFileEntry) { - errors = append(errors, updateWatch(ctx, s.client, s.logger, oldEntry.rootFilesWatch, newEntry.rootFilesWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, oldEntry.rootFilesWatch, newEntry.rootFilesWatch)...) }, ) @@ -467,35 +512,37 @@ func (s *Session) updateWatches(oldSnapshot *Snapshot, newSnapshot *Snapshot) er oldSnapshot.ProjectCollection.ProjectsByPath(), newSnapshot.ProjectCollection.ProjectsByPath(), func(_ tspath.Path, addedProject *Project) { - errors = append(errors, updateWatch(ctx, s.client, s.logger, nil, addedProject.affectingLocationsWatch)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, nil, addedProject.failedLookupsWatch)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, nil, addedProject.typingsFilesWatch)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, nil, addedProject.typingsDirectoryWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.programFilesWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.affectingLocationsWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.failedLookupsWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.typingsWatch)...) }, func(_ tspath.Path, removedProject *Project) { - errors = append(errors, updateWatch(ctx, s.client, s.logger, removedProject.affectingLocationsWatch, nil)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, removedProject.failedLookupsWatch, nil)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, removedProject.typingsFilesWatch, nil)...) - errors = append(errors, updateWatch(ctx, s.client, s.logger, removedProject.typingsDirectoryWatch, nil)...) + errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.programFilesWatch, nil)...) + errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.affectingLocationsWatch, nil)...) + errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.failedLookupsWatch, nil)...) + errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.typingsWatch, nil)...) }, func(_ tspath.Path, oldProject, newProject *Project) { + if oldProject.programFilesWatch.ID() != newProject.programFilesWatch.ID() { + errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.programFilesWatch, newProject.programFilesWatch)...) + } if oldProject.affectingLocationsWatch.ID() != newProject.affectingLocationsWatch.ID() { - errors = append(errors, updateWatch(ctx, s.client, s.logger, oldProject.affectingLocationsWatch, newProject.affectingLocationsWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.affectingLocationsWatch, newProject.affectingLocationsWatch)...) } if oldProject.failedLookupsWatch.ID() != newProject.failedLookupsWatch.ID() { - errors = append(errors, updateWatch(ctx, s.client, s.logger, oldProject.failedLookupsWatch, newProject.failedLookupsWatch)...) - } - if oldProject.typingsFilesWatch.ID() != newProject.typingsFilesWatch.ID() { - errors = append(errors, updateWatch(ctx, s.client, s.logger, oldProject.typingsFilesWatch, newProject.typingsFilesWatch)...) + errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.failedLookupsWatch, newProject.failedLookupsWatch)...) } - if oldProject.typingsDirectoryWatch.ID() != newProject.typingsDirectoryWatch.ID() { - errors = append(errors, updateWatch(ctx, s.client, s.logger, oldProject.typingsDirectoryWatch, newProject.typingsDirectoryWatch)...) + if oldProject.typingsWatch.ID() != newProject.typingsWatch.ID() { + errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.typingsWatch, newProject.typingsWatch)...) } }, ) if len(errors) > 0 { return fmt.Errorf("errors updating watches: %v", errors) + } else if s.options.LoggingEnabled { + s.logger.Log(fmt.Sprintf("Updated watches in %v", time.Since(start))) } return nil } diff --git a/internal/project/session_test.go b/internal/project/session_test.go index 9da57316bd..164f7eea68 100644 --- a/internal/project/session_test.go +++ b/internal/project/session_test.go @@ -3,10 +3,14 @@ package project_test import ( "context" "maps" + "strings" "testing" "github.com/microsoft/typescript-go/internal/bundled" + "github.com/microsoft/typescript-go/internal/core" + "github.com/microsoft/typescript-go/internal/glob" "github.com/microsoft/typescript-go/internal/lsp/lsproto" + "github.com/microsoft/typescript-go/internal/project" "github.com/microsoft/typescript-go/internal/testutil/projecttestutil" "github.com/microsoft/typescript-go/internal/tspath" "gotest.tools/v3/assert" @@ -548,6 +552,67 @@ func TestSession(t *testing.T) { assert.Check(t, lsAfter.GetProgram() != programBefore) }) + t.Run("change program file not in tsconfig root files", func(t *testing.T) { + t.Parallel() + for _, workspaceDir := range []string{"/", "/home/projects/TS/p1", "/somewhere/else/entirely"} { + t.Run("workspaceDir="+strings.ReplaceAll(workspaceDir, "/", "_"), func(t *testing.T) { + t.Parallel() + files := map[string]any{ + "/home/projects/TS/p1/tsconfig.json": `{ + "compilerOptions": { + "noLib": true, + "module": "nodenext", + "strict": true + }, + "files": ["src/index.ts"] + }`, + "/home/projects/TS/p1/src/index.ts": `import { x } from "../../x";`, + "/home/projects/TS/x.ts": `export const x = 1;`, + } + + session, utils := projecttestutil.SetupWithOptions(files, &project.SessionOptions{ + CurrentDirectory: workspaceDir, + DefaultLibraryPath: bundled.LibPath(), + TypingsLocation: projecttestutil.TestTypingsLocation, + PositionEncoding: lsproto.PositionEncodingKindUTF8, + WatchEnabled: true, + LoggingEnabled: true, + }) + session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript) + lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts") + assert.NilError(t, err) + programBefore := lsBefore.GetProgram() + session.WaitForBackgroundTasks() + + var xWatched bool + outer: + for _, call := range utils.Client().WatchFilesCalls() { + for _, watcher := range call.Watchers { + if core.Must(glob.Parse(*watcher.GlobPattern.Pattern)).Match("/home/projects/TS/x.ts") { + xWatched = true + break outer + } + } + } + assert.Check(t, xWatched) + + err = utils.FS().WriteFile("/home/projects/TS/x.ts", `export const x = 2;`, false) + assert.NilError(t, err) + + session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{ + { + Type: lsproto.FileChangeTypeChanged, + Uri: "file:///home/projects/TS/x.ts", + }, + }) + + lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts") + assert.NilError(t, err) + assert.Check(t, lsAfter.GetProgram() != programBefore) + }) + } + }) + t.Run("change config file", func(t *testing.T) { t.Parallel() files := map[string]any{ diff --git a/internal/project/watch.go b/internal/project/watch.go index 7d36ad9a43..2354040e3e 100644 --- a/internal/project/watch.go +++ b/internal/project/watch.go @@ -8,19 +8,44 @@ import ( "sync" "sync/atomic" + "github.com/microsoft/typescript-go/internal/ast" "github.com/microsoft/typescript-go/internal/collections" "github.com/microsoft/typescript-go/internal/core" - "github.com/microsoft/typescript-go/internal/glob" "github.com/microsoft/typescript-go/internal/lsp/lsproto" "github.com/microsoft/typescript-go/internal/module" "github.com/microsoft/typescript-go/internal/tspath" ) const ( - fileGlobPattern = "*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}" - recursiveFileGlobPattern = "**/*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}" + minWatchLocationDepth = 2 ) +type fileSystemWatcherKey struct { + pattern string + kind lsproto.WatchKind +} + +type fileSystemWatcherValue struct { + count int + id WatcherID +} + +type patternsAndIgnored struct { + patterns []string + ignored map[string]struct{} +} + +func toFileSystemWatcherKey(w *lsproto.FileSystemWatcher) fileSystemWatcherKey { + if w.GlobPattern.RelativePattern != nil { + panic("relative globs not implemented") + } + kind := w.Kind + if kind == nil { + kind = ptrTo(lsproto.WatchKindCreate | lsproto.WatchKindChange | lsproto.WatchKindDelete) + } + return fileSystemWatcherKey{pattern: *w.GlobPattern.Pattern, kind: *kind} +} + type WatcherID string var watcherID atomic.Uint64 @@ -28,17 +53,17 @@ var watcherID atomic.Uint64 type WatchedFiles[T any] struct { name string watchKind lsproto.WatchKind - computeGlobPatterns func(input T) []string - - input T - computeWatchersOnce sync.Once - watchers []*lsproto.FileSystemWatcher - computeParsedGlobsOnce sync.Once - parsedGlobs []*glob.Glob - id uint64 + computeGlobPatterns func(input T) patternsAndIgnored + + mu sync.RWMutex + input T + computeWatchersOnce sync.Once + watchers []*lsproto.FileSystemWatcher + ignored map[string]struct{} + id uint64 } -func NewWatchedFiles[T any](name string, watchKind lsproto.WatchKind, computeGlobPatterns func(input T) []string) *WatchedFiles[T] { +func NewWatchedFiles[T any](name string, watchKind lsproto.WatchKind, computeGlobPatterns func(input T) patternsAndIgnored) *WatchedFiles[T] { return &WatchedFiles[T]{ id: watcherID.Add(1), name: name, @@ -47,31 +72,40 @@ func NewWatchedFiles[T any](name string, watchKind lsproto.WatchKind, computeGlo } } -func (w *WatchedFiles[T]) Watchers() (WatcherID, []*lsproto.FileSystemWatcher) { +func (w *WatchedFiles[T]) Watchers() (WatcherID, []*lsproto.FileSystemWatcher, map[string]struct{}) { w.computeWatchersOnce.Do(func() { - newWatchers := core.Map(w.computeGlobPatterns(w.input), func(glob string) *lsproto.FileSystemWatcher { - return &lsproto.FileSystemWatcher{ - GlobPattern: lsproto.PatternOrRelativePattern{ - Pattern: &glob, - }, - Kind: &w.watchKind, - } - }) - if !slices.EqualFunc(w.watchers, newWatchers, func(a, b *lsproto.FileSystemWatcher) bool { - return *a.GlobPattern.Pattern == *b.GlobPattern.Pattern + w.mu.Lock() + defer w.mu.Unlock() + result := w.computeGlobPatterns(w.input) + globs := result.patterns + ignored := result.ignored + // ignored is only used for logging and doesn't affect watcher identity + w.ignored = ignored + if !slices.EqualFunc(w.watchers, globs, func(a *lsproto.FileSystemWatcher, b string) bool { + return *a.GlobPattern.Pattern == b }) { - w.watchers = newWatchers + w.watchers = core.Map(globs, func(glob string) *lsproto.FileSystemWatcher { + return &lsproto.FileSystemWatcher{ + GlobPattern: lsproto.PatternOrRelativePattern{ + Pattern: &glob, + }, + Kind: &w.watchKind, + } + }) w.id = watcherID.Add(1) } }) - return WatcherID(fmt.Sprintf("%s watcher %d", w.name, w.id)), w.watchers + + w.mu.RLock() + defer w.mu.RUnlock() + return WatcherID(fmt.Sprintf("%s watcher %d", w.name, w.id)), w.watchers, w.ignored } func (w *WatchedFiles[T]) ID() WatcherID { if w == nil { return "" } - id, _ := w.Watchers() + id, _, _ := w.Watchers() return id } @@ -83,44 +117,29 @@ func (w *WatchedFiles[T]) WatchKind() lsproto.WatchKind { return w.watchKind } -func (w *WatchedFiles[T]) ParsedGlobs() []*glob.Glob { - w.computeParsedGlobsOnce.Do(func() { - patterns := w.computeGlobPatterns(w.input) - w.parsedGlobs = make([]*glob.Glob, 0, len(patterns)) - for _, pattern := range patterns { - if g, err := glob.Parse(pattern); err == nil { - w.parsedGlobs = append(w.parsedGlobs, g) - } else { - panic("failed to parse glob pattern: " + pattern) - } - } - }) - return w.parsedGlobs -} - func (w *WatchedFiles[T]) Clone(input T) *WatchedFiles[T] { + w.mu.RLock() + defer w.mu.RUnlock() return &WatchedFiles[T]{ name: w.name, watchKind: w.watchKind, computeGlobPatterns: w.computeGlobPatterns, + watchers: w.watchers, input: input, - parsedGlobs: w.parsedGlobs, } } -func globMapperForTypingsInstaller(data map[tspath.Path]string) []string { - return slices.AppendSeq(make([]string, 0, len(data)), maps.Values(data)) -} - -func createResolutionLookupGlobMapper(currentDirectory string, useCaseSensitiveFileNames bool) func(data map[tspath.Path]string) []string { - rootPath := tspath.ToPath(currentDirectory, "", useCaseSensitiveFileNames) - rootPathComponents := tspath.GetPathComponents(string(rootPath), "") - isRootWatchable := canWatchDirectoryOrFile(rootPathComponents) +func createResolutionLookupGlobMapper(workspaceDirectory string, libDirectory string, currentDirectory string, useCaseSensitiveFileNames bool) func(data map[tspath.Path]string) patternsAndIgnored { + comparePathsOptions := tspath.ComparePathsOptions{ + CurrentDirectory: currentDirectory, + UseCaseSensitiveFileNames: useCaseSensitiveFileNames, + } - return func(data map[tspath.Path]string) []string { - // dir -> recursive - globSet := make(map[string]bool) + return func(data map[tspath.Path]string) patternsAndIgnored { + var ignored map[string]struct{} var seenDirs collections.Set[string] + var includeWorkspace, includeRoot, includeLib bool + var nodeModulesDirectories, externalDirectories map[tspath.Path]string for path, fileName := range data { // Assuming all of the input paths are filenames, we can avoid @@ -130,277 +149,138 @@ func createResolutionLookupGlobMapper(currentDirectory string, useCaseSensitiveF continue } - w := getDirectoryToWatchFailedLookupLocation( - fileName, - path, - currentDirectory, - rootPath, - rootPathComponents, - isRootWatchable, - true, - ) - if w == nil { - continue + if tspath.ContainsPath(workspaceDirectory, fileName, comparePathsOptions) { + includeWorkspace = true + } else if tspath.ContainsPath(currentDirectory, fileName, comparePathsOptions) { + includeRoot = true + } else if tspath.ContainsPath(libDirectory, fileName, comparePathsOptions) { + includeLib = true + } else if idx := strings.Index(fileName, "/node_modules/"); idx != -1 { + if nodeModulesDirectories == nil { + nodeModulesDirectories = make(map[tspath.Path]string) + } + dir := fileName[:idx+len("/node_modules")] + nodeModulesDirectories[tspath.ToPath(dir, currentDirectory, useCaseSensitiveFileNames)] = dir + } else { + if externalDirectories == nil { + externalDirectories = make(map[tspath.Path]string) + } + externalDirectories[path.GetDirectoryPath()] = tspath.GetDirectoryPath(fileName) } - globSet[w.dir] = globSet[w.dir] || !w.nonRecursive } - globs := make([]string, 0, len(globSet)) - for dir, recursive := range globSet { - if recursive { - globs = append(globs, dir+"/"+recursiveFileGlobPattern) - } else { - globs = append(globs, dir+"/"+fileGlobPattern) + var globs []string + if includeWorkspace { + globs = append(globs, getRecursiveGlobPattern(workspaceDirectory)) + } + if includeRoot { + globs = append(globs, getRecursiveGlobPattern(currentDirectory)) + } + if includeLib { + globs = append(globs, getRecursiveGlobPattern(libDirectory)) + } + for _, dir := range nodeModulesDirectories { + globs = append(globs, getRecursiveGlobPattern(dir)) + } + if len(externalDirectories) > 0 { + externalDirectoryParents, ignoredExternalDirs := tspath.GetCommonParents( + slices.Collect(maps.Values(externalDirectories)), + minWatchLocationDepth, + getPathComponentsForWatching, + comparePathsOptions, + ) + slices.Sort(externalDirectoryParents) + ignored = ignoredExternalDirs + for _, dir := range externalDirectoryParents { + globs = append(globs, getRecursiveGlobPattern(dir)) } } - slices.Sort(globs) - return globs + return patternsAndIgnored{ + patterns: globs, + ignored: ignored, + } } } -func getTypingsLocationsGlobs(typingsFiles []string, typingsLocation string, currentDirectory string, useCaseSensitiveFileNames bool) (fileGlobs map[tspath.Path]string, directoryGlobs map[tspath.Path]string) { +func getTypingsLocationsGlobs( + typingsFiles []string, + typingsLocation string, + workspaceDirectory string, + currentDirectory string, + useCaseSensitiveFileNames bool, +) patternsAndIgnored { + var includeTypingsLocation, includeWorkspace bool + externalDirectories := make(map[tspath.Path]string) + globs := make(map[tspath.Path]string) comparePathsOptions := tspath.ComparePathsOptions{ CurrentDirectory: currentDirectory, UseCaseSensitiveFileNames: useCaseSensitiveFileNames, } for _, file := range typingsFiles { - basename := tspath.GetBaseFileName(file) - if basename == "package.json" || basename == "bower.json" { - // package.json or bower.json exists, watch the file to detect changes and update typings - if fileGlobs == nil { - fileGlobs = map[tspath.Path]string{} - } - fileGlobs[tspath.ToPath(file, currentDirectory, useCaseSensitiveFileNames)] = file - } else { - var globLocation string - // path in projectRoot, watch project root - if tspath.ContainsPath(currentDirectory, file, comparePathsOptions) { - currentDirectoryLen := len(currentDirectory) + 1 - subDirectory := strings.IndexRune(file[currentDirectoryLen:], tspath.DirectorySeparator) - if subDirectory != -1 { - // Watch subDirectory - globLocation = file[0 : currentDirectoryLen+subDirectory] - } else { - // Watch the directory itself - globLocation = file - } - } else { - // path in global cache, watch global cache - // else watch node_modules or bower_components - globLocation = core.IfElse(tspath.ContainsPath(typingsLocation, file, comparePathsOptions), typingsLocation, file) - } - // package.json or bower.json exists, watch the file to detect changes and update typings - if directoryGlobs == nil { - directoryGlobs = map[tspath.Path]string{} - } - directoryGlobs[tspath.ToPath(globLocation, currentDirectory, useCaseSensitiveFileNames)] = fmt.Sprintf("%s/%s", globLocation, recursiveFileGlobPattern) - } - } - return fileGlobs, directoryGlobs -} - -type directoryOfFailedLookupWatch struct { - dir string - dirPath tspath.Path - nonRecursive bool - packageDir *string - packageDirPath *tspath.Path -} - -func getDirectoryToWatchFailedLookupLocation( - failedLookupLocation string, - failedLookupLocationPath tspath.Path, - rootDir string, - rootPath tspath.Path, - rootPathComponents []string, - isRootWatchable bool, - preferNonRecursiveWatch bool, -) *directoryOfFailedLookupWatch { - failedLookupPathComponents := tspath.GetPathComponents(string(failedLookupLocationPath), "") - failedLookupComponents := tspath.GetPathComponents(failedLookupLocation, "") - perceivedOsRootLength := perceivedOsRootLengthForWatching(failedLookupPathComponents, len(failedLookupPathComponents)) - if len(failedLookupPathComponents) <= perceivedOsRootLength+1 { - return nil - } - // If directory path contains node module, get the most parent node_modules directory for watching - nodeModulesIndex := slices.Index(failedLookupPathComponents, "node_modules") - if nodeModulesIndex != -1 && nodeModulesIndex+1 <= perceivedOsRootLength+1 { - return nil - } - lastNodeModulesIndex := lastIndex(failedLookupPathComponents, "node_modules") - if isRootWatchable && isInDirectoryPath(rootPathComponents, failedLookupPathComponents) { - if len(failedLookupPathComponents) > len(rootPathComponents)+1 { - // Instead of watching root, watch directory in root to avoid watching excluded directories not needed for module resolution - return getDirectoryOfFailedLookupWatch( - failedLookupComponents, - failedLookupPathComponents, - max(len(rootPathComponents)+1, perceivedOsRootLength+1), - lastNodeModulesIndex, - false, - ) + if tspath.ContainsPath(typingsLocation, file, comparePathsOptions) { + includeTypingsLocation = true + } else if !tspath.ContainsPath(workspaceDirectory, file, comparePathsOptions) { + directory := tspath.GetDirectoryPath(file) + externalDirectories[tspath.ToPath(directory, currentDirectory, useCaseSensitiveFileNames)] = directory } else { - // Always watch root directory non recursively - return &directoryOfFailedLookupWatch{ - dir: rootDir, - dirPath: rootPath, - nonRecursive: true, - } + includeWorkspace = true } } - - return getDirectoryToWatchFromFailedLookupLocationDirectory( - failedLookupComponents, - failedLookupPathComponents, - len(failedLookupPathComponents)-1, - perceivedOsRootLength, - nodeModulesIndex, - rootPathComponents, - lastNodeModulesIndex, - preferNonRecursiveWatch, + externalDirectoryParents, ignored := tspath.GetCommonParents( + slices.Collect(maps.Values(externalDirectories)), + minWatchLocationDepth, + getPathComponentsForWatching, + comparePathsOptions, ) -} - -func getDirectoryToWatchFromFailedLookupLocationDirectory( - dirComponents []string, - dirPathComponents []string, - dirPathComponentsLength int, - perceivedOsRootLength int, - nodeModulesIndex int, - rootPathComponents []string, - lastNodeModulesIndex int, - preferNonRecursiveWatch bool, -) *directoryOfFailedLookupWatch { - // If directory path contains node module, get the most parent node_modules directory for watching - if nodeModulesIndex != -1 { - // If the directory is node_modules use it to watch, always watch it recursively - return getDirectoryOfFailedLookupWatch( - dirComponents, - dirPathComponents, - nodeModulesIndex+1, - lastNodeModulesIndex, - false, - ) + slices.Sort(externalDirectoryParents) + if includeWorkspace { + globs[tspath.ToPath(workspaceDirectory, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(workspaceDirectory) } - - // Use some ancestor of the root directory - nonRecursive := true - length := dirPathComponentsLength - if !preferNonRecursiveWatch { - for i := range dirPathComponentsLength { - if dirPathComponents[i] != rootPathComponents[i] { - nonRecursive = false - length = max(i+1, perceivedOsRootLength+1) - break - } - } + if includeTypingsLocation { + globs[tspath.ToPath(typingsLocation, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(typingsLocation) } - return getDirectoryOfFailedLookupWatch( - dirComponents, - dirPathComponents, - length, - lastNodeModulesIndex, - nonRecursive, - ) -} - -func getDirectoryOfFailedLookupWatch( - dirComponents []string, - dirPathComponents []string, - length int, - lastNodeModulesIndex int, - nonRecursive bool, -) *directoryOfFailedLookupWatch { - packageDirLength := -1 - if lastNodeModulesIndex != -1 && lastNodeModulesIndex+1 >= length && lastNodeModulesIndex+2 < len(dirPathComponents) { - if !strings.HasPrefix(dirPathComponents[lastNodeModulesIndex+1], "@") { - packageDirLength = lastNodeModulesIndex + 2 - } else if lastNodeModulesIndex+3 < len(dirPathComponents) { - packageDirLength = lastNodeModulesIndex + 3 - } - } - var packageDir *string - var packageDirPath *tspath.Path - if packageDirLength != -1 { - packageDir = ptrTo(tspath.GetPathFromPathComponents(dirPathComponents[:packageDirLength])) - packageDirPath = ptrTo(tspath.Path(tspath.GetPathFromPathComponents(dirComponents[:packageDirLength]))) + for _, dir := range externalDirectoryParents { + globs[tspath.ToPath(dir, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(dir) } - - return &directoryOfFailedLookupWatch{ - dir: tspath.GetPathFromPathComponents(dirComponents[:length]), - dirPath: tspath.Path(tspath.GetPathFromPathComponents(dirPathComponents[:length])), - nonRecursive: nonRecursive, - packageDir: packageDir, - packageDirPath: packageDirPath, + return patternsAndIgnored{ + patterns: slices.Collect(maps.Values(globs)), + ignored: ignored, } } -func perceivedOsRootLengthForWatching(pathComponents []string, length int) int { - // Ignore "/", "c:/" - if length <= 1 { - return 1 - } - indexAfterOsRoot := 1 - firstComponent := pathComponents[0] - isDosStyle := len(firstComponent) >= 2 && tspath.IsVolumeCharacter(firstComponent[0]) && firstComponent[1] == ':' - if firstComponent != "/" && !isDosStyle && isDosStyleNextPart(pathComponents[1]) { - // ignore "//vda1cs4850/c$/folderAtRoot" - if length == 2 { - return 2 - } - indexAfterOsRoot = 2 - isDosStyle = true - } - - afterOsRoot := pathComponents[indexAfterOsRoot] - if isDosStyle && !strings.EqualFold(afterOsRoot, "users") { - // Paths like c:/notUsers - return indexAfterOsRoot +func getPathComponentsForWatching(path string, currentDirectory string) []string { + components := tspath.GetPathComponents(path, currentDirectory) + rootLength := perceivedOsRootLengthForWatching(components) + if rootLength <= 1 { + return components } - - if strings.EqualFold(afterOsRoot, "workspaces") { - // Paths like: /workspaces as codespaces hoist the repos in /workspaces so we have to exempt these from "2" level from root rule - return indexAfterOsRoot + 1 - } - - // Paths like: c:/users/username or /home/username - return indexAfterOsRoot + 2 + newRoot := tspath.CombinePaths(components[0], components[1:rootLength]...) + return append([]string{newRoot}, components[rootLength:]...) } -func canWatchDirectoryOrFile(pathComponents []string) bool { +func perceivedOsRootLengthForWatching(pathComponents []string) int { length := len(pathComponents) - // Ignore "/", "c:/" - // ignore "/user", "c:/users" or "c:/folderAtRoot" - if length < 2 { - return false + if length <= 1 { + return length } - perceivedOsRootLength := perceivedOsRootLengthForWatching(pathComponents, length) - return length > perceivedOsRootLength+1 -} - -func isDosStyleNextPart(part string) bool { - return len(part) == 2 && tspath.IsVolumeCharacter(part[0]) && part[1] == '$' -} - -func lastIndex[T comparable](s []T, v T) int { - for i := len(s) - 1; i >= 0; i-- { - if s[i] == v { - return i - } + if strings.HasPrefix(pathComponents[0], "//") { + // Group UNC roots (//server/share) into a single component + return 2 } - return -1 -} - -func isInDirectoryPath(dirComponents []string, fileOrDirComponents []string) bool { - if len(fileOrDirComponents) < len(dirComponents) { - return false - } - for i := range dirComponents { - if dirComponents[i] != fileOrDirComponents[i] { - return false + if len(pathComponents[0]) == 3 && tspath.IsVolumeCharacter(pathComponents[0][0]) && pathComponents[0][1] == ':' && pathComponents[0][2] == '/' { + // Windows-style volume + if strings.EqualFold(pathComponents[1], "users") { + // Group C:/Users/username into a single component + return min(3, length) } + return 1 } - return true + if pathComponents[1] == "home" { + // Group /home/username into a single component + return min(3, length) + } + return 1 } func ptrTo[T any](v T) *T { @@ -434,3 +314,48 @@ func extractLookups[T resolutionWithLookupLocations]( } } } + +func getNonRootFileGlobs(workspaceDir string, libDirectory string, sourceFiles []*ast.SourceFile, rootFiles map[tspath.Path]string, comparePathsOptions tspath.ComparePathsOptions) patternsAndIgnored { + var globs []string + var includeWorkspace, includeLib bool + var ignored map[string]struct{} + externalDirectories := make([]string, 0, max(0, len(sourceFiles)-len(rootFiles))) + for _, sourceFile := range sourceFiles { + if _, ok := rootFiles[sourceFile.Path()]; !ok { + if tspath.ContainsPath(workspaceDir, sourceFile.FileName(), comparePathsOptions) { + includeWorkspace = true + } else if tspath.ContainsPath(libDirectory, sourceFile.FileName(), comparePathsOptions) { + includeLib = true + } else { + externalDirectories = append(externalDirectories, tspath.GetDirectoryPath(sourceFile.FileName())) + } + } + } + + if includeWorkspace { + globs = append(globs, getRecursiveGlobPattern(workspaceDir)) + } + if includeLib { + globs = append(globs, getRecursiveGlobPattern(libDirectory)) + } + if len(externalDirectories) > 0 { + commonParents, ignoredDirs := tspath.GetCommonParents( + externalDirectories, + minWatchLocationDepth, + getPathComponentsForWatching, + comparePathsOptions, + ) + globs = append(globs, core.Map(commonParents, func(dir string) string { + return getRecursiveGlobPattern(dir) + })...) + ignored = ignoredDirs + } + return patternsAndIgnored{ + patterns: globs, + ignored: ignored, + } +} + +func getRecursiveGlobPattern(directory string) string { + return fmt.Sprintf("%s/%s", tspath.RemoveTrailingDirectorySeparator(directory), "**/*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}") +} diff --git a/internal/project/watch_test.go b/internal/project/watch_test.go new file mode 100644 index 0000000000..ed3159a2ed --- /dev/null +++ b/internal/project/watch_test.go @@ -0,0 +1,20 @@ +package project + +import ( + "testing" + + "gotest.tools/v3/assert" +) + +func TestGetPathComponentsForWatching(t *testing.T) { + t.Parallel() + + assert.DeepEqual(t, getPathComponentsForWatching("/project", ""), []string{"/", "project"}) + assert.DeepEqual(t, getPathComponentsForWatching("C:\\project", ""), []string{"C:/", "project"}) + assert.DeepEqual(t, getPathComponentsForWatching("//server/share/project/tsconfig.json", ""), []string{"//server/share", "project", "tsconfig.json"}) + assert.DeepEqual(t, getPathComponentsForWatching(`\\server\share\project\tsconfig.json`, ""), []string{"//server/share", "project", "tsconfig.json"}) + assert.DeepEqual(t, getPathComponentsForWatching("C:\\Users", ""), []string{"C:/Users"}) + assert.DeepEqual(t, getPathComponentsForWatching("C:\\Users\\andrew\\project", ""), []string{"C:/Users/andrew", "project"}) + assert.DeepEqual(t, getPathComponentsForWatching("/home", ""), []string{"/home"}) + assert.DeepEqual(t, getPathComponentsForWatching("/home/andrew/project", ""), []string{"/home/andrew", "project"}) +} diff --git a/internal/tsoptions/parsedcommandline.go b/internal/tsoptions/parsedcommandline.go index 198f9cafb4..e60649300b 100644 --- a/internal/tsoptions/parsedcommandline.go +++ b/internal/tsoptions/parsedcommandline.go @@ -1,19 +1,26 @@ package tsoptions import ( + "fmt" "iter" "slices" + "strings" "sync" "github.com/microsoft/typescript-go/internal/ast" - "github.com/microsoft/typescript-go/internal/collections" "github.com/microsoft/typescript-go/internal/core" + "github.com/microsoft/typescript-go/internal/glob" "github.com/microsoft/typescript-go/internal/module" "github.com/microsoft/typescript-go/internal/outputpaths" "github.com/microsoft/typescript-go/internal/tspath" "github.com/microsoft/typescript-go/internal/vfs" ) +const ( + fileGlobPattern = "*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}" + recursiveFileGlobPattern = "**/*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}" +) + type ParsedCommandLine struct { ParsedConfig *core.ParsedOptions `json:"parsedConfig"` @@ -25,6 +32,8 @@ type ParsedCommandLine struct { comparePathsOptions tspath.ComparePathsOptions wildcardDirectoriesOnce sync.Once wildcardDirectories map[string]bool + includeGlobsOnce sync.Once + includeGlobs []*glob.Glob extraFileExtensions []FileExtensionInfo sourceAndOutputMapsOnce sync.Once @@ -197,21 +206,40 @@ func (p *ParsedCommandLine) WildcardDirectories() map[string]bool { return nil } - if p.wildcardDirectories != nil { - return p.wildcardDirectories - } - p.wildcardDirectoriesOnce.Do(func() { - p.wildcardDirectories = getWildcardDirectories( - p.ConfigFile.configFileSpecs.validatedIncludeSpecs, - p.ConfigFile.configFileSpecs.validatedExcludeSpecs, - p.comparePathsOptions, - ) + if p.wildcardDirectories == nil { + p.wildcardDirectories = getWildcardDirectories( + p.ConfigFile.configFileSpecs.validatedIncludeSpecs, + p.ConfigFile.configFileSpecs.validatedExcludeSpecs, + p.comparePathsOptions, + ) + } }) return p.wildcardDirectories } +func (p *ParsedCommandLine) WildcardDirectoryGlobs() []*glob.Glob { + wildcardDirectories := p.WildcardDirectories() + if wildcardDirectories == nil { + return nil + } + + p.includeGlobsOnce.Do(func() { + if p.includeGlobs == nil { + globs := make([]*glob.Glob, 0, len(wildcardDirectories)) + for dir, recursive := range wildcardDirectories { + if parsed, err := glob.Parse(fmt.Sprintf("%s/%s", tspath.NormalizePath(dir), core.IfElse(recursive, recursiveFileGlobPattern, fileGlobPattern))); err == nil { + globs = append(globs, parsed) + } + } + p.includeGlobs = globs + } + }) + + return p.includeGlobs +} + // Normalized file names explicitly specified in `files` func (p *ParsedCommandLine) LiteralFileNames() []string { if p != nil && p.ConfigFile != nil { @@ -285,48 +313,30 @@ func (p *ParsedCommandLine) GetConfigFileParsingDiagnostics() []*ast.Diagnostic return p.Errors } -// Porting reference: ProjectService.isMatchedByConfig -func (p *ParsedCommandLine) MatchesFileName(fileName string) bool { +// PossiblyMatchesFileName is a fast check to see if a file is currently included by a config +// or would be included if the file were to be created. It may return false positives. +func (p *ParsedCommandLine) PossiblyMatchesFileName(fileName string) bool { path := tspath.ToPath(fileName, p.GetCurrentDirectory(), p.UseCaseSensitiveFileNames()) - if slices.ContainsFunc(p.FileNames(), func(f string) bool { - return path == tspath.ToPath(f, p.GetCurrentDirectory(), p.UseCaseSensitiveFileNames()) - }) { + if _, ok := p.FileNamesByPath()[path]; ok { return true } - if p.ConfigFile == nil { - return false - } - - if len(p.ConfigFile.configFileSpecs.validatedIncludeSpecs) == 0 { - return false - } - - supportedExtensions := GetSupportedExtensionsWithJsonIfResolveJsonModule( - p.CompilerOptions(), - GetSupportedExtensions(p.CompilerOptions(), p.extraFileExtensions), - ) - - if !tspath.FileExtensionIsOneOf(fileName, core.Flatten(supportedExtensions)) { - return false - } - - if p.ConfigFile.configFileSpecs.matchesExclude(fileName, p.comparePathsOptions) { - return false - } - - var allFileNames collections.Set[tspath.Path] - for _, fileName := range p.FileNames() { - allFileNames.Add(tspath.ToPath(fileName, p.GetCurrentDirectory(), p.UseCaseSensitiveFileNames())) + for _, include := range p.ConfigFile.configFileSpecs.validatedIncludeSpecs { + if !strings.ContainsAny(include, "*?") && !vfs.IsImplicitGlob(include) { + includePath := tspath.ToPath(include, p.GetCurrentDirectory(), p.UseCaseSensitiveFileNames()) + if includePath == path { + return true + } + } } - - if hasFileWithHigherPriorityExtension(string(path), supportedExtensions, func(fileName string) bool { - return allFileNames.Has(tspath.Path(fileName)) - }) { - return false + if wildcardDirectoryGlobs := p.WildcardDirectoryGlobs(); len(wildcardDirectoryGlobs) > 0 { + for _, glob := range wildcardDirectoryGlobs { + if glob.Match(fileName) { + return true + } + } } - - return p.ConfigFile.configFileSpecs.getMatchedIncludeSpec(fileName, p.comparePathsOptions) != "" + return false } func (p *ParsedCommandLine) GetMatchedFileSpec(fileName string) string { @@ -363,6 +373,7 @@ func (p *ParsedCommandLine) ReloadFileNamesOfParsedCommandLine(fs vfs.FS) *Parse CompileOnSave: p.CompileOnSave, comparePathsOptions: p.comparePathsOptions, wildcardDirectories: p.wildcardDirectories, + includeGlobs: p.includeGlobs, extraFileExtensions: p.extraFileExtensions, literalFileNamesLen: literalFileNamesLen, } diff --git a/internal/tsoptions/parsedcommandline_test.go b/internal/tsoptions/parsedcommandline_test.go index 18c83ba633..8015a820a8 100644 --- a/internal/tsoptions/parsedcommandline_test.go +++ b/internal/tsoptions/parsedcommandline_test.go @@ -12,7 +12,7 @@ import ( func TestParsedCommandLine(t *testing.T) { t.Parallel() - t.Run("MatchesFileName", func(t *testing.T) { + t.Run("PossiblyMatchesFileName", func(t *testing.T) { t.Parallel() noFiles := map[string]string{} @@ -47,13 +47,13 @@ func TestParsedCommandLine(t *testing.T) { assertMatches := func(t *testing.T, parsedCommandLine *tsoptions.ParsedCommandLine, files map[string]string, matches []string) { t.Helper() for fileName := range files { - actual := parsedCommandLine.MatchesFileName(fileName) + actual := parsedCommandLine.PossiblyMatchesFileName(fileName) expected := slices.Contains(matches, fileName) assert.Equal(t, actual, expected, "fileName: %s", fileName) } for _, fileName := range matches { if _, ok := files[fileName]; !ok { - actual := parsedCommandLine.MatchesFileName(fileName) + actual := parsedCommandLine.PossiblyMatchesFileName(fileName) assert.Equal(t, actual, true, "fileName: %s", fileName) } } @@ -163,229 +163,6 @@ func TestParsedCommandLine(t *testing.T) { "/dev/b.ts", }) }) - - t.Run("with non .ts file extensions", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "a.js", - "b.js" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{}) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{}) - }) - - t.Run("with literal excludes", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "a.ts", - "b.ts" - ], - "exclude": [ - "b.ts" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/a.ts", - }) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/a.ts", - }) - }) - - t.Run("with wildcard excludes", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "a.ts", - "b.ts", - "z/a.ts", - "z/abz.ts", - "z/aba.ts", - "x/b.ts" - ], - "exclude": [ - "*.ts", - "z/??z.ts", - "*/b.ts" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/z/a.ts", - "/dev/z/aba.ts", - }) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/z/a.ts", - "/dev/z/aba.ts", - }) - }) - - t.Run("with wildcard include list", func(t *testing.T) { - t.Parallel() - - t.Run("star matches only ts files", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "*" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/a.ts", - "/dev/b.ts", - "/dev/c.d.ts", - }) - - // a.d.ts matches if a.ts is not already included - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/a.ts", - "/dev/a.d.ts", - "/dev/b.ts", - "/dev/c.d.ts", - }) - }) - - t.Run("question matches only a single character", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "x/?.ts" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/x/a.ts", - "/dev/x/b.ts", - }) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/x/a.ts", - "/dev/x/b.ts", - }) - }) - - t.Run("exclude .js files when allowJs=false", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "include": [ - "js/*" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{}) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{}) - }) - - t.Run("include .js files when allowJs=true", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "compilerOptions": { - "allowJs": true - }, - "include": [ - "js/*" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/js/a.js", - "/dev/js/b.js", - }) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/js/a.js", - "/dev/js/b.js", - }) - }) - - t.Run("include explicitly listed .min.js files when allowJs=true", func(t *testing.T) { - t.Parallel() - parsedCommandLine := tsoptionstest.GetParsedCommandLine( - t, - `{ - "compilerOptions": { - "allowJs": true - }, - "include": [ - "js/*.min.js" - ] - }`, - files, - "/dev", - /*useCaseSensitiveFileNames*/ true, - ) - - assertMatches(t, parsedCommandLine, files, []string{ - "/dev/js/d.min.js", - "/dev/js/ab.min.js", - }) - - emptyParsedCommandLine := parsedCommandLine.ReloadFileNamesOfParsedCommandLine(noFilesFS) - assertMatches(t, emptyParsedCommandLine, noFiles, []string{ - "/dev/js/d.min.js", - "/dev/js/ab.min.js", - }) - }) - }) }) }) } diff --git a/internal/tspath/path.go b/internal/tspath/path.go index 7e726dfa00..fae3423721 100644 --- a/internal/tspath/path.go +++ b/internal/tspath/path.go @@ -2,6 +2,7 @@ package tspath import ( "cmp" + "slices" "strings" "unicode" @@ -1023,3 +1024,106 @@ func SplitVolumePath(path string) (volume string, rest string, ok bool) { } return "", path, false } + +// GetCommonParents returns the smallest set of directories that are parents of all paths with +// at least `minComponents` directory components. Any path that has fewer than `minComponents` directory components +// will be returned in the second return value. Examples: +// +// /a/b/c/d, /a/b/c/e, /a/b/f/g => /a/b +// /a/b/c/d, /a/b/c/e, /a/b/f/g, /x/y => / +// /a/b/c/d, /a/b/c/e, /a/b/f/g, /x/y (minComponents: 2) => /a/b, /x/y +// c:/a/b/c/d, d:/a/b/c/d => c:/a/b/c/d, d:/a/b/c/d +func GetCommonParents( + paths []string, + minComponents int, + getPathComponents func(path string, currentDirectory string) []string, + options ComparePathsOptions, +) (parents []string, ignored map[string]struct{}) { + if minComponents < 1 { + panic("minComponents must be at least 1") + } + if len(paths) == 0 { + return nil, nil + } + if len(paths) == 1 { + if len(reducePathComponents(getPathComponents(paths[0], options.CurrentDirectory))) < minComponents { + return nil, map[string]struct{}{paths[0]: {}} + } + return paths, nil + } + + ignored = make(map[string]struct{}) + pathComponents := make([][]string, 0, len(paths)) + for _, path := range paths { + components := reducePathComponents(getPathComponents(path, options.CurrentDirectory)) + if len(components) < minComponents { + ignored[path] = struct{}{} + } else { + pathComponents = append(pathComponents, components) + } + } + + results := getCommonParentsWorker(pathComponents, minComponents, options) + resultPaths := make([]string, len(results)) + for i, comps := range results { + resultPaths[i] = GetPathFromPathComponents(comps) + } + + return resultPaths, ignored +} + +func getCommonParentsWorker(componentGroups [][]string, minComponents int, options ComparePathsOptions) [][]string { + if len(componentGroups) == 0 { + return nil + } + // Determine the maximum depth we can consider + maxDepth := len(componentGroups[0]) + for _, comps := range componentGroups[1:] { + if l := len(comps); l < maxDepth { + maxDepth = l + } + } + + equality := options.getEqualityComparer() + for lastCommonIndex := range maxDepth { + candidate := componentGroups[0][lastCommonIndex] + for j, comps := range componentGroups[1:] { + if !equality(candidate, comps[lastCommonIndex]) { // divergence + if lastCommonIndex < minComponents { + // Not enough components, we need to fan out + orderedGroups := make([]Path, 0, len(componentGroups)-j) + newGroups := make(map[Path]struct { + head []string + tails [][]string + }) + for _, g := range componentGroups { + key := ToPath(g[lastCommonIndex], options.CurrentDirectory, options.UseCaseSensitiveFileNames) + if _, ok := newGroups[key]; !ok { + orderedGroups = append(orderedGroups, key) + } + newGroups[key] = struct { + head []string + tails [][]string + }{ + head: g[:lastCommonIndex+1], + tails: append(newGroups[key].tails, g[lastCommonIndex+1:]), + } + } + slices.Sort(orderedGroups) + result := make([][]string, 0, len(newGroups)) + for _, key := range orderedGroups { + group := newGroups[key] + subResults := getCommonParentsWorker(group.tails, minComponents-(lastCommonIndex+1), options) + for _, sr := range subResults { + result = append(result, append(group.head, sr...)) + } + } + return result + } + return [][]string{componentGroups[0][:lastCommonIndex]} + } + } + } + + return [][]string{componentGroups[0][:maxDepth]} +} diff --git a/internal/tspath/path_test.go b/internal/tspath/path_test.go index 07683b8825..85d7829de6 100644 --- a/internal/tspath/path_test.go +++ b/internal/tspath/path_test.go @@ -704,3 +704,107 @@ func normalizePath_old(path string) string { func getNormalizedAbsolutePath_old(fileName string, currentDirectory string) string { return GetPathFromPathComponents(GetNormalizedPathComponents(fileName, currentDirectory)) } + +func TestGetCommonParents(t *testing.T) { + t.Parallel() + + opts := ComparePathsOptions{} + + t.Run("empty input", func(t *testing.T) { + t.Parallel() + var paths []string + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + assert.DeepEqual(t, got, ([]string)(nil)) + }) + + t.Run("single path returns itself", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{paths[0]} + assert.DeepEqual(t, got, expected) + }) + + t.Run("paths shorter than minComponents are ignored", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y"} + got, ignored := GetCommonParents(paths, 4, GetPathComponents, opts) + assert.DeepEqual(t, ignored, map[string]struct{}{"/x/y": {}}) + expected := []string{"/a/b/c", "/a/b/f/g"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("three paths share /a/b", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/a/b"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("mixed with short path collapses to root when minComponents=1", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y/z"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("mixed with short path preserves both when minComponents=3", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y/z"} + got, ignored := GetCommonParents(paths, 3, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/a/b", "/x/y/z"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("different volumes are returned individually", func(t *testing.T) { + t.Parallel() + paths := []string{"c:/a/b/c/d", "d:/a/b/c/d"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{paths[0], paths[1]} + assert.DeepEqual(t, got, expected) + }) + + t.Run("duplicate paths deduplicate result", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/b/c/d"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{paths[0]} + assert.DeepEqual(t, got, expected) + }) + + t.Run("paths with few components are returned as-is when minComponents met", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/x/y"} + got, ignored := GetCommonParents(paths, 2, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/a/b/c/d", "/x/y"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("minComponents=2", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/c/d", "/a/z/c/e", "/a/aaa/f/g", "/x/y/z"} + got, ignored := GetCommonParents(paths, 2, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/a", "/x/y/z"} + assert.DeepEqual(t, got, expected) + }) + + t.Run("trailing separators are handled", func(t *testing.T) { + t.Parallel() + paths := []string{"/a/b/", "/a/b/c"} + got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts) + assert.Equal(t, len(ignored), 0) + expected := []string{"/a/b"} + assert.DeepEqual(t, got, expected) + }) +}