diff --git a/.github/actions/setup-build/action.yml b/.github/actions/setup-build/action.yml index da1ece3140..11f32c09db 100644 --- a/.github/actions/setup-build/action.yml +++ b/.github/actions/setup-build/action.yml @@ -31,7 +31,7 @@ runs: sudo chown -R $USER /usr/local/.ghcup shell: bash - - uses: haskell-actions/setup@v2.7.10 + - uses: haskell-actions/setup@v2.8.1 id: HaskEnvSetup with: ghc-version : ${{ inputs.ghc }} diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 82a50589e4..ba39a21058 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -127,7 +127,7 @@ jobs: example: ['cabal', 'lsp-types'] steps: - - uses: haskell-actions/setup@v2.8.0 + - uses: haskell-actions/setup@v2.8.1 with: ghc-version : ${{ matrix.ghc }} cabal-version: ${{ matrix.cabal }} diff --git a/CODEOWNERS b/CODEOWNERS index 7d66f7805e..820661ceeb 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -4,7 +4,6 @@ /hls-graph @wz1000 /hls-plugin-api @michaelpj @fendor /hls-test-utils @fendor -/hie-compat @wz1000 # HLS main /src @fendor diff --git a/RELEASING.md b/RELEASING.md index a48b32cb93..74da125d86 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -9,7 +9,6 @@ - [ ] bump package versions in all `*.cabal` files (same version as hls) - HLS uses lockstep versioning. The core packages and all plugins use the same version number, and only support exactly this version. - Exceptions: - - `hie-compat` requires no automatic version bump. - `shake-bench` is an internal testing tool, not exposed to the outside world. Thus, no version bump required for releases. - For updating cabal files, the following script can be used: - ```sh diff --git a/cabal.project b/cabal.project index 3d43dff2f4..8d8bd080af 100644 --- a/cabal.project +++ b/cabal.project @@ -1,6 +1,5 @@ packages: ./ - ./hie-compat ./shake-bench ./hls-graph ./ghcide @@ -8,7 +7,7 @@ packages: ./hls-test-utils -index-state: 2025-06-07T14:57:40Z +index-state: 2025-08-08T12:31:54Z tests: True test-show-details: direct diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md index 134a03b89c..08ad21f12e 100644 --- a/docs/contributing/contributing.md +++ b/docs/contributing/contributing.md @@ -197,7 +197,6 @@ pre-commit install #### Why are some components excluded from automatic formatting? - `test/testdata` and `test/data` are excluded because we want to test formatting plugins. -- `hie-compat` is excluded because we want to keep its code as close to GHC as possible. ## Plugin tutorial diff --git a/exe/Wrapper.hs b/exe/Wrapper.hs index 2c2401ab6a..2fd885ffb3 100644 --- a/exe/Wrapper.hs +++ b/exe/Wrapper.hs @@ -38,7 +38,8 @@ import Control.Monad.Trans.Except (ExceptT, runExceptT, import Data.Maybe import qualified Data.Text as T import qualified Data.Text.IO as T -import Development.IDE.LSP.LanguageServer (runLanguageServer) +import Development.IDE.LSP.LanguageServer (Setup (..), + runLanguageServer) import qualified Development.IDE.Main as Main import Ide.Logger (Doc, Pretty (pretty), Recorder, WithPriority, @@ -300,7 +301,12 @@ launchErrorLSP recorder errorMsg = do [ exitHandler exit ] let interpretHandler (env, _st) = LSP.Iso (LSP.runLspT env . unErrorLSPM) liftIO - pure (doInitialize, asyncHandlers, interpretHandler) + pure MkSetup + { doInitialize + , staticHandlers = asyncHandlers + , interpretHandler + , onExit = [exit] + } runLanguageServer (cmapWithPrio pretty recorder) (Main.argsLspOptions defaultArguments) diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index a54ea9bc4b..cab2b716ff 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,3 +16,6 @@ base text -XHaskell98 A ++RTS +-A32M +-RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7201a40de4..7421d59279 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,3 +17,5 @@ a-1.0.0-inplace base -XHaskell98 C ++RTS +-A32M diff --git a/ghcide-test/data/references/Fields.hs b/ghcide-test/data/references/Fields.hs new file mode 100644 index 0000000000..1b935f31c9 --- /dev/null +++ b/ghcide-test/data/references/Fields.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE RecordWildCards #-} +module Fields where + +data Foo = MkFoo + { + barr :: String, + bazz :: String + } + +fooUse0 :: Foo -> String +fooUse0 MkFoo{barr} = "5" + +fooUse1 :: Foo -> String +fooUse1 MkFoo{..} = "6" + +fooUse2 :: String -> String -> Foo +fooUse2 bar baz = + MkFoo{..} diff --git a/ghcide-test/data/references/Main.hs b/ghcide-test/data/references/Main.hs index 4a976f3fd0..aae14355d4 100644 --- a/ghcide-test/data/references/Main.hs +++ b/ghcide-test/data/references/Main.hs @@ -1,7 +1,7 @@ module Main where import References - +import Fields main :: IO () main = return () @@ -12,3 +12,6 @@ b = a + 1 acc :: Account acc = Savings + +fooUse3 :: String -> String -> Foo +fooUse3 bar baz = MkFoo{barr = bar, bazz = baz} diff --git a/ghcide-test/data/references/hie.yaml b/ghcide-test/data/references/hie.yaml index db42bad0c0..9e68765ba1 100644 --- a/ghcide-test/data/references/hie.yaml +++ b/ghcide-test/data/references/hie.yaml @@ -1 +1 @@ -cradle: {direct: {arguments: ["Main","OtherModule","OtherOtherModule","References"]}} +cradle: {direct: {arguments: ["Main","OtherModule","OtherOtherModule","References", "Fields"]}} diff --git a/ghcide-test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs index 046b8bbf2f..d79b90c835 100644 --- a/ghcide-test/exe/CradleTests.hs +++ b/ghcide-test/exe/CradleTests.hs @@ -117,7 +117,11 @@ simpleSubDirectoryTest = multiTests :: FilePath -> [TestTree] multiTests dir = - [simpleMultiTest dir, simpleMultiTest2 dir, simpleMultiTest3 dir, simpleMultiDefTest dir] + [ simpleMultiTest dir + , simpleMultiTest2 dir + , simpleMultiTest3 dir + , simpleMultiDefTest dir + ] multiTestName :: FilePath -> String -> String multiTestName dir name = "simple-" ++ dir ++ "-" ++ name diff --git a/ghcide-test/exe/FindDefinitionAndHoverTests.hs b/ghcide-test/exe/FindDefinitionAndHoverTests.hs index 7920ff4949..e4c0958f58 100644 --- a/ghcide-test/exe/FindDefinitionAndHoverTests.hs +++ b/ghcide-test/exe/FindDefinitionAndHoverTests.hs @@ -187,7 +187,8 @@ tests = let holeL65 = Position 65 8 ; hleInfo2 = [ExpectHoverText ["_ :: a -> Maybe a"]] cccL17 = Position 17 16 ; docLink = [ExpectHoverTextRegex "\\*Defined in 'GHC.Types'\\* \\*\\(ghc-prim-[0-9.]+\\)\\*\n\n"] imported = Position 56 13 ; importedSig = getDocUri "Foo.hs" >>= \foo -> return [ExpectHoverText ["foo", "Foo", "Haddock"], mkL foo 5 0 5 3] - reexported = Position 55 14 ; reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 then mkL bar 3 5 3 8 else mkL bar 3 0 3 14] + reexported = Position 55 14 + reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 || not isWindows then mkL bar 3 5 3 8 else mkL bar 3 0 3 14] thLocL57 = Position 59 10 ; thLoc = [ExpectHoverText ["Identity"]] cmtL68 = Position 67 0 ; lackOfdEq = [ExpectHoverExcludeText ["$dEq"]] import310 = Position 3 10; pkgTxt = [ExpectHoverText ["Data.Text\n\ntext-"]] @@ -237,9 +238,9 @@ tests = let , testM yes yes imported importedSig "Imported symbol" , if isWindows then -- Flaky on Windows: https://github.com/haskell/haskell-language-server/issues/2997 - testM no yes reexported reexportedSig "Imported symbol (reexported)" + testM no yes reexported reexportedSig "Imported symbol reexported" else - testM yes yes reexported reexportedSig "Imported symbol (reexported)" + testM yes yes reexported reexportedSig "Imported symbol reexported" , test no yes thLocL57 thLoc "TH Splice Hover" , test yes yes import310 pkgTxt "show package name and its version" ] diff --git a/ghcide-test/exe/ReferenceTests.hs b/ghcide-test/exe/ReferenceTests.hs index cdbf8e472d..758506e54d 100644 --- a/ghcide-test/exe/ReferenceTests.hs +++ b/ghcide-test/exe/ReferenceTests.hs @@ -156,6 +156,28 @@ tests = testGroup "references" , ("References.hs", 16, 0) ] ] + -- Fields.hs does not depend on Main.hs + -- so we can only find references in Fields.hs + , testGroup "references to record fields" + [ referenceTest "references record fields in the same file" + ("Fields.hs", 5, 4) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + ] + + -- Main.hs depends on Fields.hs, so we can find references + -- from Main.hs to Fields.hs + , referenceTest "references record fields cross modules" + ("Main.hs", 16, 24) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + , ("Main.hs", 16, 24) + ] + ] ] -- | When we ask for all references to symbol "foo", should the declaration "foo diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 4d4b481c14..d1c6d907a3 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -73,9 +73,8 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.15.0 - , hie-compat ^>=0.3.0.0 - , hiedb ^>= 0.6.0.2 + , hie-bios ^>=0.17.0 + , hiedb ^>= 0.7.0.0 , hls-graph == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , implicit-hie >= 0.1.4.0 && < 0.1.5 @@ -180,7 +179,9 @@ library Development.IDE.Plugin.Test Development.IDE.Plugin.TypeLenses Development.IDE.Session + Development.IDE.Session.Dependency Development.IDE.Session.Diagnostics + Development.IDE.Session.Ghc Development.IDE.Session.Implicit Development.IDE.Spans.AtPoint Development.IDE.Spans.Common @@ -203,6 +204,7 @@ library Development.IDE.GHC.CPP Development.IDE.GHC.Warnings Development.IDE.Types.Action + Development.IDE.Session.OrderedSet if flag(pedantic) ghc-options: diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 78bfb798af..b1f0b63d21 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -1,4 +1,3 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE TypeFamilies #-} {-| @@ -25,47 +24,37 @@ import Control.Exception.Safe as Safe import Control.Monad import Control.Monad.Extra as Extra import Control.Monad.IO.Class +import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H -import Data.Aeson hiding (Error) -import Data.Bifunctor +import Data.Aeson hiding (Error, Key) import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B import Data.Default -import Data.Either.Extra -import Data.Function import Data.Hashable hiding (hash) import qualified Data.HashMap.Strict as HM -import Data.IORef import Data.List import Data.List.Extra as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE import qualified Data.Map.Strict as Map import Data.Maybe import Data.Proxy import qualified Data.Text as T -import Data.Time.Clock import Data.Version import Development.IDE.Core.RuleTypes import Development.IDE.Core.Shake hiding (Log, knownTargets, withHieDb) import qualified Development.IDE.GHC.Compat as Compat -import Development.IDE.GHC.Compat.CmdLine import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, TargetModule, Var, Warning, getOptions) -import qualified Development.IDE.GHC.Compat.Core as GHC import Development.IDE.GHC.Compat.Env hiding (Logger) -import Development.IDE.GHC.Compat.Units (UnitId) import Development.IDE.GHC.Util -import Development.IDE.Graph (Action) +import Development.IDE.Graph (Action, Key) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports -import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options -import GHC.ResponseFile import qualified HIE.Bios as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) @@ -77,7 +66,8 @@ import Ide.Logger (Pretty (pretty), nest, toCologActionWithPrio, vcat, viaShow, (<+>)) -import Ide.Types (SessionLoadingPreferenceConfig (..), +import Ide.Types (Config, + SessionLoadingPreferenceConfig (..), sessionLoading) import Language.LSP.Protocol.Message import Language.LSP.Server @@ -92,25 +82,20 @@ import Data.Void import Control.Concurrent.STM.Stats (atomically, modifyTVar', readTVar, writeTVar) import Control.Concurrent.STM.TQueue -import Control.DeepSeq -import Control.Exception (evaluate) -import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Trans.Cont (ContT (ContT, runContT)) import Data.Foldable (for_) import Data.HashMap.Strict (HashMap) import Data.HashSet (HashSet) import qualified Data.HashSet as Set -import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) -import Development.IDE.Core.WorkerThread (awaitRunInThread, - withWorkerQueue) -import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.Core.WorkerThread (withWorkerQueue) +import Development.IDE.Session.Dependency import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Session.Ghc hiding (Log) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) -import GHC.Data.Graph.Directed import HieDb.Create import HieDb.Types import Ide.PluginUtils (toAbsolute) @@ -118,15 +103,13 @@ import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) -import GHC.Driver.Env (hsc_all_home_unit_ids) -import GHC.Driver.Errors.Types -import GHC.Types.Error (errMsgDiagnostic, - singleMessage) -import GHC.Unit.State - -#if MIN_VERSION_ghc(9,13,0) -import GHC.Driver.Make (checkHomeUnitsClosed) -#endif +import Control.Concurrent.STM (STM, TVar) +import qualified Control.Monad.STM as STM +import Control.Monad.Trans.Reader +import qualified Development.IDE.Session.Ghc as Ghc +import qualified Development.IDE.Session.OrderedSet as S +import qualified Focus +import qualified StmContainers.Map as STM data Log = LogSettingInitialDynFlags @@ -136,22 +119,31 @@ data Log | LogHieDbRetriesExhausted !Int !Int !Int !SomeException | LogHieDbWriterThreadSQLiteError !SQLError | LogHieDbWriterThreadException !SomeException - | LogInterfaceFilesCacheDir !FilePath | LogKnownFilesUpdated !(HashMap Target (HashSet NormalizedFilePath)) - | LogMakingNewHscEnv ![UnitId] - | LogDLLLoadError !String | LogCradlePath !FilePath | LogCradleNotFound !FilePath | LogSessionLoadingResult !(Either [CradleError] (ComponentOptions, FilePath, String)) | LogCradle !(Cradle Void) | LogNoneCradleFound FilePath - | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) | LogHieBios HieBios.Log | LogSessionLoadingChanged + | LogSessionNewLoadedFiles ![FilePath] + | LogSessionReloadOnError FilePath ![FilePath] + | LogGetOptionsLoop !FilePath + | LogLookupSessionCache !FilePath + | LogTime !String + | LogSessionGhc Ghc.Log deriving instance Show Log instance Pretty Log where pretty = \case + LogTime s -> "Time:" <+> pretty s + LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path + LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp + LogSessionReloadOnError path files -> + "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files + LogSessionNewLoadedFiles files -> + "New loaded files:" <+> pretty files LogNoneCradleFound path -> "None cradle found for" <+> pretty path <+> ", ignoring the file" LogSettingInitialDynFlags -> @@ -192,18 +184,12 @@ instance Pretty Log where vcat [ "HieDb writer thread exception:" , pretty (displayException e) ] - LogInterfaceFilesCacheDir path -> - "Interface files cache directory:" <+> pretty path LogKnownFilesUpdated targetToPathsMap -> nest 2 $ vcat [ "Known files updated:" , viaShow $ (HM.map . Set.map) fromNormalizedFilePath targetToPathsMap ] - LogMakingNewHscEnv inPlaceUnitIds -> - "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) - LogDLLLoadError errorString -> - "Error dynamically loading libm.so.6:" <+> pretty errorString LogCradlePath path -> "Cradle path:" <+> pretty path LogCradleNotFound path -> @@ -215,18 +201,14 @@ instance Pretty Log where "Session loading result:" <+> viaShow e LogCradle cradle -> "Cradle:" <+> viaShow cradle - LogNewComponentCache componentCache -> - "New component cache HscEnvEq:" <+> viaShow componentCache LogHieBios msg -> pretty msg + LogSessionGhc msg -> pretty msg LogSessionLoadingChanged -> "Session Loading config changed, reloading the full session." -- | Bump this version number when making changes to the format of the data stored in hiedb hiedbDataVersion :: String -hiedbDataVersion = "1" - -data CacheDirs = CacheDirs - { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} +hiedbDataVersion = "2" data SessionLoadingOptions = SessionLoadingOptions { findCradle :: FilePath -> IO (Maybe FilePath) @@ -400,6 +382,199 @@ getHieDbLoc dir = do createDirectoryIfMissing True cDir pure (cDir db) +{- Note [SessionState and batch load] +SessionState manages the state for batch loading files in the session loader. + +- When a new file needs to be loaded, it is added to the pendingFiles set. +- The loader processes files from pendingFiles, attempting to load them in batches. +- (SBL1) If a file is already in failedFiles, it is loaded individually (single-file mode). +- (SBL2) Otherwise, the loader tries to load as many files as possible together (batch mode). + +On success: + - (SBL3) All successfully loaded files are removed from pendingFiles and failedFiles, + and added to loadedFiles. + +On failure: + - (SBL4) If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. + - (SBL5) If batch loading fails, all files attempted are added to failedFiles. + +This approach ensures efficient batch loading while isolating problematic files for individual handling. +-} + +-- SBL3 +handleBatchLoadSuccess :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () +handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets = do + pendings <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded + +-- SBL5 +handleBatchLoadFailure :: SessionState -> [FilePath] -> IO () +handleBatchLoadFailure sessionState files = do + mapM_ (addErrorLoadingFile sessionState) files + +-- SBL4 +handleSingleLoadFailure :: SessionState -> FilePath -> IO () +handleSingleLoadFailure sessionState file = do + addErrorLoadingFile sessionState file + removeErrorLoadingFile sessionState file + atomically $ S.delete file (pendingFiles sessionState) + removeCradleFile sessionState file + +data SessionState = SessionState + { loadedFiles :: !(Var (HashSet FilePath)), + failedFiles :: !(Var (HashSet FilePath)), + pendingFiles :: !(S.OrderedSet FilePath), + hscEnvs :: !(Var HieMap), + fileToFlags :: !FlagsMap, + filesMap :: !FilesMap, + version :: !(Var Int), + sessionLoadingPreferenceConfig :: !(Var (Maybe SessionLoadingPreferenceConfig)) + } + +-- | Helper functions for SessionState management +-- These functions encapsulate common operations on the SessionState + +-- | Add a file to the set of files with errors during loading +addErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () +addErrorLoadingFile state file = + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) + +-- | Remove a file from the set of files with errors during loading +removeErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () +removeErrorLoadingFile state file = + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) + +addCradleFiles :: MonadIO m => SessionState -> HashSet FilePath -> m () +addCradleFiles state files = + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) + +-- | Remove a file from the cradle files set +removeCradleFile :: MonadIO m => SessionState -> FilePath -> m () +removeCradleFile state file = + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) + +-- | Clear error loading files and reset to empty set +clearErrorLoadingFiles :: MonadIO m => SessionState -> m () +clearErrorLoadingFiles state = + liftIO $ modifyVar_' (failedFiles state) (const $ return Set.empty) + +-- | Clear cradle files and reset to empty set +clearCradleFiles :: MonadIO m => SessionState -> m () +clearCradleFiles state = + liftIO $ modifyVar_' (loadedFiles state) (const $ return Set.empty) + +-- | Reset the file maps in the session state +resetFileMaps :: SessionState -> STM () +resetFileMaps state = do + STM.reset (filesMap state) + STM.reset (fileToFlags state) + +-- | Insert or update file flags for a specific hieYaml and normalized file path +insertFileFlags :: SessionState -> Maybe FilePath -> NormalizedFilePath -> (IdeResult HscEnvEq, DependencyInfo) -> STM () +insertFileFlags state hieYaml ncfp flags = + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp flags)) hieYaml (fileToFlags state) + +-- | Insert a file mapping from normalized path to hieYaml location +insertFileMapping :: SessionState -> Maybe FilePath -> NormalizedFilePath -> STM () +insertFileMapping state hieYaml ncfp = + STM.insert hieYaml ncfp (filesMap state) + +-- | Remove a file from the pending file set +removeFromPending :: SessionState -> FilePath -> STM () +removeFromPending state file = + S.delete file (pendingFiles state) + +-- | Add a file to the pending file set +addToPending :: SessionState -> FilePath -> STM () +addToPending state file = + S.insert file (pendingFiles state) + +-- | Insert multiple file mappings at once +insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () +insertAllFileMappings state mappings = + mapM_ (\(yaml, path) -> insertFileMapping state yaml path) mappings + +-- | Increment the version counter +incrementVersion :: SessionState -> IO Int +incrementVersion state = modifyVar' (version state) succ + +-- | Get files from the pending file set +getPendingFiles :: SessionState -> IO (HashSet FilePath) +getPendingFiles state = atomically $ S.toHashSet (pendingFiles state) + +-- | Handle errors during session loading by recording file as having error and removing from pending +handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> SessionM () +handleSingleFileProcessingError' state hieYaml file e = do + handleSingleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty + +-- | Common pattern: Insert file flags, insert file mapping, and remove from pending +handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> SessionM () +handleSingleFileProcessingError state hieYaml file diags extraDepFiles = liftIO $ do + dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles + let ncfp = toNormalizedFilePath' file + let flags = ((diags, Nothing), dep) + handleSingleLoadFailure state file + atomically $ do + insertFileFlags state hieYaml ncfp flags + insertFileMapping state hieYaml ncfp + +-- | Get the set of extra files to load based on the current file path +-- If the current file is in error loading files, we fallback to single loading mode (empty set) +-- Otherwise, we remove error files from pending files and also exclude the current file +getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] +getExtraFilesToLoad state cfp = do + pendingFiles <- getPendingFiles state + errorFiles <- readVar (failedFiles state) + old_files <- readVar (loadedFiles state) + -- if the file is in error loading files, we fall back to single loading mode + return $ + Set.toList $ + if cfp `Set.member` errorFiles + then Set.empty + -- remove error files from pending files since error loading need to load one by one + else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files + +-- | We allow users to specify a loading strategy. +-- Check whether this config was changed since the last time we have loaded +-- a session. +-- +-- If the loading configuration changed, we likely should restart the session +-- in its entirety. +didSessionLoadingPreferenceConfigChange :: SessionState -> SessionM Bool +didSessionLoadingPreferenceConfigChange s = do + clientConfig <- asks sessionClientConfig + let biosSessionLoadingVar = sessionLoadingPreferenceConfig s + mLoadingConfig <- liftIO $ readVar biosSessionLoadingVar + case mLoadingConfig of + Nothing -> do + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure False + Just loadingConfig -> do + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure (loadingConfig /= sessionLoading clientConfig) + +newSessionState :: IO SessionState +newSessionState = do + -- Initialize SessionState + sessionState <- SessionState + <$> newVar (Set.fromList []) -- loadedFiles + <*> newVar (Set.fromList []) -- failedFiles + <*> S.newIO -- pendingFiles + <*> newVar Map.empty -- hscEnvs + <*> STM.newIO -- fileToFlags + <*> STM.newIO -- filesMap + <*> newVar 0 -- version + <*> newVar Nothing -- sessionLoadingPreferenceConfig + return sessionState + -- | Given a root directory, return a Shake 'Action' which setups an -- 'IdeGhcSession' given a file. -- Some of the many things this does: @@ -417,20 +592,10 @@ getHieDbLoc dir = do loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - cradle_files <- newIORef [] - -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file - hscEnvs <- newVar Map.empty :: IO (Var HieMap) - -- Mapping from a Filepath to HscEnv - fileToFlags <- newVar Map.empty :: IO (Var FlagsMap) - -- Mapping from a Filepath to its 'hie.yaml' location. - -- Should hold the same Filepaths as 'fileToFlags', otherwise - -- they are inconsistent. So, everywhere you modify 'fileToFlags', - -- you have to modify 'filesMap' as well. - filesMap <- newVar HM.empty :: IO (Var FilesMap) - -- Version of the mappings above - version <- newVar 0 - biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) - let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar version) + + sessionState <- newSessionState + let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) + -- This caches the mapping from Mod.hs -> hie.yaml cradleLoc <- liftIO $ memoIO $ \v -> do res <- findCradle v @@ -442,276 +607,350 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return $ do clientConfig <- getClientConfigAction - extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv + extras@ShakeExtras{ideNc, knownTargetsVar } <- getShakeExtras let invalidateShakeCache = do - void $ modifyVar' version succ + void $ incrementVersion sessionState return $ toNoFileKey GhcSessionIO - IdeOptions{ optTesting = IdeTesting optTesting - , optCheckProject = getCheckProject - , optExtensions - } <- getIdeOptions - - -- populate the knownTargetsVar with all the - -- files in the project so that `knownFiles` can learn about them and - -- we can generate a complete module graph - let extendKnownTargets newTargets = do - knownTargets <- concatForM newTargets $ \TargetDetails{..} -> - case targetTarget of - TargetFile f -> do - -- If a target file has multiple possible locations, then we - -- assume they are all separate file targets. - -- This happens with '.hs-boot' files if they are in the root directory of the project. - -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. - -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the - -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. - -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either - -- - -- * TargetFile Foo.hs-boot - -- * TargetModule Foo - -- - -- If we don't generate a TargetFile for each potential location, we will only have - -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' - -- and also not find 'TargetModule Foo'. - fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) - TargetModule _ -> do - found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - return [(targetTarget, Set.fromList found)] - hasUpdate <- atomically $ do - known <- readTVar knownTargetsVar - let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) - hasUpdate = if known /= known' then Just (unhashed known') else Nothing - writeTVar knownTargetsVar known' - pure hasUpdate - for_ hasUpdate $ \x -> - logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) - return $ toNoFileKey GetKnownTargets - - -- Create a new HscEnv from a hieYaml root and a set of options - let packageSetup :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ([ComponentInfo], [ComponentInfo]) - packageSetup (hieYaml, cfp, opts, libDir) = do - -- Parse DynFlags for the newly discovered component - hscEnv <- emptyHscEnv ideNc libDir - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir - let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps - -- Now lookup to see whether we are combining with an existing HscEnv - -- or making a new one. The lookup returns the HscEnv and a list of - -- information about other components loaded into the HscEnv - -- (unitId, DynFlag, Targets) - modifyVar hscEnvs $ \m -> do - -- Just deps if there's already an HscEnv - -- Nothing is it's the first time we are making an HscEnv - let oldDeps = Map.lookup hieYaml m - let -- Add the raw information about this component to the list - -- We will modify the unitId and DynFlags used for - -- compilation but these are the true source of - -- information. - new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newTargetDfs - all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps - -- Get all the unit-ids for things in this component - _inplace = map rawComponentUnitId $ NE.toList all_deps - - all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - let prefix = show rawComponentUnitId - -- See Note [Avoiding bad interface files] - let cacheDirOpts = componentOptions opts - cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags - -- The final component information, mostly the same but the DynFlags don't - -- contain any packages which are also loaded - -- into the same component. - pure $ ComponentInfo - { componentUnitId = rawComponentUnitId - , componentDynFlags = processed_df - , componentTargets = rawComponentTargets - , componentFP = rawComponentFP - , componentCOptions = rawComponentCOptions - , componentDependencyInfo = rawComponentDependencyInfo - } - -- Modify the map so the hieYaml now maps to the newly updated - -- ComponentInfos - -- Returns - -- . The information for the new component which caused this cache miss - -- . The modified information (without -inplace flags) for - -- existing packages - let (new,old) = NE.splitAt (NE.length new_deps) all_deps' - pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) - - - let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO (IdeResult HscEnvEq,[FilePath]) - session args@(hieYaml, _cfp, _opts, _libDir) = do - (new_deps, old_deps) <- packageSetup args - - -- For each component, now make a new HscEnvEq which contains the - -- HscEnv for the hie.yaml file but the DynFlags for that component - -- For GHC's supporting multi component sessions, we create a shared - -- HscEnv but set the active component accordingly - hscEnv <- emptyHscEnv ideNc _libDir - let new_cache = newComponentCache recorder optExtensions _cfp hscEnv - all_target_details <- new_cache old_deps new_deps - - this_dep_info <- getDependencyInfo $ maybeToList hieYaml - let (all_targets, this_flags_map, this_options) - = case HM.lookup _cfp flags_map' of - Just this -> (all_targets', flags_map', this) - Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) - where all_targets' = concat all_target_details - flags_map' = HM.fromList (concatMap toFlagsMap all_targets') - this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] - this_flags = (this_error_env, this_dep_info) - this_error_env = ([this_error], Nothing) - this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) _cfp - (T.unlines - [ "No cradle target found. Is this file listed in the targets of your cradle?" - , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" - ]) - Nothing - - void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map - void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) - -- The VFS doesn't change on cradle edits, re-use the old one. - -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - keys2 <- invalidateShakeCache - restartShakeSession VFSUnmodified "new component" [] $ do - keys1 <- extendKnownTargets all_targets - return [keys1, keys2] - - -- Typecheck all files in the project on startup - checkProject <- getCheckProject - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - - return $ second Map.keys this_options - - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) - consultCradle hieYaml cfp = do - let lfpLog = makeRelative rootDir cfp - logWith recorder Info $ LogCradlePath lfpLog - when (isNothing hieYaml) $ - logWith recorder Warning $ LogCradleNotFound lfpLog - cradle <- loadCradle recorder hieYaml rootDir - when optTesting $ mRunLspT lspEnv $ - sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) - - -- Display a user friendly progress message here: They probably don't know what a cradle is - let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) - <> " (for " <> T.pack lfpLog <> ")" - eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ - withTrace "Load cradle" $ \addTag -> do - addTag "file" lfpLog - old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp old_files - addTag "result" (show res) - return res - - logWith recorder Debug $ LogSessionLoadingResult eopts - case eopts of - -- The cradle gave us some options so get to work turning them - -- into and HscEnv. - Right (opts, libDir, version) -> do - let compileTime = fullCompilerVersion - case reverse $ readP_to_S parseVersion version of - [] -> error $ "GHC version could not be parsed: " <> version - ((runTime, _):_) - | compileTime == runTime -> do - atomicModifyIORef' cradle_files (\xs -> (cfp:xs,())) - session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) - | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) - -- Failure case, either a cradle error or the none cradle - Left err -> do - dep_info <- getDependencyInfo (maybeToList hieYaml) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml - return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) - - let - -- | We allow users to specify a loading strategy. - -- Check whether this config was changed since the last time we have loaded - -- a session. + ideOptions <- getIdeOptions + + -- see Note [Serializing runs in separate thread] + -- Start the getOptionsLoop if the queue is empty + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ do + let newSessionLoadingOptions = SessionLoadingOptions + { findCradle = cradleLoc + , .. + } + sessionShake = SessionShake + { restartSession = restartShakeSession extras + , invalidateCache = invalidateShakeCache + , enqueueActions = shakeEnqueue extras + } + sessionEnv = SessionEnv + { sessionLspContext = lspEnv extras + , sessionRootDir = rootDir + , sessionIdeOptions = ideOptions + , sessionClientConfig = clientConfig + , sessionSharedNameCache = ideNc + , sessionLoadingOptions = newSessionLoadingOptions + } + + writeTQueue que (runReaderT (getOptionsLoop recorder sessionShake sessionState knownTargetsVar) sessionEnv) + + -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action + -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. + -- The GlobPattern of a FileSystemWatcher can be absolute or relative. + -- We use the absolute one because it is supported by more LSP clients. + -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. + let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath $ Map.keys deps) + returnWithVersion $ \file -> do + let absFile = toAbsolutePath file + absolutePathsCradleDeps <$> lookupOrWaitCache recorder sessionState absFile + +-- | Given a file, this function will return the HscEnv and the dependencies +-- it would look up the cache first, if the cache is not available, it would +-- submit a request to the getOptionsLoop to get the options for the file +-- and wait until the options are available +lookupOrWaitCache :: Recorder (WithPriority Log) -> SessionState -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) +lookupOrWaitCache recorder sessionState absFile = do + let ncfp = toNormalizedFilePath' absFile + res <- atomically $ do + -- wait until target file is not in pendingFiles + Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry + -- check if in the cache + checkInCache sessionState ncfp + logWith recorder Debug $ LogLookupSessionCache absFile + updateDateRes <- case res of + Just r -> do + depOk <- checkDependencyInfo (snd r) + if depOk + then return $ Just r + else return Nothing + _ -> return Nothing + case updateDateRes of + Just r -> return r + Nothing -> do + -- if not ok, we need to reload the session + atomically $ addToPending sessionState absFile + lookupOrWaitCache recorder sessionState absFile + +checkInCache :: SessionState -> NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) +checkInCache sessionState ncfp = runMaybeT $ do + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) + m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) + MaybeT $ pure $ HM.lookup ncfp m + +data SessionShake = SessionShake + { restartSession :: VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () + , invalidateCache :: IO Key + , enqueueActions :: DelayedAction () -> IO (IO ()) + } + +data SessionEnv = SessionEnv + { sessionLspContext :: Maybe (LanguageContextEnv Config) + , sessionRootDir :: FilePath + , sessionIdeOptions :: IdeOptions + , sessionClientConfig :: Config + , sessionSharedNameCache :: NameCache + , sessionLoadingOptions :: SessionLoadingOptions + } + +type SessionM = ReaderT SessionEnv IO + +-- | The main function which gets options for a file. +-- +-- The general approach is as follows: +-- 1. Find the 'hie.yaml' for the next file target, if there is any. +-- 2. Check in the cache, whether the given 'hie.yaml' was already loaded before +-- 3.1. If it wasn't, initialise a new session and continue with step 4. +-- 3.2. If it is loaded, check whether we need to reload the session, e.g. because the `.cabal` file was modified +-- 3.2.1. If we need to reload, remove the +getOptionsLoop :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> SessionM () +getOptionsLoop recorder sessionShake sessionState knownTargetsVar = forever $ do + -- Get the next file to load + file <- liftIO $ atomically $ S.readQueue (pendingFiles sessionState) + logWith recorder Debug (LogGetOptionsLoop file) + let ncfp = toNormalizedFilePath' file + cachedHieYamlLocation <- join <$> liftIO (atomically (STM.lookup ncfp (filesMap sessionState))) + sessionLoadingOptions <- asks sessionLoadingOptions + hieYaml <- liftIO $ findCradle sessionLoadingOptions file + let hieLoc = cachedHieYamlLocation <|> hieYaml + sessionOpts recorder sessionShake sessionState knownTargetsVar (hieLoc, file) + `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file + +-- | This caches the mapping from hie.yaml + Mod.hs -> [String] +-- Returns the Ghc session and the cradle dependencies +sessionOpts :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> (Maybe FilePath, FilePath) -> SessionM () +sessionOpts recorder sessionShake sessionState knownTargetsVar (hieYaml, file) = do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState) $ do + logWith recorder Info LogSessionLoadingChanged + liftIO $ atomically $ resetFileMaps sessionState + -- Don't even keep the name cache, we start from scratch here! + liftIO $ modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) + -- cleanup error loading files and cradle files + clearErrorLoadingFiles sessionState + clearCradleFiles sessionState + cacheKey <- liftIO $ invalidateCache sessionShake + liftIO $ restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) + + v <- liftIO $ atomically $ STM.lookup hieYaml (fileToFlags sessionState) + case v >>= HM.lookup (toNormalizedFilePath' file) of + Just (_opts, old_di) -> do + deps_ok <- liftIO $ checkDependencyInfo old_di + if not deps_ok + then do + -- if deps are old, we can try to load the error files again + removeErrorLoadingFile sessionState file + removeCradleFile sessionState file + -- If the dependencies are out of date then clear both caches and start + -- again. + liftIO $ atomically $ resetFileMaps sessionState + -- Keep the same name cache + liftIO $ modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file + else do + -- if deps are ok, we can just remove the file from pending files + liftIO $ atomically $ removeFromPending sessionState file + Nothing -> + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file + +consultCradle :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> Maybe FilePath -> FilePath -> SessionM () +consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp = do + loadingOptions <- asks sessionLoadingOptions + (cradle, eopts) <- loadCradleWithNotifications recorder + sessionState + (loadCradle loadingOptions recorder) + hieYaml cfp + logWith recorder Debug $ LogSessionLoadingResult eopts + let ncfp = toNormalizedFilePath' cfp + case eopts of + -- The cradle gave us some options so get to work turning them + -- into and HscEnv. + Right (opts, libDir, version) -> do + let compileTime = fullCompilerVersion + case reverse $ readP_to_S parseVersion version of + [] -> error $ "GHC version could not be parsed: " <> version + ((runTime, _):_) + | compileTime == runTime -> session recorder sessionShake sessionState knownTargetsVar (hieYaml, ncfp, opts, libDir) + | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) + -- Failure case, either a cradle error or the none cradle + Left err -> do + -- what if the error to load file is one of old_files ? + let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err + old_files <- liftIO $ readVar (loadedFiles sessionState) + let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) + if length errorToLoadNewFiles > 1 + then do + -- we are loading more files and failed, we need to retry + -- mark as less loaded files as failedLoadingFiles as possible + -- limitation is that when we are loading files, and the dependencies of old_files + -- are changed, and old_files are not valid anymore. + -- but they will still be in the old_files, and will not move to failedFiles. + -- And make other files failed to load in batch mode. + liftIO $ handleBatchLoadFailure sessionState errorToLoadNewFiles + -- retry without other files + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp + else do + -- we are only loading this file and it failed + let res = map (\err' -> renderCradleError err' cradle ncfp) err + handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err + +session :: + Recorder (WithPriority Log) -> + SessionShake -> + SessionState -> + TVar (Hashed KnownTargets) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> + SessionM () +session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, libDir) = do + let initEmptyHscEnv = emptyHscEnvM libDir + (new_deps, old_deps) <- packageSetup recorder sessionState initEmptyHscEnv (hieYaml, cfp, opts) + + -- For each component, now make a new HscEnvEq which contains the + -- HscEnv for the hie.yaml file but the DynFlags for that component + -- For GHC's supporting multi component sessions, we create a shared + -- HscEnv but set the active component accordingly + hscEnv <- initEmptyHscEnv + ideOptions <- asks sessionIdeOptions + let new_cache = newComponentCache (cmapWithPrio LogSessionGhc recorder) (optExtensions ideOptions) cfp hscEnv + all_target_details <- liftIO $ new_cache old_deps new_deps + (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp + + liftIO $ handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + -- Typecheck all files in the project on startup + liftIO $ loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + +-- | Create a new HscEnv from a hieYaml root and a set of options +packageSetup :: Recorder (WithPriority Log) -> SessionState -> SessionM HscEnv -> (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> SessionM ([ComponentInfo], [ComponentInfo]) +packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do + getCacheDirs <- asks (getCacheDirs . sessionLoadingOptions) + haddockparse <- asks (optHaddockParse . sessionIdeOptions) + rootDir <- asks sessionRootDir + -- Parse DynFlags for the newly discovered component + hscEnv <- newEmptyHscEnv + newTargetDfs <- liftIO $ evalGhcEnv hscEnv $ setOptions haddockparse cfp opts (hsc_dflags hscEnv) rootDir + let deps = componentDependencies opts ++ maybeToList hieYaml + dep_info <- liftIO $ getDependencyInfo (fmap (toAbsolute rootDir) deps) + -- Now lookup to see whether we are combining with an existing HscEnv + -- or making a new one. The lookup returns the HscEnv and a list of + -- information about other components loaded into the HscEnv + -- (unitId, DynFlag, Targets) + liftIO $ modifyVar (hscEnvs sessionState) $ + addComponentInfo (cmapWithPrio LogSessionGhc recorder) getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) + +addErrorTargetIfUnknown :: Foldable t => t [TargetDetails] -> Maybe FilePath -> NormalizedFilePath -> IO ([TargetDetails], HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) +addErrorTargetIfUnknown all_target_details hieYaml cfp = do + let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + all_targets' = concat all_target_details + this_dep_info <- getDependencyInfo $ maybeToList hieYaml + let (all_targets, this_flags_map) = case HM.lookup cfp flags_map' of + Just _ -> (all_targets', flags_map') + Nothing -> (this_target_details : all_targets', HM.insert cfp this_flags flags_map') + where + this_target_details = TargetDetails (TargetFile cfp) this_error_env this_dep_info [cfp] + this_flags = (this_error_env, this_dep_info) + this_error_env = ([this_error], Nothing) + this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) cfp + (T.unlines + [ "No cradle target found. Is this file listed in the targets of your cradle?" + , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" + ]) + Nothing + pure (all_targets, this_flags_map) + +-- | Populate the knownTargetsVar with all the +-- files in the project so that `knownFiles` can learn about them and +-- we can generate a complete module graph +extendKnownTargets :: Recorder (WithPriority Log) -> TVar (Hashed KnownTargets) -> [TargetDetails] -> IO Key +extendKnownTargets recorder knownTargetsVar newTargets = do + knownTargets <- concatForM newTargets $ \TargetDetails{..} -> + case targetTarget of + TargetFile f -> do + -- If a target file has multiple possible locations, then we + -- assume they are all separate file targets. + -- This happens with '.hs-boot' files if they are in the root directory of the project. + -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. + -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the + -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. + -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either -- - -- If the loading configuration changed, we likely should restart the session - -- in its entirety. - didSessionLoadingPreferenceConfigChange :: IO Bool - didSessionLoadingPreferenceConfigChange = do - mLoadingConfig <- readVar biosSessionLoadingVar - case mLoadingConfig of - Nothing -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure False - Just loadingConfig -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure (loadingConfig /= sessionLoading clientConfig) - - -- This caches the mapping from hie.yaml + Mod.hs -> [String] - -- Returns the Ghc session and the cradle dependencies - let sessionOpts :: (Maybe FilePath, FilePath) - -> IO (IdeResult HscEnvEq, [FilePath]) - sessionOpts (hieYaml, file) = do - Extra.whenM didSessionLoadingPreferenceConfigChange $ do - logWith recorder Info LogSessionLoadingChanged - -- If the dependencies are out of date then clear both caches and start - -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) - -- Don't even keep the name cache, we start from scratch here! - modifyVar_ hscEnvs (const (return Map.empty)) - - v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags - let cfp = toAbsolutePath file - case HM.lookup (toNormalizedFilePath' cfp) v of - Just (opts, old_di) -> do - deps_ok <- checkDependencyInfo old_di - if not deps_ok - then do - -- If the dependencies are out of date then clear both caches and start - -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) - -- Keep the same name cache - modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) - consultCradle hieYaml cfp - else return (opts, Map.keys old_di) - Nothing -> consultCradle hieYaml cfp - - -- The main function which gets options for a file. We only want one of these running - -- at a time. Therefore the IORef contains the currently running cradle, if we try - -- to get some more options then we wait for the currently running action to finish - -- before attempting to do so. - let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) - getOptions file = do - let ncfp = toNormalizedFilePath' (toAbsolutePath file) - cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap - hieYaml <- cradleLoc file - let - -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action - -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. - -- The GlobPattern of a FileSystemWatcher can be absolute or relative. - -- We use the absolute one because it is supported by more LSP clients. - -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. - absolutePathsCradleDeps (eq, deps) - = (eq, fmap toAbsolutePath deps) - (absolutePathsCradleDeps <$> sessionOpts (join cachedHieYamlLocation <|> hieYaml, file)) `Safe.catch` \e -> - return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) + -- * TargetFile Foo.hs-boot + -- * TargetModule Foo + -- + -- If we don't generate a TargetFile for each potential location, we will only have + -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' + -- and also not find 'TargetModule Foo'. + fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) + TargetModule _ -> do + found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + return [(targetTarget, Set.fromList found)] + hasUpdate <- atomically $ do + known <- readTVar knownTargetsVar + let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) + hasUpdate = if known /= known' then Just (unhashed known') else Nothing + writeTVar knownTargetsVar known' + pure hasUpdate + for_ hasUpdate $ \x -> + logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) + return $ toNoFileKey GetKnownTargets + +loadKnownTargets :: Recorder (WithPriority Log) -> SessionShake -> IO Bool -> TVar (Hashed KnownTargets) -> [ComponentInfo] -> [TargetDetails] -> IO () +loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do + checkProject <- getCheckProject + + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + restartSession sessionShake VFSUnmodified "new component" [] $ do + keys2 <- invalidateCache sessionShake + keys1 <- extendKnownTargets recorder knownTargetsVar targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations targets) + void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] + +loadCradleWithNotifications :: + Recorder (WithPriority Log) -> + SessionState -> + (Maybe FilePath -> FilePath -> IO (Cradle Void)) -> + Maybe FilePath -> + FilePath -> + SessionM (Cradle Void, Either [CradleError] (ComponentOptions, FilePath, String)) +loadCradleWithNotifications recorder sessionState loadCradle hieYaml cfp = do + IdeTesting isTesting <- asks (optTesting . sessionIdeOptions) + sessionPref <- asks (sessionLoading . sessionClientConfig) + lspEnv <- asks sessionLspContext + rootDir <- asks sessionRootDir + let lfpLog = makeRelative rootDir cfp + logWith recorder Info $ LogCradlePath lfpLog + when (isNothing hieYaml) $ + logWith recorder Warning $ LogCradleNotFound lfpLog + cradle <- liftIO $ loadCradle hieYaml rootDir + when (isTesting) $ mRunLspT lspEnv $ + sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) + + -- Display a user friendly progress message here: They probably don't know what a cradle is + let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) + <> " (for " <> T.pack lfpLog <> ")" + + extraToLoads <- liftIO $ getExtraFilesToLoad sessionState cfp + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ + withTrace "Load cradle" $ \addTag -> do + addTag "file" lfpLog + res <- liftIO $ cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads + addTag "result" (show res) + return res + pure (cradle, eopts) - returnWithVersion $ \file -> do - -- see Note [Serializing runs in separate thread] - awaitRunInThread que $ getOptions file -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the @@ -747,338 +986,26 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -emptyHscEnv :: NameCache -> FilePath -> IO HscEnv -emptyHscEnv nc libDir = do - -- We call setSessionDynFlags so that the loader is initialised - -- We need to do this before we call initUnits. - env <- runGhc (Just libDir) $ - getSessionDynFlags >>= setSessionDynFlags >> getSession - pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) - -data TargetDetails = TargetDetails - { - targetTarget :: !Target, - targetEnv :: !(IdeResult HscEnvEq), - targetDepends :: !DependencyInfo, - targetLocations :: ![NormalizedFilePath] - } +-- ---------------------------------------------------------------------------- +-- Utilities +-- ---------------------------------------------------------------------------- -fromTargetId :: [FilePath] -- ^ import paths - -> [String] -- ^ extensions to consider - -> TargetId - -> IdeResult HscEnvEq - -> DependencyInfo - -> IO [TargetDetails] --- For a target module we consider all the import paths -fromTargetId is exts (GHC.TargetModule modName) env dep = do - let fps = [i moduleNameSlashes modName -<.> ext <> boot - | ext <- exts - , i <- is - , boot <- ["", "-boot"] - ] - let locs = fmap toNormalizedFilePath' fps - return [TargetDetails (TargetModule modName) env dep locs] --- For a 'TargetFile' we consider all the possible module names -fromTargetId _ _ (GHC.TargetFile f _) env deps = do - let nf = toNormalizedFilePath' f - let other - | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) - | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") - return [TargetDetails (TargetFile nf) env deps [nf, other]] +emptyHscEnvM :: FilePath -> SessionM HscEnv +emptyHscEnvM libDir = do + nc <- asks sessionSharedNameCache + liftIO $ Ghc.emptyHscEnv nc libDir toFlagsMap :: TargetDetails -> [(NormalizedFilePath, (IdeResult HscEnvEq, DependencyInfo))] toFlagsMap TargetDetails{..} = [ (l, (targetEnv, targetDepends)) | l <- targetLocations] - -setNameCache :: NameCache -> HscEnv -> HscEnv -setNameCache nc hsc = hsc { hsc_NC = nc } - -#if MIN_VERSION_ghc(9,13,0) --- Moved back to implementation in GHC. -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] -checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue -#else --- This function checks the important property that if both p and q are home units --- then any dependency of p, which transitively depends on q is also a home unit. --- GHC had an implementation of this function, but it was horribly inefficient --- We should move back to the GHC implementation on compilers where --- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) -checkHomeUnitsClosed' ue home_id_set - | OS.null bad_unit_ids = Nothing - | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) - where - bad_unit_ids = upwards_closure OS.\\ home_id_set - rootLoc = mkGeneralSrcSpan (Compat.fsLit "") - - graph :: Graph (Node UnitId UnitId) - graph = graphFromEdgedVerticesUniq graphNodes - - -- downwards closure of graph - downwards_closure - = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) - | (uid, deps) <- Map.toList (allReachable graph node_key)] - - inverse_closure = transposeG downwards_closure - - upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] - - all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) - all_unit_direct_deps - = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue - where - go rest this this_uis = - plusUniqMap_C OS.union - (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) - rest - where - external_depends = mapUniqMap (OS.fromList . unitDepends) -#if !MIN_VERSION_ghc(9,7,0) - $ listToUniqMap $ Map.toList -#endif - - $ unitInfoMap this_units - this_units = homeUnitEnv_units this_uis - this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] - - graphNodes :: [Node UnitId UnitId] - graphNodes = go OS.empty home_id_set - where - go done todo - = case OS.minView todo of - Nothing -> [] - Just (uid, todo') - | OS.member uid done -> go done todo' - | otherwise -> case lookupUniqMap all_unit_direct_deps uid of - Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) - Just depends -> - let todo'' = (depends OS.\\ done) `OS.union` todo' - in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' -#endif - --- | Create a mapping from FilePaths to HscEnvEqs --- This combines all the components we know about into --- an appropriate session, which is a multi component --- session on GHC 9.4+ -newComponentCache - :: Recorder (WithPriority Log) - -> [String] -- ^ File extensions to consider - -> NormalizedFilePath -- ^ Path to file that caused the creation of this component - -> HscEnv -- ^ An empty HscEnv - -> [ComponentInfo] -- ^ New components to be loaded - -> [ComponentInfo] -- ^ old, already existing components - -> IO [ [TargetDetails] ] -newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do - let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) - -- When we have multiple components with the same uid, - -- prefer the new one over the old. - -- However, we might have added some targets to the old unit - -- (see special target), so preserve those - unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } - mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) - let dfs = map componentDynFlags $ Map.elems cis - uids = Map.keys cis - logWith recorder Info $ LogMakingNewHscEnv uids - hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 - Compat.initUnits dfs hsc_env - - let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') - closure_err_to_multi_err err = - ideErrorWithSource - (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp - (T.pack (Compat.printWithoutUniques (singleMessage err))) - (Just (fmap GhcDriverMessage err)) - multi_errs = map closure_err_to_multi_err closure_errs - bad_units = OS.fromList $ concat $ do - x <- map errMsgDiagnostic closure_errs - DriverHomePackagesNotClosed us <- pure x - pure us - isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units - -- Whenever we spin up a session on Linux, dynamically load libm.so.6 - -- in. We need this in case the binary is statically linked, in which - -- case the interactive session will fail when trying to load - -- ghc-prim, which happens whenever Template Haskell is being - -- evaluated or haskell-language-server's eval plugin tries to run - -- some code. If the binary is dynamically linked, then this will have - -- no effect. - -- See https://github.com/haskell/haskell-language-server/issues/221 - -- We need to do this after the call to setSessionDynFlags initialises - -- the loader - when (os == "linux") $ do - initObjLinker hscEnv' - res <- loadDLL hscEnv' "libm.so.6" - case res of - Nothing -> pure () - Just err -> logWith recorder Error $ LogDLLLoadError err - - forM (Map.elems cis) $ \ci -> do - let df = componentDynFlags ci - thisEnv <- do - -- In GHC 9.4 we have multi component support, and we have initialised all the units - -- above. - -- We just need to set the current unit here - pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' - henv <- newHscEnvEq thisEnv - let targetEnv = (if isBad ci then multi_errs else [], Just henv) - targetDepends = componentDependencyInfo ci - logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) - evaluate $ liftRnf rwhnf $ componentTargets ci - - let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends - ctargets <- concatMapM mk (componentTargets ci) - - return (L.nubOrdOn targetTarget ctargets) - -{- Note [Avoiding bad interface files] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Originally, we set the cache directory for the various components once -on the first occurrence of the component. -This works fine if these components have no references to each other, -but you have components that depend on each other, the interface files are -updated for each component. -After restarting the session and only opening the component that depended -on the other, suddenly the interface files of this component are stale. -However, from the point of view of `ghcide`, they do not look stale, -thus, not regenerated and the IDE shows weird errors such as: -``` -typecheckIface -Declaration for Rep_ClientRunFlags -Axiom branches Rep_ClientRunFlags: - Failed to load interface for ‘Distribution.Simple.Flag’ - Use -v to see a list of the files searched for. -``` -and -``` -expectJust checkFamInstConsistency -CallStack (from HasCallStack): - error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes - expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst -``` - -To mitigate this, we set the cache directory for each component dependent -on the components of the current `HscEnv`, additionally to the component options -of the respective components. -Assume two components, c1, c2, where c2 depends on c1, and the options of the -respective components are co1, co2. -If we want to load component c2, followed by c1, we set the cache directory for -each component in this way: - - * Load component c2 - * (Cache Directory State) - - name of c2 + co2 - * Load component c1 - * (Cache Directory State) - - name of c2 + name of c1 + co2 - - name of c2 + name of c1 + co1 - -Overall, we created three cache directories. If we opened c1 first, then we -create a fourth cache directory. -This makes sure that interface files are always correctly updated. - -Since this causes a lot of recompilation, we only update the cache-directory, -if the dependencies of a component have really changed. -E.g. when you load two executables, they can not depend on each other. They -should be filtered out, such that we dont have to re-compile everything. --} - --- | Set the cache-directory based on the ComponentOptions and a list of --- internal packages. --- For the exact reason, see Note [Avoiding bad interface files]. -setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags -setCacheDirs recorder CacheDirs{..} dflags = do - logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) - pure $ dflags - & maybe id setHiDir hiCacheDir - & maybe id setHieDir hieCacheDir - & maybe id setODir oCacheDir - -- See Note [Multi Cradle Dependency Info] -type DependencyInfo = Map.Map FilePath (Maybe UTCTime) type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] -- | Maps a "hie.yaml" location to all its Target Filepaths and options. -type FlagsMap = Map.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) +type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) -- | Maps a Filepath to its respective "hie.yaml" location. -- It aims to be the reverse of 'FlagsMap'. -type FilesMap = HM.HashMap NormalizedFilePath (Maybe FilePath) - --- This is pristine information about a component -data RawComponentInfo = RawComponentInfo - { rawComponentUnitId :: UnitId - -- | Unprocessed DynFlags. Contains inplace packages such as libraries. - -- We do not want to use them unprocessed. - , rawComponentDynFlags :: DynFlags - -- | All targets of this components. - , rawComponentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , rawComponentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , rawComponentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info]. - , rawComponentDependencyInfo :: DependencyInfo - } - --- This is processed information about the component, in particular the dynflags will be modified. -data ComponentInfo = ComponentInfo - { componentUnitId :: UnitId - -- | Processed DynFlags. Does not contain inplace packages such as local - -- libraries. Can be used to actually load this Component. - , componentDynFlags :: DynFlags - -- | All targets of this components. - , componentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , componentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , componentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info] - , componentDependencyInfo :: DependencyInfo - } - --- | Check if any dependency has been modified lately. -checkDependencyInfo :: DependencyInfo -> IO Bool -checkDependencyInfo old_di = do - di <- getDependencyInfo (Map.keys old_di) - return (di == old_di) - --- Note [Multi Cradle Dependency Info] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Why do we implement our own file modification tracking here? --- The primary reason is that the custom caching logic is quite complicated and going into shake --- adds even more complexity and more indirection. I did try for about 5 hours to work out how to --- use shake rules rather than IO but eventually gave up. - --- | Computes a mapping from a filepath to its latest modification date. --- See Note [Multi Cradle Dependency Info] why we do this ourselves instead --- of letting shake take care of it. -getDependencyInfo :: [FilePath] -> IO DependencyInfo -getDependencyInfo fs = Map.fromList <$> mapM do_one fs - - where - safeTryIO :: IO a -> IO (Either IOException a) - safeTryIO = Safe.try - - do_one :: FilePath -> IO (FilePath, Maybe UTCTime) - do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) - --- | This function removes all the -package flags which refer to packages we --- are going to deal with ourselves. For example, if a executable depends --- on a library component, then this function will remove the library flag --- from the package flags for the executable --- --- There are several places in GHC (for example the call to hptInstances in --- tcRnImports) which assume that all modules in the HPT have the same unit --- ID. Therefore we create a fake one and give them all the same unit id. -_removeInplacePackages --Only used in ghc < 9.4 - :: UnitId -- ^ fake uid to use for our internal component - -> [UnitId] - -> DynFlags - -> (DynFlags, [UnitId]) -_removeInplacePackages fake_uid us df = (setHomeUnitId_ fake_uid $ - df { packageFlags = ps }, uids) - where - (uids, ps) = Compat.filterInplaceUnits us (packageFlags df) +type FilesMap = STM.Map NormalizedFilePath (Maybe FilePath) -- | Memoize an IO function, with the characteristics: -- @@ -1097,118 +1024,6 @@ memoIO op = do return (Map.insert k res mp, res) Just res -> return (mp, res) -unit_flags :: [Flag (CmdLineP [String])] -unit_flags = [defFlag "unit" (SepArg addUnit)] - -addUnit :: String -> EwM (CmdLineP [String]) () -addUnit unit_str = liftEwM $ do - units <- getCmdLineState - putCmdLineState (unit_str : units) - --- | Throws if package flags are unsatisfiable -setOptions :: GhcMonad m - => NormalizedFilePath - -> ComponentOptions - -> DynFlags - -> FilePath -- ^ root dir, see Note [Root Directory] - -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do - ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) - case NE.nonEmpty units of - Just us -> initMulti us - Nothing -> do - (df, targets) <- initOne (map unLoc theOpts') - -- A special target for the file which caused this wonderful - -- component to be created. In case the cradle doesn't list all the targets for - -- the component, in which case things will be horribly broken anyway. - -- - -- When we have a singleComponent that is caused to be loaded due to a - -- file, we assume the file is part of that component. This is useful - -- for bare GHC sessions, such as many of the ones used in the testsuite - -- - -- We don't do this when we have multiple components, because each - -- component better list all targets or there will be anarchy. - -- It is difficult to know which component to add our file to in - -- that case. - -- Multi unit arguments are likely to come from cabal, which - -- does list all targets. - -- - -- If we don't end up with a target for the current file in the end, then - -- we will report it as an error for that file - let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) - let special_target = Compat.mkSimpleTarget df abs_fp - pure $ (df, special_target : targets) :| [] - where - initMulti unitArgFiles = - forM unitArgFiles $ \f -> do - args <- liftIO $ expandResponse [f] - initOne args - initOne this_opts = do - (dflags', targets') <- addCmdOpts this_opts dflags - let dflags'' = - case unitIdString (homeUnitId_ dflags') of - -- cabal uses main for the unit id of all executable packages - -- This makes multi-component sessions confused about what - -- options to use for that component. - -- Solution: hash the options and use that as part of the unit id - -- This works because there won't be any dependencies on the - -- executable unit. - "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) - hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) - in setHomeUnitId_ hashed_uid dflags' - _ -> dflags' - - let targets = makeTargetsAbsolute root targets' - root = case workingDirectory dflags'' of - Nothing -> compRoot - Just wdir -> compRoot wdir - let dflags''' = - setWorkingDirectory root $ - disableWarningsAsErrors $ - -- disabled, generated directly by ghcide instead - flip gopt_unset Opt_WriteInterface $ - -- disabled, generated directly by ghcide instead - -- also, it can confuse the interface stale check - dontWriteHieFiles $ - setIgnoreInterfacePragmas $ - setBytecodeLinkerOptions $ - disableOptimisation $ - Compat.setUpTypedHoles $ - makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory - dflags'' - return (dflags''', targets) - -setIgnoreInterfacePragmas :: DynFlags -> DynFlags -setIgnoreInterfacePragmas df = - gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges - -disableOptimisation :: DynFlags -> DynFlags -disableOptimisation df = updOptLevel 0 df - -setHiDir :: FilePath -> DynFlags -> DynFlags -setHiDir f d = - -- override user settings to avoid conflicts leading to recompilation - d { hiDir = Just f} - -setODir :: FilePath -> DynFlags -> DynFlags -setODir f d = - -- override user settings to avoid conflicts leading to recompilation - d { objectDir = Just f} - -getCacheDirsDefault :: String -> [String] -> IO CacheDirs -getCacheDirsDefault prefix opts = do - dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) - return $ CacheDirs dir dir dir - where - -- Create a unique folder per set of different GHC options, assuming that each different set of - -- GHC options will create incompatible interface files. - opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) - --- | Sub directory for the cache path -cacheDir :: String -cacheDir = "ghcide" - ---------------------------------------------------------------------------------------------------- data PackageSetupException diff --git a/ghcide/session-loader/Development/IDE/Session/Dependency.hs b/ghcide/session-loader/Development/IDE/Session/Dependency.hs new file mode 100644 index 0000000000..deedf809b8 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Dependency.hs @@ -0,0 +1,35 @@ +module Development.IDE.Session.Dependency where + +import Control.Exception.Safe as Safe +import Data.Either.Extra +import qualified Data.Map.Strict as Map +import Data.Time.Clock +import System.Directory + +type DependencyInfo = Map.Map FilePath (Maybe UTCTime) + +-- | Check if any dependency has been modified lately. +checkDependencyInfo :: DependencyInfo -> IO Bool +checkDependencyInfo old_di = do + di <- getDependencyInfo (Map.keys old_di) + return (di == old_di) + +-- Note [Multi Cradle Dependency Info] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Why do we implement our own file modification tracking here? +-- The primary reason is that the custom caching logic is quite complicated and going into shake +-- adds even more complexity and more indirection. I did try for about 5 hours to work out how to +-- use shake rules rather than IO but eventually gave up. + +-- | Computes a mapping from a filepath to its latest modification date. +-- See Note [Multi Cradle Dependency Info] why we do this ourselves instead +-- of letting shake take care of it. +getDependencyInfo :: [FilePath] -> IO DependencyInfo +getDependencyInfo fs = Map.fromList <$> mapM do_one fs + + where + safeTryIO :: IO a -> IO (Either IOException a) + safeTryIO = Safe.try + + do_one :: FilePath -> IO (FilePath, Maybe UTCTime) + do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs new file mode 100644 index 0000000000..7a84263ec9 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -0,0 +1,540 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Session.Ghc where + +import Control.Monad +import Control.Monad.Extra as Extra +import Control.Monad.IO.Class +import qualified Crypto.Hash.SHA1 as H +import qualified Data.ByteString.Base16 as B16 +import qualified Data.ByteString.Char8 as B +import Data.Function +import Data.List +import Data.List.Extra as L +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Core.Shake hiding (Log, knownTargets, + withHieDb) +import qualified Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.CmdLine +import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, + TargetModule, Var, Warning, + getOptions) +import qualified Development.IDE.GHC.Compat.Core as GHC +import Development.IDE.GHC.Compat.Env hiding (Logger) +import Development.IDE.GHC.Compat.Units (UnitId) +import Development.IDE.GHC.Util +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.Location +import GHC.ResponseFile +import qualified HIE.Bios.Cradle.Utils as HieBios +import HIE.Bios.Environment hiding (getCacheDir) +import HIE.Bios.Types hiding (Log) +import Ide.Logger (Pretty (pretty), + Priority (Debug, Error, Info), + Recorder, WithPriority, + logWith, viaShow, (<+>)) +import System.Directory +import System.FilePath +import System.Info + + +import Control.DeepSeq +import Control.Exception (evaluate) +import Control.Monad.IO.Unlift (MonadUnliftIO) +import qualified Data.Set as OS +import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.Session.Dependency +import Development.IDE.Types.Options +import GHC.Data.Graph.Directed +import Ide.PluginUtils (toAbsolute) + +import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Driver.Errors.Types +import GHC.Types.Error (errMsgDiagnostic, + singleMessage) +import GHC.Unit.State + +data Log + = LogInterfaceFilesCacheDir !FilePath + | LogMakingNewHscEnv ![UnitId] + | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) + | LogDLLLoadError !String +deriving instance Show Log + +instance Pretty Log where + pretty = \case + LogInterfaceFilesCacheDir path -> + "Interface files cache directory:" <+> pretty path + LogMakingNewHscEnv inPlaceUnitIds -> + "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) + LogNewComponentCache componentCache -> + "New component cache HscEnvEq:" <+> viaShow componentCache + LogDLLLoadError errorString -> + "Error dynamically loading libm.so.6:" <+> pretty errorString + +-- This is pristine information about a component +data RawComponentInfo = RawComponentInfo + { rawComponentUnitId :: UnitId + -- | Unprocessed DynFlags. Contains inplace packages such as libraries. + -- We do not want to use them unprocessed. + , rawComponentDynFlags :: DynFlags + -- | All targets of this components. + , rawComponentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , rawComponentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , rawComponentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info]. + , rawComponentDependencyInfo :: DependencyInfo + } + +-- This is processed information about the component, in particular the dynflags will be modified. +data ComponentInfo = ComponentInfo + { componentUnitId :: UnitId + -- | Processed DynFlags. Does not contain inplace packages such as local + -- libraries. Can be used to actually load this Component. + , componentDynFlags :: DynFlags + -- | All targets of this components. + , componentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , componentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , componentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info] + , componentDependencyInfo :: DependencyInfo + } + + +unit_flags :: [Flag (CmdLineP [String])] +unit_flags = [defFlag "unit" (SepArg addUnit)] + +addUnit :: String -> EwM (CmdLineP [String]) () +addUnit unit_str = liftEwM $ do + units <- getCmdLineState + putCmdLineState (unit_str : units) + + +-- | Create a mapping from FilePaths to HscEnvEqs +-- This combines all the components we know about into +-- an appropriate session, which is a multi component +-- session on GHC 9.4+ +newComponentCache + :: Recorder (WithPriority Log) + -> [String] -- ^ File extensions to consider + -> NormalizedFilePath -- ^ Path to file that caused the creation of this component + -> HscEnv -- ^ An empty HscEnv + -> [ComponentInfo] -- ^ New components to be loaded + -> [ComponentInfo] -- ^ old, already existing components + -> IO [ [TargetDetails] ] +newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do + let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) + -- When we have multiple components with the same uid, + -- prefer the new one over the old. + -- However, we might have added some targets to the old unit + -- (see special target), so preserve those + unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } + mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) + let dfs = map componentDynFlags $ Map.elems cis + uids = Map.keys cis + logWith recorder Info $ LogMakingNewHscEnv uids + hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 + Compat.initUnits dfs hsc_env + + let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') + closure_err_to_multi_err err = + ideErrorWithSource + (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp + (T.pack (Compat.printWithoutUniques (singleMessage err))) + (Just (fmap GhcDriverMessage err)) + multi_errs = map closure_err_to_multi_err closure_errs + bad_units = OS.fromList $ concat $ do + x <- map errMsgDiagnostic closure_errs + DriverHomePackagesNotClosed us <- pure x + pure us + isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units + -- Whenever we spin up a session on Linux, dynamically load libm.so.6 + -- in. We need this in case the binary is statically linked, in which + -- case the interactive session will fail when trying to load + -- ghc-prim, which happens whenever Template Haskell is being + -- evaluated or haskell-language-server's eval plugin tries to run + -- some code. If the binary is dynamically linked, then this will have + -- no effect. + -- See https://github.com/haskell/haskell-language-server/issues/221 + -- We need to do this after the call to setSessionDynFlags initialises + -- the loader + when (os == "linux") $ do + initObjLinker hscEnv' + res <- loadDLL hscEnv' "libm.so.6" + case res of + Nothing -> pure () + Just err -> logWith recorder Error $ LogDLLLoadError err + + forM (Map.elems cis) $ \ci -> do + let df = componentDynFlags ci + thisEnv <- do + -- In GHC 9.4 we have multi component support, and we have initialised all the units + -- above. + -- We just need to set the current unit here + pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' + henv <- newHscEnvEq thisEnv + let targetEnv = (if isBad ci then multi_errs else [], Just henv) + targetDepends = componentDependencyInfo ci + logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) + evaluate $ liftRnf rwhnf $ componentTargets ci + + let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends + ctargets <- concatMapM mk (componentTargets ci) + + return (L.nubOrdOn targetTarget ctargets) + +-- | Throws if package flags are unsatisfiable +setOptions :: GhcMonad m + => OptHaddockParse + -> NormalizedFilePath + -> ComponentOptions + -> DynFlags + -> FilePath -- ^ root dir, see Note [Root Directory] + -> m (NonEmpty (DynFlags, [GHC.Target])) +setOptions haddockOpt cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do + ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) + case NE.nonEmpty units of + Just us -> initMulti us + Nothing -> do + (df, targets) <- initOne (map unLoc theOpts') + -- A special target for the file which caused this wonderful + -- component to be created. In case the cradle doesn't list all the targets for + -- the component, in which case things will be horribly broken anyway. + -- + -- When we have a singleComponent that is caused to be loaded due to a + -- file, we assume the file is part of that component. This is useful + -- for bare GHC sessions, such as many of the ones used in the testsuite + -- + -- We don't do this when we have multiple components, because each + -- component better list all targets or there will be anarchy. + -- It is difficult to know which component to add our file to in + -- that case. + -- Multi unit arguments are likely to come from cabal, which + -- does list all targets. + -- + -- If we don't end up with a target for the current file in the end, then + -- we will report it as an error for that file + let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) + let special_target = Compat.mkSimpleTarget df abs_fp + pure $ (df, special_target : targets) :| [] + where + initMulti unitArgFiles = + forM unitArgFiles $ \f -> do + args <- liftIO $ expandResponse [f] + -- The reponse files may contain arguments like "+RTS", + -- and hie-bios doesn't expand the response files of @-unit@ arguments. + -- Thus, we need to do the stripping here. + initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args + initOne this_opts = do + (dflags', targets') <- addCmdOpts this_opts dflags + let dflags'' = + case unitIdString (homeUnitId_ dflags') of + -- cabal uses main for the unit id of all executable packages + -- This makes multi-component sessions confused about what + -- options to use for that component. + -- Solution: hash the options and use that as part of the unit id + -- This works because there won't be any dependencies on the + -- executable unit. + "main" -> + let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) + hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) + in setHomeUnitId_ hashed_uid dflags' + _ -> dflags' + + let targets = makeTargetsAbsolute root targets' + root = case workingDirectory dflags'' of + Nothing -> compRoot + Just wdir -> compRoot wdir + let dflags''' = + setWorkingDirectory root $ + disableWarningsAsErrors $ + -- disabled, generated directly by ghcide instead + flip gopt_unset Opt_WriteInterface $ + -- disabled, generated directly by ghcide instead + -- also, it can confuse the interface stale check + dontWriteHieFiles $ + setIgnoreInterfacePragmas $ + setBytecodeLinkerOptions $ + enableOptHaddock haddockOpt $ + disableOptimisation $ + Compat.setUpTypedHoles $ + makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory + dflags'' + return (dflags''', targets) + +addComponentInfo :: + MonadUnliftIO m => + Recorder (WithPriority Log) -> + (String -> [String] -> IO CacheDirs) -> + DependencyInfo -> + NonEmpty (DynFlags, [GHC.Target]) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> + Map.Map (Maybe FilePath) [RawComponentInfo] -> + m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) +addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + +setIgnoreInterfacePragmas :: DynFlags -> DynFlags +setIgnoreInterfacePragmas df = + gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges + +disableOptimisation :: DynFlags -> DynFlags +disableOptimisation df = updOptLevel 0 df + +-- | We always compile with '-haddock' unless explicitly disabled. +-- +-- This avoids inconsistencies when doing recompilation checking which was +-- observed in https://github.com/haskell/haskell-language-server/issues/4511 +enableOptHaddock :: OptHaddockParse -> DynFlags -> DynFlags +enableOptHaddock HaddockParse d = gopt_set d Opt_Haddock +enableOptHaddock NoHaddockParse d = d + +setHiDir :: FilePath -> DynFlags -> DynFlags +setHiDir f d = + -- override user settings to avoid conflicts leading to recompilation + d { hiDir = Just f} + +setODir :: FilePath -> DynFlags -> DynFlags +setODir f d = + -- override user settings to avoid conflicts leading to recompilation + d { objectDir = Just f} + +data CacheDirs = CacheDirs + { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} + +{- Note [Avoiding bad interface files] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Originally, we set the cache directory for the various components once +on the first occurrence of the component. +This works fine if these components have no references to each other, +but you have components that depend on each other, the interface files are +updated for each component. +After restarting the session and only opening the component that depended +on the other, suddenly the interface files of this component are stale. +However, from the point of view of `ghcide`, they do not look stale, +thus, not regenerated and the IDE shows weird errors such as: +``` +typecheckIface +Declaration for Rep_ClientRunFlags +Axiom branches Rep_ClientRunFlags: + Failed to load interface for ‘Distribution.Simple.Flag’ + Use -v to see a list of the files searched for. +``` +and +``` +expectJust checkFamInstConsistency +CallStack (from HasCallStack): + error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes + expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst +``` + +and many more. + +To mitigate this, we set the cache directory for each component dependent +on the components of the current `HscEnv`, additionally to the component options +of the respective components. +Assume two components, c1, c2, where c2 depends on c1, and the options of the +respective components are co1, co2. +If we want to load component c2, followed by c1, we set the cache directory for +each component in this way: + + * Load component c2 + * (Cache Directory State) + - name of c2 + co2 + * Load component c1 + * (Cache Directory State) + - name of c2 + name of c1 + co2 + - name of c2 + name of c1 + co1 + +Overall, we created three cache directories. If we opened c1 first, then we +create a fourth cache directory. +This makes sure that interface files are always correctly updated. + +Since this causes a lot of recompilation, we only update the cache-directory, +if the dependencies of a component have really changed. +E.g. when you load two executables, they can not depend on each other. They +should be filtered out, such that we dont have to re-compile everything. +-} + + +-- | Set the cache-directory based on the ComponentOptions and a list of +-- internal packages. +-- For the exact reason, see Note [Avoiding bad interface files]. +setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags +setCacheDirs recorder CacheDirs{..} dflags = do + logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) + pure $ dflags + & maybe id setHiDir hiCacheDir + & maybe id setHieDir hieCacheDir + & maybe id setODir oCacheDir + +getCacheDirsDefault :: String -> [String] -> IO CacheDirs +getCacheDirsDefault prefix opts = do + dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) + return $ CacheDirs dir dir dir + where + -- Create a unique folder per set of different GHC options, assuming that each different set of + -- GHC options will create incompatible interface files. + opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) + +setNameCache :: NameCache -> HscEnv -> HscEnv +setNameCache nc hsc = hsc { hsc_NC = nc } + +-- | Sub directory for the cache path +cacheDir :: String +cacheDir = "ghcide" + +emptyHscEnv :: NameCache -> FilePath -> IO HscEnv +emptyHscEnv nc libDir = do + -- We call setSessionDynFlags so that the loader is initialised + -- We need to do this before we call initUnits. + env <- liftIO $ runGhc (Just libDir) $ + getSessionDynFlags >>= setSessionDynFlags >> getSession + pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) + +-- ---------------------------------------------------------------------------- +-- Target Details +-- ---------------------------------------------------------------------------- + +data TargetDetails = TargetDetails + { + targetTarget :: !Target, + targetEnv :: !(IdeResult HscEnvEq), + targetDepends :: !DependencyInfo, + targetLocations :: ![NormalizedFilePath] + } + +fromTargetId :: [FilePath] -- ^ import paths + -> [String] -- ^ extensions to consider + -> TargetId + -> IdeResult HscEnvEq + -> DependencyInfo + -> IO [TargetDetails] +-- For a target module we consider all the import paths +fromTargetId is exts (GHC.TargetModule modName) env dep = do + let fps = [i moduleNameSlashes modName -<.> ext <> boot + | ext <- exts + , i <- is + , boot <- ["", "-boot"] + ] + let locs = fmap toNormalizedFilePath' fps + return [TargetDetails (TargetModule modName) env dep locs] +-- For a 'TargetFile' we consider all the possible module names +fromTargetId _ _ (GHC.TargetFile f _) env deps = do + let nf = toNormalizedFilePath' f + let other + | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) + | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") + return [TargetDetails (TargetFile nf) env deps [nf, other]] + +-- ---------------------------------------------------------------------------- +-- Backwards compatibility +-- ---------------------------------------------------------------------------- + +#if MIN_VERSION_ghc(9,13,0) +-- Moved back to implementation in GHC. +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue +#else +-- This function checks the important property that if both p and q are home units +-- then any dependency of p, which transitively depends on q is also a home unit. +-- GHC had an implementation of this function, but it was horribly inefficient +-- We should move back to the GHC implementation on compilers where +-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) +checkHomeUnitsClosed' ue home_id_set + | OS.null bad_unit_ids = Nothing + | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) + where + bad_unit_ids = upwards_closure OS.\\ home_id_set + rootLoc = mkGeneralSrcSpan (Compat.fsLit "") + + graph :: Graph (Node UnitId UnitId) + graph = graphFromEdgedVerticesUniq graphNodes + + -- downwards closure of graph + downwards_closure + = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) + | (uid, deps) <- Map.toList (allReachable graph node_key)] + + inverse_closure = transposeG downwards_closure + + upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] + + all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) + all_unit_direct_deps + = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue + where + go rest this this_uis = + plusUniqMap_C OS.union + (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) + rest + where + external_depends = mapUniqMap (OS.fromList . unitDepends) +#if !MIN_VERSION_ghc(9,7,0) + $ listToUniqMap $ Map.toList +#endif + + $ unitInfoMap this_units + this_units = homeUnitEnv_units this_uis + this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] + + graphNodes :: [Node UnitId UnitId] + graphNodes = go OS.empty home_id_set + where + go done todo + = case OS.minView todo of + Nothing -> [] + Just (uid, todo') + | OS.member uid done -> go done todo' + | otherwise -> case lookupUniqMap all_unit_direct_deps uid of + Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) + Just depends -> + let todo'' = (depends OS.\\ done) `OS.union` todo' + in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' +#endif diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs new file mode 100644 index 0000000000..630f1dc4fc --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -0,0 +1,54 @@ +module Development.IDE.Session.OrderedSet where + +import Control.Concurrent.STM (STM, TQueue, newTQueueIO) +import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) +import Control.Monad (when) +import Data.Hashable (Hashable) +import qualified Data.HashSet +import qualified Focus +import qualified ListT as LT +import qualified StmContainers.Set as S +import StmContainers.Set (Set) + + +data OrderedSet a = OrderedSet + { insertionOrder :: TQueue a + , elements :: Set a + } + +-- | Insert an element into the ordered set. +-- If the element is not already present, it is added to both the queue and set. +-- If the element already exists, ignore it +insert :: Hashable a => a -> OrderedSet a -> STM () +insert a (OrderedSet que s) = do + (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s + -- if already in the set + when inserted $ writeTQueue que a + +newIO :: Hashable a => IO (OrderedSet a) +newIO = do + que <- newTQueueIO + s <- S.newIO + return (OrderedSet que s) + +-- | Read the first element from the queue. +-- If an element is not in the set, it means it has been deleted, +-- so we retry until we find a valid element that exists in the set. +readQueue :: Hashable a => OrderedSet a -> STM a +readQueue rs@(OrderedSet que s) = do + f <- readTQueue que + b <- S.lookup f s + -- retry if no files are left in the queue + if b then return f else readQueue rs + +lookup :: Hashable a => a -> OrderedSet a -> STM Bool +lookup a (OrderedSet _ s) = S.lookup a s + +-- | Delete an element from the set. +-- The queue is not modified directly; stale entries are filtered out lazily +-- during reading operations (see 'readQueue'). +delete :: Hashable a => a -> OrderedSet a -> STM () +delete a (OrderedSet _ s) = S.delete a s + +toHashSet :: Hashable a => OrderedSet a -> STM (Data.HashSet.HashSet a) +toHashSet (OrderedSet _ s) = Data.HashSet.fromList <$> LT.toList (S.listT s) diff --git a/ghcide/src/Development/IDE/Core/Actions.hs b/ghcide/src/Development/IDE/Core/Actions.hs index 0d55a73120..61614cb0ca 100644 --- a/ghcide/src/Development/IDE/Core/Actions.hs +++ b/ghcide/src/Development/IDE/Core/Actions.hs @@ -28,6 +28,7 @@ import Development.IDE.Graph import qualified Development.IDE.Spans.AtPoint as AtPoint import Development.IDE.Types.HscEnvEq (hscEnv) import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (Identifier) import qualified HieDb import Language.LSP.Protocol.Types (DocumentHighlight (..), SymbolInformation (..), diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index 552409fbba..48439e2ff3 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -109,6 +109,7 @@ import qualified Data.Set as Set import qualified GHC as G import GHC.Core.Lint.Interactive import GHC.Driver.Config.CoreToStg.Prep +import GHC.Iface.Ext.Types (HieASTs) import qualified GHC.Runtime.Loader as Loader import GHC.Tc.Gen.Splice import GHC.Types.Error @@ -145,6 +146,7 @@ import Development.IDE.GHC.Compat hiding import qualified Data.List.NonEmpty as NE import Data.Time (getCurrentTime) import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Iface.Ext.Types (NameEntityInfo) #endif #if MIN_VERSION_ghc(9,12,0) diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index 7dad386ece..e545ec7b14 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -78,7 +78,6 @@ import System.FilePath import System.IO.Error import System.IO.Unsafe - data Log = LogCouldNotIdentifyReverseDeps !NormalizedFilePath | LogTypeCheckingReverseDeps !NormalizedFilePath !(Maybe [NormalizedFilePath]) @@ -147,6 +146,29 @@ getModificationTimeImpl missingFileDiags file = do then return (Nothing, ([], Nothing)) else return (Nothing, ([diag], Nothing)) + +getPhysicalModificationTimeRule :: Recorder (WithPriority Log) -> Rules () +getPhysicalModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetPhysicalModificationTime file -> + getPhysicalModificationTimeImpl file + +getPhysicalModificationTimeImpl + :: NormalizedFilePath + -> Action (Maybe BS.ByteString, ([FileDiagnostic], Maybe FileVersion)) +getPhysicalModificationTimeImpl file = do + let file' = fromNormalizedFilePath file + let wrap time = (Just $ LBS.toStrict $ B.encode $ toRational time, ([], Just $ ModificationTime time)) + + alwaysRerun + + liftIO $ fmap wrap (getModTime file') + `catch` \(e :: IOException) -> do + let err | isDoesNotExistError e = "File does not exist: " ++ file' + | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e + diag = ideErrorText file (T.pack err) + if isDoesNotExistError e + then return (Nothing, ([], Nothing)) + else return (Nothing, ([diag], Nothing)) + -- | Interface files cannot be watched, since they live outside the workspace. -- But interface files are private, in that only HLS writes them. -- So we implement watching ourselves, and bypass the need for alwaysRerun. @@ -170,7 +192,11 @@ resetFileStore ideState changes = mask $ \_ -> do case c of LSP.FileChangeType_Changed -- already checked elsewhere | not $ HM.member nfp fois - -> atomically $ deleteValue (shakeExtras ideState) GetModificationTime nfp + -> + atomically $ do + ks <- deleteValue (shakeExtras ideState) GetModificationTime nfp + vs <- deleteValue (shakeExtras ideState) GetPhysicalModificationTime nfp + pure $ ks ++ vs _ -> pure [] @@ -233,6 +259,7 @@ getVersionedTextDoc doc = do fileStoreRules :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () fileStoreRules recorder isWatched = do getModificationTimeRule recorder + getPhysicalModificationTimeRule recorder getFileContentsRule recorder addWatchedFileRule recorder isWatched diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs index 43b80be119..a13e6de14c 100644 --- a/ghcide/src/Development/IDE/Core/RuleTypes.hs +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -1,6 +1,7 @@ -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE PatternSynonyms #-} @@ -34,6 +35,9 @@ import Development.IDE.Import.DependencyInformation import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.KnownTargets import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieASTs, + TypeIndex) +import GHC.Iface.Ext.Utils (RefMap) import Data.ByteString (ByteString) import Data.Text.Utf16.Rope.Mixed (Rope) @@ -316,6 +320,13 @@ instance Hashable GetModificationTime where instance NFData GetModificationTime +data GetPhysicalModificationTime = GetPhysicalModificationTime + deriving (Generic, Show, Eq) + deriving anyclass (Hashable, NFData) + +-- | Get the modification time of a file on disk, ignoring any version in the VFS. +type instance RuleResult GetPhysicalModificationTime = FileVersion + pattern GetModificationTime :: GetModificationTime pattern GetModificationTime = GetModificationTime_ {missingFileDiagnostics=True} diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index f1b11d971b..c123c9d4a8 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -138,6 +138,8 @@ import Development.IDE.Types.HscEnvEq import Development.IDE.Types.Location import Development.IDE.Types.Options import qualified Development.IDE.Types.Shake as Shake +import GHC.Iface.Ext.Types (HieASTs (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) import qualified GHC.LanguageExtensions as LangExt import HIE.Bios.Ghc.Gap (hostIsDynamic) import qualified HieDb @@ -181,6 +183,7 @@ data Log | LogLoadingHieFileFail !FilePath !SomeException | LogLoadingHieFileSuccess !FilePath | LogTypecheckedFOI !NormalizedFilePath + | LogDependencies !NormalizedFilePath [FilePath] deriving Show instance Pretty Log where @@ -205,6 +208,11 @@ instance Pretty Log where <+> "the HLS version being used, the plugins enabled, and if possible the codebase and file which" <+> "triggered this warning." ] + LogDependencies nfp deps -> + vcat + [ "Add dependency" <+> pretty (fromNormalizedFilePath nfp) + , nest 2 $ pretty deps + ] templateHaskellInstructions :: T.Text templateHaskellInstructions = "https://haskell-language-server.readthedocs.io/en/latest/troubleshooting.html#static-binaries" @@ -260,12 +268,10 @@ getParsedModuleRule recorder = let ms = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } reset_ms pm = pm { pm_mod_summary = ms' } - -- We still parse with Haddocks whether Opt_Haddock is True or False to collect information - -- but we no longer need to parse with and without Haddocks separately for above GHC90. - liftIO $ (fmap.fmap.fmap) reset_ms $ getParsedModuleDefinition hsc opt file (withOptHaddock ms) + liftIO $ (fmap.fmap.fmap) reset_ms $ getParsedModuleDefinition hsc opt file ms -withOptHaddock :: ModSummary -> ModSummary -withOptHaddock = withOption Opt_Haddock +withoutOptHaddock :: ModSummary -> ModSummary +withoutOptHaddock = withoutOption Opt_Haddock withOption :: GeneralFlag -> ModSummary -> ModSummary withOption opt ms = ms{ms_hspp_opts= gopt_set (ms_hspp_opts ms) opt} @@ -284,7 +290,7 @@ getParsedModuleWithCommentsRule recorder = ModSummaryResult{msrModSummary = ms, msrHscEnv = hsc} <- use_ GetModSummary file opt <- getIdeOptions - let ms' = withoutOption Opt_Haddock $ withOption Opt_KeepRawTokenStream ms + let ms' = withoutOptHaddock $ withOption Opt_KeepRawTokenStream ms modify_dflags <- getModifyDynFlags dynFlagsModifyParser let ms'' = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } reset_ms pm = pm { pm_mod_summary = ms' } @@ -512,7 +518,7 @@ persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybe (currentSource, ver) <- liftIO $ case M.lookup (filePathToUri' file) vfsData of Nothing -> (,Nothing) . T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath file) Just vf -> pure (virtualFileText vf, Just $ virtualFileVersion vf) - let refmap = Compat.generateReferencesMap . Compat.getAsts . Compat.hie_asts $ res + let refmap = generateReferencesMap . getAsts . Compat.hie_asts $ res del = deltaFromDiff (T.decodeUtf8 $ Compat.hie_hs_src res) currentSource pure (HAR (Compat.hie_module res) (Compat.hie_asts res) refmap mempty (HieFromDisk res),del,ver) @@ -540,8 +546,8 @@ getHieAstRuleDefinition f hsc tmr = do liftIO $ writeAndIndexHieFile hsc se modSummary f exports asts source _ -> pure [] - let refmap = Compat.generateReferencesMap . Compat.getAsts <$> masts - typemap = AtPoint.computeTypeReferences . Compat.getAsts <$> masts + let refmap = generateReferencesMap . getAsts <$> masts + typemap = AtPoint.computeTypeReferences . getAsts <$> masts pure (diags <> diagsWrite, HAR (ms_mod $ tmrModSummary tmr) <$> masts <*> refmap <*> typemap <*> pure HieFresh) getImportMapRule :: Recorder (WithPriority Log) -> Rules () @@ -715,7 +721,8 @@ loadGhcSession recorder ghcSessionDepsConfig = do let nfp = toNormalizedFilePath' fp itExists <- getFileExists nfp when itExists $ void $ do - use_ GetModificationTime nfp + use_ GetPhysicalModificationTime nfp + logWith recorder Logger.Info $ LogDependencies file deps mapM_ addDependency deps let cutoffHash = LBS.toStrict $ B.encode (hash (snd val)) @@ -972,8 +979,8 @@ regenerateHiFile sess f ms compNeeded = do hsc <- setFileCacheHook (hscEnv sess) opt <- getIdeOptions - -- Embed haddocks in the interface file - (diags, mb_pm) <- liftIO $ getParsedModuleDefinition hsc opt f (withOptHaddock ms) + -- By default, we parse with `-haddock` unless 'OptHaddockParse' is overwritten. + (diags, mb_pm) <- liftIO $ getParsedModuleDefinition hsc opt f ms case mb_pm of Nothing -> return (diags, Nothing) Just pm -> do diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index ddf01c61c5..befd22c8de 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -45,8 +45,6 @@ module Development.IDE.GHC.Compat( readHieFile, setHieDir, dontWriteHieFiles, - module Compat.HieTypes, - module Compat.HieUtils, -- * Compat modules module Development.IDE.GHC.Compat.Core, module Development.IDE.GHC.Compat.Env, @@ -112,14 +110,8 @@ module Development.IDE.GHC.Compat( #if MIN_VERSION_ghc(9,7,0) tcInitTidyEnv, #endif - ) where -import Compat.HieAst (enrichHie) -import Compat.HieBin -import Compat.HieTypes hiding - (nodeAnnotations) -import qualified Compat.HieTypes as GHC (nodeAnnotations) -import Compat.HieUtils + ) where import Control.Applicative ((<|>)) import qualified Data.ByteString as BS import Data.Coerce (coerce) @@ -146,12 +138,18 @@ import GHC.Core.Tidy (tidyExpr) import GHC.CoreToStg.Prep (corePrepPgm) import qualified GHC.CoreToStg.Prep as GHC import GHC.Driver.Hooks (hscCompileCoreExprHook) +import GHC.Iface.Ext.Types hiding + (nodeAnnotations) +import qualified GHC.Iface.Ext.Types as GHC (nodeAnnotations) +import GHC.Iface.Ext.Utils import GHC.ByteCode.Asm (bcoFreeNames) import GHC.Core import GHC.Data.FastString import GHC.Data.StringBuffer import GHC.Driver.Session hiding (ExposePackage) +import GHC.Iface.Ext.Ast (enrichHie) +import GHC.Iface.Ext.Binary import GHC.Iface.Make (mkIfaceExports) import GHC.SysTools.Tasks (runPp, runUnlit) import GHC.Types.Annotations (AnnTarget (ModuleTarget), diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs index 0255886726..de59afa146 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -8,6 +8,7 @@ module Development.IDE.GHC.Compat.Error ( -- * Error messages for the typechecking and renamer phase TcRnMessage (..), TcRnMessageDetailed (..), + Hole(..), stripTcRnMessageContext, -- * Parsing error message PsMessage(..), @@ -17,19 +18,33 @@ module Development.IDE.GHC.Compat.Error ( DriverMessage (..), -- * General Diagnostics Diagnostic(..), - -- * Prisms for error selection + -- * Prisms and lenses for error selection _TcRnMessage, _TcRnMessageWithCtx, _GhcPsMessage, _GhcDsMessage, _GhcDriverMessage, + _ReportHoleError, + _TcRnIllegalWildcardInType, + _TcRnPartialTypeSignatures, _TcRnMissingSignature, + _TcRnSolverReport, + _TcRnMessageWithInfo, + _TypeHole, + _ConstraintHole, + reportContextL, + reportContentL, + _MismatchMessage, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, ) where import Control.Lens +import Development.IDE.GHC.Compat (Type) import GHC.Driver.Errors.Types import GHC.HsToCore.Errors.Types import GHC.Tc.Errors.Types +import GHC.Tc.Types.Constraint (Hole (..), HoleSort) import GHC.Types.Error -- | Some 'TcRnMessage's are nested in other constructors for additional context. @@ -82,3 +97,40 @@ msgEnvelopeErrorL :: Lens' (MsgEnvelope e) e msgEnvelopeErrorL = lens errMsgDiagnostic (\envelope e -> envelope { errMsgDiagnostic = e } ) makePrisms ''TcRnMessage + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''SolverReportWithCtxt + +makePrisms ''TcSolverReportMsg + +makePrisms ''HoleSort + +-- | Focus 'MismatchMsg' from 'TcSolverReportMsg'. Currently, 'MismatchMsg' can be +-- extracted from 'CannotUnifyVariable' and 'Mismatch' constructors. +_MismatchMessage :: Traversal' TcSolverReportMsg MismatchMsg +_MismatchMessage focus (Mismatch msg t a c) = (\msg' -> Mismatch msg' t a c) <$> focus msg +_MismatchMessage focus (CannotUnifyVariable msg a) = flip CannotUnifyVariable a <$> focus msg +_MismatchMessage _ report = pure report + +-- | Focus 'teq_mismatch_expected' from 'TypeEqMismatch'. +_TypeEqMismatchExpected :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,10,2) +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#else +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#endif +_TypeEqMismatchExpected _ mismatch = pure mismatch + +-- | Focus 'teq_mismatch_actual' from 'TypeEqMismatch'. +_TypeEqMismatchActual :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,10,2) +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_actual = actual' }) <$> focus actual +#else +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_expected = actual' }) <$> focus actual +#endif +_TypeEqMismatchActual _ mismatch = pure mismatch diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs index 9977ad573b..99b7328770 100644 --- a/ghcide/src/Development/IDE/GHC/CoreFile.hs +++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs @@ -18,7 +18,6 @@ import Data.Foldable import Data.IORef import Data.List (isPrefixOf) import Data.Maybe -import qualified Data.Text as T import Development.IDE.GHC.Compat import qualified Development.IDE.GHC.Compat.Util as Util import GHC.Core diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs index 543c6f4387..068ca6a78a 100644 --- a/ghcide/src/Development/IDE/GHC/Orphans.hs +++ b/ghcide/src/Development/IDE/GHC/Orphans.hs @@ -7,9 +7,7 @@ -- | Orphan instances for GHC. -- Note that the 'NFData' instances may not be law abiding. module Development.IDE.GHC.Orphans() where -import Development.IDE.GHC.Compat hiding - (DuplicateRecordFields, - FieldSelectors) +import Development.IDE.GHC.Compat import Development.IDE.GHC.Util import Control.DeepSeq @@ -24,9 +22,8 @@ import GHC.ByteCode.Types import GHC.Data.Bag import GHC.Data.FastString import qualified GHC.Data.StringBuffer as SB +import GHC.Iface.Ext.Types import GHC.Parser.Annotation -import GHC.Types.FieldLabel (DuplicateRecordFields (DuplicateRecordFields, NoDuplicateRecordFields), - FieldSelectors (FieldSelectors, NoFieldSelectors)) import GHC.Types.PkgQual import GHC.Types.SrcLoc diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs index cf7845ce08..918e024a4f 100644 --- a/ghcide/src/Development/IDE/LSP/LanguageServer.hs +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -11,6 +11,7 @@ module Development.IDE.LSP.LanguageServer , Log(..) , ThreadQueue , runWithWorkerThreads + , Setup (..) ) where import Control.Concurrent.STM @@ -81,6 +82,17 @@ instance Pretty Log where LogLspServer msg -> pretty msg LogServerShutdownMessage -> "Received shutdown message" +data Setup config m a + = MkSetup + { doInitialize :: LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv config, a)) + -- ^ the callback invoked when the language server receives the 'Method_Initialize' request + , staticHandlers :: LSP.Handlers m + -- ^ the statically known handlers of the lsp server + , interpretHandler :: (LanguageContextEnv config, a) -> m <~> IO + -- ^ how to interpret @m@ to 'IO' and how to lift 'IO' into @m@ + , onExit :: [IO ()] + -- ^ a list of 'IO' actions that clean up resources and must be run when the server shuts down + } runLanguageServer :: forall config a m. (Show config) @@ -90,18 +102,16 @@ runLanguageServer -> Handle -- output -> config -> (config -> Value -> Either T.Text config) - -> (config -> m config ()) - -> (MVar () - -> IO (LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv config, a)), - LSP.Handlers (m config), - (LanguageContextEnv config, a) -> m config <~> IO)) + -> (config -> m ()) + -> (MVar () -> IO (Setup config m a)) -> IO () runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigChange setup = do -- This MVar becomes full when the server thread exits or we receive exit message from client. -- LSP server will be canceled when it's full. clientMsgVar <- newEmptyMVar - (doInitialize, staticHandlers, interpretHandler) <- setup clientMsgVar + MkSetup + { doInitialize, staticHandlers, interpretHandler, onExit } <- setup clientMsgVar let serverDefinition = LSP.ServerDefinition { LSP.parseConfig = parseConfig @@ -115,28 +125,29 @@ runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigCh , LSP.options = modifyOptions options } - let lspCologAction :: MonadIO m2 => Colog.LogAction m2 (Colog.WithSeverity LspServerLog) + let lspCologAction :: forall io. MonadIO io => Colog.LogAction io (Colog.WithSeverity LspServerLog) lspCologAction = toCologActionWithPrio (cmapWithPrio LogLspServer recorder) - void $ untilMVar clientMsgVar $ - void $ LSP.runServerWithHandles + let runServer = + LSP.runServerWithHandles lspCologAction lspCologAction inH outH serverDefinition + untilMVar clientMsgVar $ + runServer `finally` sequence_ onExit + setupLSP :: - forall config err. + forall config. Recorder (WithPriority Log) -> FilePath -- ^ root directory, see Note [Root Directory] -> (FilePath -> IO FilePath) -- ^ Map root paths to the location of the hiedb for the project -> LSP.Handlers (ServerM config) -> (LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState) -> MVar () - -> IO (LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either err (LSP.LanguageContextEnv config, IdeState)), - LSP.Handlers (ServerM config), - (LanguageContextEnv config, IdeState) -> ServerM config <~> IO) + -> IO (Setup config (ServerM config) IdeState) setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar = do -- Send everything over a channel, since you need to wait until after initialise before -- LspFuncs is available @@ -171,7 +182,7 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar cancelled <- readTVar cancelledRequests unless (reqId `Set.member` cancelled) retry - let asyncHandlers = mconcat + let staticHandlers = mconcat [ userHandlers , cancelHandler cancelRequest , exitHandler exit @@ -184,7 +195,9 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar let interpretHandler (env, st) = LSP.Iso (LSP.runLspT env . flip (runReaderT . unServerM) (clientMsgChan,st)) liftIO - pure (doInitialize, asyncHandlers, interpretHandler) + let onExit = [stopReactorLoop, exit] + + pure MkSetup {doInitialize, staticHandlers, interpretHandler, onExit} handleInit @@ -266,10 +279,12 @@ runWithWorkerThreads recorder dbLoc f = evalContT $ do liftIO $ f hiedb (ThreadQueue threadQueue sessionRestartTQueue sessionLoaderTQueue) -- | Runs the action until it ends or until the given MVar is put. +-- It is important, that the thread that puts the 'MVar' is not dropped before it puts the 'MVar' i.e. it should +-- occur as the final action in a 'finally' or 'bracket', because otherwise this thread will finish early (as soon +-- as the thread receives the BlockedIndefinitelyOnMVar exception) -- Rethrows any exceptions. -untilMVar :: MonadUnliftIO m => MVar () -> m () -> m () -untilMVar mvar io = void $ - waitAnyCancel =<< traverse async [ io , readMVar mvar ] +untilMVar :: MonadUnliftIO m => MVar () -> m a -> m () +untilMVar mvar io = race_ (readMVar mvar) io cancelHandler :: (SomeLspId -> IO ()) -> LSP.Handlers (ServerM c) cancelHandler cancelRequest = LSP.notificationHandler SMethod_CancelRequest $ \TNotificationMessage{_params=CancelParams{_id}} -> diff --git a/ghcide/src/Development/IDE/Main.hs b/ghcide/src/Development/IDE/Main.hs index 872e957364..ad4a36327a 100644 --- a/ghcide/src/Development/IDE/Main.hs +++ b/ghcide/src/Development/IDE/Main.hs @@ -12,7 +12,7 @@ module Development.IDE.Main ) where import Control.Concurrent.Extra (withNumCapabilities) -import Control.Concurrent.MVar (newEmptyMVar, +import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, tryReadMVar) import Control.Concurrent.STM.Stats (dumpSTMStats) import Control.Monad.Extra (concatMapM, unless, @@ -318,9 +318,8 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re ioT <- offsetTime logWith recorder Info $ LogLspStart (pluginId <$> ipMap argsHlsPlugins) - ideStateVar <- newEmptyMVar - let getIdeState :: LSP.LanguageContextEnv Config -> FilePath -> WithHieDb -> Shake.ThreadQueue -> IO IdeState - getIdeState env rootPath withHieDb threadQueue = do + let getIdeState :: MVar IdeState -> LSP.LanguageContextEnv Config -> FilePath -> WithHieDb -> Shake.ThreadQueue -> IO IdeState + getIdeState ideStateVar env rootPath withHieDb threadQueue = do t <- ioT logWith recorder Info $ LogLspStartDuration t sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions rootPath (tLoaderQueue threadQueue) @@ -353,9 +352,9 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re putMVar ideStateVar ide pure ide - let setup = setupLSP (cmapWithPrio LogLanguageServer recorder) argsProjectRoot argsGetHieDbLoc (pluginHandlers plugins) getIdeState + let setup ideStateVar = setupLSP (cmapWithPrio LogLanguageServer recorder) argsProjectRoot argsGetHieDbLoc (pluginHandlers plugins) (getIdeState ideStateVar) -- See Note [Client configuration in Rules] - onConfigChange cfg = do + onConfigChange ideStateVar cfg = do -- TODO: this is nuts, we're converting back to JSON just to get a fingerprint let cfgObj = J.toJSON cfg mide <- liftIO $ tryReadMVar ideStateVar @@ -368,7 +367,9 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re modifyClientSettings ide (const $ Just cfgObj) return [toNoFileKey Rules.GetClientSettings] - runLanguageServer (cmapWithPrio LogLanguageServer recorder) options inH outH argsDefaultHlsConfig argsParseConfig onConfigChange setup + do + ideStateVar <- newEmptyMVar + runLanguageServer (cmapWithPrio LogLanguageServer recorder) options inH outH argsDefaultHlsConfig argsParseConfig (onConfigChange ideStateVar) (setup ideStateVar) dumpSTMStats Check argFiles -> do let dir = argsProjectRoot diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs index a00705ba39..0a5cecaca8 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -49,6 +49,9 @@ import Development.IDE.Plugin.Completions.Types import Development.IDE.Spans.LocalBindings import Development.IDE.Types.Exports import Development.IDE.Types.Options +import GHC.Iface.Ext.Types (HieAST, + NodeInfo (..)) +import GHC.Iface.Ext.Utils (nodeInfo) import Ide.PluginUtils (mkLspCommand) import Ide.Types (CommandId (..), IdePlugins (..), diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs index a577cae32e..50df0f5ba5 100644 --- a/ghcide/src/Development/IDE/Spans/AtPoint.hs +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -67,6 +67,23 @@ import Data.Tree import qualified Data.Tree as T import Data.Version (showVersion) import Development.IDE.Types.Shake (WithHieDb) +import GHC.Iface.Ext.Types (EvVarSource (..), + HieAST (..), + HieASTs (..), + HieArgs (..), + HieType (..), Identifier, + IdentifierDetails (..), + NodeInfo (..), Scope, + Span) +import GHC.Iface.Ext.Utils (EvidenceInfo (..), + RefMap, getEvidenceTree, + getScopeFromContext, + hieTypeToIface, + isEvidenceContext, + isEvidenceUse, + isOccurrence, nodeInfo, + recoverFullType, + selectSmallestContaining) import HieDb hiding (pointCommand, withHieDb) import System.Directory (doesFileExist) @@ -113,7 +130,7 @@ foiReferencesAtPoint file pos (FOIReferences asts) = getNamesAtPoint :: HieASTs a -> Position -> PositionMapping -> [Name] getNamesAtPoint hf pos mapping = - concat $ pointCommand hf posFile (rights . M.keys . getNodeIds) + concat $ pointCommand hf posFile (rights . M.keys . getSourceNodeIds) where posFile = fromMaybe pos $ fromCurrentPosition mapping pos @@ -488,7 +505,7 @@ instanceLocationsAtPoint instanceLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _rm _ _) = let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) evTrees = mapMaybe (eitherToMaybe >=> getEvidenceTree _rm) ns - evNs = concatMap (map (evidenceVar) . T.flatten) evTrees + evNs = concatMap (map evidenceVar . T.flatten) evTrees in fmap (nubOrd . concat) $ mapMaybeM (nameToLocation withHieDb lookupModule) evNs diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs index f3e86d792d..996e55ef1a 100644 --- a/ghcide/src/Development/IDE/Spans/Common.hs +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -190,11 +190,10 @@ haddockToMarkdown (H.DocOrderedList things) = haddockToMarkdown (H.DocDefList things) = '\n' : (unlines $ map (\(term, defn) -> "+ **" ++ haddockToMarkdown term ++ "**: " ++ haddockToMarkdown defn) things) --- we cannot render math by default -haddockToMarkdown (H.DocMathInline _) - = "*cannot render inline math formula*" -haddockToMarkdown (H.DocMathDisplay _) - = "\n\n*cannot render display math formula*\n\n" +haddockToMarkdown (H.DocMathInline s) + = "`" ++ s ++ "`" +haddockToMarkdown (H.DocMathDisplay s) + = "\n```latex\n" ++ s ++ "\n```\n" -- TODO: render tables haddockToMarkdown (H.DocTable _t) diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs index 85f2ef1037..dcf7778de3 100644 --- a/ghcide/src/Development/IDE/Spans/Documentation.hs +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -28,6 +28,7 @@ import Development.IDE.GHC.Compat.Util import Development.IDE.GHC.Error import Development.IDE.GHC.Util (printOutputable) import Development.IDE.Spans.Common +import GHC.Iface.Ext.Utils (RefMap) import Language.LSP.Protocol.Types (filePathToUri, getUri) import Prelude hiding (mod) import System.Directory diff --git a/ghcide/src/Development/IDE/Spans/LocalBindings.hs b/ghcide/src/Development/IDE/Spans/LocalBindings.hs index 8ca811eaa0..8806ed8ab3 100644 --- a/ghcide/src/Development/IDE/Spans/LocalBindings.hs +++ b/ghcide/src/Development/IDE/Spans/LocalBindings.hs @@ -17,15 +17,16 @@ import qualified Data.IntervalMap.FingerTree as IM import qualified Data.List as L import qualified Data.Map as M import qualified Data.Set as S +import GHC.Iface.Ext.Types (IdentifierDetails (..), + Scope (..)) +import GHC.Iface.Ext.Utils (RefMap, getBindSiteFromContext, + getScopeFromContext) + import Development.IDE.GHC.Compat (Name, NameEnv, RealSrcSpan, - RefMap, Scope (..), Type, - getBindSiteFromContext, - getScopeFromContext, identInfo, - identType, isSystemName, + Type, isSystemName, nonDetNameEnvElts, realSrcSpanEnd, realSrcSpanStart, unitNameEnv) - import Development.IDE.GHC.Error import Development.IDE.Types.Location diff --git a/ghcide/src/Development/IDE/Types/Options.hs b/ghcide/src/Development/IDE/Types/Options.hs index be3ea20932..8d4d91e166 100644 --- a/ghcide/src/Development/IDE/Types/Options.hs +++ b/ghcide/src/Development/IDE/Types/Options.hs @@ -68,10 +68,12 @@ data IdeOptions = IdeOptions , optCheckParents :: IO CheckParents -- ^ When to typecheck reverse dependencies of a file , optHaddockParse :: OptHaddockParse - -- ^ Whether to return result of parsing module with Opt_Haddock. - -- Otherwise, return the result of parsing without Opt_Haddock, so - -- that the parsed module contains the result of Opt_KeepRawTokenStream, - -- which might be necessary for hlint. + -- ^ Whether to parse modules with '-haddock' by default. + -- If 'HaddockParse' is given, we parse local haskell modules with the + -- '-haddock' flag enables. + -- If a plugin requires the parsed sources *without* '-haddock', it needs + -- to use rules that explicitly disable the '-haddock' flag. + -- See call sites of 'withoutOptHaddock' for rules that parse without '-haddock'. , optModifyDynFlags :: Config -> DynFlagsModifications -- ^ Will be called right after setting up a new cradle, -- allowing to customize the Ghc options used diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index f49c619ec1..50d4b869ba 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -14,7 +14,7 @@ copyright: The Haskell IDE Team license: Apache-2.0 license-file: LICENSE build-type: Simple -tested-with: GHC == {9.12.2, 9.10.1, 9.8.4, 9.6.7} +tested-with: GHC == {9.12.2, 9.10.2, 9.8.4, 9.6.7} extra-source-files: README.md ChangeLog.md @@ -254,8 +254,13 @@ library hls-cabal-plugin Ide.Plugin.Cabal.Completion.Types Ide.Plugin.Cabal.Definition Ide.Plugin.Cabal.FieldSuggest + Ide.Plugin.Cabal.Files + Ide.Plugin.Cabal.OfInterest Ide.Plugin.Cabal.LicenseSuggest - Ide.Plugin.Cabal.CabalAdd + Ide.Plugin.Cabal.Rules + Ide.Plugin.Cabal.CabalAdd.Command + Ide.Plugin.Cabal.CabalAdd.CodeAction + Ide.Plugin.Cabal.CabalAdd.Types Ide.Plugin.Cabal.Orphans Ide.Plugin.Cabal.Outline Ide.Plugin.Cabal.Parse @@ -276,14 +281,14 @@ library hls-cabal-plugin , lens , lsp ^>=2.7 , lsp-types ^>=2.3 + , mtl , regex-tdfa ^>=1.3.1 , text , text-rope , transformers , unordered-containers >=0.2.10.0 , containers - , cabal-add - , process + , cabal-add ^>=0.2 , aeson , Cabal , pretty @@ -313,9 +318,9 @@ test-suite hls-cabal-plugin-tests , haskell-language-server:hls-cabal-plugin , hls-test-utils == 2.11.0.0 , lens + , lsp , lsp-types , text - , hls-plugin-api ----------------------------- -- class plugin @@ -406,8 +411,9 @@ library hls-call-hierarchy-plugin , aeson , containers , extra + , ghc , ghcide == 2.11.0.0 - , hiedb ^>= 0.6.0.2 + , hiedb ^>= 0.7.0.0 , hls-plugin-api == 2.11.0.0 , lens , lsp >=2.7 @@ -592,10 +598,10 @@ library hls-rename-plugin hs-source-dirs: plugins/hls-rename-plugin/src build-depends: , containers + , ghc , ghcide == 2.11.0.0 , hashable - , hiedb ^>= 0.6.0.2 - , hie-compat + , hiedb ^>= 0.7.0.0 , hls-plugin-api == 2.11.0.0 , haskell-language-server:hls-refactor-plugin , lens @@ -800,7 +806,6 @@ library hls-stan-plugin build-depends: , deepseq , hashable - , hie-compat , hls-plugin-api , ghcide , lsp-types @@ -1066,6 +1071,7 @@ library hls-qualify-imported-names-plugin hs-source-dirs: plugins/hls-qualify-imported-names-plugin/src build-depends: , containers + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lens @@ -1119,6 +1125,7 @@ library hls-code-range-plugin , containers , deepseq , extra + , ghc , ghcide == 2.11.0.0 , hashable , hls-plugin-api == 2.11.0.0 @@ -1173,12 +1180,14 @@ library hls-change-type-signature-plugin build-depends: , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 + , lens , lsp-types , regex-tdfa , syb , text , transformers , containers + , ghc default-extensions: DataKinds ExplicitNamespaces @@ -1196,6 +1205,7 @@ test-suite hls-change-type-signature-plugin-tests build-depends: , filepath , haskell-language-server:hls-change-type-signature-plugin + , hls-plugin-api , hls-test-utils == 2.11.0.0 , regex-tdfa , text @@ -1319,6 +1329,7 @@ library hls-explicit-record-fields-plugin buildable: False exposed-modules: Ide.Plugin.ExplicitFields build-depends: + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lsp @@ -1727,6 +1738,7 @@ library hls-semantic-tokens-plugin , extra , text-rope , mtl >= 2.2 + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lens diff --git a/hie-compat/CHANGELOG.md b/hie-compat/CHANGELOG.md deleted file mode 100644 index 82d590f7ab..0000000000 --- a/hie-compat/CHANGELOG.md +++ /dev/null @@ -1,5 +0,0 @@ -# Revision history for hie-compat - -## 0.1.0.0 -- 2020-10-19 - -* Initial Release diff --git a/hie-compat/LICENSE b/hie-compat/LICENSE deleted file mode 100644 index 8775cb7967..0000000000 --- a/hie-compat/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 Zubin Duggal - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/hie-compat/README.md b/hie-compat/README.md deleted file mode 100644 index 7ac08b305a..0000000000 --- a/hie-compat/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# hie-compat - -Mainly a backport of [HIE -Files](https://gitlab.haskell.org/ghc/ghc/-/wikis/hie-files) for ghc 8.8, along -with a few other backports of fixes useful for `ghcide` - -Also includes backport of record-dot-syntax support to 9.2.x - -Fully compatible with `.hie` files natively produced by versions of GHC that support -them. - -**THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC** - -Backports included: - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/8589 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4037 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4068 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/3199 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/2578 diff --git a/hie-compat/hie-compat.cabal b/hie-compat/hie-compat.cabal deleted file mode 100644 index 2b361df887..0000000000 --- a/hie-compat/hie-compat.cabal +++ /dev/null @@ -1,39 +0,0 @@ -cabal-version: 1.22 -name: hie-compat -version: 0.3.1.2 -synopsis: HIE files for GHC 8.8 and other HIE file backports -license: Apache-2.0 -description: - Backports for HIE files to GHC 8.8, along with a few other backports - of HIE file related fixes for ghcide. - - THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC -license-file: LICENSE -author: Zubin Duggal -maintainer: zubin.duggal@gmail.com -build-type: Simple -extra-source-files: CHANGELOG.md README.md -category: Development -homepage: https://github.com/haskell/haskell-language-server/tree/master/hie-compat#readme -bug-reports: https://github.com/haskell/haskell-language-server/issues - -source-repository head - type: git - location: https://github.com/haskell/haskell-language-server.git - -library - default-language: GHC2021 - build-depends: - base < 4.22, array, bytestring, containers, directory, filepath, transformers - build-depends: ghc >= 8.10, ghc-boot - ghc-options: -Wall -Wno-name-shadowing - - exposed-modules: - Compat.HieAst - Compat.HieBin - Compat.HieTypes - Compat.HieDebug - Compat.HieUtils - - if (impl(ghc >= 9.4)) - hs-source-dirs: src-reexport-ghc92 diff --git a/hie-compat/src-ghc92/Compat/HieAst.hs b/hie-compat/src-ghc92/Compat/HieAst.hs deleted file mode 100644 index 3445ff6213..0000000000 --- a/hie-compat/src-ghc92/Compat/HieAst.hs +++ /dev/null @@ -1,2132 +0,0 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE UndecidableSuperClasses #-} -{- HLINT ignore -} -{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} - -{- -Forked from GHC v9.2.3 to include record-dot-syntax type information in .hie files. - -Changes are marked with "CHANGED:" - -Main functions for .hie file generation --} - --- CHANGED: removed this include and updated the module declaration --- #include "HsVersions.h" --- --- module GHC.Iface.Ext.Ast ( mkHieFile, mkHieFileWithSource, getCompressedAsts, enrichHie) where - -module Compat.HieAst ( enrichHie ) where - -import GHC.Utils.Outputable(ppr) - -import GHC.Prelude - -import GHC.Types.Avail ( Avails ) -import GHC.Data.Bag ( Bag, bagToList ) -import GHC.Types.Basic -import GHC.Data.BooleanFormula -import GHC.Core.Class ( className, classSCSelIds ) -import GHC.Core.Utils ( exprType ) -import GHC.Core.ConLike ( conLikeName, ConLike(RealDataCon) ) -import GHC.Core.TyCon ( TyCon, tyConClass_maybe ) -import GHC.Core.FVs -import GHC.Core.DataCon ( dataConNonlinearType ) -import GHC.Types.FieldLabel -import GHC.Hs -import GHC.Driver.Env -import GHC.Utils.Monad ( concatMapM, liftIO ) -import GHC.Types.Id ( isDataConId_maybe ) -import GHC.Types.Name ( Name, nameSrcSpan, nameUnique ) -import GHC.Types.Name.Env ( NameEnv, emptyNameEnv, extendNameEnv, lookupNameEnv ) -import GHC.Types.SrcLoc -import GHC.Tc.Utils.Zonk ( hsLitType, hsPatType ) -import GHC.Core.Type ( mkVisFunTys, Type ) -import GHC.Core.Predicate -import GHC.Core.InstEnv -import GHC.Builtin.Types ( mkListTy, mkSumTy ) -import GHC.Tc.Types -import GHC.Tc.Types.Evidence -import GHC.Types.Var ( Id, Var, EvId, varName, varType, varUnique ) -import GHC.Types.Var.Env -import GHC.Builtin.Uniques -import GHC.Iface.Make ( mkIfaceExports ) -import GHC.Utils.Panic -import GHC.Utils.Misc -import GHC.Data.Maybe -import GHC.Data.FastString - -import GHC.Iface.Ext.Types -import GHC.Iface.Ext.Utils - -import GHC.Unit.Module ( ModuleName, ml_hs_file ) -import GHC.Unit.Module.ModSummary - -import qualified Data.Array as A -import qualified Data.ByteString as BS -import qualified Data.Map as M -import qualified Data.Set as S -import Data.Data ( Data ) -import Data.Void ( Void, absurd ) -import Control.Monad ( forM_ ) -import Control.Monad.Trans.State.Strict -import Control.Monad.Trans.Reader -import Control.Monad.Trans.Class ( lift ) -import GHC.HsToCore.Types -import GHC.HsToCore.Expr -import GHC.HsToCore.Monad - -{- Note [Updating HieAst for changes in the GHC AST] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When updating the code in this file for changes in the GHC AST, you -need to pay attention to the following things: - -1) Symbols (Names/Vars/Modules) in the following categories: - - a) Symbols that appear in the source file that directly correspond to - something the user typed - b) Symbols that don't appear in the source, but should be in some sense - "visible" to a user, particularly via IDE tooling or the like. This - includes things like the names introduced by RecordWildcards (We record - all the names introduced by a (..) in HIE files), and will include implicit - parameters and evidence variables after one of my pending MRs lands. - -2) Subtrees that may contain such symbols, or correspond to a SrcSpan in - the file. This includes all `Located` things - -For 1), you need to call `toHie` for one of the following instances - -instance ToHie (Context (Located Name)) where ... -instance ToHie (Context (Located Var)) where ... -instance ToHie (IEContext (Located ModuleName)) where ... - -`Context` is a data type that looks like: - -data Context a = C ContextInfo a -- Used for names and bindings - -`ContextInfo` is defined in `GHC.Iface.Ext.Types`, and looks like - -data ContextInfo - = Use -- ^ regular variable - | MatchBind - | IEThing IEType -- ^ import/export - | TyDecl - -- | Value binding - | ValBind - BindType -- ^ whether or not the binding is in an instance - Scope -- ^ scope over which the value is bound - (Maybe Span) -- ^ span of entire binding - ... - -It is used to annotate symbols in the .hie files with some extra information on -the context in which they occur and should be fairly self explanatory. You need -to select one that looks appropriate for the symbol usage. In very rare cases, -you might need to extend this sum type if none of the cases seem appropriate. - -So, given a `Located Name` that is just being "used", and not defined at a -particular location, you would do the following: - - toHie $ C Use located_name - -If you select one that corresponds to a binding site, you will need to -provide a `Scope` and a `Span` for your binding. Both of these are basically -`SrcSpans`. - -The `SrcSpan` in the `Scope` is supposed to span over the part of the source -where the symbol can be legally allowed to occur. For more details on how to -calculate this, see Note [Capturing Scopes and other non local information] -in GHC.Iface.Ext.Ast. - -The binding `Span` is supposed to be the span of the entire binding for -the name. - -For a function definition `foo`: - -foo x = x + y - where y = x^2 - -The binding `Span` is the span of the entire function definition from `foo x` -to `x^2`. For a class definition, this is the span of the entire class, and -so on. If this isn't well defined for your bit of syntax (like a variable -bound by a lambda), then you can just supply a `Nothing` - -There is a test that checks that all symbols in the resulting HIE file -occur inside their stated `Scope`. This can be turned on by passing the --fvalidate-ide-info flag to ghc along with -fwrite-ide-info to generate the -.hie file. - -You may also want to provide a test in testsuite/test/hiefile that includes -a file containing your new construction, and tests that the calculated scope -is valid (by using -fvalidate-ide-info) - -For subtrees in the AST that may contain symbols, the procedure is fairly -straightforward. If you are extending the GHC AST, you will need to provide a -`ToHie` instance for any new types you may have introduced in the AST. - -Here is an extract from the `ToHie` instance for (LHsExpr (GhcPass p)): - - toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of - HsVar _ (L _ var) -> - [ toHie $ C Use (L mspan var) - -- Patch up var location since typechecker removes it - ] - HsConLikeOut _ con -> - [ toHie $ C Use $ L mspan $ conLikeName con - ] - ... - HsApp _ a b -> - [ toHie a - , toHie b - ] - -If your subtree is `Located` or has a `SrcSpan` available, the output list -should contain a HieAst `Node` corresponding to the subtree. You can use -either `makeNode` or `getTypeNode` for this purpose, depending on whether it -makes sense to assign a `Type` to the subtree. After this, you just need -to concatenate the result of calling `toHie` on all subexpressions and -appropriately annotated symbols contained in the subtree. - -The code above from the ToHie instance of `LhsExpr (GhcPass p)` is supposed -to work for both the renamed and typechecked source. `getTypeNode` is from -the `HasType` class defined in this file, and it has different instances -for `GhcTc` and `GhcRn` that allow it to access the type of the expression -when given a typechecked AST: - -class Data a => HasType a where - getTypeNode :: a -> HieM [HieAST Type] -instance HasType (LHsExpr GhcTc) where - getTypeNode e@(L spn e') = ... -- Actually get the type for this expression -instance HasType (LHsExpr GhcRn) where - getTypeNode (L spn e) = makeNode e spn -- Fallback to a regular `makeNode` without recording the type - -If your subtree doesn't have a span available, you can omit the `makeNode` -call and just recurse directly in to the subexpressions. - --} - --- These synonyms match those defined in compiler/GHC.hs -type RenamedSource = ( HsGroup GhcRn, [LImportDecl GhcRn] - , Maybe [(LIE GhcRn, Avails)] - , Maybe LHsDocString ) -type TypecheckedSource = LHsBinds GhcTc - - -{- Note [Name Remapping] - ~~~~~~~~~~~~~~~~~~~~~ -The Typechecker introduces new names for mono names in AbsBinds. -We don't care about the distinction between mono and poly bindings, -so we replace all occurrences of the mono name with the poly name. --} -type VarMap a = DVarEnv (Var,a) -data HieState = HieState - { name_remapping :: NameEnv Id - , unlocated_ev_binds :: VarMap (S.Set ContextInfo) - -- These contain evidence bindings that we don't have a location for - -- These are placed at the top level Node in the HieAST after everything - -- else has been generated - -- This includes things like top level evidence bindings. - } - -addUnlocatedEvBind :: Var -> ContextInfo -> HieM () -addUnlocatedEvBind var ci = do - let go (a,b) (_,c) = (a,S.union b c) - lift $ modify' $ \s -> - s { unlocated_ev_binds = - extendDVarEnv_C go (unlocated_ev_binds s) - var (var,S.singleton ci) - } - -getUnlocatedEvBinds :: FastString -> HieM (NodeIdentifiers Type,[HieAST Type]) -getUnlocatedEvBinds file = do - binds <- lift $ gets unlocated_ev_binds - org <- ask - let elts = dVarEnvElts binds - - mkNodeInfo (n,ci) = (Right (varName n), IdentifierDetails (Just $ varType n) ci) - - go e@(v,_) (xs,ys) = case nameSrcSpan $ varName v of - RealSrcSpan spn _ - | srcSpanFile spn == file -> - let node = Node (mkSourcedNodeInfo org ni) spn [] - ni = NodeInfo mempty [] $ M.fromList [mkNodeInfo e] - in (xs,node:ys) - _ -> (mkNodeInfo e : xs,ys) - - (nis,asts) = foldr go ([],[]) elts - - pure $ (M.fromList nis, asts) - -initState :: HieState -initState = HieState emptyNameEnv emptyDVarEnv - -class ModifyState a where -- See Note [Name Remapping] - addSubstitution :: a -> a -> HieState -> HieState - -instance ModifyState Name where - addSubstitution _ _ hs = hs - -instance ModifyState Id where - addSubstitution mono poly hs = - hs{name_remapping = extendNameEnv (name_remapping hs) (varName mono) poly} - -modifyState :: ModifyState (IdP p) => [ABExport p] -> HieState -> HieState -modifyState = foldr go id - where - go ABE{abe_poly=poly,abe_mono=mono} f - = addSubstitution mono poly . f - go _ f = f - -type HieM = ReaderT NodeOrigin (StateT HieState DsM) - --- | Construct an 'HieFile' from the outputs of the typechecker. -mkHieFile :: ModSummary - -> TcGblEnv - -> RenamedSource -> Hsc HieFile -mkHieFile ms ts rs = do - let src_file = expectJust "mkHieFile" (ml_hs_file $ ms_location ms) - src <- liftIO $ BS.readFile src_file - mkHieFileWithSource src_file src ms ts rs - --- | Construct an 'HieFile' from the outputs of the typechecker but don't --- read the source file again from disk. -mkHieFileWithSource :: FilePath - -> BS.ByteString - -> ModSummary - -> TcGblEnv - -> RenamedSource -> Hsc HieFile -mkHieFileWithSource src_file src ms ts rs = do - let tc_binds = tcg_binds ts - top_ev_binds = tcg_ev_binds ts - insts = tcg_insts ts - tcs = tcg_tcs ts - hsc_env <- Hsc $ \e w -> return (e, w) - (_msgs, res) <- liftIO $ initDs hsc_env ts $ getCompressedAsts tc_binds rs top_ev_binds insts tcs - let (asts',arr) = expectJust "mkHieFileWithSource" res - return $ HieFile - { hie_hs_file = src_file - , hie_module = ms_mod ms - , hie_types = arr - , hie_asts = asts' - -- mkIfaceExports sorts the AvailInfos for stability - , hie_exports = mkIfaceExports (tcg_exports ts) - , hie_hs_src = src - } - -getCompressedAsts :: TypecheckedSource -> RenamedSource -> Bag EvBind -> [ClsInst] -> [TyCon] - -> DsM (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) -getCompressedAsts ts rs top_ev_binds insts tcs = do - asts <- enrichHie ts rs top_ev_binds insts tcs - return $ compressTypes asts - -enrichHie :: TypecheckedSource -> RenamedSource -> Bag EvBind -> [ClsInst] -> [TyCon] - -> DsM (HieASTs Type) -enrichHie ts (hsGrp, imports, exports, _) ev_bs insts tcs = - flip evalStateT initState $ flip runReaderT SourceInfo $ do - tasts <- toHie $ fmap (BC RegularBind ModuleScope) ts - rasts <- processGrp hsGrp - imps <- toHie $ filter (not . ideclImplicit . unLoc) imports - exps <- toHie $ fmap (map $ IEC Export . fst) exports - -- Add Instance bindings - forM_ insts $ \i -> - addUnlocatedEvBind (is_dfun i) (EvidenceVarBind (EvInstBind False (is_cls_nm i)) ModuleScope Nothing) - -- Add class parent bindings - forM_ tcs $ \tc -> - case tyConClass_maybe tc of - Nothing -> pure () - Just c -> forM_ (classSCSelIds c) $ \v -> - addUnlocatedEvBind v (EvidenceVarBind (EvInstBind True (className c)) ModuleScope Nothing) - let spanFile file children = case children of - [] -> realSrcLocSpan (mkRealSrcLoc file 1 1) - _ -> mkRealSrcSpan (realSrcSpanStart $ nodeSpan $ head children) - (realSrcSpanEnd $ nodeSpan $ last children) - - flat_asts = concat - [ tasts - , rasts - , imps - , exps - ] - - modulify (HiePath file) xs' = do - - top_ev_asts :: [HieAST Type] <- do - let - l :: SrcSpanAnnA - l = noAnnSrcSpan (RealSrcSpan (realSrcLocSpan $ mkRealSrcLoc file 1 1) Nothing) - toHie $ EvBindContext ModuleScope Nothing - $ L l (EvBinds ev_bs) - - (uloc_evs,more_ev_asts) <- getUnlocatedEvBinds file - - let xs = mergeSortAsts $ xs' ++ top_ev_asts ++ more_ev_asts - span = spanFile file xs - - moduleInfo = SourcedNodeInfo - $ M.singleton SourceInfo - $ (simpleNodeInfo "Module" "Module") - {nodeIdentifiers = uloc_evs} - - moduleNode = Node moduleInfo span [] - - case mergeSortAsts $ moduleNode : xs of - [x] -> return x - xs -> panicDoc "enrichHie: mergeSortAsts retur:ed more than one result" (ppr $ map nodeSpan xs) - - asts' <- sequence - $ M.mapWithKey modulify - $ M.fromListWith (++) - $ map (\x -> (HiePath (srcSpanFile (nodeSpan x)),[x])) flat_asts - - let asts = HieASTs $ resolveTyVarScopes asts' - return asts - where - processGrp grp = concatM - [ toHie $ fmap (RS ModuleScope ) hs_valds grp - , toHie $ hs_splcds grp - , toHie $ hs_tyclds grp - , toHie $ hs_derivds grp - , toHie $ hs_fixds grp - , toHie $ hs_defds grp - , toHie $ hs_fords grp - , toHie $ hs_warnds grp - , toHie $ hs_annds grp - , toHie $ hs_ruleds grp - ] - -getRealSpanA :: SrcSpanAnn' ann -> Maybe Span -getRealSpanA la = getRealSpan (locA la) - -getRealSpan :: SrcSpan -> Maybe Span -getRealSpan (RealSrcSpan sp _) = Just sp -getRealSpan _ = Nothing - -grhss_span :: (Anno (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) ~ SrcSpan - , Data (HsLocalBinds (GhcPass p))) - => GRHSs (GhcPass p) (LocatedA (body (GhcPass p))) -> SrcSpan -grhss_span (GRHSs _ xs bs) = foldl' combineSrcSpans (spanHsLocaLBinds bs) (map getLoc xs) - -bindingsOnly :: [Context Name] -> HieM [HieAST a] -bindingsOnly [] = pure [] -bindingsOnly (C c n : xs) = do - org <- ask - rest <- bindingsOnly xs - pure $ case nameSrcSpan n of - RealSrcSpan span _ -> Node (mkSourcedNodeInfo org nodeinfo) span [] : rest - where nodeinfo = NodeInfo S.empty [] (M.singleton (Right n) info) - info = mempty{identInfo = S.singleton c} - _ -> rest - -concatM :: Monad m => [m [a]] -> m [a] -concatM xs = concat <$> sequence xs - -{- Note [Capturing Scopes and other non local information] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -toHie is a local transformation, but scopes of bindings cannot be known locally, -hence we have to push the relevant info down into the binding nodes. -We use the following types (*Context and *Scoped) to wrap things and -carry the required info -(Maybe Span) always carries the span of the entire binding, including rhs --} -data Context a = C ContextInfo a -- Used for names and bindings - -data RContext a = RC RecFieldContext a -data RFContext a = RFC RecFieldContext (Maybe Span) a --- ^ context for record fields - -data IEContext a = IEC IEType a --- ^ context for imports/exports - -data BindContext a = BC BindType Scope a --- ^ context for imports/exports - -data PatSynFieldContext a = PSC (Maybe Span) a --- ^ context for pattern synonym fields. - -data SigContext a = SC SigInfo a --- ^ context for type signatures - -data SigInfo = SI SigType (Maybe Span) - -data SigType = BindSig | ClassSig | InstSig - -data EvBindContext a = EvBindContext Scope (Maybe Span) a - -data RScoped a = RS Scope a --- ^ Scope spans over everything to the right of a, (mostly) not --- including a itself --- (Includes a in a few special cases like recursive do bindings) or --- let/where bindings - --- | Pattern scope -data PScoped a = PS (Maybe Span) - Scope -- ^ use site of the pattern - Scope -- ^ pattern to the right of a, not including a - a - deriving (Data) -- Pattern Scope - -{- Note [TyVar Scopes] - ~~~~~~~~~~~~~~~~~~~ -Due to -XScopedTypeVariables, type variables can be in scope quite far from -their original binding. We resolve the scope of these type variables -in a separate pass --} -data TScoped a = TS TyVarScope a -- TyVarScope - -data TVScoped a = TVS TyVarScope Scope a -- TyVarScope --- ^ First scope remains constant --- Second scope is used to build up the scope of a tyvar over --- things to its right, ala RScoped - --- | Each element scopes over the elements to the right -listScopes :: Scope -> [LocatedA a] -> [RScoped (LocatedA a)] -listScopes _ [] = [] -listScopes rhsScope [pat] = [RS rhsScope pat] -listScopes rhsScope (pat : pats) = RS sc pat : pats' - where - pats'@((RS scope p):_) = listScopes rhsScope pats - sc = combineScopes scope $ mkScope $ getLocA p - --- | 'listScopes' specialised to 'PScoped' things -patScopes - :: Maybe Span - -> Scope - -> Scope - -> [LPat (GhcPass p)] - -> [PScoped (LPat (GhcPass p))] -patScopes rsp useScope patScope xs = - map (\(RS sc a) -> PS rsp useScope sc a) $ - listScopes patScope xs - --- | 'listScopes' specialised to 'HsPatSigType' -tScopes - :: Scope - -> Scope - -> [HsPatSigType (GhcPass a)] - -> [TScoped (HsPatSigType (GhcPass a))] -tScopes scope rhsScope xs = - map (\(RS sc a) -> TS (ResolvedScopes [scope, sc]) (unLoc a)) $ - listScopes rhsScope (map (\hsps -> L (getLoc $ hsps_body hsps) hsps) xs) - -- We make the HsPatSigType into a Located one by using the location of the underlying LHsType. - -- We then strip off the redundant location information afterward, and take the union of the given scope and those to the right when forming the TS. - --- | 'listScopes' specialised to 'TVScoped' things -tvScopes - :: TyVarScope - -> Scope - -> [LHsTyVarBndr flag (GhcPass a)] - -> [TVScoped (LHsTyVarBndr flag (GhcPass a))] -tvScopes tvScope rhsScope xs = - map (\(RS sc a)-> TVS tvScope sc a) $ listScopes rhsScope xs - -{- Note [Scoping Rules for SigPat] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Explicitly quantified variables in pattern type signatures are not -brought into scope in the rhs, but implicitly quantified variables -are (HsWC and HsIB). -This is unlike other signatures, where explicitly quantified variables -are brought into the RHS Scope -For example -foo :: forall a. ...; -foo = ... -- a is in scope here - -bar (x :: forall a. a -> a) = ... -- a is not in scope here --- ^ a is in scope here (pattern body) - -bax (x :: a) = ... -- a is in scope here - -This case in handled in the instance for HsPatSigType --} - -class HasLoc a where - -- ^ conveniently calculate locations for things without locations attached - loc :: a -> SrcSpan - -instance HasLoc thing => HasLoc (PScoped thing) where - loc (PS _ _ _ a) = loc a - -instance HasLoc (Located a) where - loc (L l _) = l - -instance HasLoc (LocatedA a) where - loc (L la _) = locA la - -instance HasLoc (LocatedN a) where - loc (L la _) = locA la - -instance HasLoc a => HasLoc [a] where - loc [] = noSrcSpan - loc xs = foldl1' combineSrcSpans $ map loc xs - -instance (HasLoc a, HiePass p) => HasLoc (FamEqn (GhcPass p) a) where - loc (FamEqn _ a outer_bndrs b _ c) = case outer_bndrs of - HsOuterImplicit{} -> - foldl1' combineSrcSpans [loc a, loc b, loc c] - HsOuterExplicit{hso_bndrs = tvs} -> - foldl1' combineSrcSpans [loc a, loc tvs, loc b, loc c] - -instance (HasLoc tm, HasLoc ty) => HasLoc (HsArg tm ty) where - loc (HsValArg tm) = loc tm - loc (HsTypeArg _ ty) = loc ty - loc (HsArgPar sp) = sp - -instance HasLoc (HsDataDefn GhcRn) where - loc def@(HsDataDefn{}) = loc $ dd_cons def - -- Only used for data family instances, so we only need rhs - -- Most probably the rest will be unhelpful anyway - --- | The main worker class --- See Note [Updating HieAst for changes in the GHC AST] for more information --- on how to add/modify instances for this. -class ToHie a where - toHie :: a -> HieM [HieAST Type] - --- | Used to collect type info -class HasType a where - getTypeNode :: a -> HieM [HieAST Type] - -instance ToHie Void where - toHie v = absurd v - -instance (ToHie a) => ToHie [a] where - toHie = concatMapM toHie - -instance (ToHie a) => ToHie (Bag a) where - toHie = toHie . bagToList - -instance (ToHie a) => ToHie (Maybe a) where - toHie = maybe (pure []) toHie - -instance ToHie (IEContext (LocatedA ModuleName)) where - toHie (IEC c (L (SrcSpanAnn _ (RealSrcSpan span _)) mname)) = do - org <- ask - pure $ [Node (mkSourcedNodeInfo org $ NodeInfo S.empty [] idents) span []] - where details = mempty{identInfo = S.singleton (IEThing c)} - idents = M.singleton (Left mname) details - toHie _ = pure [] - -instance ToHie (Context (Located a)) => ToHie (Context (LocatedN a)) where - toHie (C c (L l a)) = toHie (C c (L (locA l) a)) - -instance ToHie (Context (Located a)) => ToHie (Context (LocatedA a)) where - toHie (C c (L l a)) = toHie (C c (L (locA l) a)) - -instance ToHie (Context (Located Var)) where - toHie c = case c of - C context (L (RealSrcSpan span _) name') - | varUnique name' == mkBuiltinUnique 1 -> pure [] - -- `mkOneRecordSelector` makes a field var using this unique, which we ignore - | otherwise -> do - m <- lift $ gets name_remapping - org <- ask - let name = case lookupNameEnv m (varName name') of - Just var -> var - Nothing-> name' - ty = case isDataConId_maybe name' of - Nothing -> varType name' - Just dc -> dataConNonlinearType dc - pure - [Node - (mkSourcedNodeInfo org $ NodeInfo S.empty [] $ - M.singleton (Right $ varName name) - (IdentifierDetails (Just ty) - (S.singleton context))) - span - []] - C (EvidenceVarBind i _ sp) (L _ name) -> do - addUnlocatedEvBind name (EvidenceVarBind i ModuleScope sp) - pure [] - _ -> pure [] - -instance ToHie (Context (Located Name)) where - toHie c = case c of - C context (L (RealSrcSpan span _) name') - | nameUnique name' == mkBuiltinUnique 1 -> pure [] - -- `mkOneRecordSelector` makes a field var using this unique, which we ignore - | otherwise -> do - m <- lift $ gets name_remapping - org <- ask - let name = case lookupNameEnv m name' of - Just var -> varName var - Nothing -> name' - pure - [Node - (mkSourcedNodeInfo org $ NodeInfo S.empty [] $ - M.singleton (Right name) - (IdentifierDetails Nothing - (S.singleton context))) - span - []] - _ -> pure [] - -evVarsOfTermList :: EvTerm -> [EvId] -evVarsOfTermList (EvExpr e) = exprSomeFreeVarsList isEvVar e -evVarsOfTermList (EvTypeable _ ev) = - case ev of - EvTypeableTyCon _ e -> concatMap evVarsOfTermList e - EvTypeableTyApp e1 e2 -> concatMap evVarsOfTermList [e1,e2] - EvTypeableTrFun e1 e2 e3 -> concatMap evVarsOfTermList [e1,e2,e3] - EvTypeableTyLit e -> evVarsOfTermList e -evVarsOfTermList (EvFun{}) = [] - -instance ToHie (EvBindContext (LocatedA TcEvBinds)) where - toHie (EvBindContext sc sp (L span (EvBinds bs))) - = concatMapM go $ bagToList bs - where - go evbind = do - let evDeps = evVarsOfTermList $ eb_rhs evbind - depNames = EvBindDeps $ map varName evDeps - concatM $ - [ toHie (C (EvidenceVarBind (EvLetBind depNames) (combineScopes sc (mkScopeA span)) sp) - (L span $ eb_lhs evbind)) - , toHie $ map (C EvidenceVarUse . L span) $ evDeps - ] - toHie _ = pure [] - -instance ToHie (LocatedA HsWrapper) where - toHie (L osp wrap) - = case wrap of - (WpLet bs) -> toHie $ EvBindContext (mkScopeA osp) (getRealSpanA osp) (L osp bs) - (WpCompose a b) -> concatM $ - [toHie (L osp a), toHie (L osp b)] - (WpFun a b _ _) -> concatM $ - [toHie (L osp a), toHie (L osp b)] - (WpEvLam a) -> - toHie $ C (EvidenceVarBind EvWrapperBind (mkScopeA osp) (getRealSpanA osp)) - $ L osp a - (WpEvApp a) -> - concatMapM (toHie . C EvidenceVarUse . L osp) $ evVarsOfTermList a - _ -> pure [] - -instance HiePass p => HasType (LocatedA (HsBind (GhcPass p))) where - getTypeNode (L spn bind) = - case hiePass @p of - HieRn -> makeNode bind (locA spn) - HieTc -> case bind of - FunBind{fun_id = name} -> makeTypeNode bind (locA spn) (varType $ unLoc name) - _ -> makeNode bind (locA spn) - -instance HiePass p => HasType (LocatedA (Pat (GhcPass p))) where - getTypeNode (L spn pat) = - case hiePass @p of - HieRn -> makeNodeA pat spn - HieTc -> makeTypeNodeA pat spn (hsPatType pat) - --- | This instance tries to construct 'HieAST' nodes which include the type of --- the expression. It is not yet possible to do this efficiently for all --- expression forms, so we skip filling in the type for those inputs. --- --- 'HsApp', for example, doesn't have any type information available directly on --- the node. Our next recourse would be to desugar it into a 'CoreExpr' then --- query the type of that. Yet both the desugaring call and the type query both --- involve recursive calls to the function and argument! This is particularly --- problematic when you realize that the HIE traversal will eventually visit --- those nodes too and ask for their types again. --- --- Since the above is quite costly, we just skip cases where computing the --- expression's type is going to be expensive. --- --- See #16233 -instance HiePass p => HasType (LocatedA (HsExpr (GhcPass p))) where - getTypeNode e@(L spn e') = - case hiePass @p of - HieRn -> makeNodeA e' spn - HieTc -> - -- Some expression forms have their type immediately available - let tyOpt = case e' of - HsUnboundVar (HER _ ty _) _ -> Just ty - HsLit _ l -> Just (hsLitType l) - HsOverLit _ o -> Just (overLitType o) - - HsConLikeOut _ (RealDataCon con) -> Just (dataConNonlinearType con) - - HsLam _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) - HsLamCase _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) - HsCase _ _ (MG { mg_ext = groupTy }) -> Just (mg_res_ty groupTy) - - ExplicitList ty _ -> Just (mkListTy ty) - ExplicitSum ty _ _ _ -> Just (mkSumTy ty) - HsDo ty _ _ -> Just ty - HsMultiIf ty _ -> Just ty - - _ -> Nothing - - in - case tyOpt of - Just t -> makeTypeNodeA e' spn t - Nothing - | skipDesugaring e' -> fallback - | otherwise -> do - (e, no_errs) <- lift $ lift $ discardWarningsDs $ askNoErrsDs $ dsLExpr e - if no_errs - then makeTypeNodeA e' spn . exprType $ e - else fallback - where - fallback = makeNodeA e' spn - - matchGroupType :: MatchGroupTc -> Type - matchGroupType (MatchGroupTc args res) = mkVisFunTys args res - - -- | Skip desugaring of these expressions for performance reasons. - -- - -- See impact on Haddock output (esp. missing type annotations or links) - -- before marking more things here as 'False'. See impact on Haddock - -- performance before marking more things as 'True'. - skipDesugaring :: HsExpr GhcTc -> Bool - skipDesugaring e = case e of - HsVar{} -> False - HsConLikeOut{} -> False - HsRecFld{} -> False - HsOverLabel{} -> False - HsIPVar{} -> False - XExpr (WrapExpr {}) -> False - -- CHANGED: the line below makes record-dot-syntax types work - XExpr (ExpansionExpr {}) -> False - _ -> True - -data HiePassEv p where - HieRn :: HiePassEv 'Renamed - HieTc :: HiePassEv 'Typechecked - -class ( IsPass p - , HiePass (NoGhcTcPass p) - , ModifyState (IdGhcP p) - , Data (GRHS (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Match (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Match (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (HsExpr (GhcPass p)) - , Data (HsCmd (GhcPass p)) - , Data (AmbiguousFieldOcc (GhcPass p)) - , Data (HsCmdTop (GhcPass p)) - , Data (GRHS (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (HsSplice (GhcPass p)) - , Data (HsLocalBinds (GhcPass p)) - , Data (FieldOcc (GhcPass p)) - , Data (HsTupArg (GhcPass p)) - , Data (IPBind (GhcPass p)) - , ToHie (Context (Located (IdGhcP p))) - , ToHie (RFContext (Located (AmbiguousFieldOcc (GhcPass p)))) - , ToHie (RFContext (Located (FieldOcc (GhcPass p)))) - , ToHie (TScoped (LHsWcType (GhcPass (NoGhcTcPass p)))) - , ToHie (TScoped (LHsSigWcType (GhcPass (NoGhcTcPass p)))) - , Anno (IdGhcP p) ~ SrcSpanAnnN - ) - => HiePass p where - hiePass :: HiePassEv p - -instance HiePass 'Renamed where - hiePass = HieRn -instance HiePass 'Typechecked where - hiePass = HieTc - -instance ToHie (Context (Located NoExtField)) where - toHie _ = pure [] - -type AnnoBody p body - = ( Anno (Match (GhcPass p) (LocatedA (body (GhcPass p)))) - ~ SrcSpanAnnA - , Anno [LocatedA (Match (GhcPass p) (LocatedA (body (GhcPass p))))] - ~ SrcSpanAnnL - , Anno (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) - ~ SrcSpan - , Anno (StmtLR (GhcPass p) (GhcPass p) (LocatedA (body (GhcPass p)))) ~ SrcSpanAnnA - - , Data (body (GhcPass p)) - , Data (Match (GhcPass p) (LocatedA (body (GhcPass p)))) - , Data (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (body (GhcPass p)))) - - , IsPass p - ) - -instance HiePass p => ToHie (BindContext (LocatedA (HsBind (GhcPass p)))) where - toHie (BC context scope b@(L span bind)) = - concatM $ getTypeNode b : case bind of - FunBind{fun_id = name, fun_matches = matches, fun_ext = wrap} -> - [ toHie $ C (ValBind context scope $ getRealSpanA span) name - , toHie matches - , case hiePass @p of - HieTc -> toHie $ L span wrap - _ -> pure [] - ] - PatBind{pat_lhs = lhs, pat_rhs = rhs} -> - [ toHie $ PS (getRealSpan (locA span)) scope NoScope lhs - , toHie rhs - ] - VarBind{var_rhs = expr} -> - [ toHie expr - ] - AbsBinds{ abs_exports = xs, abs_binds = binds - , abs_ev_binds = ev_binds - , abs_ev_vars = ev_vars } -> - [ lift (modify (modifyState xs)) >> -- Note [Name Remapping] - (toHie $ fmap (BC context scope) binds) - , toHie $ map (L span . abe_wrap) xs - , toHie $ - map (EvBindContext (mkScopeA span) (getRealSpanA span) - . L span) ev_binds - , toHie $ - map (C (EvidenceVarBind EvSigBind - (mkScopeA span) - (getRealSpanA span)) - . L span) ev_vars - ] - PatSynBind _ psb -> - [ toHie $ L (locA span) psb -- PatSynBinds only occur at the top level - ] - -instance ( HiePass p - , AnnoBody p body - , ToHie (LocatedA (body (GhcPass p))) - ) => ToHie (MatchGroup (GhcPass p) (LocatedA (body (GhcPass p)))) where - toHie mg = case mg of - MG{ mg_alts = (L span alts) , mg_origin = origin} -> - local (setOrigin origin) $ concatM - [ locOnly (locA span) - , toHie alts - ] - -setOrigin :: Origin -> NodeOrigin -> NodeOrigin -setOrigin FromSource _ = SourceInfo -setOrigin Generated _ = GeneratedInfo - -instance HiePass p => ToHie (Located (PatSynBind (GhcPass p) (GhcPass p))) where - toHie (L sp psb) = concatM $ case psb of - PSB{psb_id=var, psb_args=dets, psb_def=pat, psb_dir=dir} -> - [ toHie $ C (Decl PatSynDec $ getRealSpan sp) var - , toHie $ toBind dets - , toHie $ PS Nothing lhsScope patScope pat - , toHie dir - ] - where - lhsScope = combineScopes varScope detScope - varScope = mkLScopeN var - patScope = mkScopeA $ getLoc pat - detScope = case dets of - (PrefixCon _ args) -> foldr combineScopes NoScope $ map mkLScopeN args - (InfixCon a b) -> combineScopes (mkLScopeN a) (mkLScopeN b) - (RecCon r) -> foldr go NoScope r - go (RecordPatSynField a b) c = combineScopes c - $ combineScopes (mkLScopeN (rdrNameFieldOcc a)) (mkLScopeN b) - detSpan = case detScope of - LocalScope a -> Just a - _ -> Nothing - -- CHANGED: removed ASSERT - -- toBind (PrefixCon ts args) = ASSERT(null ts) PrefixCon ts $ map (C Use) args - toBind (PrefixCon ts args) = PrefixCon ts $ map (C Use) args - toBind (InfixCon a b) = InfixCon (C Use a) (C Use b) - toBind (RecCon r) = RecCon $ map (PSC detSpan) r - -instance HiePass p => ToHie (HsPatSynDir (GhcPass p)) where - toHie dir = case dir of - ExplicitBidirectional mg -> toHie mg - _ -> pure [] - -instance ( HiePass p - , Data (body (GhcPass p)) - , AnnoBody p body - , ToHie (LocatedA (body (GhcPass p))) - ) => ToHie (LocatedA (Match (GhcPass p) (LocatedA (body (GhcPass p))))) where - toHie (L span m ) = concatM $ node : case m of - Match{m_ctxt=mctx, m_pats = pats, m_grhss = grhss } -> - [ toHie mctx - , let rhsScope = mkScope $ grhss_span grhss - in toHie $ patScopes Nothing rhsScope NoScope pats - , toHie grhss - ] - where - node = case hiePass @p of - HieTc -> makeNodeA m span - HieRn -> makeNodeA m span - -instance HiePass p => ToHie (HsMatchContext (GhcPass p)) where - toHie (FunRhs{mc_fun=name}) = toHie $ C MatchBind name - toHie (StmtCtxt a) = toHie a - toHie _ = pure [] - -instance HiePass p => ToHie (HsStmtContext (GhcPass p)) where - toHie (PatGuard a) = toHie a - toHie (ParStmtCtxt a) = toHie a - toHie (TransStmtCtxt a) = toHie a - toHie _ = pure [] - -instance HiePass p => ToHie (PScoped (LocatedA (Pat (GhcPass p)))) where - toHie (PS rsp scope pscope lpat@(L ospan opat)) = - concatM $ getTypeNode lpat : case opat of - WildPat _ -> - [] - VarPat _ lname -> - [ toHie $ C (PatternBind scope pscope rsp) lname - ] - LazyPat _ p -> - [ toHie $ PS rsp scope pscope p - ] - AsPat _ lname pat -> - [ toHie $ C (PatternBind scope - (combineScopes (mkLScopeA pat) pscope) - rsp) - lname - , toHie $ PS rsp scope pscope pat - ] - ParPat _ pat -> - [ toHie $ PS rsp scope pscope pat - ] - BangPat _ pat -> - [ toHie $ PS rsp scope pscope pat - ] - ListPat _ pats -> - [ toHie $ patScopes rsp scope pscope pats - ] - TuplePat _ pats _ -> - [ toHie $ patScopes rsp scope pscope pats - ] - SumPat _ pat _ _ -> - [ toHie $ PS rsp scope pscope pat - ] - ConPat {pat_con = con, pat_args = dets, pat_con_ext = ext} -> - case hiePass @p of - HieTc -> - [ toHie $ C Use $ fmap conLikeName con - , toHie $ contextify dets - , let ev_binds = cpt_binds ext - ev_vars = cpt_dicts ext - wrap = cpt_wrap ext - evscope = mkScopeA ospan `combineScopes` scope `combineScopes` pscope - in concatM [ toHie $ EvBindContext scope rsp $ L ospan ev_binds - , toHie $ L ospan wrap - , toHie $ map (C (EvidenceVarBind EvPatternBind evscope rsp) - . L ospan) ev_vars - ] - ] - HieRn -> - [ toHie $ C Use con - , toHie $ contextify dets - ] - ViewPat _ expr pat -> - [ toHie expr - , toHie $ PS rsp scope pscope pat - ] - SplicePat _ sp -> - [ toHie $ L ospan sp - ] - LitPat _ _ -> - [] - NPat _ _ _ _ -> - [] - NPlusKPat _ n _ _ _ _ -> - [ toHie $ C (PatternBind scope pscope rsp) n - ] - SigPat _ pat sig -> - [ toHie $ PS rsp scope pscope pat - , case hiePass @p of - HieTc -> - let cscope = mkLScopeA pat in - toHie $ TS (ResolvedScopes [cscope, scope, pscope]) - sig - HieRn -> pure [] - ] - XPat e -> - case hiePass @p of - HieTc -> - let CoPat wrap pat _ = e - in [ toHie $ L ospan wrap - , toHie $ PS rsp scope pscope $ (L ospan pat) - ] - where - contextify :: a ~ LPat (GhcPass p) => HsConDetails (HsPatSigType (NoGhcTc (GhcPass p))) a (HsRecFields (GhcPass p) a) - -> HsConDetails (TScoped (HsPatSigType (NoGhcTc (GhcPass p)))) (PScoped a) (RContext (HsRecFields (GhcPass p) (PScoped a))) - contextify (PrefixCon tyargs args) = PrefixCon (tScopes scope argscope tyargs) (patScopes rsp scope pscope args) - where argscope = foldr combineScopes NoScope $ map mkLScopeA args - contextify (InfixCon a b) = InfixCon a' b' - where [a', b'] = patScopes rsp scope pscope [a,b] - contextify (RecCon r) = RecCon $ RC RecFieldMatch $ contextify_rec r - contextify_rec (HsRecFields fds a) = HsRecFields (map go scoped_fds) a - where - go :: RScoped (LocatedA (HsRecField' id a1)) - -> LocatedA (HsRecField' id (PScoped a1)) -- AZ - go (RS fscope (L spn (HsRecField x lbl pat pun))) = - L spn $ HsRecField x lbl (PS rsp scope fscope pat) pun - scoped_fds = listScopes pscope fds - -instance ToHie (TScoped (HsPatSigType GhcRn)) where - toHie (TS sc (HsPS (HsPSRn wcs tvs) body@(L span _))) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScopeA span) sc) (wcs++tvs) - , toHie body - ] - -- See Note [Scoping Rules for SigPat] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , HiePass p - , AnnoBody p body - ) => ToHie (GRHSs (GhcPass p) (LocatedA (body (GhcPass p)))) where - toHie grhs = concatM $ case grhs of - GRHSs _ grhss binds -> - [ toHie grhss - , toHie $ RS (mkScope $ grhss_span grhs) binds - ] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , HiePass p - , AnnoBody p body - ) => ToHie (Located (GRHS (GhcPass p) (LocatedA (body (GhcPass p))))) where - toHie (L span g) = concatM $ node : case g of - GRHS _ guards body -> - [ toHie $ listScopes (mkLScopeA body) guards - , toHie body - ] - where - node = case hiePass @p of - HieRn -> makeNode g span - HieTc -> makeNode g span - -instance HiePass p => ToHie (LocatedA (HsExpr (GhcPass p))) where - toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of - HsVar _ (L _ var) -> - [ toHie $ C Use (L mspan var) - -- Patch up var location since typechecker removes it - ] - HsUnboundVar _ _ -> [] -- there is an unbound name here, but that causes trouble - HsConLikeOut _ con -> - [ toHie $ C Use $ L mspan $ conLikeName con - ] - HsRecFld _ fld -> - [ toHie $ RFC RecFieldOcc Nothing (L (locA mspan) fld) - ] - HsOverLabel {} -> [] - HsIPVar _ _ -> [] - HsOverLit _ _ -> [] - HsLit _ _ -> [] - HsLam _ mg -> - [ toHie mg - ] - HsLamCase _ mg -> - [ toHie mg - ] - HsApp _ a b -> - [ toHie a - , toHie b - ] - HsAppType _ expr sig -> - [ toHie expr - , toHie $ TS (ResolvedScopes []) sig - ] - OpApp _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - NegApp _ a _ -> - [ toHie a - ] - HsPar _ a -> - [ toHie a - ] - SectionL _ a b -> - [ toHie a - , toHie b - ] - SectionR _ a b -> - [ toHie a - , toHie b - ] - ExplicitTuple _ args _ -> - [ toHie args - ] - ExplicitSum _ _ _ expr -> - [ toHie expr - ] - HsCase _ expr matches -> - [ toHie expr - , toHie matches - ] - HsIf _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - HsMultiIf _ grhss -> - [ toHie grhss - ] - HsLet _ binds expr -> - [ toHie $ RS (mkLScopeA expr) binds - , toHie expr - ] - HsDo _ _ (L ispan stmts) -> - [ locOnly (locA ispan) - , toHie $ listScopes NoScope stmts - ] - ExplicitList _ exprs -> - [ toHie exprs - ] - RecordCon { rcon_con = con, rcon_flds = binds} -> - [ toHie $ C Use $ con_name - , toHie $ RC RecFieldAssign $ binds - ] - where - con_name :: LocatedN Name - con_name = case hiePass @p of -- Like ConPat - HieRn -> con - HieTc -> fmap conLikeName con - RecordUpd {rupd_expr = expr, rupd_flds = Left upds}-> - [ toHie expr - , toHie $ map (RC RecFieldAssign) upds - ] - RecordUpd {rupd_expr = expr, rupd_flds = Right _}-> - [ toHie expr - ] - ExprWithTySig _ expr sig -> - [ toHie expr - , toHie $ TS (ResolvedScopes [mkLScopeA expr]) sig - ] - ArithSeq _ _ info -> - [ toHie info - ] - HsPragE _ _ expr -> - [ toHie expr - ] - HsProc _ pat cmdtop -> - [ toHie $ PS Nothing (mkLScope cmdtop) NoScope pat - , toHie cmdtop - ] - HsStatic _ expr -> - [ toHie expr - ] - HsTick _ _ expr -> - [ toHie expr - ] - HsBinTick _ _ _ expr -> - [ toHie expr - ] - HsBracket _ b -> - [ toHie b - ] - HsRnBracketOut _ b p -> - [ toHie b - , toHie p - ] - HsTcBracketOut _ _wrap b p -> - [ toHie b - , toHie p - ] - HsSpliceE _ x -> - [ toHie $ L mspan x - ] - HsGetField {} -> [] - HsProjection {} -> [] - XExpr x - | GhcTc <- ghcPass @p - , WrapExpr (HsWrap w a) <- x - -> [ toHie $ L mspan a - , toHie (L mspan w) - ] - | GhcTc <- ghcPass @p - , ExpansionExpr (HsExpanded _ b) <- x - -> [ toHie (L mspan b) - ] - | otherwise -> [] - --- NOTE: no longer have the location -instance HiePass p => ToHie (HsTupArg (GhcPass p)) where - toHie arg = concatM $ case arg of - Present _ expr -> - [ toHie expr - ] - Missing _ -> [] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , AnnoBody p body - , HiePass p - ) => ToHie (RScoped (LocatedA (Stmt (GhcPass p) (LocatedA (body (GhcPass p)))))) where - toHie (RS scope (L span stmt)) = concatM $ node : case stmt of - LastStmt _ body _ _ -> - [ toHie body - ] - BindStmt _ pat body -> - [ toHie $ PS (getRealSpan $ getLocA body) scope NoScope pat - , toHie body - ] - ApplicativeStmt _ stmts _ -> - [ concatMapM (toHie . RS scope . snd) stmts - ] - BodyStmt _ body _ _ -> - [ toHie body - ] - LetStmt _ binds -> - [ toHie $ RS scope binds - ] - ParStmt _ parstmts _ _ -> - [ concatMapM (\(ParStmtBlock _ stmts _ _) -> - toHie $ listScopes NoScope stmts) - parstmts - ] - TransStmt {trS_stmts = stmts, trS_using = using, trS_by = by} -> - [ toHie $ listScopes scope stmts - , toHie using - , toHie by - ] - RecStmt {recS_stmts = L _ stmts} -> - [ toHie $ map (RS $ combineScopes scope (mkScope (locA span))) stmts - ] - where - node = case hiePass @p of - HieTc -> makeNodeA stmt span - HieRn -> makeNodeA stmt span - -instance HiePass p => ToHie (RScoped (HsLocalBinds (GhcPass p))) where - toHie (RS scope binds) = concatM $ makeNode binds (spanHsLocaLBinds binds) : case binds of - EmptyLocalBinds _ -> [] - HsIPBinds _ ipbinds -> case ipbinds of - IPBinds evbinds xs -> let sc = combineScopes scope $ scopeHsLocaLBinds binds - sp :: SrcSpanAnnA - sp = noAnnSrcSpan $ spanHsLocaLBinds binds in - [ - case hiePass @p of - HieTc -> toHie $ EvBindContext sc (getRealSpan $ locA sp) $ L sp evbinds - HieRn -> pure [] - , toHie $ map (RS sc) xs - ] - HsValBinds _ valBinds -> - [ - toHie $ RS (combineScopes scope (scopeHsLocaLBinds binds)) - valBinds - ] - - -scopeHsLocaLBinds :: HsLocalBinds (GhcPass p) -> Scope -scopeHsLocaLBinds (HsValBinds _ (ValBinds _ bs sigs)) - = foldr combineScopes NoScope (bsScope ++ sigsScope) - where - bsScope :: [Scope] - bsScope = map (mkScopeA . getLoc) $ bagToList bs - sigsScope :: [Scope] - sigsScope = map (mkScope . getLocA) sigs -scopeHsLocaLBinds (HsValBinds _ (XValBindsLR (NValBinds bs sigs))) - = foldr combineScopes NoScope (bsScope ++ sigsScope) - where - bsScope :: [Scope] - bsScope = map (mkScopeA . getLoc) $ concatMap (bagToList . snd) bs - sigsScope :: [Scope] - sigsScope = map (mkScope . getLocA) sigs - -scopeHsLocaLBinds (HsIPBinds _ (IPBinds _ bs)) - = foldr combineScopes NoScope (map (mkScopeA . getLoc) bs) -scopeHsLocaLBinds (EmptyLocalBinds _) = NoScope - - -instance HiePass p => ToHie (RScoped (LocatedA (IPBind (GhcPass p)))) where - toHie (RS scope (L sp bind)) = concatM $ makeNodeA bind sp : case bind of - IPBind _ (Left _) expr -> [toHie expr] - IPBind _ (Right v) expr -> - [ toHie $ C (EvidenceVarBind EvImplicitBind scope (getRealSpanA sp)) - $ L sp v - , toHie expr - ] - -instance HiePass p => ToHie (RScoped (HsValBindsLR (GhcPass p) (GhcPass p))) where - toHie (RS sc v) = concatM $ case v of - ValBinds _ binds sigs -> - [ toHie $ fmap (BC RegularBind sc) binds - , toHie $ fmap (SC (SI BindSig Nothing)) sigs - ] - XValBindsLR x -> [ toHie $ RS sc x ] - -instance HiePass p => ToHie (RScoped (NHsValBindsLR (GhcPass p))) where - toHie (RS sc (NValBinds binds sigs)) = concatM $ - [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) - , toHie $ fmap (SC (SI BindSig Nothing)) sigs - ] - -instance ( ToHie arg , HasLoc arg , Data arg - , HiePass p ) => ToHie (RContext (HsRecFields (GhcPass p) arg)) where - toHie (RC c (HsRecFields fields _)) = toHie $ map (RC c) fields - -instance ( ToHie (RFContext (Located label)) - , ToHie arg, HasLoc arg, Data arg - , Data label - ) => ToHie (RContext (LocatedA (HsRecField' label arg))) where - toHie (RC c (L span recfld)) = concatM $ makeNode recfld (locA span) : case recfld of - HsRecField _ label expr _ -> - [ toHie $ RFC c (getRealSpan $ loc expr) label - , toHie expr - ] - -instance ToHie (RFContext (Located (FieldOcc GhcRn))) where - toHie (RFC c rhs (L nspan f)) = concatM $ case f of - FieldOcc name _ -> - [ toHie $ C (RecField c rhs) (L nspan name) - ] - -instance ToHie (RFContext (Located (FieldOcc GhcTc))) where - toHie (RFC c rhs (L nspan f)) = concatM $ case f of - FieldOcc var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - -instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcRn))) where - toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of - Unambiguous name _ -> - [ toHie $ C (RecField c rhs) $ L nspan name - ] - Ambiguous _name _ -> - [ ] - -instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcTc))) where - toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of - Unambiguous var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - Ambiguous var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - -instance HiePass p => ToHie (RScoped (ApplicativeArg (GhcPass p))) where - toHie (RS sc (ApplicativeArgOne _ pat expr _)) = concatM - [ toHie $ PS Nothing sc NoScope pat - , toHie expr - ] - toHie (RS sc (ApplicativeArgMany _ stmts _ pat _)) = concatM - [ toHie $ listScopes NoScope stmts - , toHie $ PS Nothing sc NoScope pat - ] - -instance (ToHie tyarg, ToHie arg, ToHie rec) => ToHie (HsConDetails tyarg arg rec) where - toHie (PrefixCon tyargs args) = concatM [ toHie tyargs, toHie args ] - toHie (RecCon rec) = toHie rec - toHie (InfixCon a b) = concatM [ toHie a, toHie b] - -instance ToHie (HsConDeclGADTDetails GhcRn) where - toHie (PrefixConGADT args) = toHie args - toHie (RecConGADT rec) = toHie rec - -instance HiePass p => ToHie (Located (HsCmdTop (GhcPass p))) where - toHie (L span top) = concatM $ makeNode top span : case top of - HsCmdTop _ cmd -> - [ toHie cmd - ] - -instance HiePass p => ToHie (LocatedA (HsCmd (GhcPass p))) where - toHie (L span cmd) = concatM $ makeNodeA cmd span : case cmd of - HsCmdArrApp _ a b _ _ -> - [ toHie a - , toHie b - ] - HsCmdArrForm _ a _ _ cmdtops -> - [ toHie a - , toHie cmdtops - ] - HsCmdApp _ a b -> - [ toHie a - , toHie b - ] - HsCmdLam _ mg -> - [ toHie mg - ] - HsCmdPar _ a -> - [ toHie a - ] - HsCmdCase _ expr alts -> - [ toHie expr - , toHie alts - ] - HsCmdLamCase _ alts -> - [ toHie alts - ] - HsCmdIf _ _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - HsCmdLet _ binds cmd' -> - [ toHie $ RS (mkLScopeA cmd') binds - , toHie cmd' - ] - HsCmdDo _ (L ispan stmts) -> - [ locOnly (locA ispan) - , toHie $ listScopes NoScope stmts - ] - XCmd _ -> [] - -instance ToHie (TyClGroup GhcRn) where - toHie TyClGroup{ group_tyclds = classes - , group_roles = roles - , group_kisigs = sigs - , group_instds = instances } = - concatM - [ toHie classes - , toHie sigs - , toHie roles - , toHie instances - ] - -instance ToHie (LocatedA (TyClDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - FamDecl {tcdFam = fdecl} -> - [ toHie ((L span fdecl) :: LFamilyDecl GhcRn) - ] - SynDecl {tcdLName = name, tcdTyVars = vars, tcdRhs = typ} -> - [ toHie $ C (Decl SynDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [mkScope $ getLocA typ]) vars - , toHie typ - ] - DataDecl {tcdLName = name, tcdTyVars = vars, tcdDataDefn = defn} -> - [ toHie $ C (Decl DataDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [quant_scope, rhs_scope]) vars - , toHie defn - ] - where - quant_scope = mkLScopeA $ fromMaybe (noLocA []) $ dd_ctxt defn - rhs_scope = sig_sc `combineScopes` con_sc `combineScopes` deriv_sc - sig_sc = maybe NoScope mkLScopeA $ dd_kindSig defn - con_sc = foldr combineScopes NoScope $ map mkLScopeA $ dd_cons defn - deriv_sc = foldr combineScopes NoScope $ map mkLScope $ dd_derivs defn - ClassDecl { tcdCtxt = context - , tcdLName = name - , tcdTyVars = vars - , tcdFDs = deps - , tcdSigs = sigs - , tcdMeths = meths - , tcdATs = typs - , tcdATDefs = deftyps - } -> - [ toHie $ C (Decl ClassDec $ getRealSpanA span) name - , toHie context - , toHie $ TS (ResolvedScopes [context_scope, rhs_scope]) vars - , toHie deps - , toHie $ map (SC $ SI ClassSig $ getRealSpanA span) sigs - , toHie $ fmap (BC InstanceBind ModuleScope) meths - , toHie typs - , concatMapM (locOnly . getLocA) deftyps - , toHie deftyps - ] - where - context_scope = mkLScopeA $ fromMaybe (noLocA []) context - rhs_scope = foldl1' combineScopes $ map mkScope - [ loc deps, loc sigs, loc (bagToList meths), loc typs, loc deftyps] - -instance ToHie (LocatedA (FamilyDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - FamilyDecl _ info _ name vars _ sig inj -> - [ toHie $ C (Decl FamDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [rhsSpan]) vars - , toHie info - , toHie $ RS injSpan sig - , toHie inj - ] - where - rhsSpan = sigSpan `combineScopes` injSpan - sigSpan = mkScope $ getLoc sig - injSpan = maybe NoScope (mkScope . getLoc) inj - -instance ToHie (FamilyInfo GhcRn) where - toHie (ClosedTypeFamily (Just eqns)) = concatM $ - [ concatMapM (locOnly . getLocA) eqns - , toHie $ map go eqns - ] - where - go (L l ib) = TS (ResolvedScopes [mkScopeA l]) ib - toHie _ = pure [] - -instance ToHie (RScoped (Located (FamilyResultSig GhcRn))) where - toHie (RS sc (L span sig)) = concatM $ makeNode sig span : case sig of - NoSig _ -> - [] - KindSig _ k -> - [ toHie k - ] - TyVarSig _ bndr -> - [ toHie $ TVS (ResolvedScopes [sc]) NoScope bndr - ] - -instance ToHie (LocatedA (FunDep GhcRn)) where - toHie (L span fd@(FunDep _ lhs rhs)) = concatM $ - [ makeNode fd (locA span) - , toHie $ map (C Use) lhs - , toHie $ map (C Use) rhs - ] - - -instance ToHie (TScoped (FamEqn GhcRn (HsDataDefn GhcRn))) where - toHie (TS _ f) = toHie f - -instance ToHie (TScoped (FamEqn GhcRn (LocatedA (HsType GhcRn)))) where - toHie (TS _ f) = toHie f - -instance (ToHie rhs, HasLoc rhs) - => ToHie (FamEqn GhcRn rhs) where - toHie fe@(FamEqn _ var outer_bndrs pats _ rhs) = concatM $ - [ toHie $ C (Decl InstDec $ getRealSpan $ loc fe) var - , toHie $ TVS (ResolvedScopes []) scope outer_bndrs - , toHie pats - , toHie rhs - ] - where scope = combineScopes patsScope rhsScope - patsScope = mkScope (loc pats) - rhsScope = mkScope (loc rhs) - -instance ToHie (Located (InjectivityAnn GhcRn)) where - toHie (L span ann) = concatM $ makeNode ann span : case ann of - InjectivityAnn _ lhs rhs -> - [ toHie $ C Use lhs - , toHie $ map (C Use) rhs - ] - -instance ToHie (HsDataDefn GhcRn) where - toHie (HsDataDefn _ _ ctx _ mkind cons derivs) = concatM - [ toHie ctx - , toHie mkind - , toHie cons - , toHie derivs - ] - -instance ToHie (Located [Located (HsDerivingClause GhcRn)]) where - toHie (L span clauses) = concatM - [ locOnly span - , toHie clauses - ] - -instance ToHie (Located (HsDerivingClause GhcRn)) where - toHie (L span cl) = concatM $ makeNode cl span : case cl of - HsDerivingClause _ strat dct -> - [ toHie strat - , toHie dct - ] - -instance ToHie (LocatedC (DerivClauseTys GhcRn)) where - toHie (L span dct) = concatM $ makeNodeA dct span : case dct of - DctSingle _ ty -> [ toHie $ TS (ResolvedScopes []) ty ] - DctMulti _ tys -> [ toHie $ map (TS (ResolvedScopes [])) tys ] - -instance ToHie (Located (DerivStrategy GhcRn)) where - toHie (L span strat) = concatM $ makeNode strat span : case strat of - StockStrategy _ -> [] - AnyclassStrategy _ -> [] - NewtypeStrategy _ -> [] - ViaStrategy s -> [ toHie (TS (ResolvedScopes []) s) ] - -instance ToHie (LocatedP OverlapMode) where - toHie (L span _) = locOnly (locA span) - -instance ToHie a => ToHie (HsScaled GhcRn a) where - toHie (HsScaled w t) = concatM [toHie (arrowToHsType w), toHie t] - -instance ToHie (LocatedA (ConDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - ConDeclGADT { con_names = names, con_bndrs = L outer_bndrs_loc outer_bndrs - , con_mb_cxt = ctx, con_g_args = args, con_res_ty = typ } -> - [ toHie $ map (C (Decl ConDec $ getRealSpanA span)) names - , case outer_bndrs of - HsOuterImplicit{hso_ximplicit = imp_vars} -> - bindingsOnly $ map (C $ TyVarBind (mkScopeA outer_bndrs_loc) resScope) - imp_vars - HsOuterExplicit{hso_bndrs = exp_bndrs} -> - toHie $ tvScopes resScope NoScope exp_bndrs - , toHie ctx - , toHie args - , toHie typ - ] - where - rhsScope = combineScopes argsScope tyScope - ctxScope = maybe NoScope mkLScopeA ctx - argsScope = case args of - PrefixConGADT xs -> scaled_args_scope xs - RecConGADT x -> mkLScopeA x - tyScope = mkLScopeA typ - resScope = ResolvedScopes [ctxScope, rhsScope] - ConDeclH98 { con_name = name, con_ex_tvs = qvars - , con_mb_cxt = ctx, con_args = dets } -> - [ toHie $ C (Decl ConDec $ getRealSpan (locA span)) name - , toHie $ tvScopes (ResolvedScopes []) rhsScope qvars - , toHie ctx - , toHie dets - ] - where - rhsScope = combineScopes ctxScope argsScope - ctxScope = maybe NoScope mkLScopeA ctx - argsScope = case dets of - PrefixCon _ xs -> scaled_args_scope xs - InfixCon a b -> scaled_args_scope [a, b] - RecCon x -> mkLScopeA x - where scaled_args_scope :: [HsScaled GhcRn (LHsType GhcRn)] -> Scope - scaled_args_scope = foldr combineScopes NoScope . map (mkLScopeA . hsScaledThing) - -instance ToHie (LocatedL [LocatedA (ConDeclField GhcRn)]) where - toHie (L span decls) = concatM $ - [ locOnly (locA span) - , toHie decls - ] - -instance ToHie (TScoped (HsWildCardBndrs GhcRn (LocatedA (HsSigType GhcRn)))) where - toHie (TS sc (HsWC names a)) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names - , toHie $ TS sc a - ] - where span = loc a - -instance ToHie (TScoped (HsWildCardBndrs GhcRn (LocatedA (HsType GhcRn)))) where - toHie (TS sc (HsWC names a)) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names - , toHie a - ] - where span = loc a - -instance ToHie (LocatedA (StandaloneKindSig GhcRn)) where - toHie (L sp sig) = concatM [makeNodeA sig sp, toHie sig] - -instance ToHie (StandaloneKindSig GhcRn) where - toHie sig = concatM $ case sig of - StandaloneKindSig _ name typ -> - [ toHie $ C TyDecl name - , toHie $ TS (ResolvedScopes []) typ - ] - -instance HiePass p => ToHie (SigContext (LocatedA (Sig (GhcPass p)))) where - toHie (SC (SI styp msp) (L sp sig)) = - case hiePass @p of - HieTc -> pure [] - HieRn -> concatM $ makeNodeA sig sp : case sig of - TypeSig _ names typ -> - [ toHie $ map (C TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ - ] - PatSynSig _ names typ -> - [ toHie $ map (C TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ - ] - ClassOpSig _ _ names typ -> - [ case styp of - ClassSig -> toHie $ map (C $ ClassTyDecl $ getRealSpanA sp) names - _ -> toHie $ map (C $ TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) msp) typ - ] - IdSig _ _ -> [] - FixSig _ fsig -> - [ toHie $ L sp fsig - ] - InlineSig _ name _ -> - [ toHie $ (C Use) name - ] - SpecSig _ name typs _ -> - [ toHie $ (C Use) name - , toHie $ map (TS (ResolvedScopes [])) typs - ] - SpecInstSig _ _ typ -> - [ toHie $ TS (ResolvedScopes []) typ - ] - MinimalSig _ _ form -> - [ toHie form - ] - SCCFunSig _ _ name mtxt -> - [ toHie $ (C Use) name - , maybe (pure []) (locOnly . getLoc) mtxt - ] - CompleteMatchSig _ _ (L ispan names) typ -> - [ locOnly ispan - , toHie $ map (C Use) names - , toHie $ fmap (C Use) typ - ] - -instance ToHie (TScoped (LocatedA (HsSigType GhcRn))) where - toHie (TS tsc (L span t@HsSig{sig_bndrs=bndrs,sig_body=body})) = concatM $ makeNodeA t span : - [ toHie (TVS tsc (mkScopeA span) bndrs) - , toHie body - ] - --- Check this -instance Data flag => ToHie (TVScoped (HsOuterTyVarBndrs flag GhcRn)) where - toHie (TVS tsc sc bndrs) = case bndrs of - HsOuterImplicit xs -> bindingsOnly $ map (C $ TyVarBind sc tsc) xs - HsOuterExplicit _ xs -> toHie $ tvScopes tsc sc xs - -instance ToHie (LocatedA (HsType GhcRn)) where - toHie (L span t) = concatM $ makeNode t (locA span) : case t of - HsForAllTy _ tele body -> - let scope = mkScope $ getLocA body in - [ case tele of - HsForAllVis { hsf_vis_bndrs = bndrs } -> - toHie $ tvScopes (ResolvedScopes []) scope bndrs - HsForAllInvis { hsf_invis_bndrs = bndrs } -> - toHie $ tvScopes (ResolvedScopes []) scope bndrs - , toHie body - ] - HsQualTy _ ctx body -> - [ toHie ctx - , toHie body - ] - HsTyVar _ _ var -> - [ toHie $ C Use var - ] - HsAppTy _ a b -> - [ toHie a - , toHie b - ] - HsAppKindTy _ ty ki -> - [ toHie ty - , toHie ki - ] - HsFunTy _ w a b -> - [ toHie (arrowToHsType w) - , toHie a - , toHie b - ] - HsListTy _ a -> - [ toHie a - ] - HsTupleTy _ _ tys -> - [ toHie tys - ] - HsSumTy _ tys -> - [ toHie tys - ] - HsOpTy _ a op b -> - [ toHie a - , toHie $ C Use op - , toHie b - ] - HsParTy _ a -> - [ toHie a - ] - HsIParamTy _ ip ty -> - [ toHie ip - , toHie ty - ] - HsKindSig _ a b -> - [ toHie a - , toHie b - ] - HsSpliceTy _ a -> - [ toHie $ L span a - ] - HsDocTy _ a _ -> - [ toHie a - ] - HsBangTy _ _ ty -> - [ toHie ty - ] - HsRecTy _ fields -> - [ toHie fields - ] - HsExplicitListTy _ _ tys -> - [ toHie tys - ] - HsExplicitTupleTy _ tys -> - [ toHie tys - ] - HsTyLit _ _ -> [] - HsWildCardTy _ -> [] - HsStarTy _ _ -> [] - XHsType _ -> [] - -instance (ToHie tm, ToHie ty) => ToHie (HsArg tm ty) where - toHie (HsValArg tm) = toHie tm - toHie (HsTypeArg _ ty) = toHie ty - toHie (HsArgPar sp) = locOnly sp - -instance Data flag => ToHie (TVScoped (LocatedA (HsTyVarBndr flag GhcRn))) where - toHie (TVS tsc sc (L span bndr)) = concatM $ makeNodeA bndr span : case bndr of - UserTyVar _ _ var -> - [ toHie $ C (TyVarBind sc tsc) var - ] - KindedTyVar _ _ var kind -> - [ toHie $ C (TyVarBind sc tsc) var - , toHie kind - ] - -instance ToHie (TScoped (LHsQTyVars GhcRn)) where - toHie (TS sc (HsQTvs implicits vars)) = concatM $ - [ bindingsOnly bindings - , toHie $ tvScopes sc NoScope vars - ] - where - varLoc = loc vars - bindings = map (C $ TyVarBind (mkScope varLoc) sc) implicits - -instance ToHie (LocatedC [LocatedA (HsType GhcRn)]) where - toHie (L span tys) = concatM $ - [ locOnly (locA span) - , toHie tys - ] - -instance ToHie (LocatedA (ConDeclField GhcRn)) where - toHie (L span field) = concatM $ makeNode field (locA span) : case field of - ConDeclField _ fields typ _ -> - [ toHie $ map (RFC RecFieldDecl (getRealSpan $ loc typ)) fields - , toHie typ - ] - -instance ToHie (LHsExpr a) => ToHie (ArithSeqInfo a) where - toHie (From expr) = toHie expr - toHie (FromThen a b) = concatM $ - [ toHie a - , toHie b - ] - toHie (FromTo a b) = concatM $ - [ toHie a - , toHie b - ] - toHie (FromThenTo a b c) = concatM $ - [ toHie a - , toHie b - , toHie c - ] - -instance ToHie (LocatedA (SpliceDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - SpliceDecl _ splice _ -> - [ toHie splice - ] - -instance ToHie (HsBracket a) where - toHie _ = pure [] - -instance ToHie PendingRnSplice where - toHie _ = pure [] - -instance ToHie PendingTcSplice where - toHie _ = pure [] - -instance ToHie (LBooleanFormula (LocatedN Name)) where - toHie (L span form) = concatM $ makeNode form (locA span) : case form of - Var a -> - [ toHie $ C Use a - ] - And forms -> - [ toHie forms - ] - Or forms -> - [ toHie forms - ] - Parens f -> - [ toHie f - ] - -instance ToHie (Located HsIPName) where - toHie (L span e) = makeNode e span - -instance HiePass p => ToHie (LocatedA (HsSplice (GhcPass p))) where - toHie (L span sp) = concatM $ makeNodeA sp span : case sp of - HsTypedSplice _ _ _ expr -> - [ toHie expr - ] - HsUntypedSplice _ _ _ expr -> - [ toHie expr - ] - HsQuasiQuote _ _ _ ispan _ -> - [ locOnly ispan - ] - HsSpliced _ _ _ -> - [] - XSplice x -> case ghcPass @p of - GhcTc -> case x of - HsSplicedT _ -> [] - -instance ToHie (LocatedA (RoleAnnotDecl GhcRn)) where - toHie (L span annot) = concatM $ makeNodeA annot span : case annot of - RoleAnnotDecl _ var roles -> - [ toHie $ C Use var - , concatMapM (locOnly . getLoc) roles - ] - -instance ToHie (LocatedA (InstDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - ClsInstD _ d -> - [ toHie $ L span d - ] - DataFamInstD _ d -> - [ toHie $ L span d - ] - TyFamInstD _ d -> - [ toHie $ L span d - ] - -instance ToHie (LocatedA (ClsInstDecl GhcRn)) where - toHie (L span decl) = concatM - [ toHie $ TS (ResolvedScopes [mkScopeA span]) $ cid_poly_ty decl - , toHie $ fmap (BC InstanceBind ModuleScope) $ cid_binds decl - , toHie $ map (SC $ SI InstSig $ getRealSpanA span) $ cid_sigs decl - , concatMapM (locOnly . getLocA) $ cid_tyfam_insts decl - , toHie $ cid_tyfam_insts decl - , concatMapM (locOnly . getLocA) $ cid_datafam_insts decl - , toHie $ cid_datafam_insts decl - , toHie $ cid_overlap_mode decl - ] - -instance ToHie (LocatedA (DataFamInstDecl GhcRn)) where - toHie (L sp (DataFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScopeA sp]) d - -instance ToHie (LocatedA (TyFamInstDecl GhcRn)) where - toHie (L sp (TyFamInstDecl _ d)) = toHie $ TS (ResolvedScopes [mkScopeA sp]) d - -instance HiePass p => ToHie (Context (FieldOcc (GhcPass p))) where - toHie (C c (FieldOcc n (L l _))) = case hiePass @p of - HieTc -> toHie (C c (L l n)) - HieRn -> toHie (C c (L l n)) - -instance HiePass p => ToHie (PatSynFieldContext (RecordPatSynField (GhcPass p))) where - toHie (PSC sp (RecordPatSynField a b)) = concatM $ - [ toHie $ C (RecField RecFieldDecl sp) a - , toHie $ C Use b - ] - -instance ToHie (LocatedA (DerivDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - DerivDecl _ typ strat overlap -> - [ toHie $ TS (ResolvedScopes []) typ - , toHie strat - , toHie overlap - ] - -instance ToHie (LocatedA (FixitySig GhcRn)) where - toHie (L span sig) = concatM $ makeNodeA sig span : case sig of - FixitySig _ vars _ -> - [ toHie $ map (C Use) vars - ] - -instance ToHie (LocatedA (DefaultDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - DefaultDecl _ typs -> - [ toHie typs - ] - -instance ToHie (LocatedA (ForeignDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - ForeignImport {fd_name = name, fd_sig_ty = sig, fd_fi = fi} -> - [ toHie $ C (ValBind RegularBind ModuleScope $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes []) sig - , toHie fi - ] - ForeignExport {fd_name = name, fd_sig_ty = sig, fd_fe = fe} -> - [ toHie $ C Use name - , toHie $ TS (ResolvedScopes []) sig - , toHie fe - ] - -instance ToHie ForeignImport where - toHie (CImport (L a _) (L b _) _ _ (L c _)) = concatM $ - [ locOnly a - , locOnly b - , locOnly c - ] - -instance ToHie ForeignExport where - toHie (CExport (L a _) (L b _)) = concatM $ - [ locOnly a - , locOnly b - ] - -instance ToHie (LocatedA (WarnDecls GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - Warnings _ _ warnings -> - [ toHie warnings - ] - -instance ToHie (LocatedA (WarnDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - Warning _ vars _ -> - [ toHie $ map (C Use) vars - ] - -instance ToHie (LocatedA (AnnDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - HsAnnotation _ _ prov expr -> - [ toHie prov - , toHie expr - ] - -instance ToHie (AnnProvenance GhcRn) where - toHie (ValueAnnProvenance a) = toHie $ C Use a - toHie (TypeAnnProvenance a) = toHie $ C Use a - toHie ModuleAnnProvenance = pure [] - -instance ToHie (LocatedA (RuleDecls GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - HsRules _ _ rules -> - [ toHie rules - ] - -instance ToHie (LocatedA (RuleDecl GhcRn)) where - toHie (L span r@(HsRule _ rname _ tybndrs bndrs exprA exprB)) = concatM - [ makeNodeA r span - , locOnly $ getLoc rname - , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs - , toHie $ map (RS $ mkScope (locA span)) bndrs - , toHie exprA - , toHie exprB - ] - where scope = bndrs_sc `combineScopes` exprA_sc `combineScopes` exprB_sc - bndrs_sc = maybe NoScope mkLScope (listToMaybe bndrs) - exprA_sc = mkLScopeA exprA - exprB_sc = mkLScopeA exprB - -instance ToHie (RScoped (Located (RuleBndr GhcRn))) where - toHie (RS sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of - RuleBndr _ var -> - [ toHie $ C (ValBind RegularBind sc Nothing) var - ] - RuleBndrSig _ var typ -> - [ toHie $ C (ValBind RegularBind sc Nothing) var - , toHie $ TS (ResolvedScopes [sc]) typ - ] - -instance ToHie (LocatedA (ImportDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - ImportDecl { ideclName = name, ideclAs = as, ideclHiding = hidden } -> - [ toHie $ IEC Import name - , toHie $ fmap (IEC ImportAs) as - , maybe (pure []) goIE hidden - ] - where - goIE (hiding, (L sp liens)) = concatM $ - [ locOnly (locA sp) - , toHie $ map (IEC c) liens - ] - where - c = if hiding then ImportHiding else Import - -instance ToHie (IEContext (LocatedA (IE GhcRn))) where - toHie (IEC c (L span ie)) = concatM $ makeNode ie (locA span) : case ie of - IEVar _ n -> - [ toHie $ IEC c n - ] - IEThingAbs _ n -> - [ toHie $ IEC c n - ] - IEThingAll _ n -> - [ toHie $ IEC c n - ] - IEThingWith flds n _ ns -> - [ toHie $ IEC c n - , toHie $ map (IEC c) ns - , toHie $ map (IEC c) flds - ] - IEModuleContents _ n -> - [ toHie $ IEC c n - ] - IEGroup _ _ _ -> [] - IEDoc _ _ -> [] - IEDocNamed _ _ -> [] - -instance ToHie (IEContext (LIEWrappedName Name)) where - toHie (IEC c (L span iewn)) = concatM $ makeNodeA iewn span : case iewn of - IEName n -> - [ toHie $ C (IEThing c) n - ] - IEPattern _ p -> - [ toHie $ C (IEThing c) p - ] - IEType _ n -> - [ toHie $ C (IEThing c) n - ] - -instance ToHie (IEContext (Located FieldLabel)) where - toHie (IEC c (L span lbl)) = concatM - [ makeNode lbl span - , toHie $ C (IEThing c) $ L span (flSelector lbl) - ] diff --git a/hie-compat/src-reexport-ghc9/Compat/HieBin.hs b/hie-compat/src-reexport-ghc9/Compat/HieBin.hs deleted file mode 100644 index 254e1db6d3..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieBin.hs +++ /dev/null @@ -1,8 +0,0 @@ -{- -Binary serialization for .hie files. --} - -module Compat.HieBin ( module GHC.Iface.Ext.Binary) -where - -import GHC.Iface.Ext.Binary diff --git a/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs b/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs deleted file mode 100644 index 872da67c2b..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs +++ /dev/null @@ -1,10 +0,0 @@ -module Compat.HieDebug - ( module GHC.Iface.Ext.Debug - , ppHie ) where -import GHC.Iface.Ext.Debug - -import GHC.Iface.Ext.Types (HieAST) -import GHC.Utils.Outputable (Outputable(ppr), SDoc) - -ppHie :: Outputable a => HieAST a -> SDoc -ppHie = ppr diff --git a/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs b/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs deleted file mode 100644 index 36bb86abeb..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module GHC.Iface.Ext.Types ) where -import GHC.Iface.Ext.Types diff --git a/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs b/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs deleted file mode 100644 index 204a312039..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module GHC.Iface.Ext.Utils ) where -import GHC.Iface.Ext.Utils diff --git a/hie-compat/src-reexport-ghc92/Compat/HieAst.hs b/hie-compat/src-reexport-ghc92/Compat/HieAst.hs deleted file mode 100644 index 240dc4da49..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieAst.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieAst - ( module GHC.Iface.Ext.Ast ) where -import GHC.Iface.Ext.Ast diff --git a/hie-compat/src-reexport-ghc92/Compat/HieBin.hs b/hie-compat/src-reexport-ghc92/Compat/HieBin.hs deleted file mode 100644 index 254e1db6d3..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieBin.hs +++ /dev/null @@ -1,8 +0,0 @@ -{- -Binary serialization for .hie files. --} - -module Compat.HieBin ( module GHC.Iface.Ext.Binary) -where - -import GHC.Iface.Ext.Binary diff --git a/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs b/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs deleted file mode 100644 index 872da67c2b..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs +++ /dev/null @@ -1,10 +0,0 @@ -module Compat.HieDebug - ( module GHC.Iface.Ext.Debug - , ppHie ) where -import GHC.Iface.Ext.Debug - -import GHC.Iface.Ext.Types (HieAST) -import GHC.Utils.Outputable (Outputable(ppr), SDoc) - -ppHie :: Outputable a => HieAST a -> SDoc -ppHie = ppr diff --git a/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs b/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs deleted file mode 100644 index 36bb86abeb..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module GHC.Iface.Ext.Types ) where -import GHC.Iface.Ext.Types diff --git a/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs b/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs deleted file mode 100644 index 204a312039..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module GHC.Iface.Ext.Utils ) where -import GHC.Iface.Ext.Utils diff --git a/hie-compat/src-reexport/Compat/HieDebug.hs b/hie-compat/src-reexport/Compat/HieDebug.hs deleted file mode 100644 index 32da665b6d..0000000000 --- a/hie-compat/src-reexport/Compat/HieDebug.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieDebug - ( module HieDebug ) where -import HieDebug diff --git a/hie-compat/src-reexport/Compat/HieTypes.hs b/hie-compat/src-reexport/Compat/HieTypes.hs deleted file mode 100644 index 7185fb10bd..0000000000 --- a/hie-compat/src-reexport/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module HieTypes ) where -import HieTypes diff --git a/hie-compat/src-reexport/Compat/HieUtils.hs b/hie-compat/src-reexport/Compat/HieUtils.hs deleted file mode 100644 index c4c401e269..0000000000 --- a/hie-compat/src-reexport/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module HieUtils ) where -import HieUtils diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Key.hs b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs index ba303cdb99..85cebeb110 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Key.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs @@ -34,6 +34,7 @@ module Development.IDE.Graph.Internal.Key ) where --import Control.Monad.IO.Class () +import Control.Exception (evaluate) import Data.Coerce import Data.Dynamic import qualified Data.HashMap.Strict as Map @@ -85,8 +86,15 @@ newKey k = unsafePerformIO $ do lookupKeyValue :: Key -> KeyValue lookupKeyValue (UnsafeMkKey x) = unsafePerformIO $ do + -- NOTE: + -- The reason for this evaluate is that the x, if not forced yet, is a thunk + -- that forces the atomicModifyIORef' in the creation of the new key. If it + -- isn't forced *before* reading the keyMap, the keyMap will only obtain the new + -- key (x) *after* the IntMap is already copied out of the keyMap reference, + -- i.e. when it is forced for the lookup in the IntMap. + k <- evaluate x GlobalKeyValueMap _ im _ <- readIORef keyMap - pure $! im IM.! x + pure $! im IM.! k {-# NOINLINE lookupKeyValue #-} diff --git a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs index e64ab34876..4fa81a2d57 100644 --- a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs +++ b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs @@ -69,7 +69,7 @@ requireDiagnostic actuals expected@(severity, cursor, expectedMsg, mbExpectedCod | ghcVersion >= GHC96 = case (mbExpectedCode, _code d) of (Nothing, _) -> True - (Just expectedCode, Nothing) -> False + (Just _, Nothing) -> False (Just expectedCode, Just actualCode) -> InR expectedCode == actualCode | otherwise = True diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs index 9a56467f3f..7a2c53ee25 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs @@ -2,65 +2,53 @@ {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE TypeFamilies #-} module Ide.Plugin.Cabal (descriptor, haskellInteractionDescriptor, Log (..)) where -import Control.Concurrent.Strict -import Control.DeepSeq import Control.Lens ((^.)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Maybe (runMaybeT) -import qualified Data.ByteString as BS -import Data.Hashable import Data.HashMap.Strict (HashMap) -import qualified Data.HashMap.Strict as HashMap import qualified Data.List as List -import qualified Data.List.NonEmpty as NE import qualified Data.Maybe as Maybe -import Data.Proxy import qualified Data.Text () import qualified Data.Text as T -import qualified Data.Text.Encoding as Encoding -import Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE as D import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.Core.Shake (restartShakeSession) -import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.Graph (Key, - alwaysRerun) +import Development.IDE.Graph (Key) import Development.IDE.LSP.HoverDefinition (foundHover) import qualified Development.IDE.Plugin.Completions.Logic as Ghcide import Development.IDE.Types.Shake (toKey) -import qualified Distribution.CabalSpecVersion as Cabal import qualified Distribution.Fields as Syntax import Distribution.Package (Dependency) import Distribution.PackageDescription (allBuildDepends, depPkgName, unPackageName) import Distribution.PackageDescription.Configuration (flattenPackageDescription) -import Distribution.Parsec.Error import qualified Distribution.Parsec.Position as Syntax -import GHC.Generics -import qualified Ide.Plugin.Cabal.CabalAdd as CabalAdd +import qualified Ide.Plugin.Cabal.CabalAdd.CodeAction as CabalAdd +import qualified Ide.Plugin.Cabal.CabalAdd.Command as CabalAdd import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields import qualified Ide.Plugin.Cabal.Completion.Completer.Types as CompleterTypes import qualified Ide.Plugin.Cabal.Completion.Completions as Completions -import qualified Ide.Plugin.Cabal.Completion.Data as Data import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), ParseCabalFields (..), ParseCabalFile (..)) import qualified Ide.Plugin.Cabal.Completion.Types as Types import Ide.Plugin.Cabal.Definition (gotoDefinition) -import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics import qualified Ide.Plugin.Cabal.FieldSuggest as FieldSuggest +import qualified Ide.Plugin.Cabal.Files as CabalAdd import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest import Ide.Plugin.Cabal.Orphans () import Ide.Plugin.Cabal.Outline -import qualified Ide.Plugin.Cabal.Parse as Parse +import qualified Ide.Plugin.Cabal.Rules as Rules import Ide.Plugin.Error import Ide.Types import qualified Language.LSP.Protocol.Lens as JL @@ -71,7 +59,8 @@ import Text.Regex.TDFA data Log = LogModificationTime NormalizedFilePath FileVersion - | LogShake Shake.Log + | LogRule Rules.Log + | LogOfInterest OfInterest.Log | LogDocOpened Uri | LogDocModified Uri | LogDocSaved Uri @@ -84,7 +73,8 @@ data Log instance Pretty Log where pretty = \case - LogShake log' -> pretty log' + LogRule log' -> pretty log' + LogOfInterest log' -> pretty log' LogModificationTime nfp modTime -> "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) LogDocOpened uri -> @@ -105,28 +95,30 @@ instance Pretty Log where LogCompletions logs -> pretty logs LogCabalAdd logs -> pretty logs --- | Some actions with cabal files originate from haskell files. --- This descriptor allows to hook into the diagnostics of haskell source files, and --- allows us to provide code actions and commands that interact with `.cabal` files. +{- | Some actions in cabal files can be triggered from haskell files. +This descriptor allows us to hook into the diagnostics of haskell source files and +allows us to provide code actions and commands that interact with `.cabal` files. +-} haskellInteractionDescriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState haskellInteractionDescriptor recorder plId = (defaultPluginDescriptor plId "Provides the cabal-add code action in haskell files") { pluginHandlers = mconcat - [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction cabalAddCodeAction + [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddDependencyCodeAction recorder + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddModuleCodeAction recorder ] - , pluginCommands = [PluginCommand CabalAdd.cabalAddCommand "add a dependency to a cabal file" (CabalAdd.command cabalAddRecorder)] - , pluginRules = pure () - , pluginNotificationHandlers = mempty + , pluginCommands = + [ PluginCommand CabalAdd.cabalAddDependencyCommandId "add a dependency to a cabal file" (CabalAdd.addDependencyCommand cabalAddRecorder) + , PluginCommand CabalAdd.cabalAddModuleCommandId "add a module to a cabal file" (CabalAdd.addModuleCommand cabalAddRecorder) + ] } - where - cabalAddRecorder = cmapWithPrio LogCabalAdd recorder - + where + cabalAddRecorder = cmapWithPrio LogCabalAdd recorder descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState descriptor recorder plId = (defaultCabalPluginDescriptor plId "Provides a variety of IDE features in cabal files") - { pluginRules = cabalRules recorder plId + { pluginRules = Rules.cabalRules ruleRecorder plId , pluginHandlers = mconcat [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction licenseSuggestCodeAction @@ -143,32 +135,35 @@ descriptor recorder plId = whenUriFile _uri $ \file -> do log' Debug $ LogDocOpened _uri restartCabalShakeSession (shakeExtras ide) vfs file "(opened)" $ - addFileOfInterest recorder ide file Modified{firstOpen = True} + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = True} , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidChange $ \ide vfs _ (DidChangeTextDocumentParams VersionedTextDocumentIdentifier{_uri} _) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocModified _uri restartCabalShakeSession (shakeExtras ide) vfs file "(changed)" $ - addFileOfInterest recorder ide file Modified{firstOpen = False} + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = False} , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidSave $ \ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocSaved _uri - restartCabalShakeSession (shakeExtras ide) vfs file "(saved)" $ - addFileOfInterest recorder ide file OnDisk + restartCabalShakeSessionPhysical (shakeExtras ide) vfs file "(saved)" $ + OfInterest.addFileOfInterest ofInterestRecorder ide file OnDisk , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidClose $ \ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocClosed _uri restartCabalShakeSession (shakeExtras ide) vfs file "(closed)" $ - deleteFileOfInterest recorder ide file + OfInterest.deleteFileOfInterest ofInterestRecorder ide file ] - , pluginConfigDescriptor = defaultConfigDescriptor - { configHasDiagnostics = True - } + , pluginConfigDescriptor = + defaultConfigDescriptor + { configHasDiagnostics = True + } } where log' = logWith recorder + ruleRecorder = cmapWithPrio LogRule recorder + ofInterestRecorder = cmapWithPrio LogOfInterest recorder whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO () whenUriFile uri act = whenJust (uriToFilePath uri) $ act . toNormalizedFilePath' @@ -188,144 +183,36 @@ restartCabalShakeSession shakeExtras vfs file actionMsg actionBetweenSession = d keys <- actionBetweenSession return (toKey GetModificationTime file:keys) --- ---------------------------------------------------------------- --- Plugin Rules --- ---------------------------------------------------------------- - -cabalRules :: Recorder (WithPriority Log) -> PluginId -> Rules () -cabalRules recorder plId = do - -- Make sure we initialise the cabal files-of-interest. - ofInterestRules recorder - -- Rule to produce diagnostics for cabal files. - define (cmapWithPrio LogShake recorder) $ \ParseCabalFields file -> do - config <- getPluginConfigAction plId - if not (plcGlobalOn config && plcDiagnosticsOn config) - then pure ([], Nothing) - else do - -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), - -- we rerun this rule because this rule *depends* on GetModificationTime. - (t, mCabalSource) <- use_ GetFileContents file - log' Debug $ LogModificationTime file t - contents <- case mCabalSource of - Just sources -> - pure $ Encoding.encodeUtf8 $ Rope.toText sources - Nothing -> do - liftIO $ BS.readFile $ fromNormalizedFilePath file - - case Parse.readCabalFields file contents of - Left _ -> - pure ([], Nothing) - Right fields -> - pure ([], Just fields) - - define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do - fields <- use_ ParseCabalFields file - let commonSections = Maybe.mapMaybe (\case - commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection - _ -> Nothing) - fields - pure ([], Just commonSections) - - define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do - config <- getPluginConfigAction plId - if not (plcGlobalOn config && plcDiagnosticsOn config) - then pure ([], Nothing) - else do - -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), - -- we rerun this rule because this rule *depends* on GetModificationTime. - (t, mCabalSource) <- use_ GetFileContents file - log' Debug $ LogModificationTime file t - contents <- case mCabalSource of - Just sources -> - pure $ Encoding.encodeUtf8 $ Rope.toText sources - Nothing -> do - liftIO $ BS.readFile $ fromNormalizedFilePath file - - -- Instead of fully reparsing the sources to get a 'GenericPackageDescription', - -- we would much rather re-use the already parsed results of 'ParseCabalFields'. - -- Unfortunately, Cabal-syntax doesn't expose the function 'parseGenericPackageDescription'' - -- which allows us to resume the parsing pipeline with '[Field Position]'. - (pWarnings, pm) <- liftIO $ Parse.parseCabalFileContents contents - let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings - case pm of - Left (_cabalVersion, pErrorNE) -> do - let regexUnknownCabalBefore310 :: T.Text - -- We don't support the cabal version, this should not be an error, as the - -- user did not do anything wrong. Instead we cast it to a warning - regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" - regexUnknownCabalVersion :: T.Text - regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" - unsupportedCabalHelpText = unlines - [ "The used `cabal-version` is not fully supported by this `HLS` binary." - , "Either the `cabal-version` is unknown, or too new for this executable." - , "This means that some functionality might not work as expected." - , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." - , "" - , "Supported versions are: " <> - List.intercalate ", " - (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) - ] - errorDiags = - NE.toList $ - NE.map - ( \pe@(PError pos text) -> - if any (text =~) - [ regexUnknownCabalBefore310 - , regexUnknownCabalVersion - ] - then Diagnostics.warningDiagnostic file (Syntax.PWarning Syntax.PWTOther pos $ - unlines - [ text - , unsupportedCabalHelpText - ]) - else Diagnostics.errorDiagnostic file pe - ) - pErrorNE - allDiags = errorDiags <> warningDiags - pure (allDiags, Nothing) - Right gpd -> do - pure (warningDiags, Just gpd) - - action $ do - -- Run the cabal kick. This code always runs when 'shakeRestart' is run. - -- Must be careful to not impede the performance too much. Crucial to - -- a snappy IDE experience. - kick - where - log' = logWith recorder - -{- | This is the kick function for the cabal plugin. -We run this action, whenever we shake session us run/restarted, which triggers -actions to produce diagnostics for cabal files. - -It is paramount that this kick-function can be run quickly, since it is a blocking -function invocation. --} -kick :: Action () -kick = do - files <- HashMap.keys <$> getCabalFilesOfInterestUntracked - Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile +-- | Just like 'restartCabalShakeSession', but records that the 'file' has been changed on disk. +-- So, any action that can only work with on-disk modifications may depend on the 'GetPhysicalModificationTime' +-- rule to get re-run if the file changes on disk. +restartCabalShakeSessionPhysical :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO [Key] -> IO () +restartCabalShakeSessionPhysical shakeExtras vfs file actionMsg actionBetweenSession = do + restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do + keys <- actionBetweenSession + return (toKey GetModificationTime file:toKey GetPhysicalModificationTime file:keys) -- ---------------------------------------------------------------- -- Code Actions -- ---------------------------------------------------------------- licenseSuggestCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=diags}) = do +licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics = diags}) = do maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal-plugin.suggestLicense" ideState getClientConfigAction pure $ InL $ diags >>= (fmap InR . LicenseSuggest.licenseErrorAction maxCompls uri) --- | CodeActions for correcting field names with typos in them. --- --- Provides CodeActions that fix typos in both stanzas and top-level field names. --- The suggestions are computed based on the completion context, where we "move" a fake cursor --- to the end of the field name and trigger cabal file completions. The completions are then --- suggested to the user. --- --- TODO: Relying on completions here often does not produce the desired results, we should --- use some sort of fuzzy matching in the future, see issue #4357. +{- | CodeActions for correcting field names with typos in them. + +Provides CodeActions that fix typos in both stanzas and top-level field names. +The suggestions are computed based on the completion context, where we "move" a fake cursor +to the end of the field name and trigger cabal file completions. The completions are then +suggested to the user. + +TODO: Relying on completions here often does not produce the desired results, we should +use some sort of fuzzy matching in the future, see issue #4357. +-} fieldSuggestCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do +fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri case (,) <$> mContents <*> uriToFilePath' uri of Nothing -> pure $ InL [] @@ -340,47 +227,80 @@ fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentif let fields = Maybe.mapMaybe FieldSuggest.fieldErrorName diags results <- forM fields (getSuggestion fileContents path cabalFields) pure $ InL $ map InR $ concat results - where - getSuggestion fileContents fp cabalFields (fieldName,Diagnostic{ _range=_range@(Range (Position lineNr col) _) }) = do - let -- Compute where we would anticipate the cursor to be. - fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) - lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents - cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo - completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields - let completionTexts = fmap (^. JL.label) completions - pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range - -cabalAddCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -cabalAddCodeAction state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do - maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal.cabal-add" state getClientConfigAction - let suggestions = take maxCompls $ concatMap CabalAdd.hiddenPackageSuggestion diags + where + getSuggestion fileContents fp cabalFields (fieldName, Diagnostic{_range = _range@(Range (Position lineNr col) _)}) = do + let + -- Compute where we would anticipate the cursor to be. + fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) + lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents + cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo + completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields + let completionTexts = fmap (^. JL.label) completions + pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range + +cabalAddDependencyCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddDependencyCodeAction _ state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do + let suggestions = concatMap CabalAdd.hiddenPackageSuggestion diags case suggestions of [] -> pure $ InL [] - _ -> - case uriToFilePath uri of + _ -> do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of Nothing -> pure $ InL [] - Just haskellFilePath -> do - mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath - case mbCabalFile of + Just cabalFilePath -> do + verTxtDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + case mbGPD of Nothing -> pure $ InL [] - Just cabalFilePath -> do - verTxtDocId <- runActionE "cabalAdd.getVersionedTextDoc" state $ - lift $ getVersionedTextDoc $ TextDocumentIdentifier (filePathToUri cabalFilePath) - mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath - case mbGPD of - Nothing -> pure $ InL [] - Just (gpd, _) -> do - actions <- liftIO $ CabalAdd.addDependencySuggestCodeAction plId verTxtDocId - suggestions - haskellFilePath cabalFilePath - gpd - pure $ InL $ fmap InR actions + Just (gpd, _) -> do + actions <- + liftIO $ + CabalAdd.addDependencySuggestCodeAction + plId + verTxtDocId + suggestions + haskellFilePath + cabalFilePath + gpd + pure $ InL $ fmap InR actions + +cabalAddModuleCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddModuleCodeAction recorder state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = + case List.find CabalAdd.isUnknownModuleDiagnostic diags of + Just diag -> + do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of + Nothing -> pure $ InL [] + Just cabalFilePath -> do + verTextDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + (gpd, _) <- runActionE "cabal.cabal-add" state $ useWithStaleE ParseCabalFile $ toNormalizedFilePath cabalFilePath + actions <- + CabalAdd.collectModuleInsertionOptions + (cmapWithPrio LogCabalAdd recorder) + plId + verTextDocId + diag + cabalFilePath + gpd + uri + pure $ InL $ fmap InR actions + Nothing -> pure $ InL [] + +{- | Handler for hover messages. --- | Handler for hover messages. --- --- Provides a Handler for displaying message on hover. --- If found that the filtered hover message is a dependency, --- adds a Documentation link. +If the cursor is hovering on a dependency, add a documentation link to that dependency. +-} hover :: PluginMethodHandler IdeState LSP.Method_TextDocumentHover hover ide _ msgParam = do nfp <- getNormalizedFilePathE uri @@ -395,111 +315,35 @@ hover ide _ msgParam = do Nothing -> pure $ InR Null Just txt -> if txt `elem` depsNames - then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) - else pure $ InR Null - where - cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) - uri = msgParam ^. JL.textDocument . JL.uri - - dependencyName :: Dependency -> T.Text - dependencyName dep = T.pack $ unPackageName $ depPkgName dep - - -- | Removes version requirements like - -- `==1.0.0.0`, `>= 2.1.1` that could be included in - -- hover message. Assumes that the dependency consists - -- of alphanums with dashes in between. Ends with an alphanum. - -- - -- Examples: - -- >>> filterVersion "imp-deps>=2.1.1" - -- "imp-deps" - filterVersion :: T.Text -> Maybe T.Text - filterVersion msg = getMatch (msg =~ regex) - where - regex :: T.Text - regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" - - getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text - getMatch (_, _, _, [dependency]) = Just dependency - getMatch (_, _, _, _) = Nothing -- impossible case - - documentationText :: T.Text -> T.Text - documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" - - --- ---------------------------------------------------------------- --- Cabal file of Interest rules and global variable --- ---------------------------------------------------------------- - -{- | Cabal files that are currently open in the lsp-client. -Specific actions happen when these files are saved, closed or modified, -such as generating diagnostics, re-parsing, etc... - -We need to store the open files to parse them again if we restart the shake session. -Restarting of the shake session happens whenever these files are modified. --} -newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) - -instance Shake.IsIdeGlobal OfInterestCabalVar - -data IsCabalFileOfInterest = IsCabalFileOfInterest - deriving (Eq, Show, Generic) -instance Hashable IsCabalFileOfInterest -instance NFData IsCabalFileOfInterest - -type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult - -data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus - deriving (Eq, Show, Generic) -instance Hashable CabalFileOfInterestResult -instance NFData CabalFileOfInterestResult - -{- | The rule that initialises the files of interest state. - -Needs to be run on start-up. --} -ofInterestRules :: Recorder (WithPriority Log) -> Rules () -ofInterestRules recorder = do - Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty) - Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do - alwaysRerun - filesOfInterest <- getCabalFilesOfInterestUntracked - let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest - fp = summarize foi - res = (Just fp, Just foi) - return res + then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) + else pure $ InR Null where - summarize NotCabalFOI = BS.singleton 0 - summarize (IsCabalFOI OnDisk) = BS.singleton 1 - summarize (IsCabalFOI (Modified False)) = BS.singleton 2 - summarize (IsCabalFOI (Modified True)) = BS.singleton 3 - -getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) -getCabalFilesOfInterestUntracked = do - OfInterestCabalVar var <- Shake.getIdeGlobalAction - liftIO $ readVar var - -addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] -addFileOfInterest recorder state f v = do - OfInterestCabalVar var <- Shake.getIdeGlobalState state - (prev, files) <- modifyVar var $ \dict -> do - let (prev, new) = HashMap.alterF (,Just v) f dict - pure (new, (prev, new)) - if prev /= Just v - then do - log' Debug $ LogFOI files - return [toKey IsCabalFileOfInterest f] - else return [] - where - log' = logWith recorder - -deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO [Key] -deleteFileOfInterest recorder state f = do - OfInterestCabalVar var <- Shake.getIdeGlobalState state - files <- modifyVar' var $ HashMap.delete f - log' Debug $ LogFOI files - return [toKey IsFileOfInterest f] - where - log' = logWith recorder + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + + dependencyName :: Dependency -> T.Text + dependencyName dep = T.pack $ unPackageName $ depPkgName dep + + -- \| Removes version requirements like + -- `==1.0.0.0`, `>= 2.1.1` that could be included in + -- hover message. Assumes that the dependency consists + -- of alphanums with dashes in between. Ends with an alphanum. + -- + -- Examples: + -- >>> filterVersion "imp-deps>=2.1.1" + -- "imp-deps" + filterVersion :: T.Text -> Maybe T.Text + filterVersion msg = getMatch (msg =~ regex) + where + regex :: T.Text + regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" + + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text + getMatch (_, _, _, [dependency]) = Just dependency + getMatch (_, _, _, _) = Nothing -- impossible case + + documentationText :: T.Text -> T.Text + documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" -- ---------------------------------------------------------------- -- Completion @@ -532,23 +376,24 @@ computeCompletionsAt recorder ide prefInfo fp fields = do Just ctx -> do logWith recorder Debug $ LogCompletionContext ctx pos let completer = Completions.contextToCompleter ctx - let completerData = CompleterTypes.CompleterData - { getLatestGPD = do - -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, - -- thus, a quick response gives us the desired result most of the time. - -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. - mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp - pure $ fmap fst mGPD - , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp - , cabalPrefixInfo = prefInfo - , stanzaName = - case fst ctx of - Types.Stanza _ name -> name - _ -> Nothing - } + let completerData = + CompleterTypes.CompleterData + { getLatestGPD = do + -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, + -- thus, a quick response gives us the desired result most of the time. + -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. + mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp + pure $ fmap fst mGPD + , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp + , cabalPrefixInfo = prefInfo + , stanzaName = + case fst ctx of + Types.Stanza _ name -> name + _ -> Nothing + } completions <- completer completerRecorder completerData pure completions - where - pos = Types.completionCursorPosition prefInfo - context fields = Completions.getContext completerRecorder prefInfo fields - completerRecorder = cmapWithPrio LogCompletions recorder + where + pos = Types.completionCursorPosition prefInfo + context fields = Completions.getContext completerRecorder prefInfo fields + completerRecorder = cmapWithPrio LogCompletions recorder diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs deleted file mode 100644 index 3b46eec128..0000000000 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs +++ /dev/null @@ -1,326 +0,0 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE ExplicitNamespaces #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE RecordWildCards #-} - -module Ide.Plugin.Cabal.CabalAdd -( findResponsibleCabalFile - , addDependencySuggestCodeAction - , hiddenPackageSuggestion - , cabalAddCommand - , command - , Log -) -where - -import Control.Monad (filterM, void) -import Control.Monad.IO.Class (MonadIO, liftIO) -import Control.Monad.Trans.Class (lift) -import Control.Monad.Trans.Except -import Data.Aeson.Types (FromJSON, - ToJSON, toJSON) -import Data.ByteString (ByteString) -import qualified Data.ByteString.Char8 as B -import Data.List.NonEmpty (NonEmpty (..), - fromList) -import Data.String (IsString) -import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8) -import qualified Data.Text.Encoding as T -import Data.Text.Utf16.Rope.Mixed as Rope -import Development.IDE (IdeState, - getFileContents, - useWithStale) -import Development.IDE.Core.Rules (runAction) -import Distribution.Client.Add as Add -import Distribution.Compat.Prelude (Generic) -import Distribution.PackageDescription (GenericPackageDescription, - packageDescription, - specVersion) -import Distribution.PackageDescription.Configuration (flattenPackageDescription) -import Distribution.PackageDescription.Quirks (patchQuirks) -import qualified Distribution.Pretty as Pretty -import Distribution.Simple.BuildTarget (BuildTarget, - buildTargetComponentName, - readBuildTargets) -import Distribution.Simple.Utils (safeHead) -import Distribution.Verbosity (silent, - verboseNoStderr) -import Ide.Logger -import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), - ParseCabalFile (..)) -import Ide.Plugin.Cabal.Orphans () -import Ide.Plugin.Error -import Ide.PluginUtils (WithDeletions (SkipDeletions), - diffText, - mkLspCommand) -import Ide.Types (CommandFunction, - CommandId (CommandId), - PluginId, - pluginGetClientCapabilities, - pluginSendRequest) -import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) -import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), - ClientCapabilities, - CodeAction (CodeAction), - CodeActionKind (CodeActionKind_QuickFix), - Diagnostic (..), - Null (Null), - VersionedTextDocumentIdentifier, - WorkspaceEdit, - toNormalizedFilePath, - type (|?) (InR)) -import System.Directory (doesFileExist, - listDirectory) -import System.FilePath (dropFileName, - makeRelative, - splitPath, - takeExtension, - ()) -import Text.PrettyPrint (render) -import Text.Regex.TDFA - -data Log - = LogFoundResponsibleCabalFile FilePath - | LogCalledCabalAddCommand CabalAddCommandParams - | LogCreatedEdit WorkspaceEdit - | LogExecutedCommand - deriving (Show) - -instance Pretty Log where - pretty = \case - LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp - LogCalledCabalAddCommand params -> "Called CabalAdd command with:\n" <+> pretty params - LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit - LogExecutedCommand -> "Executed CabalAdd command" - -cabalAddCommand :: IsString p => p -cabalAddCommand = "cabalAdd" - -data CabalAddCommandParams = - CabalAddCommandParams { cabalPath :: FilePath - , verTxtDocId :: VersionedTextDocumentIdentifier - , buildTarget :: Maybe String - , dependency :: T.Text - , version :: Maybe T.Text - } - deriving (Generic, Show) - deriving anyclass (FromJSON, ToJSON) - -instance Pretty CabalAddCommandParams where - pretty CabalAddCommandParams{..} = - "CabalAdd parameters:" <+> vcat - [ "cabal path:" <+> pretty cabalPath - , "target:" <+> pretty buildTarget - , "dependendency:" <+> pretty dependency - , "version:" <+> pretty version - ] - --- | Creates a code action that calls the `cabalAddCommand`, --- using dependency-version suggestion pairs as input. --- --- Returns disabled action if no cabal files given. --- --- Takes haskell file and cabal file paths to create a relative path --- to the haskell file, which is used to get a `BuildTarget`. --- --- In current implementation the dependency is being added to the main found --- build target, but if there will be a way to get all build targets from a file --- it will be possible to support addition to a build target of choice. -addDependencySuggestCodeAction - :: PluginId - -> VersionedTextDocumentIdentifier -- ^ Cabal's versioned text identifier - -> [(T.Text, T.Text)] -- ^ A dependency-version suggestion pairs - -> FilePath -- ^ Path to the haskell file (source of diagnostics) - -> FilePath -- ^ Path to the cabal file (that will be edited) - -> GenericPackageDescription - -> IO [CodeAction] -addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do - buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath - case buildTargets of - -- If there are no build targets found, run `cabal-add` command with default behaviour - [] -> pure $ mkCodeAction cabalFilePath Nothing <$> suggestions - -- Otherwise provide actions for all found targets - targets -> pure $ concat [mkCodeAction cabalFilePath (Just $ buildTargetToStringRepr target) <$> - suggestions | target <- targets] - where - -- | Note the use of `pretty` function. - -- It converts the `BuildTarget` to an acceptable string representation. - -- It will be used in as the input for `cabal-add`'s `executeConfig`. - buildTargetToStringRepr target = render $ Pretty.pretty $ buildTargetComponentName target - - -- | Gives the build targets that are used in the `CabalAdd`. - -- Note the unorthodox usage of `readBuildTargets`: - -- If the relative path to the haskell file is provided, - -- the `readBuildTargets` will return build targets, where this - -- module is mentioned (in exposed-modules or other-modules). - getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] - getBuildTargets gpd cabalFilePath haskellFilePath = do - let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath - readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] - - mkCodeAction :: FilePath -> Maybe String -> (T.Text, T.Text) -> CodeAction - mkCodeAction cabalFilePath target (suggestedDep, suggestedVersion) = - let - versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion - targetTitle = case target of - Nothing -> T.empty - Just t -> " at " <> T.pack t - title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle - version = if T.null suggestedVersion then Nothing else Just suggestedVersion - - params = CabalAddCommandParams {cabalPath = cabalFilePath - , verTxtDocId = verTxtDocId - , buildTarget = target - , dependency = suggestedDep - , version=version} - command = mkLspCommand plId (CommandId cabalAddCommand) "Add missing dependency" (Just [toJSON params]) - in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing - --- | Gives a mentioned number of @(dependency, version)@ pairs --- found in the "hidden package" diagnostic message. --- --- For example, if a ghc error looks like this: --- --- > "Could not load module ‘Data.List.Split’ --- > It is a member of the hidden package ‘split-0.2.5’. --- > Perhaps you need to add ‘split’ to the build-depends in your .cabal file." --- --- or this if PackageImports extension is used: --- --- > "Could not find module ‘Data.List.Split’ --- > Perhaps you meant --- > Data.List.Split (needs flag -package-id split-0.2.5)" --- --- It extracts mentioned package names and version numbers. --- In this example, it will be @[("split", "0.2.5")]@ --- --- Also supports messages without a version. --- --- > "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." --- --- Will turn into @[("split", "")]@ -hiddenPackageSuggestion :: Diagnostic -> [(T.Text, T.Text)] -hiddenPackageSuggestion diag = getMatch (msg =~ regex) - where - msg :: T.Text - msg = _message diag - regex :: T.Text -- TODO: Support multiple packages suggestion - regex = - let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" - in "It is a member of the hidden package [\8216']" <> regex' <> "[\8217']" - <> "|" - <> "needs flag -package-id " <> regex' - -- Have to do this matching because `Regex.TDFA` doesn't(?) support - -- not-capturing groups like (?:message) - getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] - getMatch (_, _, _, []) = [] - getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] - getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] - getMatch (_, _, _, _) = [] - -command :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddCommandParams -command recorder state _ params@(CabalAddCommandParams {cabalPath = path, verTxtDocId = verTxtDocId, buildTarget = target, dependency = dep, version = mbVer}) = do - logWith recorder Debug $ LogCalledCabalAddCommand params - let specifiedDep = case mbVer of - Nothing -> dep - Just ver -> dep <> " ^>=" <> ver - caps <- lift pluginGetClientCapabilities - let env = (state, caps, verTxtDocId) - edit <- getDependencyEdit recorder env path target (fromList [T.unpack specifiedDep]) - void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) - logWith recorder Debug LogExecutedCommand - pure $ InR Null - --- | Constructs prerequisites for the @executeConfig@ --- and runs it, given path to the cabal file and a dependency message. --- Given the new contents of the cabal file constructs and returns the @edit@. --- Inspired by @main@ in cabal-add, --- Distribution.Client.Main -getDependencyEdit :: MonadIO m => Recorder (WithPriority Log) -> (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> - FilePath -> Maybe String -> NonEmpty String -> ExceptT PluginError m WorkspaceEdit -getDependencyEdit recorder env cabalFilePath buildTarget dependency = do - let (state, caps, verTxtDocId) = env - (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do - contents <- getFileContents $ toNormalizedFilePath cabalFilePath - inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath - inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath - let mbCnfOrigContents = case contents of - (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt - _ -> Nothing - let mbFields = fst <$> inFields - let mbPackDescr = fst <$> inPackDescr - pure (mbCnfOrigContents, mbFields, mbPackDescr) - - -- Check if required info was received, - -- otherwise fall back on other options. - (cnfOrigContents, fields, packDescr) <- do - cnfOrigContents <- case mbCnfOrigContents of - (Just cnfOrigContents) -> pure cnfOrigContents - Nothing -> readCabalFile cabalFilePath - (fields, packDescr) <- case (mbFields, mbPackDescr) of - (Just fields, Just packDescr) -> pure (fields, packDescr) - (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of - Left err -> throwE $ PluginInternalError $ T.pack err - Right (f ,gpd) -> pure (f, gpd) - pure (cnfOrigContents, fields, packDescr) - - let inputs = do - let rcnfComponent = buildTarget - let specVer = specVersion $ packageDescription packDescr - cmp <- resolveComponent cabalFilePath (fields, packDescr) rcnfComponent - deps <- traverse (validateDependency specVer) dependency - pure (fields, packDescr, cmp, deps) - - (cnfFields, origPackDescr, cnfComponent, cnfDependencies) <- case inputs of - Left err -> throwE $ PluginInternalError $ T.pack err - Right pair -> pure pair - - case executeConfig (validateChanges origPackDescr) (Config {..}) of - Nothing -> throwE $ PluginInternalError $ T.pack $ "Cannot extend build-depends in " ++ cabalFilePath - Just newContents -> do - let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions - logWith recorder Debug $ LogCreatedEdit edit - pure edit - --- | Given a path to a haskell file, returns the closest cabal file. --- If a package.yaml is present in same directory as the .cabal file, returns nothing, because adding a dependency to a generated cabal file --- will break propagation of changes from package.yaml to cabal files in stack projects. --- If cabal file wasn't found, gives Nothing. -findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) -findResponsibleCabalFile haskellFilePath = do - let dirPath = dropFileName haskellFilePath - allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific - go allDirPaths - where - go [] = pure Nothing - go (path:ps) = do - objects <- listDirectory path - let objectsWithPaths = map (\obj -> path <> obj) objects - objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths - cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension - case safeHead cabalFiles of - Nothing -> go ps - Just cabalFile -> guardAgainstHpack path cabalFile - where - guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) - guardAgainstHpack path cabalFile = do - exists <- doesFileExist $ path "package.yaml" - if exists then pure Nothing else pure $ Just cabalFile - --- | Gives cabal file's contents or throws error. --- Inspired by @readCabalFile@ in cabal-add, --- Distribution.Client.Main --- --- This is a fallback option! --- Use only if the `GetFileContents` fails. -readCabalFile :: MonadIO m => FilePath -> ExceptT PluginError m ByteString -readCabalFile fileName = do - cabalFileExists <- liftIO $ doesFileExist fileName - if cabalFileExists - then snd . patchQuirks <$> liftIO (B.readFile fileName) - else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs new file mode 100644 index 0000000000..d72ad290fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs @@ -0,0 +1,343 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.CodeAction where + +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.Aeson.Types (toJSON) +import Data.Foldable (asum) +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import Development.IDE.Core.PluginUtils (uriToFilePathE) +import Development.IDE.Types.Location (Uri) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as CabalPretty +import Distribution.Simple.BuildTarget (BuildTarget, + buildTargetComponentName, + readBuildTargets) +import Distribution.Utils.Path (getSymbolicPath) +import Distribution.Verbosity (silent, + verboseNoStderr) +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Completer.Module (fpToExposedModulePath) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (mkLspCommand) +import Ide.Types (CommandId (CommandId), + PluginId) + +import Control.Lens ((^.)) +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Types (CodeActionKind (..), + VersionedTextDocumentIdentifier) +import qualified Language.LSP.Protocol.Types as J +import System.FilePath +import Text.PrettyPrint (render) +import Text.Regex.TDFA + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +{- | Takes a path to a cabal file, a module path in exposed module syntax + and the contents of the cabal file and generates all possible + code actions for inserting the module into the cabal file + with the given contents. +-} +collectModuleInsertionOptions :: + (MonadIO m) => + Recorder (WithPriority Log) -> + PluginId -> + VersionedTextDocumentIdentifier -> + J.Diagnostic -> + -- | The file path of the cabal file to insert the new module into + FilePath -> + -- | The generic package description of the cabal file to insert the new module into. + GenericPackageDescription -> + -- | The URI of the unknown haskell file/new module to insert into the cabal file. + Uri -> + ExceptT PluginError m [J.CodeAction] +collectModuleInsertionOptions _ plId txtDocIdentifier diag cabalFilePath gpd haskellFilePathURI = do + haskellFilePath <- uriToFilePathE haskellFilePathURI + let configs = concatMap (mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath) (makeStanzaItems gpd) + pure $ map (mkCodeActionForModulePath plId diag) configs + where + makeStanzaItems :: GenericPackageDescription -> [StanzaItem] + makeStanzaItems gpd = + mainLibItem pd + ++ libItems pd + ++ executableItems pd + ++ testSuiteItems pd + ++ benchmarkItems pd + where + pd = flattenPackageDescription gpd + +{- | Takes a buildInfo of a cabal file component as defined in the generic package description, + and translates it to filepaths of the component's hsSourceDirs, + to be processed for adding modules to exposed-, or other-modules fields in a cabal file. +-} +buildInfoToHsSourceDirs :: BuildInfo -> [FilePath] +buildInfoToHsSourceDirs buildInfo = map getSymbolicPath hsSourceDirs' + where + hsSourceDirs' = hsSourceDirs buildInfo + +{- | Takes the path to the cabal file to insert the module into, + the module path to be inserted, and a stanza representation. + + Returns a list of module insertion configs, where each config + represents a possible place to insert the module. +-} +mkModuleInsertionConfig :: VersionedTextDocumentIdentifier -> FilePath -> FilePath -> StanzaItem -> [ModuleInsertionConfig] +mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath (StanzaItem{..}) = do + case mkRelativeModulePathM siHsSourceDirs cabalFilePath haskellFilePath of + Just processedModPath -> + [modInsertItem processedModPath "other-modules"] + ++ [modInsertItem processedModPath "exposed-modules" | CLibName _ <- [siComponent]] + _ -> [] + where + modInsertItem :: T.Text -> T.Text -> ModuleInsertionConfig + modInsertItem modPath label = + ModuleInsertionConfig + { targetFile = cabalFilePath + , moduleToInsert = modPath + , modVerTxtDocId = txtDocIdentifier + , insertionStanza = siComponent + , insertionLabel = label + } + +mkCodeActionForModulePath :: PluginId -> J.Diagnostic -> ModuleInsertionConfig -> J.CodeAction +mkCodeActionForModulePath plId diag insertionConfig = + J.CodeAction + { _title = "Add to " <> label <> " as " <> fieldName + , _kind = Just CodeActionKind_Refactor + , _diagnostics = Just [diag] + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Just command + , _data_ = Nothing + } + where + fieldName = insertionLabel insertionConfig + command = mkLspCommand plId (CommandId cabalAddModuleCommandId) "Add missing module" (Just [toJSON insertionConfig]) + label = T.pack $ CabalPretty.prettyShow $ insertionStanza insertionConfig + +{- | Takes a list of source subdirectories, a cabal source path and a haskell filepath + and returns a path to the module in exposed module syntax. + The path will be relative to one of the subdirectories, in case the module is contained within one of them. +-} +mkRelativeModulePathM :: [FilePath] -> FilePath -> FilePath -> Maybe T.Text +mkRelativeModulePathM hsSourceDirs cabalSrcPath' haskellFilePath = + asum $ + map + ( \srcDir -> do + let relMP = makeRelative (normalise (cabalSrcPath srcDir)) haskellFilePath + if relMP == haskellFilePath then Nothing else Just $ fpToExposedModulePath cabalSrcPath relMP + ) + hsSourceDirs + where + cabalSrcPath = takeDirectory cabalSrcPath' + +isUnknownModuleDiagnostic :: J.Diagnostic -> Bool +isUnknownModuleDiagnostic diag = (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = "Loading the module [\8216'][^\8217']*[\8217'] failed." + +-------------------------- +-- Below are several utility functions which create a StanzaItem for each of the possible Stanzas, +-- these all have specific constructors we need to match, so we can't generalise this process well. +-------------------------- + +benchmarkItems :: PackageDescription -> [StanzaItem] +benchmarkItems pd = + map + ( \benchmark -> + StanzaItem + { siComponent = CBenchName $ benchmarkName benchmark + , siHsSourceDirs = buildInfoToHsSourceDirs $ benchmarkBuildInfo benchmark + } + ) + (benchmarks pd) + +testSuiteItems :: PackageDescription -> [StanzaItem] +testSuiteItems pd = + map + ( \testSuite -> + StanzaItem + { siComponent = CTestName $ testName testSuite + , siHsSourceDirs = buildInfoToHsSourceDirs $ testBuildInfo testSuite + } + ) + (testSuites pd) + +executableItems :: PackageDescription -> [StanzaItem] +executableItems pd = + map + ( \executable -> + StanzaItem + { siComponent = CExeName $ exeName executable + , siHsSourceDirs = buildInfoToHsSourceDirs $ buildInfo executable + } + ) + (executables pd) + +libItems :: PackageDescription -> [StanzaItem] +libItems pd = + mapMaybe + ( \subLib -> + case libName subLib of + LSubLibName compName -> + Just + StanzaItem + { siComponent = CLibName $ LSubLibName compName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo subLib + } + _ -> Nothing + ) + (subLibraries pd) + +mainLibItem :: PackageDescription -> [StanzaItem] +mainLibItem pd = + case library pd of + Just lib -> + [ StanzaItem + { siComponent = CLibName LMainLibName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo lib + } + ] + Nothing -> [] + +-------------------------------------------- +-- Add dependency to a cabal file +-------------------------------------------- + +{- | Creates a code action that calls the `cabalAddCommand`, + using dependency-version suggestion pairs as input. + + Returns disabled action if no cabal files given. + + Takes haskell and cabal file paths to create a relative path + to the haskell file, which is used to get a `BuildTarget`. +-} +addDependencySuggestCodeAction :: + PluginId -> + -- | Cabal's versioned text identifier + VersionedTextDocumentIdentifier -> + -- | A dependency-version suggestion pairs + [(T.Text, T.Text)] -> + -- | Path to the haskell file (source of diagnostics) + FilePath -> + -- | Path to the cabal file (that will be edited) + FilePath -> + GenericPackageDescription -> + IO [J.CodeAction] +addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do + buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath + case buildTargets of + -- If there are no build targets found, run the `cabal-add` command with default behaviour + [] -> pure $ mkCodeActionForDependency cabalFilePath Nothing <$> suggestions + -- Otherwise provide actions for all found targets + targets -> + pure $ + concat + [ mkCodeActionForDependency cabalFilePath (Just $ buildTargetToStringRepr target) + <$> suggestions + | target <- targets + ] + where + {- | Note the use of the `pretty` function. + It converts the `BuildTarget` to an acceptable string representation. + It will be used as the input for `cabal-add`'s `executeConfig`. + -} + buildTargetToStringRepr target = render $ CabalPretty.pretty $ buildTargetComponentName target + + {- | Finds the build targets that are used in `cabal-add`. + Note the unorthodox usage of `readBuildTargets`: + If the relative path to the haskell file is provided, + `readBuildTargets` will return the build targets, this + module is mentioned in (either exposed-modules or other-modules). + -} + getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] + getBuildTargets gpd cabalFilePath haskellFilePath = do + let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath + readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] + + mkCodeActionForDependency :: FilePath -> Maybe String -> (T.Text, T.Text) -> J.CodeAction + mkCodeActionForDependency cabalFilePath target (suggestedDep, suggestedVersion) = + let + versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion + targetTitle = case target of + Nothing -> T.empty + Just t -> " at " <> T.pack t + title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle + version = if T.null suggestedVersion then Nothing else Just suggestedVersion + + params = + CabalAddDependencyCommandParams + { depCabalPath = cabalFilePath + , depVerTxtDocId = verTxtDocId + , depBuildTarget = target + , depDependency = suggestedDep + , depVersion = version + } + command = mkLspCommand plId (CommandId cabalAddDependencyCommandId) "Add dependency" (Just [toJSON params]) + in + J.CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing + +{- | Gives a mentioned number of @(dependency, version)@ pairs +found in the "hidden package" diagnostic message. + +For example, if a ghc error looks like this: + +> "Could not load module ‘Data.List.Split’ +> It is a member of the hidden package ‘split-0.2.5’. +> Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +or this if PackageImports extension is used: + +> "Could not find module ‘Data.List.Split’ +> Perhaps you meant +> Data.List.Split (needs flag -package-id split-0.2.5)" + +It extracts mentioned package names and version numbers. +In this example, it will be @[("split", "0.2.5")]@ + +Also supports messages without a version. + +> "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +Will turn into @[("split", "")]@ +-} +hiddenPackageSuggestion :: J.Diagnostic -> [(T.Text, T.Text)] +hiddenPackageSuggestion diag = getMatch (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = + let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" + in "It is a member of the hidden package [\8216']" + <> regex' + <> "[\8217']" + <> "|" + <> "needs flag -package-id " + <> regex' + -- Have to do this matching because `Regex.TDFA` doesn't(?) support + -- not-capturing groups like (?:message) + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] + getMatch (_, _, _, []) = [] + getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] + getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] + getMatch (_, _, _, _) = [] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs new file mode 100644 index 0000000000..83554c6a82 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs @@ -0,0 +1,232 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Command ( + cabalAddDependencyCommandId, + cabalAddModuleCommandId, + addDependencyCommand, + addModuleCommand, + Log, +) +where + +import Control.Monad (void) +import Control.Monad.Except (modifyError) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import Data.List.NonEmpty (singleton) +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getFileContents) +import Development.IDE.Core.Rules (IdeState) +import Development.IDE.Core.Service (runAction) +import Development.IDE.Core.Shake (useWithStale) +import Distribution.Client.Add as Add +import Distribution.Fields (Field) +import Distribution.PackageDescription +import Distribution.Parsec.Position (Position) +import qualified Distribution.Pretty as CabalPretty +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + ParseCabalFile (..)) +import Ide.Plugin.Cabal.Files +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (WithDeletions (SkipDeletions), + diffText) +import Ide.Types (CommandFunction, + pluginGetClientCapabilities, + pluginSendRequest) +import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), + ClientCapabilities, + Null (Null), + VersionedTextDocumentIdentifier, + WorkspaceEdit, + toNormalizedFilePath, + type (|?) (InR)) + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +addModuleCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState ModuleInsertionConfig +addModuleCommand recorder state _ params@(ModuleInsertionConfig{..}) = do + logWith recorder Debug $ LogCalledCabalAddModuleCommand params + caps <- lift pluginGetClientCapabilities + let env = (state, caps, modVerTxtDocId) + edit <- getModuleEdit recorder env targetFile insertionStanza (T.unpack insertionLabel) (T.unpack moduleToInsert) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + + Inspired by @main@ in cabal-add, Distribution.Client.Main +-} +getModuleEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit. + FilePath -> + -- | The component to add the module to. + ComponentName -> + -- | The specific field in the component to add the module to. + String -> + -- | The module to add. + String -> + ExceptT PluginError m WorkspaceEdit +getModuleEdit recorder env cabalFilePath stanza targetFieldStr modulePath = + mkCabalAddConfig + recorder + env + cabalFilePath + mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + compName <- + case Add.resolveComponent cabalFilePath (fields, packDescr) $ Just $ CabalPretty.prettyShow stanza of + Right x -> pure x + Left err -> do + logWith recorder Info $ LogFailedToResolveComponent err + throwE $ PluginInternalError $ T.pack err + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = if targetFieldStr == "exposed-modules" then ExposedModules else OtherModules + , cnfAdditions = singleton $ B.pack modulePath + } + +-------------------------------------------- +-- Add build dependency to cabal file +-------------------------------------------- + +addDependencyCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddDependencyCommandParams +addDependencyCommand recorder state _ params@(CabalAddDependencyCommandParams{..}) = do + logWith recorder Debug $ LogCalledCabalAddDependencyCommand params + let specifiedDep = case depVersion of + Nothing -> depDependency + Just ver -> depDependency <> " ^>=" <> ver + caps <- lift pluginGetClientCapabilities + let env = (state, caps, depVerTxtDocId) + edit <- getDependencyEdit recorder env depCabalPath depBuildTarget (T.unpack specifiedDep) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + Inspired by @main@ in cabal-add, + Distribution.Client.Main +-} +getDependencyEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + FilePath -> + Maybe String -> + String -> + ExceptT PluginError m WorkspaceEdit +getDependencyEdit recorder env cabalFilePath buildTarget dependency = + mkCabalAddConfig recorder env cabalFilePath mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + let specVer = specVersion $ packageDescription packDescr + (deps, compName) <- + modifyError (\t -> PluginInternalError $ T.pack t) $ do + deps <- validateDependency specVer dependency + compName <- resolveComponent cabalFilePath (fields, packDescr) buildTarget + pure (deps, compName) + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = BuildDepends + , cnfAdditions = singleton deps + } + +-------------------------------------------- +-- Shared Functions +-------------------------------------------- + +mkCabalAddConfig :: + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit + FilePath -> + -- | Callback to allow configuration of 'AddConfig' to be used by `cabal-add` + ( ByteString -> + [Field Position] -> + GenericPackageDescription -> + ExceptT PluginError m AddConfig + ) -> + ExceptT PluginError m WorkspaceEdit +mkCabalAddConfig recorder env cabalFilePath mkConfig = do + let (state, caps, verTxtDocId) = env + (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do + contents <- getFileContents $ toNormalizedFilePath cabalFilePath + inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath + inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + let mbCnfOrigContents = case contents of + (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt + _ -> Nothing + let mbFields = fst <$> inFields + let mbPackDescr = fst <$> inPackDescr + pure (mbCnfOrigContents, mbFields, mbPackDescr) + + -- Check if required info was received, + -- otherwise fall back on other options. + (cnfOrigContents, fields, packDescr) <- do + cnfOrigContents <- case mbCnfOrigContents of + (Just cnfOrigContents) -> pure cnfOrigContents + Nothing -> readCabalFile cabalFilePath + (fields, packDescr) <- case (mbFields, mbPackDescr) of + (Just fields, Just packDescr) -> pure (fields, packDescr) + (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of + Left err -> throwE $ PluginInternalError $ T.pack err + Right (f, gpd) -> pure (f, gpd) + pure (cnfOrigContents, fields, packDescr) + + cabalAddConfig <- mkConfig cnfOrigContents fields packDescr + + case executeAddConfig (validateChanges packDescr) cabalAddConfig of + Nothing -> + throwE $ + PluginInternalError $ + T.pack $ + "Cannot extend " + ++ show (cnfTargetField cabalAddConfig) + ++ " of " + ++ case (cnfComponent cabalAddConfig) of + Right compName -> showComponentName compName + Left commonStanza -> show commonStanza + ++ " in " + ++ cabalFilePath + Just newContents -> do + let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions + logWith recorder Debug $ LogCreatedEdit edit + pure edit diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs new file mode 100644 index 0000000000..62d6b7a7d3 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs @@ -0,0 +1,104 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Types where + +import Data.Aeson.Types (FromJSON, ToJSON) +import Data.String (IsString) +import qualified Data.Text as T +import Distribution.Compat.Prelude (Generic) +import Distribution.PackageDescription +import Ide.Logger +import Ide.Plugin.Cabal.Orphans () +import Language.LSP.Protocol.Types + +data Log + = LogFoundResponsibleCabalFile FilePath + | LogCalledCabalAddDependencyCommand CabalAddDependencyCommandParams + | LogCalledCabalAddModuleCommand ModuleInsertionConfig + | LogCreatedEdit WorkspaceEdit + | LogExecutedCommand + | LogFailedToResolveComponent String + deriving (Show) + +instance Pretty Log where + pretty = \case + LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp + LogCalledCabalAddDependencyCommand params -> "Called CabalAddDependency command with:\n" <+> pretty params + LogCalledCabalAddModuleCommand params -> "Called CabalAddModule command with:\n" <+> pretty params + LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit + LogExecutedCommand -> "Executed CabalAdd command" + LogFailedToResolveComponent cS -> "Failed to resolve component in CabalAdd with error:" <+> viaShow cS + +cabalAddDependencyCommandId :: (IsString p) => p +cabalAddDependencyCommandId = "cabalAddDependency" + +cabalAddModuleCommandId :: (IsString p) => p +cabalAddModuleCommandId = "cabalAddModule" + +-- | Relevant data needed to add a module to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data ModuleInsertionConfig = ModuleInsertionConfig + { targetFile :: FilePath + -- ^ The file we want to insert information about the new module into. + , moduleToInsert :: T.Text + -- ^ The module name of the module to be inserted into the targetFile at the insertionPosition. + , modVerTxtDocId :: VersionedTextDocumentIdentifier + , insertionStanza :: ComponentName + -- ^ Which stanza the module will be inserted into. + , insertionLabel :: T.Text + -- ^ A label which describes which field the module will be inserted into. + } + deriving (Show, Eq, Ord, Generic) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty ModuleInsertionConfig where + pretty ModuleInsertionConfig{..} = + "CabalAddModule parameters:" + <+> vcat + [ "cabal path:" <+> pretty targetFile + , "target:" <+> pretty moduleToInsert + , "stanza:" <+> viaShow insertionStanza + , "label:" <+> pretty insertionLabel + ] + +-- | Contains all source directories of a stanza with the name of the first parameter. +data StanzaItem = StanzaItem + { siComponent :: ComponentName + , siHsSourceDirs :: [FilePath] + } + deriving (Show) + +-- | Relevant data needed to add a dependency to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data CabalAddDependencyCommandParams = CabalAddDependencyCommandParams + { depCabalPath :: FilePath + , depVerTxtDocId :: VersionedTextDocumentIdentifier + , depBuildTarget :: Maybe String + , depDependency :: T.Text + , depVersion :: Maybe T.Text + } + deriving (Generic, Show) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty CabalAddDependencyCommandParams where + pretty CabalAddDependencyCommandParams{..} = + "CabalAddDependency parameters:" + <+> vcat + [ "cabal path:" <+> pretty depCabalPath + , "target:" <+> pretty depBuildTarget + , "dependendency:" <+> pretty depDependency + , "version:" <+> pretty depVersion + ] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs new file mode 100644 index 0000000000..28cf1e39a8 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs @@ -0,0 +1,56 @@ +module Ide.Plugin.Cabal.Files where + +import Control.Monad (filterM) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import qualified Data.Text as T +import Distribution.PackageDescription.Quirks (patchQuirks) +import Distribution.Simple.Utils (safeHead) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import System.Directory (doesFileExist, + listDirectory) +import System.FilePath + +{- | Given a path to a haskell file, returns the closest cabal file. + If a package.yaml is present in same directory as the .cabal file, returns nothing, + because adding a dependency to a generated cabal file will break propagation of changes + from package.yaml to cabal files in stack projects. + If cabal file wasn't found, returns Nothing. +-} +findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) +findResponsibleCabalFile haskellFilePath = do + let dirPath = dropFileName haskellFilePath + allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific + go allDirPaths + where + go [] = pure Nothing + go (path : ps) = do + objects <- listDirectory path + let objectsWithPaths = map (\obj -> path <> obj) objects + objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths + cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension + case safeHead cabalFiles of + Nothing -> go ps + Just cabalFile -> guardAgainstHpack path cabalFile + where + guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) + guardAgainstHpack path cabalFile = do + exists <- doesFileExist $ path "package.yaml" + if exists then pure Nothing else pure $ Just cabalFile + +{- | Gives a cabal file's contents or throws error. + + Inspired by @readCabalFile@ in cabal-add, Distribution.Client.Main + + This is a fallback option! + Use only if the `GetFileContents` fails. +-} +readCabalFile :: (MonadIO m) => FilePath -> ExceptT PluginError m ByteString +readCabalFile fileName = do + cabalFileExists <- liftIO $ doesFileExist fileName + if cabalFileExists + then snd . patchQuirks <$> liftIO (B.readFile fileName) + else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs new file mode 100644 index 0000000000..67cf97ccee --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs @@ -0,0 +1,122 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.OfInterest (ofInterestRules, getCabalFilesOfInterestUntracked, addFileOfInterest, deleteFileOfInterest, kick, Log) where + +import Control.Concurrent.Strict +import Control.DeepSeq +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import Data.Hashable +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Proxy +import qualified Data.Text () +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph (Key, alwaysRerun) +import Development.IDE.Types.Shake (toKey) +import GHC.Generics +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Orphans () + +data Log + = LogShake Shake.Log + | LogFOI (HashMap NormalizedFilePath FileOfInterestStatus) + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogFOI files -> + "Set files of interest to:" <+> viaShow files + +-- ---------------------------------------------------------------- +-- Cabal file of interest rules and global variable +-- ---------------------------------------------------------------- + +{- | Cabal files that are currently open in the lsp-client. +Specific actions happen when these files are saved, closed or modified, +such as generating diagnostics, re-parsing, etc... + +We need to store the open files to parse them again if we restart the shake session. +Restarting of the shake session happens whenever these files are modified. +-} +newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) + +instance Shake.IsIdeGlobal OfInterestCabalVar + +data IsCabalFileOfInterest = IsCabalFileOfInterest + deriving (Eq, Show, Generic) +instance Hashable IsCabalFileOfInterest +instance NFData IsCabalFileOfInterest + +type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult + +data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus + deriving (Eq, Show, Generic) +instance Hashable CabalFileOfInterestResult +instance NFData CabalFileOfInterestResult + +{- | The rule that initialises the files of interest state. + +Needs to be run on start-up. +-} +ofInterestRules :: Recorder (WithPriority Log) -> Rules () +ofInterestRules recorder = do + Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty) + Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do + alwaysRerun + filesOfInterest <- getCabalFilesOfInterestUntracked + let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest + fp = summarize foi + res = (Just fp, Just foi) + return res + where + summarize NotCabalFOI = BS.singleton 0 + summarize (IsCabalFOI OnDisk) = BS.singleton 1 + summarize (IsCabalFOI (Modified False)) = BS.singleton 2 + summarize (IsCabalFOI (Modified True)) = BS.singleton 3 + +getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getCabalFilesOfInterestUntracked = do + OfInterestCabalVar var <- Shake.getIdeGlobalAction + liftIO $ readVar var + +addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] +addFileOfInterest recorder state f v = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + (prev, files) <- modifyVar var $ \dict -> do + let (prev, new) = HashMap.alterF (,Just v) f dict + pure (new, (prev, new)) + if prev /= Just v + then do + log' Debug $ LogFOI files + return [toKey IsCabalFileOfInterest f] + else return [] + where + log' = logWith recorder + +deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO [Key] +deleteFileOfInterest recorder state f = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + files <- modifyVar' var $ HashMap.delete f + log' Debug $ LogFOI files + return [toKey IsFileOfInterest f] + where + log' = logWith recorder + +{- | This is the kick function for the cabal plugin. +We run this action, whenever we shake session us run/restarted, which triggers +actions to produce diagnostics for cabal files. + +It is paramount that this kick-function can be run quickly, since it is a blocking +function invocation. +-} +kick :: Action () +kick = do + files <- HashMap.keys <$> getCabalFilesOfInterestUntracked + Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs index 2264d5390f..8ecb361025 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs @@ -1,8 +1,14 @@ {-# OPTIONS_GHC -Wno-orphans #-} module Ide.Plugin.Cabal.Orphans where import Control.DeepSeq +import Data.Aeson +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Types as Aeson +import qualified Data.Text as T import Distribution.Fields.Field -import Distribution.Parsec.Position +import Distribution.PackageDescription (ComponentName) +import Distribution.Parsec +import Distribution.Pretty (prettyShow) -- ---------------------------------------------------------------- -- Cabal-syntax orphan instances we need sometimes @@ -22,3 +28,12 @@ instance NFData (SectionArg Position) where rnf (SecArgName ann bs) = rnf ann `seq` rnf bs rnf (SecArgStr ann bs) = rnf ann `seq` rnf bs rnf (SecArgOther ann bs) = rnf ann `seq` rnf bs + +instance ToJSON ComponentName where + toJSON = Aeson.String . T.pack . prettyShow + +instance FromJSON ComponentName where + parseJSON = Aeson.withText "ComponentName" $ \t -> + case eitherParsec (T.unpack t) of + Left err -> Aeson.parseFail err + Right r -> pure r diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs index e949af1b1d..f2b3d74639 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs @@ -22,9 +22,9 @@ import qualified Distribution.Parsec.Position as Syntax parseCabalFileContents :: BS.ByteString -- ^ UTF-8 encoded bytestring - -> IO ([PWarning], Either (Maybe Version, NonEmpty PError) GenericPackageDescription) + -> ([PWarning], Either (Maybe Version, NonEmpty PError) GenericPackageDescription) parseCabalFileContents bs = - pure $ runParseResult (parseGenericPackageDescription bs) + runParseResult (parseGenericPackageDescription bs) readCabalFields :: NormalizedFilePath -> diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs new file mode 100644 index 0000000000..de7bb9a5fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Rules (cabalRules, Log) where + +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import qualified Data.List as List +import qualified Data.List.NonEmpty as NE +import qualified Data.Maybe as Maybe +import qualified Data.Text () +import qualified Data.Text as T +import qualified Data.Text.Encoding as Encoding +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import qualified Distribution.CabalSpecVersion as Cabal +import qualified Distribution.Fields as Syntax +import Distribution.Parsec.Error +import qualified Ide.Plugin.Cabal.Completion.Data as Data +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest +import Ide.Plugin.Cabal.Orphans () +import qualified Ide.Plugin.Cabal.Parse as Parse +import Ide.Types +import Text.Regex.TDFA + +data Log + = LogModificationTime NormalizedFilePath FileVersion + | LogShake Shake.Log + | LogOfInterest OfInterest.Log + | LogDocSaved Uri + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogOfInterest log' -> pretty log' + LogModificationTime nfp modTime -> + "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) + LogDocSaved uri -> + "Saved text document:" <+> pretty (getUri uri) + +cabalRules :: Recorder (WithPriority Log) -> PluginId -> Rules () +cabalRules recorder plId = do + -- Make sure we initialise the cabal files-of-interest. + OfInterest.ofInterestRules (cmapWithPrio LogOfInterest recorder) + -- Rule to produce diagnostics for cabal files. + define (cmapWithPrio LogShake recorder) $ \ParseCabalFields file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + case Parse.readCabalFields file contents of + Left _ -> + pure ([], Nothing) + Right fields -> + pure ([], Just fields) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do + fields <- use_ ParseCabalFields file + let commonSections = + Maybe.mapMaybe + ( \case + commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection + _ -> Nothing + ) + fields + pure ([], Just commonSections) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + -- Instead of fully reparsing the sources to get a 'GenericPackageDescription', + -- we would much rather re-use the already parsed results of 'ParseCabalFields'. + -- Unfortunately, Cabal-syntax doesn't expose the function 'parseGenericPackageDescription'' + -- which allows us to resume the parsing pipeline with '[Field Position]'. + let (pWarnings, pm) = Parse.parseCabalFileContents contents + let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings + case pm of + Left (_cabalVersion, pErrorNE) -> do + let regexUnknownCabalBefore310 :: T.Text + -- We don't support the cabal version, this should not be an error, as the + -- user did not do anything wrong. Instead we cast it to a warning + regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" + regexUnknownCabalVersion :: T.Text + regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" + unsupportedCabalHelpText = + unlines + [ "The used `cabal-version` is not fully supported by this `HLS` binary." + , "Either the `cabal-version` is unknown, or too new for this executable." + , "This means that some functionality might not work as expected." + , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." + , "" + , "Supported versions are: " + <> List.intercalate + ", " + (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) + ] + errorDiags = + NE.toList $ + NE.map + ( \pe@(PError pos text) -> + if any + (text =~) + [ regexUnknownCabalBefore310 + , regexUnknownCabalVersion + ] + then + Diagnostics.warningDiagnostic + file + ( Syntax.PWarning Syntax.PWTOther pos $ + unlines + [ text + , unsupportedCabalHelpText + ] + ) + else Diagnostics.errorDiagnostic file pe + ) + pErrorNE + allDiags = errorDiags <> warningDiags + pure (allDiags, Nothing) + Right gpd -> do + pure (warningDiags, Just gpd) + + action $ do + -- Run the cabal kick. This code always runs when 'shakeRestart' is run. + -- Must be careful to not impede the performance too much. Crucial to + -- a snappy IDE experience. + OfInterest.kick + where + log' = logWith recorder diff --git a/plugins/hls-cabal-plugin/test/CabalAdd.hs b/plugins/hls-cabal-plugin/test/CabalAdd.hs index 6517c811fe..8cbac90e43 100644 --- a/plugins/hls-cabal-plugin/test/CabalAdd.hs +++ b/plugins/hls-cabal-plugin/test/CabalAdd.hs @@ -1,56 +1,112 @@ -{-# LANGUAGE OverloadedStrings #-} - +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} module CabalAdd ( - cabalAddTests, + cabalAddDependencyTests, + cabalAddModuleTests, ) where -import Control.Lens ((^.)) -import Control.Lens.Fold ((^?)) -import qualified Data.Maybe as Maybe -import qualified Data.Text as T -import qualified Data.Text.Internal.Search as T -import Distribution.Utils.Generic (safeHead) -import Ide.Plugin.Cabal.CabalAdd (hiddenPackageSuggestion) -import qualified Language.LSP.Protocol.Lens as L -import Language.LSP.Protocol.Types (Diagnostic (..), mkRange) +import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Internal.Search as T +import Distribution.ModuleName (fromString) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as Pretty +import Distribution.Types.Component +import Distribution.Utils.Generic (safeHead) +import Ide.Plugin.Cabal.CabalAdd.CodeAction (hiddenPackageSuggestion) +import Ide.Plugin.Cabal.Parse (parseCabalFileContents) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as J import System.FilePath -import Test.Hls (Session, TestTree, _R, anyMessage, - assertEqual, documentContents, - executeCodeAction, - getAllCodeActions, - getDocumentEdit, liftIO, openDoc, - skipManyTill, testCase, testGroup, - waitForDiagnosticsFrom, (@?=)) +import Test.Hls import Utils -cabalAddTests :: TestTree -cabalAddTests = +cabalAddModuleTests :: TestTree +cabalAddModuleTests = + testGroup + "Add Module" + [ runHaskellTestCaseSession "Add to benchmark" ("cabal-add-module" "library") $ do + let compName = CBenchName "test1" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to executable" ("cabal-add-module" "library") $ do + let compName = CExeName "test" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to test-suite" ("cabal-add-module" "library") $ do + let compName = CTestName "test2" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to library" ("cabal-add-module" "library") $ do + let compName = CLibName $ LSubLibName "test3" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to main library" ("cabal-add-module" "library") $ do + let compName = CLibName LMainLibName + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + ] + where + generateAddDependencyTestSession :: FilePath -> FilePath -> ComponentName -> Session PackageDescription + generateAddDependencyTestSession cabalFile haskellFile compName = do + haskellDoc <- openDoc haskellFile "haskell" + cabalDoc <- openDoc cabalFile "cabal" + _ <- waitForDiagnosticsFrom haskellDoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions haskellDoc + let selectedCas = filter (\ca -> (T.pack $ "Add to " <> Pretty.prettyShow compName <> " ") `T.isPrefixOf` (ca ^. L.title)) cas + mapM_ executeCodeAction $ selectedCas + _ <- skipManyTill anyMessage $ getDocumentEdit cabalDoc -- Wait for the changes in cabal file + contents <- documentContents cabalDoc + case parseCabalFileContents $ T.encodeUtf8 contents of + (_, Right gpd) -> pure $ flattenPackageDescription gpd + _ -> liftIO $ assertFailure "could not parse cabal file to gpd" + + -- | Verify that the given module was added to the desired component. + -- Note that we do not care whether it was added to exposed-modules or other-modules of that component. + checkModuleAddedTo :: PackageDescription -> String -> ComponentName -> Session () + checkModuleAddedTo pd modName compName = do + let comp = getComponent pd compName + compModules = case comp of + CLib lib -> explicitLibModules lib + CFLib fLib -> foreignLibModules fLib + CExe exe -> exeModules exe + CTest test -> testModules test + CBench bench -> benchmarkModules bench + testDescription = modName <> " was added to " <> showComponentName compName + liftIO $ assertBool testDescription $ fromString modName `elem` compModules + +cabalAddDependencyTests :: TestTree +cabalAddDependencyTests = testGroup - "CabalAdd Tests" - [ runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable" ("cabal-add-testdata" "cabal-add-exe") + "Add dependency" + [ runHaskellTestCaseSession "Add to executable" ("cabal-add-testdata" "cabal-add-exe") (generateAddDependencyTestSession "cabal-add-exe.cabal" ("src" "Main.hs") "split" [253]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library" ("cabal-add-testdata" "cabal-add-lib") + , runHaskellTestCaseSession "Add to library" ("cabal-add-testdata" "cabal-add-lib") (generateAddDependencyTestSession "cabal-add-lib.cabal" ("src" "MyLib.hs") "split" [348]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test" ("cabal-add-testdata" "cabal-add-tests") + , runHaskellTestCaseSession "Add to testsuite" ("cabal-add-testdata" "cabal-add-tests") (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "Main.hs") "split" [478]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test with PackageImports" ("cabal-add-testdata" "cabal-add-tests") + , runHaskellTestCaseSession "Add to testsuite with PackageImports" ("cabal-add-testdata" "cabal-add-tests") (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "MainPackageImports.hs") "split" [731]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark" ("cabal-add-testdata" "cabal-add-bench") + , runHaskellTestCaseSession "Add to benchmark" ("cabal-add-testdata" "cabal-add-bench") (generateAddDependencyTestSession "cabal-add-bench.cabal" ("bench" "Main.hs") "split" [403]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("src" "Main.hs") "split" [269]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "MyLib.hs") "split" [413]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to an internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "InternalLib.hs") "split" [413]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to testsuite, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("test" "Main.hs") "split" [655]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("bench" "Main.hs") "split" [776]) - , runHaskellTestCaseSession "Code Actions - Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") + , runHaskellTestCaseSession "Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") (generatePackageYAMLTestSession ("src" "Main.hs")) , testHiddenPackageSuggestions "Check CabalAdd's parser, no version" @@ -156,7 +212,7 @@ cabalAddTests = liftIO $ assertEqual (T.unpack dependency <> " isn't found in the cabal file") indicesRes (T.indices dependency contents) testHiddenPackageSuggestions :: String -> [T.Text] -> [(T.Text, T.Text)] -> TestTree testHiddenPackageSuggestions testTitle messages suggestions = - let diags = map (\msg -> messageToDiagnostic msg ) messages + let diags = map (\msg -> messageToDiagnostic msg) messages suggestions' = map (safeHead . hiddenPackageSuggestion) diags assertions = zipWith (@?=) suggestions' (map Just suggestions) testNames = map (\(f, s) -> "Check if " ++ T.unpack f ++ (if s == "" then "" else "-") ++ T.unpack s ++ " was parsed correctly") suggestions @@ -164,20 +220,19 @@ cabalAddTests = in test messageToDiagnostic :: T.Text -> Diagnostic messageToDiagnostic msg = Diagnostic { - _range = mkRange 0 0 0 0 - , _severity = Nothing - , _code = Nothing - , _source = Nothing - , _message = msg - , _relatedInformation = Nothing - , _tags = Nothing - , _codeDescription = Nothing - , _data_ = Nothing + J._range = mkRange 0 0 0 0 + , J._severity = Nothing + , J._code = Nothing + , J._source = Nothing + , J._message = msg + , J._relatedInformation = Nothing + , J._tags = Nothing + , J._codeDescription = Nothing + , J._data_ = Nothing } - generatePackageYAMLTestSession :: FilePath -> Session () - generatePackageYAMLTestSession haskellFile = do + generatePackageYAMLTestSession haskellFile = do hsdoc <- openDoc haskellFile "haskell" _ <- waitForDiagnosticsFrom hsdoc cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs index fcb85a081e..43794e753d 100644 --- a/plugins/hls-cabal-plugin/test/Main.hs +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -1,12 +1,15 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} {-# LANGUAGE DisambiguateRecordFields #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} module Main ( main, ) where -import CabalAdd (cabalAddTests) +import CabalAdd (cabalAddDependencyTests, + cabalAddModuleTests) import Completer (completerTests) import Context (contextTests) import Control.Lens ((^.)) @@ -16,14 +19,19 @@ import qualified Data.ByteString as BS import Data.Either (isRight) import Data.List.Extra (nubOrdOn) import qualified Data.Maybe as Maybe +import Data.Text (Text) import qualified Data.Text as T +import qualified Data.Text.IO as Text import Definition (gotoDefinitionTests) +import Development.IDE.Test import Ide.Plugin.Cabal.LicenseSuggest (licenseErrorSuggestion) import qualified Ide.Plugin.Cabal.Parse as Lib import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Message as L import Outline (outlineTests) import System.FilePath import Test.Hls +import Test.Hls.FileSystem import Utils main :: IO () @@ -39,6 +47,7 @@ main = do , codeActionTests , gotoDefinitionTests , hoverTests + , reloadOnCabalChangeTests ] -- ------------------------------------------------------------------------ @@ -58,7 +67,8 @@ cabalParserUnitTests = testGroup "Parsing Cabal" [ testCase "Simple Parsing works" $ do - (warnings, pm) <- Lib.parseCabalFileContents =<< BS.readFile (testDataDir "simple.cabal") + fileContents <- BS.readFile (testDataDir "simple.cabal") + let (warnings, pm) = Lib.parseCabalFileContents fileContents liftIO $ do null warnings @? "Found unexpected warnings" isRight pm @? "Failed to parse GenericPackageDescription" @@ -89,7 +99,7 @@ codeActionUnitTests = maxCompletions = 100 --- ------------------------ ------------------------------------------------ +-- ------------------------------------------------------------------------ -- Integration Tests -- ------------------------------------------------------------------------ @@ -126,11 +136,6 @@ pluginTests = _ <- applyEdit doc $ TextEdit (Range (Position 3 20) (Position 4 0)) "BSD-3-Clause\n" newDiags <- cabalCaptureKick liftIO $ newDiags @?= [] - , runCabalTestCaseSession "No Diagnostics in .hs files from valid .cabal file" "simple-cabal" $ do - hsDoc <- openDoc "A.hs" "haskell" - expectNoMoreDiagnostics 1 hsDoc "typechecking" - cabalDoc <- openDoc "simple-cabal.cabal" "cabal" - expectNoMoreDiagnostics 1 cabalDoc "parsing" ] ] -- ---------------------------------------------------------------------------- @@ -208,7 +213,8 @@ codeActionTests = testGroup "Code Actions" ]) cas mapM_ executeCodeAction selectedCas pure () - , cabalAddTests + , cabalAddDependencyTests + , cabalAddModuleTests ] where getLicenseAction :: T.Text -> [Command |? CodeAction] -> [CodeAction] @@ -259,3 +265,63 @@ hoverOnDependencyTests = testGroup "Hover Dependency" h <- getHover doc pos liftIO $ assertBool ("Found hover `" <> show h <> "`") $ Maybe.isNothing h closeDoc doc + +-- ---------------------------------------------------------------------------- +-- Reloading of Haskell files on .cabal changes +-- ---------------------------------------------------------------------------- + +simpleCabalVft :: [FileTree] +simpleCabalVft = + [ copy "hie.yaml" + , copy "simple-reload.cabal" + , copy "Main.hs" + ] + +simpleCabalFs :: VirtualFileTree +simpleCabalFs = mkVirtualFileTree + (testDataDir "simple-reload") + simpleCabalVft + +-- Slow tests +reloadOnCabalChangeTests :: TestTree +reloadOnCabalChangeTests = testGroup "Reload on .cabal changes" + [ runCabalTestCaseSessionVft "Change warnings when .cabal file changes" simpleCabalFs $ do + _ <- openDoc "Main.hs" "haskell" + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (8, 0), "Top-level binding with no type signature", Just "GHC-38417")])] + waitForAllProgressDone + cabalDoc <- openDoc "simple-reload.cabal" "cabal" + skipManyTill anyMessage cabalKickDone + saveDoc cabalDoc + [trimming| + cabal-version: 3.4 + name: simple-reload + version: 0.1.0.0 + -- copyright: + build-type: Simple + + common warnings + ghc-options: -Wall -Wno-missing-signatures + + executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 + |] + + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of \8216Data.List\8217 is redundant", Nothing)])] + ] + +-- | Persists the given contents to the 'TextDocumentIdentifier' on disk +-- and sends the @textDocument/didSave@ notification. +saveDoc :: TextDocumentIdentifier -> Text -> Session () +saveDoc docId t = do + -- I couldn't figure out how to get the virtual file contents, so we write it + -- to disk and send the 'SMethod_TextDocumentDidSave' notification + case uriToFilePath (docId ^. L.uri) of + Nothing -> pure () + Just fp -> do + liftIO $ Text.writeFile fp t + + let params = DidSaveTextDocumentParams docId Nothing + sendNotification L.SMethod_TextDocumentDidSave params diff --git a/plugins/hls-cabal-plugin/test/Utils.hs b/plugins/hls-cabal-plugin/test/Utils.hs index 2733f94fd0..0264fec2c6 100644 --- a/plugins/hls-cabal-plugin/test/Utils.hs +++ b/plugins/hls-cabal-plugin/test/Utils.hs @@ -14,6 +14,7 @@ import qualified Ide.Plugin.Cabal import Ide.Plugin.Cabal.Completion.Types import System.FilePath import Test.Hls +import Test.Hls.FileSystem (VirtualFileTree) cabalPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log @@ -57,6 +58,13 @@ runCabalSession :: FilePath -> Session a -> IO a runCabalSession subdir = failIfSessionTimeout . runSessionWithServer def cabalPlugin (testDataDir subdir) +runCabalTestCaseSessionVft :: TestName -> VirtualFileTree -> Session () -> TestTree +runCabalTestCaseSessionVft title vft = testCase title . runCabalSessionVft vft + +runCabalSessionVft :: VirtualFileTree -> Session a -> IO a +runCabalSessionVft vft = + failIfSessionTimeout . runSessionWithServerInTmpDir def cabalPlugin vft + runHaskellAndCabalSession :: FilePath -> Session a -> IO a runHaskellAndCabalSession subdir = failIfSessionTimeout . runSessionWithServer def (cabalPlugin <> cabalHaskellPlugin) (testDataDir subdir) @@ -82,3 +90,4 @@ cabalCaptureKick = captureKickDiagnostics cabalKickStart cabalKickDone -- | list comparison where the order in the list is irrelevant (@?==) :: (HasCallStack, Ord a, Show a) => [a] -> [a] -> Assertion (@?==) l1 l2 = sort l1 @?= sort l2 + diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs new file mode 100644 index 0000000000..c2e4af9606 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = undefined diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal new file mode 100644 index 0000000000..bb6dc95f2f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal @@ -0,0 +1,26 @@ +cabal-version: 3.0 +name: test +version: 0.1.0.0 +build-type: Simple + +library + hs-source-dirs: . + exposed-modules: + build-depends: base + default-language: Haskell2010 + +executable test + main-is: bla + build-depends: base + +benchmark test1 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +test-suite test2 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +library test3 diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs new file mode 100644 index 0000000000..5f0cdfad80 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs @@ -0,0 +1,9 @@ +module Main where + +import Data.List -- Intentionally unused import, used in the testcase + +main :: IO () +main = foo + +-- Missing signature +foo = putStrLn "Hello, World" diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal new file mode 100644 index 0000000000..359940aebc --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal @@ -0,0 +1,14 @@ +cabal-version: 3.4 +name: simple-reload +version: 0.1.0.0 +-- copyright: +build-type: Simple + +common warnings + ghc-options: -Wall -Wno-unused-imports + +executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs index 06e9d99679..b897fa5abb 100644 --- a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs @@ -24,6 +24,12 @@ import Development.IDE import Development.IDE.Core.Shake import Development.IDE.GHC.Compat as Compat import Development.IDE.Spans.AtPoint +import GHC.Iface.Ext.Types (ContextInfo (..), + DeclType (..), HieAST (..), + HieASTs (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (getNameBinding) import HieDb (Symbol (Symbol)) import qualified Ide.Plugin.CallHierarchy.Query as Q import Ide.Plugin.CallHierarchy.Types diff --git a/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs index df776e6d15..8b8b7e7d3a 100644 --- a/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs +++ b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs @@ -1,47 +1,93 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE ViewPatterns #-} -- | An HLS plugin to provide code actions to change type signatures module Ide.Plugin.ChangeTypeSignature (descriptor -- * For Unit Tests + , Log(..) , errorMessageRegexes ) where -import Control.Monad (guard) -import Control.Monad.IO.Class (MonadIO) -import Control.Monad.Trans.Except (ExceptT) -import Data.Foldable (asum) -import qualified Data.Map as Map -import Data.Maybe (mapMaybe) -import Data.Text (Text) -import qualified Data.Text as T -import Development.IDE (realSrcSpanToRange) +import Control.Lens +import Control.Monad (guard) +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..)) +import Control.Monad.Trans.Maybe (MaybeT (..), hoistMaybe) +import Data.Foldable (asum) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic, + IdeState (..), Pretty (..), + Priority (..), Recorder, + WithPriority, + fdLspDiagnosticL, + fdStructuredMessageL, + logWith, realSrcSpanToRange) import Development.IDE.Core.PluginUtils -import Development.IDE.Core.RuleTypes (GetParsedModule (GetParsedModule)) -import Development.IDE.Core.Service (IdeState) -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Util (printOutputable) -import Generics.SYB (extQ, something) -import Ide.Plugin.Error (PluginError, - getNormalizedFilePathE) -import Ide.Types (PluginDescriptor (..), - PluginId (PluginId), - PluginMethodHandler, - defaultPluginDescriptor, - mkPluginHandler) +import Development.IDE.Core.RuleTypes (GetParsedModule (GetParsedModule)) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error (_MismatchMessage, + _TcRnMessageWithCtx, + _TcRnMessageWithInfo, + _TcRnSolverReport, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, + msgEnvelopeErrorL, + reportContentL) +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import Generics.SYB (extQ, something) +import GHC.Tc.Errors.Types (ErrInfo (..), + TcRnMessageDetailed (..)) +import qualified Ide.Logger as Logger +import Ide.Plugin.Error (PluginError, + getNormalizedFilePathE) +import Ide.Types (Config, HandlerM, + PluginDescriptor (..), + PluginId (PluginId), + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import Text.Regex.TDFA ((=~)) - -descriptor :: PluginId -> PluginDescriptor IdeState -descriptor plId = (defaultPluginDescriptor plId "Provides a code action to change the type signature of a binding if it is wrong") - { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeActionHandler plId) } - -codeActionHandler :: PluginId -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction -codeActionHandler plId ideState _ CodeActionParams {_textDocument = TextDocumentIdentifier uri, _context = CodeActionContext diags _ _} = do - nfp <- getNormalizedFilePathE uri - decls <- getDecls plId ideState nfp - let actions = mapMaybe (generateAction plId uri decls) diags - pure $ InL actions +import Text.Regex.TDFA ((=~)) + +data Log + = LogErrInfoCtxt ErrInfo + | LogFindSigLocFailure DeclName + +instance Pretty Log where + pretty = \case + LogErrInfoCtxt (ErrInfo ctxt suppl) -> + Logger.vcat [fromSDoc ctxt, fromSDoc suppl] + LogFindSigLocFailure name -> + pretty ("Lookup signature location failure: " <> name) + where + fromSDoc = pretty . printOutputable + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides a code action to change the type signature of a binding if it is wrong") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeActionHandler recorder plId) + } + +codeActionHandler + :: Recorder (WithPriority Log) + -> PluginId + -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionHandler recorder plId ideState _ CodeActionParams{_textDocument, _range} = do + let TextDocumentIdentifier uri = _textDocument + nfp <- getNormalizedFilePathE uri + decls <- getDecls plId ideState nfp + + activeDiagnosticsInRange (shakeExtras ideState) nfp _range >>= \case + Nothing -> pure (InL []) + Just fileDiags -> do + actions <- lift $ mapM (generateAction recorder plId uri decls) fileDiags + pure (InL (catMaybes actions)) getDecls :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m [LHsDecl GhcPs] getDecls (PluginId changeTypeSignatureId) state = @@ -67,39 +113,74 @@ data ChangeSignature = ChangeSignature { -- | the location of the declaration signature , declSrcSpan :: RealSrcSpan -- | the diagnostic to solve - , diagnostic :: Diagnostic + , diagnostic :: FileDiagnostic } -- | Create a CodeAction from a Diagnostic -generateAction :: PluginId -> Uri -> [LHsDecl GhcPs] -> Diagnostic -> Maybe (Command |? CodeAction) -generateAction plId uri decls diag = changeSigToCodeAction plId uri <$> diagnosticToChangeSig decls diag +generateAction + :: Recorder (WithPriority Log) + -> PluginId + -> Uri + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe (Command |? CodeAction)) +generateAction recorder plId uri decls fileDiag = do + changeSig <- diagnosticToChangeSig recorder decls fileDiag + pure $ + changeSigToCodeAction plId uri <$> changeSig -- | Convert a diagnostic into a ChangeSignature and add the proper SrcSpan -diagnosticToChangeSig :: [LHsDecl GhcPs] -> Diagnostic -> Maybe ChangeSignature -diagnosticToChangeSig decls diagnostic = do - -- regex match on the GHC Error Message - (expectedType, actualType, declName) <- matchingDiagnostic diagnostic - -- Find the definition and it's location - declSrcSpan <- findSigLocOfStringDecl decls expectedType (T.unpack declName) - pure $ ChangeSignature{..} - +diagnosticToChangeSig + :: Recorder (WithPriority Log) + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe ChangeSignature) +diagnosticToChangeSig recorder decls diagnostic = runMaybeT $ do + -- Extract expected, actual, and extra error info + (expectedType, actualType, errInfo) <- hoistMaybe $ do + msg <- diagnostic ^. fdStructuredMessageL ^? _SomeStructuredMessage + tcRnMsg <- msg ^. msgEnvelopeErrorL ^? _TcRnMessageWithCtx + (_, TcRnMessageDetailed errInfo tcRnMsg') <- tcRnMsg ^? _TcRnMessageWithInfo + solverReport <- tcRnMsg' ^? _TcRnSolverReport . _1 . reportContentL + mismatch <- solverReport ^? _MismatchMessage + expectedType <- mismatch ^? _TypeEqMismatchExpected + actualType <- mismatch ^? _TypeEqMismatchActual + + pure (showType expectedType, showType actualType, errInfo) + + logWith recorder Debug (LogErrInfoCtxt errInfo) + + -- Extract the declName from the extra error text + declName <- hoistMaybe (matchingDiagnostic errInfo) + + -- Look up location of declName. If it fails, log it + declSrcSpan <- + case findSigLocOfStringDecl decls expectedType (T.unpack declName) of + Just x -> pure x + Nothing -> do + logWith recorder Debug (LogFindSigLocFailure declName) + hoistMaybe Nothing + + pure ChangeSignature{..} + where + showType :: Type -> Text + showType = T.pack . showSDocUnsafe . pprTidiedType -- | If a diagnostic has the proper message create a ChangeSignature from it -matchingDiagnostic :: Diagnostic -> Maybe (ExpectedSig, ActualSig, DeclName) -matchingDiagnostic Diagnostic{_message} = asum $ map (unwrapMatch . (=~) _message) errorMessageRegexes +matchingDiagnostic :: ErrInfo -> Maybe DeclName +matchingDiagnostic ErrInfo{errInfoContext} = + asum $ map (unwrapMatch . (=~) errInfoTxt) errorMessageRegexes where - unwrapMatch :: (Text, Text, Text, [Text]) -> Maybe (ExpectedSig, ActualSig, DeclName) - -- due to using (.|\n) in regex we have to drop the erroneous, but necessary ("." doesn't match newlines), match - unwrapMatch (_, _, _, [expect, actual, _, name]) = Just (expect, actual, name) - unwrapMatch _ = Nothing + unwrapMatch :: (Text, Text, Text, [Text]) -> Maybe DeclName + unwrapMatch (_, _, _, [name]) = Just name + unwrapMatch _ = Nothing + + errInfoTxt = printOutputable errInfoContext -- | List of regexes that match various Error Messages errorMessageRegexes :: [Text] errorMessageRegexes = [ -- be sure to add new Error Messages Regexes at the bottom to not fail any existing tests - "Expected type: (.+)\n +Actual type: (.+)\n(.|\n)+In an equation for ‘(.+)’" - , "Couldn't match expected type ‘(.+)’ with actual type ‘(.+)’\n(.|\n)+In an equation for ‘(.+)’" - -- GHC >9.2 version of the first error regex - , "Expected: (.+)\n +Actual: (.+)\n(.|\n)+In an equation for ‘(.+)’" + "In an equation for ‘(.+)’:" ] -- | Given a String with the name of a declaration, GHC's "Expected Type", find the declaration that matches @@ -147,7 +228,7 @@ changeSigToCodeAction :: PluginId -> Uri -> ChangeSignature -> Command |? CodeAc changeSigToCodeAction (PluginId changeTypeSignatureId) uri ChangeSignature{..} = InR CodeAction { _title = mkChangeSigTitle declName actualType , _kind = Just (CodeActionKind_Custom ("quickfix." <> changeTypeSignatureId)) - , _diagnostics = Just [diagnostic] + , _diagnostics = Just [diagnostic ^. fdLspDiagnosticL ] , _isPreferred = Nothing , _disabled = Nothing , _edit = Just $ mkChangeSigEdit uri declSrcSpan (mkNewSignature declName actualType) diff --git a/plugins/hls-change-type-signature-plugin/test/Main.hs b/plugins/hls-change-type-signature-plugin/test/Main.hs index cd1b152c0b..72a2ab780e 100644 --- a/plugins/hls-change-type-signature-plugin/test/Main.hs +++ b/plugins/hls-change-type-signature-plugin/test/Main.hs @@ -5,7 +5,7 @@ import Data.Either (rights) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.IO as TIO -import Ide.Plugin.ChangeTypeSignature (errorMessageRegexes) +import Ide.Plugin.ChangeTypeSignature (Log (..), errorMessageRegexes) import qualified Ide.Plugin.ChangeTypeSignature as ChangeTypeSignature import System.FilePath ((<.>), ()) import Test.Hls (CodeAction (..), Command, @@ -21,8 +21,7 @@ import Test.Hls (CodeAction (..), Command, getCodeActions, goldenWithHaskellDoc, knownBrokenForGhcVersions, - liftIO, - mkPluginTestDescriptor', + liftIO, mkPluginTestDescriptor, openDoc, runSessionWithServer, testCase, testGroup, toEither, type (|?), waitForBuildQueue, @@ -32,16 +31,19 @@ import Text.Regex.TDFA ((=~)) main :: IO () main = defaultTestRunner test -changeTypeSignaturePlugin :: PluginTestDescriptor () -changeTypeSignaturePlugin = mkPluginTestDescriptor' ChangeTypeSignature.descriptor "changeTypeSignature" +changeTypeSignaturePlugin :: PluginTestDescriptor Log +changeTypeSignaturePlugin = + mkPluginTestDescriptor + ChangeTypeSignature.descriptor + "changeTypeSignature" test :: TestTree test = testGroup "changeTypeSignature" [ testRegexes , codeActionTest "TExpectedActual" 4 11 - , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.2+ does not provide enough info" $ + , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.6+ does not provide enough info" $ codeActionTest "TRigidType" 4 14 - , codeActionTest "TRigidType2" 4 6 + , codeActionTest "TRigidType2" 4 8 , codeActionTest "TLocalBinding" 7 22 , codeActionTest "TLocalBindingShadow1" 11 8 , codeActionTest "TLocalBindingShadow2" 7 22 @@ -50,43 +52,17 @@ test = testGroup "changeTypeSignature" [ testRegexes :: TestTree testRegexes = testGroup "Regex Testing" [ - testRegexOne - , testRegexTwo - , testRegex921One - ] - -testRegexOne :: TestTree -testRegexOne = testGroup "Regex One" [ - regexTest "error1.txt" regex True - , regexTest "error2.txt" regex True - , regexTest "error3.txt" regex False - , regexTest "error4.txt" regex True - , regexTest "error5.txt" regex True + regexTest "TExpectedActual.txt" regex True + , regexTest "TLocalBinding.txt" regex True + , regexTest "TLocalBindingShadow1.txt" regex True + , regexTest "TLocalBindingShadow2.txt" regex True + -- Error message from GHC currently does not not provide enough info + , regexTest "TRigidType.txt" regex False + , regexTest "TRigidType2.txt" regex True ] where regex = errorMessageRegexes !! 0 -testRegexTwo :: TestTree -testRegexTwo = testGroup "Regex Two" [ - regexTest "error1.txt" regex False - , regexTest "error2.txt" regex False - , regexTest "error3.txt" regex True - , regexTest "error4.txt" regex False - , regexTest "error5.txt" regex False - ] - where - regex = errorMessageRegexes !! 1 - --- test ghc-9.2 error message regex -testRegex921One :: TestTree -testRegex921One = testGroup "Regex One" [ - regexTest "ghc921-error1.txt" regex True - , regexTest "ghc921-error2.txt" regex True - , regexTest "ghc921-error3.txt" regex True - ] - where - regex = errorMessageRegexes !! 2 - testDataDir :: FilePath testDataDir = "plugins" "hls-change-type-signature-plugin" "test" "testdata" @@ -123,8 +99,8 @@ regexTest :: FilePath -> Text -> Bool -> TestTree regexTest fp regex shouldPass = testCase fp $ do msg <- TIO.readFile (testDataDir fp) case (msg =~ regex :: (Text, Text, Text, [Text]), shouldPass) of - ((_, _, _, [_, _, _, _]), True) -> pure () - ((_, _, _, [_, _, _, _]), False) -> assertFailure $ "Unexpected match: " <> fp <> " with " <> T.unpack regex + ((_, _, _, [_]), True) -> pure () + ((_, _, _, [_]), False) -> assertFailure $ "Unexpected match: " <> fp <> " with " <> T.unpack regex (_, True) -> assertFailure $ "Failed to match: " <> fp <> " with " <> T.unpack regex (_, False) -> pure () diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt new file mode 100644 index 0000000000..6a8246a921 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt @@ -0,0 +1,8 @@ +In the expression: go +In an equation for ‘fullSig’: +fullSig + = go + where + go = head . reverse + + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt new file mode 100644 index 0000000000..3f31dc48b9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt @@ -0,0 +1,8 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in x + 1 diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt new file mode 100644 index 0000000000..ef782e8aec --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt @@ -0,0 +1,4 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt new file mode 100644 index 0000000000..bea2526eb9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt @@ -0,0 +1,9 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in test x [GHC-83865] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt new file mode 100644 index 0000000000..f9e78c97ae --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt @@ -0,0 +1,5 @@ +In the expression: go . head . reverse +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt new file mode 100644 index 0000000000..343129a942 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt @@ -0,0 +1,6 @@ +In the expression: head +In an equation for ‘test’: test = head +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt deleted file mode 100644 index 37f0aa4a81..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt +++ /dev/null @@ -1,6 +0,0 @@ - • Couldn't match type ‘Int’ - with ‘Data.HashSet.Internal.HashSet Int’ - Expected type: Int -> Int - Actual type: Data.HashSet.Internal.HashSet Int -> Int - • In the expression: head . toList - In an equation for ‘test’: test = head . toList diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt deleted file mode 100644 index 497f8350a5..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt +++ /dev/null @@ -1,6 +0,0 @@ - • Couldn't match type ‘b0 -> t0 a0 -> b0’ with ‘Int’ - Expected type: Int -> Int - Actual type: (b0 -> a0 -> b0) -> b0 -> t0 a0 -> b0 - • Probable cause: ‘foldl’ is applied to too few arguments - In the expression: foldl - In an equation for ‘test’: test = foldl diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt deleted file mode 100644 index 0cbddad7c4..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt +++ /dev/null @@ -1,10 +0,0 @@ - • Couldn't match expected type ‘Int’ with actual type ‘[Int]’ - • In the expression: map (+ x) [1, 2, 3] - In an equation for ‘test’: - test x - = map (+ x) [1, 2, 3] - where - go = head . reverse - | -152 | test x = map (+ x) [1,2,3] - | ^^^^^^^^^^^^^^^^^ diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt deleted file mode 100644 index 323cf7d4db..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt +++ /dev/null @@ -1,19 +0,0 @@ - • Couldn't match type ‘a’ with ‘[[Int]]’ - ‘a’ is a rigid type variable bound by - the type signature for: - test :: forall a. Ord a => a -> Int - at src/Ide/Plugin/ChangeTypeSignature.hs:154:1-25 - Expected type: a -> Int - Actual type: [[Int]] -> Int - • In the expression: go . head . reverse - In an equation for ‘test’: - test - = go . head . reverse - where - go = head . reverse - • Relevant bindings include - test :: a -> Int - (bound at src/Ide/Plugin/ChangeTypeSignature.hs:155:1) - | -155 | test = go . head . reverse - | ^^^^^^^^^^^^^^^^^^^ diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt deleted file mode 100644 index a7a5d9a20b..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt +++ /dev/null @@ -1,15 +0,0 @@ - • Couldn't match type ‘(a0 -> m0 b0) -> m0 (t0 b0)’ with ‘Int’ - Expected type: Int -> Int - Actual type: t0 a0 -> (a0 -> m0 b0) -> m0 (t0 b0) - • Probable cause: ‘forM’ is applied to too few arguments - In the expression: forM - In an equation for ‘test’: test = forM - In an equation for ‘implicit’: - implicit - = return OpTEmpty - where - test :: Int -> Int - test = forM - | -82 | test = forM - | ^^^^ diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs index ecbd495246..3f902ef80c 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs @@ -36,6 +36,9 @@ import Development.IDE.GHC.Compat.Error (TcRnMessage (..), stripTcRnMessageContext) import Development.IDE.GHC.Compat.Util import Development.IDE.Spans.AtPoint (pointCommand) +import GHC.Iface.Ext.Types (ContextInfo (..), + HieAST (..), Identifier, + IdentifierDetails (..)) import Ide.Plugin.Class.ExactPrint import Ide.Plugin.Class.Types import Ide.Plugin.Class.Utils diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs index 6fa799b8d5..915a98d607 100644 --- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs @@ -19,7 +19,11 @@ import Data.Maybe (fromMaybe, mapMaybe) import Data.Semigroup (First (First, getFirst)) import Data.Semigroup.Foldable (foldlM1) import qualified Data.Set as Set -import Development.IDE.GHC.Compat hiding (nodeInfo) +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo (..), HieAST (..), + Identifier, IdentifierDetails (..), + NodeInfo (nodeIdentifiers), Span) +import GHC.Iface.Ext.Utils (RefMap, flattenAst) import Prelude hiding (span) {-| diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs index 86d5923011..2391a35e1a 100644 --- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs @@ -39,18 +39,17 @@ import qualified Data.Vector as V import Development.IDE import Development.IDE.Core.Rules (toIdeResult) import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.GHC.Compat (HieAST (..), - HieASTs (getAsts), RefMap) import Development.IDE.GHC.Compat.Util import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieAST (..), HieASTs (..)) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.CodeRange.ASTPreProcess (CustomNodeType (..), PreProcessEnv (..), isCustomNode, preProcessAST) -import Language.LSP.Protocol.Types (FoldingRangeKind (FoldingRangeKind_Comment, FoldingRangeKind_Imports, FoldingRangeKind_Region)) - import Language.LSP.Protocol.Lens (HasEnd (end), HasStart (start)) +import Language.LSP.Protocol.Types (FoldingRangeKind (FoldingRangeKind_Comment, FoldingRangeKind_Imports, FoldingRangeKind_Region)) import Prelude hiding (log) data Log = LogShake Shake.Log diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs index e3208e37f5..089779ea2b 100644 --- a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs @@ -10,4 +10,12 @@ module TProperty where -- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List -- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List -- head, called at :1:27 in interactive:Ghci2 +-- HasCallStack backtrace: +-- collectBacktraces, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:169:13 in ghc-internal:GHC.Internal.Exception +-- toExceptionWithBacktrace, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:204:5 in ghc-internal:GHC.Internal.Exception +-- error, called at libraries/ghc-internal/src/GHC/Internal/List.hs:2030:3 in ghc-internal:GHC.Internal.List +-- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List +-- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List +-- head, called at :1:27 in interactive:Ghci2 +-- -- [] diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs index a761f648af..a111e9062b 100644 --- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -58,8 +58,7 @@ import Development.IDE.GHC.Compat (FieldLabel (flSelector), HsExpr (HsApp, HsVar, XExpr), HsFieldBind (hfbLHS), HsRecFields (..), - HsWrap (HsWrap), - Identifier, LPat, + HsWrap (HsWrap), LPat, Located, NamedThing (getName), Outputable, @@ -90,6 +89,7 @@ import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), getFirstPragma, insertNewPragma) import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (Identifier) import Ide.Logger (Priority (..), cmapWithPrio, logWith, (<+>)) diff --git a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs index 5a72455eb5..210e9f3910 100644 --- a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs +++ b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs @@ -64,11 +64,9 @@ import System.Environment (setEnv, #endif import Development.IDE.GHC.Compat (DynFlags, - WarningFlag (Opt_WarnUnrecognisedPragmas), extensionFlags, ms_hspp_opts, - topDir, - wopt) + topDir) import qualified Development.IDE.GHC.Compat.Util as EnumSet #if MIN_GHC_API_VERSION(9,4,0) @@ -466,19 +464,10 @@ mkSuppressHintTextEdits dynFlags fileContents hint = NextPragmaInfo{ nextPragmaLine, lineSplitTextEdits } = getNextPragmaInfo dynFlags (Just fileContents) nextPragmaLinePosition = Position (fromIntegral nextPragmaLine) 0 nextPragmaRange = Range nextPragmaLinePosition nextPragmaLinePosition - wnoUnrecognisedPragmasText = - if wopt Opt_WarnUnrecognisedPragmas dynFlags - then Just "{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}\n" - else Nothing - hlintIgnoreText = Just ("{-# HLINT ignore \"" <> hint <> "\" #-}\n") - -- we combine the texts into a single text because lsp-test currently - -- applies text edits backwards and I want the options pragma to - -- appear above the hlint pragma in the tests - combinedText = mconcat $ catMaybes [wnoUnrecognisedPragmasText, hlintIgnoreText] - combinedTextEdit = LSP.TextEdit nextPragmaRange combinedText + textEdit = LSP.TextEdit nextPragmaRange $ "{- HLINT ignore \"" <> hint <> "\" -}\n" lineSplitTextEditList = maybe [] (\LineSplitTextEdits{..} -> [lineSplitInsertTextEdit, lineSplitDeleteTextEdit]) lineSplitTextEdits in - combinedTextEdit : lineSplitTextEditList + textEdit : lineSplitTextEditList -- --------------------------------------------------------------------- ignoreHint :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> VersionedTextDocumentIdentifier -> HintTitle -> IO (Either PluginError WorkspaceEdit) diff --git a/plugins/hls-hlint-plugin/test/Main.hs b/plugins/hls-hlint-plugin/test/Main.hs index 4eea2a803a..360a9c0c01 100644 --- a/plugins/hls-hlint-plugin/test/Main.hs +++ b/plugins/hls-hlint-plugin/test/Main.hs @@ -48,9 +48,9 @@ resolveTests :: TestTree resolveTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint resolve tests" [ ignoreHintGoldenResolveTest - "Resolve version of: Ignore hint in this module inserts -Wno-unrecognised-pragmas and hlint ignore pragma if warn unrecognized pragmas is off" - "UnrecognizedPragmasOff" - (Point 3 8) + "Resolve version of: Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) "Eta reduce" , applyHintGoldenResolveTest "Resolve version of: [#2612] Apply hint works when operator fixities go right-to-left" @@ -64,14 +64,9 @@ ignoreHintTests :: TestTree ignoreHintTests = testGroup "hlint ignore hint tests" [ ignoreHintGoldenTest - "Ignore hint in this module inserts -Wno-unrecognised-pragmas and hlint ignore pragma if warn unrecognized pragmas is off" - "UnrecognizedPragmasOff" - (Point 3 8) - "Eta reduce" - , ignoreHintGoldenTest - "Ignore hint in this module inserts only hlint ignore pragma if warn unrecognized pragmas is on" - "UnrecognizedPragmasOn" - (Point 3 9) + "Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) "Eta reduce" ] diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs new file mode 100644 index 0000000000..b3ae28995e --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs @@ -0,0 +1,3 @@ +{- HLINT ignore "Eta reduce" -} +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs new file mode 100644 index 0000000000..7fb147a40f --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs @@ -0,0 +1,2 @@ +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs deleted file mode 100644 index 31d9aed946..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs +++ /dev/null @@ -1,4 +0,0 @@ -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -{-# HLINT ignore "Eta reduce" #-} -module UnrecognizedPragmasOff where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs deleted file mode 100644 index 2611c9a7f7..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs +++ /dev/null @@ -1,3 +0,0 @@ -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -module UnrecognizedPragmasOff where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs deleted file mode 100644 index 564503ca40..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs +++ /dev/null @@ -1,5 +0,0 @@ -{-# OPTIONS_GHC -Wunrecognised-pragmas #-} -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -{-# HLINT ignore "Eta reduce" #-} -module UnrecognizedPragmasOn where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs deleted file mode 100644 index bac66497ba..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs +++ /dev/null @@ -1,3 +0,0 @@ -{-# OPTIONS_GHC -Wunrecognised-pragmas #-} -module UnrecognizedPragmasOn where -foo x = id x diff --git a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs index 011910b880..6917d0a7a9 100644 --- a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs +++ b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs @@ -32,17 +32,14 @@ import Development.IDE.Core.RuleTypes (GetFileContents (GetFileConte TcModuleResult (TcModuleResult, tmrParsed, tmrTypechecked), TypeCheck (TypeCheck)) import Development.IDE.Core.Shake (IdeState) -import Development.IDE.GHC.Compat (ContextInfo (Use), - GenLocated (..), GhcPs, +import Development.IDE.GHC.Compat (GenLocated (..), GhcPs, GlobalRdrElt, GlobalRdrEnv, HsModule (hsmodImports), - Identifier, - IdentifierDetails (IdentifierDetails, identInfo), ImpDeclSpec (ImpDeclSpec, is_as, is_dloc, is_qual), ImportSpec (ImpSpec), LImportDecl, ModuleName, Name, NameEnv, ParsedModule, - RefMap, Span, SrcSpan, + SrcSpan, TcGblEnv (tcg_rdr_env), emptyUFM, globalRdrEnvElts, gre_imp, gre_name, locA, @@ -58,6 +55,9 @@ import Development.IDE.GHC.Compat (ContextInfo (Use), srcSpanStartLine, unitUFM) import Development.IDE.Types.Location (Position (Position), Range (Range), Uri) +import GHC.Iface.Ext.Types (ContextInfo (..), Identifier, + IdentifierDetails (..), Span) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.Error (PluginError (PluginRuleFailed), getNormalizedFilePathE, handleMaybe) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs index e471d1781a..1fba6b67e5 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs @@ -75,6 +75,8 @@ import Development.IDE.Types.Options import GHC (DeltaPos (..), EpAnn (..), LEpaComment) +import GHC.Iface.Ext.Types (ContextInfo (..), + IdentifierDetails (..)) import qualified GHC.LanguageExtensions as Lang import Ide.Logger hiding (group) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs index 0f48a3a649..bffd2a611c 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs @@ -63,6 +63,7 @@ import GHC (addAnns, ann) #if MIN_VERSION_ghc(9,9,0) import GHC (NoAnn (..)) +import GHC (EpAnnComments (..)) #endif ------------------------------------------------------------------------------ @@ -170,7 +171,7 @@ appendConstraint constraintT = go . traceAst "appendConstraint" constraint <- liftParseAST df constraintT constraint <- pure $ setEntryDP constraint (SameLine 1) #if MIN_VERSION_ghc(9,9,0) - let l'' = fmap (addParensToCtxt close_dp) l' + let l'' = moveCommentsToTheEnd $ fmap (addParensToCtxt close_dp) l' #else let l'' = (fmap.fmap) (addParensToCtxt close_dp) l' #endif @@ -205,6 +206,26 @@ appendConstraint constraintT = go . traceAst "appendConstraint" return $ reLocA $ L lTop $ HsQualTy noExtField context ast +#if MIN_VERSION_ghc(9,9,0) +-- | This moves comment annotation toward the end of the block +-- This is useful when extending a block, so the comment correctly appears +-- after. +-- +-- See https://github.com/haskell/haskell-language-server/issues/4648 for +-- discussion. +-- +-- For example, the following element, @(Foo) => -- hello@, when introducing an +-- additionnal constraint, `Bar`, instead of getting `@(Foo, Bar) => -- hello@, +-- we get @(Foo, -- hello Bar) =>@ +-- +-- This is a bit painful that the pretty printer is not able to realize that it +-- introduces the token `=>` inside the comment and instead does something with +-- meaning, but that's another story. +moveCommentsToTheEnd :: EpAnn ann -> EpAnn ann +moveCommentsToTheEnd (EpAnn entry anns (EpaComments priors)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors}) +moveCommentsToTheEnd (EpAnn entry anns (EpaCommentsBalanced priors following)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors <> following}) +#endif + liftParseAST :: forall ast l. (ASTElement l ast, ExactPrint (LocatedAn l ast)) => DynFlags -> String -> TransformT (Either String) (LocatedAn l ast) @@ -500,7 +521,7 @@ extendHiding symbol (L l idecls) mlies df = do Nothing -> do #if MIN_VERSION_ghc(9,11,0) let ann :: EpAnn (AnnList (EpToken "hiding", [EpToken ","])) - ann = noAnnSrcSpanDP0 + ann = noAnnSrcSpanDP0 #elif MIN_VERSION_ghc(9,9,0) let ann = noAnnSrcSpanDP0 #else diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs index 40f3c76127..2a7719fdbe 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs @@ -10,6 +10,7 @@ import Development.IDE.GHC.Compat.ExactPrint as GHC import Development.IDE.GHC.Dump (showAstDataHtml) import GHC.Stack import GHC.Utils.Outputable +import System.Directory.Extra (createDirectoryIfMissing) import System.Environment.Blank (getEnvDefault) import System.IO.Unsafe import Text.Printf @@ -37,6 +38,7 @@ traceAst lbl x doTrace = unsafePerformIO $ do u <- U.newUnique let htmlDumpFileName = printf "/tmp/hls/%s-%s-%d.html" (show timestamp) lbl (U.hashUnique u) + createDirectoryIfMissing True "/tmp/hls" writeFile htmlDumpFileName $ renderDump htmlDump return $ unlines [prettyCallStack callStack ++ ":" diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs index 17db1f0298..0f06fff2f7 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs @@ -2,78 +2,106 @@ module Development.IDE.Plugin.Plugins.FillTypeWildcard ( suggestFillTypeWildcard ) where -import Data.Char -import qualified Data.Text as T -import Language.LSP.Protocol.Types (Diagnostic (..), - TextEdit (TextEdit)) +import Control.Lens +import Data.Maybe (isJust) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic (..), + fdStructuredMessageL, + printOutputable) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import GHC.Tc.Errors.Types (ErrInfo (..)) +import Language.LSP.Protocol.Types (Diagnostic (..), + TextEdit (TextEdit)) -suggestFillTypeWildcard :: Diagnostic -> [(T.Text, TextEdit)] -suggestFillTypeWildcard Diagnostic{_range=_range,..} +suggestFillTypeWildcard :: FileDiagnostic -> [(T.Text, TextEdit)] +suggestFillTypeWildcard diag@FileDiagnostic{fdLspDiagnostic = Diagnostic {..}} -- Foo.hs:3:8: error: -- * Found type wildcard `_' standing for `p -> p1 -> p' - | "Found type wildcard" `T.isInfixOf` _message - , " standing for " `T.isInfixOf` _message - , typeSignature <- extractWildCardTypeSignature _message - = [("Use type signature: ‘" <> typeSignature <> "’", TextEdit _range typeSignature)] + | isWildcardDiagnostic diag + , typeSignature <- extractWildCardTypeSignature diag = + [("Use type signature: ‘" <> typeSignature <> "’", TextEdit _range typeSignature)] | otherwise = [] +isWildcardDiagnostic :: FileDiagnostic -> Bool +isWildcardDiagnostic = + maybe False (isJust . (^? _TypeHole) . hole_sort) . diagReportHoleError + +-- | Extract the 'Hole' out of a 'FileDiagnostic' +diagReportHoleError :: FileDiagnostic -> Maybe Hole +diagReportHoleError diag = do + solverReport <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessage + . _TcRnSolverReport + . _1 + (hole, _) <- solverReport ^? reportContentL . _ReportHoleError + + Just hole + -- | Extract the type and surround it in parentheses except in obviously safe cases. -- -- Inferring when parentheses are actually needed around the type signature would -- require understanding both the precedence of the context of the hole and of -- the signature itself. Inserting them (almost) unconditionally is ugly but safe. -extractWildCardTypeSignature :: T.Text -> T.Text -extractWildCardTypeSignature msg - | enclosed || not isApp || isToplevelSig = sig - | otherwise = "(" <> sig <> ")" - where - msgSigPart = snd $ T.breakOnEnd "standing for " msg - (sig, rest) = T.span (/='’') . T.dropWhile (=='‘') . T.dropWhile (/='‘') $ msgSigPart - -- If we're completing something like ‘foo :: _’ parens can be safely omitted. - isToplevelSig = errorMessageRefersToToplevelHole rest - -- Parenthesize type applications, e.g. (Maybe Char). - isApp = T.any isSpace sig - -- Do not add extra parentheses to lists, tuples and already parenthesized types. - enclosed = - case T.uncons sig of +extractWildCardTypeSignature :: FileDiagnostic -> T.Text +extractWildCardTypeSignature diag = + case hole_ty <$> diagReportHoleError diag of + Just ty + | isTopLevel || not (isApp ty) || enclosed ty -> printOutputable ty + | otherwise -> "(" <> printOutputable ty <> ")" Nothing -> error "GHC provided invalid type" - Just (firstChr, _) -> not (T.null sig) && (firstChr, T.last sig) `elem` [('(', ')'), ('[', ']')] + where + isTopLevel :: Bool + isTopLevel = + maybe False errorMessageRefersToToplevelHole (diagErrInfoContext diag) + + isApp :: Type -> Bool + isApp (AppTy _ _) = True + isApp (TyConApp _ (_ : _)) = True + isApp (FunTy{}) = True + isApp _ = False + + enclosed :: Type -> Bool + enclosed (TyConApp con _) + | con == listTyCon || isTupleTyCon con = True + enclosed _ = False + +-- | Extract the 'ErrInfo' context out of a 'FileDiagnostic' and render it to +-- 'Text' +diagErrInfoContext :: FileDiagnostic -> Maybe T.Text +diagErrInfoContext diag = do + (_, detailedMsg) <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessageWithCtx + . _TcRnMessageWithInfo + let TcRnMessageDetailed err _ = detailedMsg + ErrInfo errInfoCtx _ = err + + Just (printOutputable errInfoCtx) --- | Detect whether user wrote something like @foo :: _@ or @foo :: (_, Int)@. +-- | Detect whether user wrote something like @foo :: _@ or @foo :: Maybe _@. -- The former is considered toplevel case for which the function returns 'True', -- the latter is not toplevel and the returned value is 'False'. -- --- When type hole is at toplevel then there’s a line starting with --- "• In the type signature" which ends with " :: _" like in the +-- When type hole is at toplevel then the ErrInfo context starts with +-- "In the type signature" which ends with " :: _" like in the -- following snippet: -- --- source/library/Language/Haskell/Brittany/Internal.hs:131:13: error: --- • Found type wildcard ‘_’ standing for ‘HsDecl GhcPs’ --- To use the inferred type, enable PartialTypeSignatures --- • In the type signature: decl :: _ --- In an equation for ‘splitAnnots’: --- splitAnnots m@HsModule {hsmodAnn, hsmodDecls} --- = undefined --- where --- ann :: SrcSpanAnnA --- decl :: _ --- L ann decl = head hsmodDecls --- • Relevant bindings include --- [REDACTED] +-- Just "In the type signature: decl :: _" -- -- When type hole is not at toplevel there’s a stack of where -- the hole was located ending with "In the type signature": -- --- source/library/Language/Haskell/Brittany/Internal.hs:130:20: error: --- • Found type wildcard ‘_’ standing for ‘GhcPs’ --- To use the inferred type, enable PartialTypeSignatures --- • In the first argument of ‘HsDecl’, namely ‘_’ --- In the type ‘HsDecl _’ --- In the type signature: decl :: HsDecl _ --- • Relevant bindings include --- [REDACTED] +-- Just "In the first argument of ‘HsDecl’\nIn the type signature: decl :: HsDecl _" errorMessageRefersToToplevelHole :: T.Text -> Bool errorMessageRefersToToplevelHole msg = - not (T.null prefix) && " :: _" `T.isSuffixOf` T.takeWhile (/= '\n') rest - where - (prefix, rest) = T.breakOn "• In the type signature:" msg + "In the type signature:" `T.isPrefixOf` msg + && " :: _" `T.isSuffixOf` T.takeWhile (/= '\n') msg diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index da45083a08..0fb8b61f83 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -701,6 +701,10 @@ typeWildCardActionTests = testGroup "type wildcard actions" "func::Integer -> Integer -> Integer" , "func x y = x + y" ] + , testNoUseTypeSignature "ignores typed holes" + [ "func :: a -> a" + , "func x = _" + ] , testGroup "add parens if hole is part of bigger type" [ testUseTypeSignature "subtype 1" [ "func :: _ -> Integer -> Integer" @@ -736,19 +740,33 @@ typeWildCardActionTests = testGroup "type wildcard actions" -- | Test session of given name, checking action "Use type signature..." -- on a test file with given content and comparing to expected result. testUseTypeSignature name textIn textOut = testSession name $ do - let fileStart = "module Testing where" + let expectedContentAfterAction = T.unlines $ fileStart : textOut content = T.unlines $ fileStart : textIn - expectedContentAfterAction = T.unlines $ fileStart : textOut doc <- createDoc "Testing.hs" "haskell" content - _ <- waitForDiagnostics - actionsOrCommands <- getAllCodeActions doc - [addSignature] <- pure [action | InR action@CodeAction { _title = actionTitle } <- actionsOrCommands - , "Use type signature" `T.isPrefixOf` actionTitle - ] + + (Just addSignature) <- getUseTypeSigAction doc executeCodeAction addSignature contentAfterAction <- documentContents doc liftIO $ expectedContentAfterAction @=? contentAfterAction + testNoUseTypeSignature name textIn = testSession name $ do + let content = T.unlines $ fileStart : textIn + doc <- createDoc "Testing.hs" "haskell" content + codeAction <- getUseTypeSigAction doc + liftIO $ Nothing @=? codeAction + + fileStart = "module Testing where" + + getUseTypeSigAction docIn = do + _ <- waitForDiagnostics + actionsOrCommands <- getAllCodeActions docIn + + let addSignatures = + [ action | InR action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isPrefixOf` actionTitle + ] + pure $ listToMaybe addSignatures + removeImportTests :: TestTree removeImportTests = testGroup "remove import actions" @@ -3036,6 +3054,21 @@ addFunctionConstraintTests = let , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" ] + -- See https://github.com/haskell/haskell-language-server/issues/4648 + -- When haddock comment appears after the =>, code action was introducing the + -- new constraint in the comment + incompleteConstraintSourceCodeWithCommentInTypeSignature :: T.Text -> T.Text + incompleteConstraintSourceCodeWithCommentInTypeSignature constraint = + T.unlines + + [ "module Testing where" + , "foo " + , " :: ("<> constraint <> ") =>" + , " -- This is a comment" + , " m ()" + , "foo = pure ()" + ] + missingMonadConstraint constraint = T.unlines [ "module Testing where" , "f :: " <> constraint <> "m ()" @@ -3079,6 +3112,11 @@ addFunctionConstraintTests = let "Add `Eq b` to the context of the type signature for `eq`" (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a") (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a, Eq b") + , checkCodeAction + "preexisting constraint, with haddock comment in type signature" + "Add `Applicative m` to the context of the type signature for `foo`" + (incompleteConstraintSourceCodeWithCommentInTypeSignature "") + (incompleteConstraintSourceCodeWithCommentInTypeSignature " Applicative m") , checkCodeAction "missing Monad constraint" "Add `Monad m` to the context of the type signature for `f`" @@ -3341,7 +3379,7 @@ addSigActionTests = let executeCodeAction chosenAction modifiedCode <- documentContents doc liftIO $ expectedCode @=? modifiedCode - issue806 = if ghcVersion >= GHC912 then + issue806 = if ghcVersion >= GHC910 then "hello = print" >:: "hello :: GHC.Types.ZonkAny 0 -> IO ()" -- GHC now returns ZonkAny 0 instead of Any. https://gitlab.haskell.org/ghc/ghc/-/issues/25895 else "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806 diff --git a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs index 7cc1122982..0ba6bc7975 100644 --- a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs +++ b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs @@ -8,7 +8,6 @@ module Ide.Plugin.Rename (descriptor, E.Log) where -import Compat.HieTypes import Control.Lens ((^.)) import Control.Monad import Control.Monad.Except (ExceptT, throwError) @@ -25,7 +24,6 @@ import Data.List.NonEmpty (NonEmpty ((:|)), import qualified Data.Map as M import Data.Maybe import Data.Mod.Word -import qualified Data.Set as S import qualified Data.Text as T import Development.IDE (Recorder, WithPriority, usePropertyAction) @@ -42,7 +40,14 @@ import qualified Development.IDE.GHC.ExactPrint as E import Development.IDE.Plugin.CodeAction import Development.IDE.Spans.AtPoint import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (HieAST (..), + HieASTs (..), + NodeOrigin (..), + SourcedNodeInfo (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) +import HieDb ((:.) (..)) import HieDb.Query +import HieDb.Types (RefRow (refIsGenerated)) import Ide.Plugin.Error import Ide.Plugin.Properties import Ide.PluginUtils @@ -196,6 +201,8 @@ refsAtName state nfp name = do dbRefs <- case nameModule_maybe name of Nothing -> pure [] Just mod -> liftIO $ mapMaybe rowToLoc <$> withHieDb (\hieDb -> + -- See Note [Generated references] + filter (\(refRow HieDb.:. _) -> refIsGenerated refRow) <$> findReferences hieDb True @@ -230,15 +237,29 @@ handleGetHieAst state nfp = -- which is bad (see https://github.com/haskell/haskell-language-server/issues/3799) fmap removeGenerated $ runActionE "Rename.GetHieAst" state $ useE GetHieAst nfp --- | We don't want to rename in code generated by GHC as this gives false positives. --- So we restrict the HIE file to remove all the generated code. +{- Note [Generated references] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +GHC inserts `Use`s of record constructor everywhere where its record selectors are used, +which leads to record fields being renamed whenever corresponding constructor is renamed. +see https://github.com/haskell/haskell-language-server/issues/2915 +To work around this, we filter out compiler-generated references. +-} removeGenerated :: HieAstResult -> HieAstResult -removeGenerated HAR{..} = HAR{hieAst = go hieAst,..} +removeGenerated HAR{..} = + HAR{hieAst = sourceOnlyAsts, refMap = sourceOnlyRefMap, ..} where - go :: HieASTs a -> HieASTs a - go hf = - HieASTs (fmap goAst (getAsts hf)) - goAst (Node nsi sp xs) = Node (SourcedNodeInfo $ M.restrictKeys (getSourcedNodeInfo nsi) (S.singleton SourceInfo)) sp (map goAst xs) + goAsts :: HieASTs a -> HieASTs a + goAsts (HieASTs asts) = HieASTs (fmap goAst asts) + + goAst :: HieAST a -> HieAST a + goAst (Node (SourcedNodeInfo sniMap) sp children) = + let sourceOnlyNodeInfos = SourcedNodeInfo $ M.delete GeneratedInfo sniMap + in Node sourceOnlyNodeInfos sp $ map goAst children + + sourceOnlyAsts = goAsts hieAst + -- Also need to regenerate the RefMap, because the one in HAR + -- is generated from HieASTs containing GeneratedInfo + sourceOnlyRefMap = generateReferencesMap $ getAsts sourceOnlyAsts collectWith :: (Hashable a, Eq b) => (a -> b) -> HashSet a -> [(b, HashSet a)] collectWith f = map (\(a :| as) -> (f a, HS.fromList (a:as))) . groupWith f . HS.toList diff --git a/plugins/hls-rename-plugin/test/Main.hs b/plugins/hls-rename-plugin/test/Main.hs index 5f7fb818ff..b935e6563f 100644 --- a/plugins/hls-rename-plugin/test/Main.hs +++ b/plugins/hls-rename-plugin/test/Main.hs @@ -24,6 +24,11 @@ tests :: TestTree tests = testGroup "Rename" [ goldenWithRename "Data constructor" "DataConstructor" $ \doc -> rename doc (Position 0 15) "Op" + , goldenWithRename "Data constructor with fields" "DataConstructorWithFields" $ \doc -> + rename doc (Position 1 13) "FooRenamed" + , knownBrokenForGhcVersions [GHC96, GHC98] "renaming Constructor{..} with RecordWildcard removes .." $ + goldenWithRename "Data constructor with fields" "DataConstructorWithFieldsRecordWildcards" $ \doc -> + rename doc (Position 1 13) "FooRenamed" , goldenWithRename "Exported function" "ExportedFunction" $ \doc -> rename doc (Position 2 1) "quux" , goldenWithRename "Field Puns" "FieldPuns" $ \doc -> @@ -113,7 +118,7 @@ goldenWithRename title path act = goldenWithHaskellDoc (def { plugins = M.fromList [("rename", def { plcConfig = "crossModule" .= True })] }) renamePlugin title testDataDir path "expected" "hs" act -renameExpectError :: (TResponseError Method_TextDocumentRename) -> TextDocumentIdentifier -> Position -> Text -> Session () +renameExpectError :: TResponseError Method_TextDocumentRename -> TextDocumentIdentifier -> Position -> Text -> Session () renameExpectError expectedError doc pos newName = do let params = RenameParams Nothing doc pos newName rsp <- request SMethod_TextDocumentRename params diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs new file mode 100644 index 0000000000..5fc38c7f01 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = FooRenamed { a = 1, b = True } + +foo2 :: Foo +foo2 = FooRenamed 1 True + +fun1 :: Foo -> Int +fun1 FooRenamed {a} = a + +fun2 :: Foo -> Int +fun2 FooRenamed {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs new file mode 100644 index 0000000000..abd8031096 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = Foo { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = Foo { a = 1, b = True } + +foo2 :: Foo +foo2 = Foo 1 True + +fun1 :: Foo -> Int +fun1 Foo {a} = a + +fun2 :: Foo -> Int +fun2 Foo {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs new file mode 100644 index 0000000000..b5dd83cecb --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun FooRenamed {..} = a diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs new file mode 100644 index 0000000000..8e624b0816 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = Foo { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun Foo {..} = a diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index b8b07e667f..1bbba24df2 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -3,6 +3,7 @@ {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} @@ -43,6 +44,8 @@ import Development.IDE.Core.Shake (ShakeExtras (..), getVirtualFile) import Development.IDE.GHC.Compat hiding (Warning) import Development.IDE.GHC.Compat.Util (mkFastString) +import GHC.Iface.Ext.Types (HieASTs (getAsts), + pattern HiePath) import Ide.Logger (logWith) import Ide.Plugin.Error (PluginError (PluginInternalError), getNormalizedFilePathE, diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index d9bfc4449d..e93cefb711 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -20,6 +20,10 @@ import qualified Data.Set as Set import Data.Text (Text, unpack) import Development.IDE (HieKind (HieFresh, HieFromDisk)) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), + ContextInfo (..), + DeclType (..), HieType (..), + HieTypeFlat, TypeIndex) import Ide.Plugin.SemanticTokens.Types import Ide.Plugin.SemanticTokens.Utils (mkRange) import Language.LSP.Protocol.Types (LspEnum (knownValues), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index fb7fdd9e71..5875ebfa8d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -11,6 +11,9 @@ import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, toCurrentRange) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo, Identifier, + IdentifierDetails (..)) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index e9e8034ce3..be793cfe7a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -16,8 +16,8 @@ import Development.IDE (Action, usePropertyAction) import GHC.TypeLits (KnownSymbol) import Ide.Plugin.Properties (KeyNameProxy, NotElem, Properties, - PropertyKey (type PropertyKey), - PropertyType (type TEnum), + PropertyKey (PropertyKey), + PropertyType (TEnum), defineEnumProperty, emptyProperties) import Ide.Plugin.SemanticTokens.Types diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 2ed11be333..b6142fb39c 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -22,6 +22,10 @@ import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import GHC.Iface.Ext.Types (HieAST (..), Identifier, + NodeInfo (..), + NodeOrigin (..), + SourcedNodeInfo (..)) import Ide.Plugin.SemanticTokens.Types (HsSemanticTokenType (TModule), RangeHsSemanticTokenTypes (..)) import Language.LSP.Protocol.Types (Position (Position), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 7f445bf7ac..da59c28d29 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -10,16 +10,16 @@ module Ide.Plugin.SemanticTokens.Types where import Control.DeepSeq (NFData (rnf), rwhnf) import qualified Data.Array as A import Data.Default (Default (def)) +import Data.Text (Text) import Development.IDE (Pretty (pretty), RuleResult) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (loc) import Development.IDE.Graph.Classes (Hashable) import GHC.Generics (Generic) -import Language.LSP.Protocol.Types --- import template haskell -import Data.Text (Text) +import GHC.Iface.Ext.Types (TypeIndex) import Ide.Plugin.Error (PluginError) import Language.Haskell.TH.Syntax (Lift) +import Language.LSP.Protocol.Types -- !!!! order of declarations matters deriving enum and ord diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 52cd56a21f..c545d8941a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -10,6 +10,11 @@ import Data.ByteString.Char8 (unpack) import qualified Data.Map.Strict as Map import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), ContextInfo (..), + DeclType (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (RefMap) import Prelude hiding (length, span) deriving instance Show DeclType diff --git a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs index a1efb7f150..77c9817dba 100644 --- a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs +++ b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs @@ -2,7 +2,6 @@ {-# LANGUAGE PatternSynonyms #-} module Ide.Plugin.Stan (descriptor, Log) where -import Compat.HieTypes (HieFile (..)) import Control.DeepSeq (NFData) import Control.Monad (void) import Control.Monad.IO.Class (liftIO) @@ -14,6 +13,7 @@ import qualified Data.Text as T import Development.IDE import Development.IDE.Core.Rules (getHieFile) import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat (HieFile (..)) import GHC.Generics (Generic) import Ide.Plugin.Config (PluginConfig (..)) import Ide.Types (PluginDescriptor (..), PluginId, diff --git a/src/HlsPlugins.hs b/src/HlsPlugins.hs index 87a1af7392..4c135fc48b 100644 --- a/src/HlsPlugins.hs +++ b/src/HlsPlugins.hs @@ -224,7 +224,7 @@ idePlugins recorder = pluginDescToIdePlugins allPlugins let pId = "codeRange" in CodeRange.descriptor (pluginRecorder pId) pId: #endif #if hls_changeTypeSignature - ChangeTypeSignature.descriptor "changeTypeSignature" : + let pId = "changeTypeSignature" in ChangeTypeSignature.descriptor (pluginRecorder pId) pId : #endif #if hls_gadt GADT.descriptor "gadt" : diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 7306295a8a..429125333a 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -2,12 +2,10 @@ resolver: lts-22.43 # ghc-9.6.6 packages: - . - - ./hie-compat - ./hls-graph - ./ghcide/ - ./hls-plugin-api - ./hls-test-utils - # - ./shake-bench ghc-options: "$everything": -haddock @@ -21,8 +19,9 @@ allow-newer-deps: extra-deps: - Diff-0.5 - floskell-0.11.1 - - hiedb-0.6.0.2 - - hie-bios-0.15.0 + - hiedb-0.7.0.0 + - hie-bios-0.17.0 + - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 - lsp-test-0.17.1.0 @@ -39,7 +38,7 @@ extra-deps: - trial-optparse-applicative-0.0.0.0 - trial-tomland-0.0.0.0 - validation-selective-0.2.0.0 - - cabal-add-0.1 + - cabal-add-0.2 - cabal-install-parsers-0.6.1.1 - directory-ospath-streaming-0.2.2 @@ -57,8 +56,6 @@ flags: ghc-lib: true retrie: BuildExecutable: false - cabal-add: - cabal-syntax: true # stan dependencies directory-ospath-streaming: os-string: false diff --git a/stack.yaml b/stack.yaml index ba89370091..43cb239b34 100644 --- a/stack.yaml +++ b/stack.yaml @@ -2,12 +2,10 @@ resolver: lts-23.18 # ghc-9.8.4 packages: - . - - ./hie-compat - ./hls-graph - ./ghcide/ - ./hls-plugin-api - ./hls-test-utils - # - ./shake-bench ghc-options: "$everything": -haddock @@ -22,9 +20,10 @@ allow-newer-deps: extra-deps: - floskell-0.11.1 - - hiedb-0.6.0.2 + - hiedb-0.7.0.0 + - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - - hie-bios-0.15.0 + - hie-bios-0.17.0 - hw-fingertree-0.1.2.1 - monad-dijkstra-0.1.1.5 - retrie-1.2.3 @@ -37,7 +36,7 @@ extra-deps: - trial-optparse-applicative-0.0.0.0 - trial-tomland-0.0.0.0 - directory-ospath-streaming-0.2.2 - + - cabal-add-0.2 configure-options: ghcide: - --disable-library-for-ghci @@ -51,8 +50,6 @@ flags: ghc-lib: true retrie: BuildExecutable: false - cabal-add: - cabal-syntax: true # stan dependencies directory-ospath-streaming: os-string: false