diff --git a/.circleci/config.yml b/.circleci/config.yml
index 062adcb5ec..e2be3f6528 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -60,14 +60,14 @@ version: 2
jobs:
stackage-lts22:
docker:
- - image: haskell:9.6.5-slim-buster
+ - image: haskell:9.6.6-slim-bullseye
environment:
- STACK_FILE: "stack-lts22.yaml"
<<: *defaults
- stackage-nightly:
+ stackage-lts23:
docker:
- - image: haskell:9.8.2-slim-buster
+ - image: haskell:9.8.4-slim-bullseye
environment:
- STACK_FILE: "stack.yaml"
<<: *defaults
@@ -77,4 +77,4 @@ workflows:
multiple-ghcs:
jobs:
- stackage-lts22
- - stackage-nightly
+ - stackage-lts23
diff --git a/.github/actions/bindist-actions/action-deb10/action.yaml b/.github/actions/bindist-actions/action-deb10/action.yaml
new file mode 100644
index 0000000000..da96b04669
--- /dev/null
+++ b/.github/actions/bindist-actions/action-deb10/action.yaml
@@ -0,0 +1,21 @@
+description: Container for deb10
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-deb10
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: debian:10
+ using: docker
diff --git a/.github/actions/bindist-actions/action-deb11/action.yaml b/.github/actions/bindist-actions/action-deb11/action.yaml
new file mode 100644
index 0000000000..8ffe78e1db
--- /dev/null
+++ b/.github/actions/bindist-actions/action-deb11/action.yaml
@@ -0,0 +1,21 @@
+description: Container for deb11
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-deb11
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: debian:11
+ using: docker
diff --git a/.github/actions/bindist-actions/action-deb12/action.yaml b/.github/actions/bindist-actions/action-deb12/action.yaml
new file mode 100644
index 0000000000..20bcc6a157
--- /dev/null
+++ b/.github/actions/bindist-actions/action-deb12/action.yaml
@@ -0,0 +1,21 @@
+description: Container for deb12
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-deb12
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: debian:12
+ using: docker
diff --git a/.github/actions/bindist-actions/action-deb9/action.yaml b/.github/actions/bindist-actions/action-deb9/action.yaml
new file mode 100644
index 0000000000..693e3845a5
--- /dev/null
+++ b/.github/actions/bindist-actions/action-deb9/action.yaml
@@ -0,0 +1,24 @@
+description: Container for deb9
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-deb9
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list &&
+ sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list &&
+ sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install
+ -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: debian:9
+ using: docker
diff --git a/.github/actions/bindist-actions/action-fedora33/action.yaml b/.github/actions/bindist-actions/action-fedora33/action.yaml
new file mode 100644
index 0000000000..d20c8feccd
--- /dev/null
+++ b/.github/actions/bindist-actions/action-fedora33/action.yaml
@@ -0,0 +1,21 @@
+description: Container for fedora33
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-fedora33
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: dnf install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs
+ findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs
+ ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which
+ xz zlib-devel patchelf
+ image: fedora:33
+ using: docker
diff --git a/.github/actions/bindist-actions/action-fedora40/action.yaml b/.github/actions/bindist-actions/action-fedora40/action.yaml
new file mode 100644
index 0000000000..83f23b23c8
--- /dev/null
+++ b/.github/actions/bindist-actions/action-fedora40/action.yaml
@@ -0,0 +1,21 @@
+description: Container for fedora40
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-fedora40
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: dnf install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs
+ findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs
+ ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which
+ xz zlib-devel patchelf
+ image: fedora:40
+ using: docker
diff --git a/.github/actions/bindist-actions/action-mint193/action.yaml b/.github/actions/bindist-actions/action-mint193/action.yaml
new file mode 100644
index 0000000000..e1269e0e56
--- /dev/null
+++ b/.github/actions/bindist-actions/action-mint193/action.yaml
@@ -0,0 +1,21 @@
+description: Container for mint193
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-mint193
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: linuxmintd/mint19.3-amd64
+ using: docker
diff --git a/.github/actions/bindist-actions/action-mint202/action.yaml b/.github/actions/bindist-actions/action-mint202/action.yaml
new file mode 100644
index 0000000000..adea7272f1
--- /dev/null
+++ b/.github/actions/bindist-actions/action-mint202/action.yaml
@@ -0,0 +1,21 @@
+description: Container for mint202
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-mint202
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: linuxmintd/mint20.2-amd64
+ using: docker
diff --git a/.github/actions/bindist-actions/action-mint213/action.yaml b/.github/actions/bindist-actions/action-mint213/action.yaml
new file mode 100644
index 0000000000..bd09dc0e97
--- /dev/null
+++ b/.github/actions/bindist-actions/action-mint213/action.yaml
@@ -0,0 +1,21 @@
+description: Container for mint213
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-mint213
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: linuxmintd/mint21.3-amd64
+ using: docker
diff --git a/.github/actions/bindist-actions/action-ubuntu1804/action.yaml b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml
new file mode 100644
index 0000000000..6a6f4662a0
--- /dev/null
+++ b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml
@@ -0,0 +1,21 @@
+description: Container for ubuntu1804
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-ubuntu1804
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: ubuntu:18.04
+ using: docker
diff --git a/.github/actions/bindist-actions/action-ubuntu2004/action.yaml b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml
new file mode 100644
index 0000000000..3a5b57a370
--- /dev/null
+++ b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml
@@ -0,0 +1,21 @@
+description: Container for ubuntu2004
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-ubuntu2004
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: ubuntu:20.04
+ using: docker
diff --git a/.github/actions/bindist-actions/action-ubuntu2204/action.yaml b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml
new file mode 100644
index 0000000000..857776507d
--- /dev/null
+++ b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml
@@ -0,0 +1,21 @@
+description: Container for ubuntu2204
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-ubuntu2204
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: apt-get update && apt-get install -y
+ STAGE: ${{ inputs.stage }}
+ TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev
+ git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc
+ autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5
+ libtinfo5 patchelf
+ image: ubuntu:22.04
+ using: docker
diff --git a/.github/actions/bindist-actions/action-unknown/action.yaml b/.github/actions/bindist-actions/action-unknown/action.yaml
new file mode 100644
index 0000000000..96cf0593e9
--- /dev/null
+++ b/.github/actions/bindist-actions/action-unknown/action.yaml
@@ -0,0 +1,21 @@
+description: Container for unknown
+inputs:
+ stage:
+ description: which stage to build
+ required: true
+ version:
+ description: which GHC version to build/test
+ required: false
+name: action-unknown
+runs:
+ entrypoint: .github/scripts/entrypoint.sh
+ env:
+ GHC_VERSION: ${{ inputs.version }}
+ INSTALL: yum -y install epel-release && yum install -y --allowerasing
+ STAGE: ${{ inputs.stage }}
+ TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs
+ findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs
+ ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which
+ xz zlib-devel patchelf
+ image: rockylinux:8
+ using: docker
diff --git a/.github/actions/setup-build/action.yml b/.github/actions/setup-build/action.yml
index 975fa90617..da1ece3140 100644
--- a/.github/actions/setup-build/action.yml
+++ b/.github/actions/setup-build/action.yml
@@ -7,7 +7,7 @@ inputs:
cabal:
description: "Cabal version"
required: false
- default: "3.10.2.0"
+ default: "3.14.2.0"
os:
description: "Operating system: Linux, Windows or macOS"
required: true
@@ -31,7 +31,7 @@ runs:
sudo chown -R $USER /usr/local/.ghcup
shell: bash
- - uses: haskell-actions/setup@v2.7.9
+ - uses: haskell-actions/setup@v2.7.10
id: HaskEnvSetup
with:
ghc-version : ${{ inputs.ghc }}
diff --git a/.github/generate-ci/LICENSE b/.github/generate-ci/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/.github/generate-ci/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/.github/generate-ci/README.mkd b/.github/generate-ci/README.mkd
new file mode 100644
index 0000000000..fef645ea12
--- /dev/null
+++ b/.github/generate-ci/README.mkd
@@ -0,0 +1,5 @@
+# generate-ci
+
+This is the generator for the release bindist CI.
+
+Edit ./gen_ci.hs to change configuration and run "./generate-jobs" to regenerate
diff --git a/.github/generate-ci/cabal.project b/.github/generate-ci/cabal.project
new file mode 100644
index 0000000000..e6fdbadb43
--- /dev/null
+++ b/.github/generate-ci/cabal.project
@@ -0,0 +1 @@
+packages: .
diff --git a/.github/generate-ci/gen_ci.hs b/.github/generate-ci/gen_ci.hs
new file mode 100644
index 0000000000..28a81d8576
--- /dev/null
+++ b/.github/generate-ci/gen_ci.hs
@@ -0,0 +1,618 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+
+import Control.Monad
+import Data.Maybe
+
+import Data.Aeson hiding (encode)
+import qualified Data.Aeson.Key as K
+import Data.Aeson.Types (Pair)
+import Data.Yaml
+
+import qualified Data.ByteString as BS
+
+import qualified Data.List as L
+
+import System.Directory
+import System.Environment
+import System.FilePath
+
+-------------------------------------------------------------------------------
+-- Configuration parameters
+-------------------------------------------------------------------------------
+
+data Opsys
+ = Linux Distro
+ | Darwin
+ | Windows deriving (Eq)
+
+osName :: Opsys -> String
+osName Darwin = "mac"
+osName Windows = "windows"
+osName (Linux d) = "linux-" ++ distroName d
+
+data Distro
+ = Debian9
+ | Debian10
+ | Debian11
+ | Debian12
+ | Ubuntu1804
+ | Ubuntu2004
+ | Ubuntu2204
+ | Mint193
+ | Mint202
+ | Mint213
+ | Fedora33
+ | Fedora40
+ | Rocky8
+ deriving (Eq, Enum, Bounded)
+
+allDistros :: [Distro]
+allDistros = [minBound .. maxBound]
+
+data Arch = Amd64 | AArch64
+archName :: Arch -> String
+archName Amd64 = "x86_64"
+archName AArch64 = "aarch64"
+
+artifactName :: Arch -> Opsys -> String
+artifactName arch opsys = archName arch ++ "-" ++ case opsys of
+ Linux distro -> "linux-" ++ distroName distro
+ Darwin -> "apple-darwin"
+ Windows -> "mingw64"
+
+data GHC
+ = GHC967
+ | GHC984
+ | GHC9102
+ | GHC9122
+ deriving (Eq, Enum, Bounded)
+
+ghcVersion :: GHC -> String
+ghcVersion GHC967 = "9.6.7"
+ghcVersion GHC984 = "9.8.4"
+ghcVersion GHC9102 = "9.10.2"
+ghcVersion GHC9122 = "9.12.2"
+
+ghcVersionIdent :: GHC -> String
+ghcVersionIdent = filter (/= '.') . ghcVersion
+
+allGHCs :: [GHC]
+allGHCs = [minBound .. maxBound]
+
+data Stage = Build GHC | Bindist | Test
+
+-------------------------------------------------------------------------------
+-- Distro Configuration
+-------------------------------------------------------------------------------
+
+distroImage :: Distro -> String
+distroImage Debian9 = "debian:9"
+distroImage Debian10 = "debian:10"
+distroImage Debian11 = "debian:11"
+distroImage Debian12 = "debian:12"
+distroImage Ubuntu1804 = "ubuntu:18.04"
+distroImage Ubuntu2004 = "ubuntu:20.04"
+distroImage Ubuntu2204 = "ubuntu:22.04"
+distroImage Mint193 = "linuxmintd/mint19.3-amd64"
+distroImage Mint202 = "linuxmintd/mint20.2-amd64"
+distroImage Mint213 = "linuxmintd/mint21.3-amd64"
+distroImage Fedora33 = "fedora:33"
+distroImage Fedora40 = "fedora:40"
+distroImage Rocky8 = "rockylinux:8"
+
+distroName :: Distro -> String
+distroName Debian9 = "deb9"
+distroName Debian10 = "deb10"
+distroName Debian11 = "deb11"
+distroName Debian12 = "deb12"
+distroName Ubuntu1804 = "ubuntu1804"
+distroName Ubuntu2004 = "ubuntu2004"
+distroName Ubuntu2204 = "ubuntu2204"
+distroName Mint193 = "mint193"
+distroName Mint202 = "mint202"
+distroName Mint213 = "mint213"
+distroName Fedora33 = "fedora33"
+distroName Fedora40 = "fedora40"
+distroName Rocky8 = "unknown"
+
+distroInstall :: Distro -> String
+distroInstall Debian9 = "sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y"
+distroInstall Debian10 = "apt-get update && apt-get install -y"
+distroInstall Debian11 = "apt-get update && apt-get install -y"
+distroInstall Debian12 = "apt-get update && apt-get install -y"
+distroInstall Ubuntu1804 = "apt-get update && apt-get install -y"
+distroInstall Ubuntu2004 = "apt-get update && apt-get install -y"
+distroInstall Ubuntu2204 = "apt-get update && apt-get install -y"
+distroInstall Mint193 = "apt-get update && apt-get install -y"
+distroInstall Mint202 = "apt-get update && apt-get install -y"
+distroInstall Mint213 = "apt-get update && apt-get install -y"
+distroInstall Fedora33 = "dnf install -y"
+distroInstall Fedora40 = "dnf install -y"
+distroInstall Rocky8 = "yum -y install epel-release && yum install -y --allowerasing"
+
+distroTools :: Distro -> String
+distroTools Debian9 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Debian10 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Debian11 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Debian12 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Ubuntu1804 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Ubuntu2004 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Ubuntu2204 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Mint193 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Mint202 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Mint213 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
+distroTools Fedora33 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
+distroTools Fedora40 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
+distroTools Rocky8 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
+
+-------------------------------------------------------------------------------
+-- OS/runner Config
+-------------------------------------------------------------------------------
+
+baseEnv :: [(Key,Value)]
+baseEnv = [ "AWS_SECRET_ACCESS_KEY" .= str "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
+ , "AWS_ACCESS_KEY_ID" .= str "${{ secrets.AWS_ACCESS_KEY_ID }}"
+ , "S3_HOST" .= str "${{ secrets.S3_HOST }}"
+ , "TZ" .= str "Asia/Singapore"
+ ]
+
+-- | Environment configuration
+envVars :: Arch -> Opsys -> Value
+envVars arch os = object $
+ baseEnv
+ ++ [ "TARBALL_EXT" .= str (case os of
+ Windows -> "zip"
+ _ -> "tar.xz")
+ , "ARCH" .= str (case arch of
+ Amd64 -> "64"
+ AArch64 -> "ARM64")
+ , "ADD_CABAL_ARGS" .= str (case (os,arch) of
+ (Linux _, Amd64) -> "--enable-split-sections"
+ _ -> "")
+ , "ARTIFACT" .= artifactName arch os
+ ]
+ ++ [ "DEBIAN_FRONTEND" .= str "noninteractive"
+ | Linux _ <- [os]
+ ]
+ ++ [ "MACOSX_DEPLOYMENT_TARGET" .= str "10.13"
+ | Darwin <- [os]
+ ]
+ ++ [ "HOMEBREW_CHANGE_ARCH_TO_ARM" .= str "1"
+ | Darwin <- [os], AArch64 <- [arch]
+ ]
+
+-- | Runner selection
+runner :: Arch -> Opsys -> [Value]
+runner Amd64 (Linux _) = ["ubuntu-latest"]
+runner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"]
+runner Amd64 Darwin = ["macOS-13"]
+runner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"]
+runner Amd64 Windows = ["windows-latest"]
+runner AArch64 Windows = error "aarch64 windows not supported"
+
+-- | Runner selection for bindist jobs
+bindistRunner :: Arch -> Opsys -> [Value]
+bindistRunner Amd64 (Linux _) = ["self-hosted", "linux-space", "maerwald"]
+bindistRunner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"]
+bindistRunner Amd64 Darwin = ["macOS-13"]
+bindistRunner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"]
+bindistRunner Amd64 Windows = ["windows-latest"]
+bindistRunner AArch64 Windows = error "aarch64 windows not supported"
+
+-------------------------------------------------------------------------------
+-- Action generatation
+-------------------------------------------------------------------------------
+-- Each x86-linux job has its own action, living in a separate file
+-- The contents of the file are derived from the 'Action' datatype
+--
+-- We do this so that we can run the build in the right kind of OS container,
+-- but not be forced to run the checkout and upload artifact in the same container
+--
+-- This is because we want to use container images that are not supported by
+-- github provided actions, see for instance https://github.com/actions/upload-artifact/issues/489
+-------------------------------------------------------------------------------
+
+-- | Container actions for x86-linux runners.
+-- Each of these corresponds to a separate action file,
+-- called 'actionName', located at 'actionPath'
+data Action
+ = Action
+ { actionName :: String
+ , actionDistro :: Distro
+ }
+
+actionDir :: FilePath
+actionDir = "./.github/actions/bindist-actions/"
+
+actionPath :: Distro -> FilePath
+actionPath d = actionDir ++ distroActionName d
+
+instance ToJSON Action where
+ toJSON Action{..} = object
+ [ "name" .= actionName
+ , "description" .= str ("Container for " ++ distroName actionDistro)
+ , "inputs" .= object
+ [ "stage" .= object
+ [ "description" .= str "which stage to build"
+ , "required" .= True
+ ]
+ , "version" .= object
+ [ "description" .= str "which GHC version to build/test"
+ , "required" .= False
+ ]
+ ]
+ , "runs" .= object
+ [ "using" .= str "docker"
+ , "image" .= distroImage actionDistro
+ , "entrypoint" .= str ".github/scripts/entrypoint.sh"
+ , "env" .= object
+ [ "STAGE" .= str "${{ inputs.stage }}"
+ , "INSTALL" .= distroInstall actionDistro
+ , "TOOLS" .= distroTools actionDistro
+ , "GHC_VERSION" .= str "${{ inputs.version }}"
+ ]
+ ]
+ ]
+
+configAction :: Config -> Maybe Action
+configAction (MkConfig Amd64 (Linux d) _) = Just $ Action (distroActionName d) d
+configAction _ = Nothing
+
+distroActionName :: Distro -> String
+distroActionName d = "action-" ++ distroName d
+
+customAction :: Distro -> Stage -> Value
+customAction d st = flip (ghAction stepName (actionPath d)) [] $ case st of
+ Build v ->
+ [ "stage" .= str "BUILD"
+ , "version" .= ghcVersion v
+ ]
+ Test ->
+ [ "stage" .= str "TEST"
+ ]
+ Bindist ->
+ [ "stage" .= str "BINDIST"
+ ]
+ where
+ stepName = case st of
+ Build v -> "Build " ++ ghcVersion v
+ Test -> "Test"
+ Bindist -> "Bindist"
+
+-------------------------------------------------------------------------------
+-- CI generation
+-------------------------------------------------------------------------------
+-- This is the code that generates the bindist workflow
+
+-- | Global CI config type
+data CI = CI [Config]
+
+data Config = MkConfig Arch Opsys [GHC]
+
+instance ToJSON CI where
+ toJSON (CI cs) = object
+ [ "name" .= str "Build and release"
+ , "on" .= object [ "push" .= object ["tags" .= [str "*"]]
+ , "schedule" .= [object ["cron" .= str "0 2 * * 1"]]
+ ]
+ , "env" .= object
+ [ "CABAL_CACHE_DISABLE" .= str "${{ vars.CABAL_CACHE_DISABLE }}"
+ , "CABAL_CACHE_NONFATAL" .= str "${{ vars.CABAL_CACHE_NONFATAL }}"
+ ]
+ , "jobs" .= object (concatMap (getConfigJobs . makeJobs) cs ++ [releaseJob cs])
+ ]
+
+type Job = Pair
+
+data ConfigJobs = ConfigJobs { buildJobs :: [Job], bindistJob :: Job, testJob :: Job}
+
+getConfigJobs :: ConfigJobs -> [Job]
+getConfigJobs ConfigJobs{..} = buildJobs ++ [bindistJob, testJob]
+
+makeJobs :: Config -> ConfigJobs
+makeJobs (MkConfig arch os vs) =
+ ConfigJobs
+ { buildJobs = [ buildJob arch os ver | ver <- vs ]
+ , bindistJob = mkBindistJob arch os vs
+ , testJob = mkTestJob arch os
+ }
+
+buildJobName :: Arch -> Opsys -> GHC -> String
+buildJobName arch os version = L.intercalate "-" ["build",archName arch, osName os, ghcVersionIdent version]
+
+testJobName :: Arch -> Opsys -> String
+testJobName arch os = L.intercalate "-" ["test",archName arch, osName os]
+
+bindistJobName :: Arch -> Opsys -> String
+bindistJobName arch os = L.intercalate "-" ["bindist",archName arch, osName os]
+
+bindistName :: Arch -> Opsys -> String
+bindistName arch os = "bindist-" ++ artifactName arch os
+
+setupAction :: Arch -> Opsys -> [Value]
+-- some
+setupAction AArch64 (Linux Ubuntu2004) =
+ [ ghRun "clean and git config for aarch64-linux" "bash" [] $ unlines
+ [ "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
+ , "git config --global --get-all safe.directory | grep '^\\*$' || git config --global --add safe.directory \"*\""
+ ]
+ ]
+setupAction _ _ = []
+
+releaseJob :: [Config] -> Job
+releaseJob cs =
+ "release" .= object
+ [ "name" .= str "release"
+ , "runs-on" .= str "ubuntu-latest"
+ , "needs" .= [testJobName arch os | MkConfig arch os _ <- cs]
+ , "if" .= str "startsWith(github.ref, 'refs/tags/')"
+ , "steps" .= ( [ checkoutAction ]
+ ++ [ downloadArtifacts (bindistName arch os) "./out" | MkConfig arch os _ <- cs]
+ ++ [ ghRun "Prepare release" "bash" [] $ unlines
+ [ "sudo apt-get update && sudo apt-get install -y tar xz-utils"
+ , "cd out/plan.json"
+ , "tar cf plan_json.tar *"
+ , "mv plan_json.tar ../"
+ , "cd ../.."
+ , "export RELEASE=$GITHUB_REF_NAME"
+ , "git archive --format=tar.gz -o \"out/haskell-language-server-${RELEASE}-src.tar.gz\" --prefix=\"haskell-language-server-${RELEASE}/\" HEAD"
+ ]
+ , ghAction "Release" "softprops/action-gh-release@v2"
+ [ "draft" .= True
+ , "files" .= unlines
+ [ "./out/*.zip"
+ , "./out/*.tar.xz"
+ , "./out/*.tar.gz"
+ , "./out/*.tar"
+ ]
+ ] []
+ ])
+ ]
+
+
+
+buildJob :: Arch -> Opsys -> GHC -> Job
+buildJob arch os v =
+ K.fromString (buildJobName arch os v) .= object
+ [ "runs-on" .= runner arch os
+ , "name" .= str (buildJobName arch os v ++ " (Build binaries)")
+ , "environment" .= str "CI"
+ , "env" .= thisEnv
+ , "steps" .=
+ ( setupAction arch os
+ ++ [ checkoutAction ]
+ ++ buildStep arch os
+ ++ [uploadArtifacts ("artifacts-"++buildJobName arch os v) outputname])
+ ]
+
+ where thisEnv = envVars arch os
+ art = artifactName arch os
+ outputname
+ | Windows <- os = "./out/*"
+ | otherwise = ("out-"++art++"-"++ghcVersion v++".tar")
+ buildStep Amd64 (Linux d) = [customAction d (Build v)]
+ buildStep AArch64 (Linux Ubuntu2004) =
+ [ ghAction "Build aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal"
+ [ "args" .= str "bash .github/scripts/build.sh" ]
+ [ "GHC_VERSION" .= ghcVersion v ]
+ , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal"
+ [ "args" .= str "bash .github/scripts/tar.sh" ]
+ [ "GHC_VERSION" .= ghcVersion v ]
+ ]
+ buildStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported"
+
+ buildStep Amd64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $
+ [ "brew install coreutils tree"
+ , "bash .github/scripts/build.sh"
+ , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/"
+ ]
+ ]
+ buildStep AArch64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $
+ [ "bash .github/scripts/brew.sh git coreutils autoconf automake tree"
+ , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$PATH\""
+ , "export LD=ld"
+ , "bash .github/scripts/build.sh"
+ , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/"
+ ]
+ ]
+
+ buildStep Amd64 Windows = [ghRun "Run build" "pwsh" ["GHC_VERSION" .= ghcVersion v] $ unlines $
+ [ "$env:CHERE_INVOKING = 1"
+ , "$env:MSYS2_PATH_TYPE = \"inherit\""
+ , "$ErrorActionPreference = \"Stop\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/build.sh\""
+ ]
+ ]
+ buildStep AArch64 Windows = error "aarch64 windows not supported"
+
+mkBindistJob :: Arch -> Opsys -> [GHC] -> Job
+mkBindistJob arch os vs =
+ K.fromString (bindistJobName arch os) .= object
+ [ "runs-on" .= bindistRunner arch os
+ , "name" .= (bindistJobName arch os ++ " (Prepare bindist)")
+ , "needs" .= [buildJobName arch os ver | ver <- vs]
+ , "env" .= thisEnv
+ , "steps" .=
+ ( setupAction arch os
+ ++ [ checkoutAction ]
+ ++ [downloadArtifacts ("artifacts-"++buildJobName arch os v) outputPath | v <- vs]
+ ++ bindistStep arch os
+ ++ [ uploadArtifacts (bindistName arch os) "./out/*.tar.xz\n./out/plan.json/*\n./out/*.zip" ])
+ ]
+ where thisEnv = envVars arch os
+
+ outputPath
+ | Windows <- os = "./out"
+ | otherwise = "./"
+
+ bindistStep Amd64 (Linux d) = [customAction d Bindist]
+ bindistStep AArch64 (Linux Ubuntu2004) =
+ [ ghAction "Unpack aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal"
+ [ "args" .= str "bash .github/scripts/untar.sh" ]
+ [ ]
+ , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal"
+ [ "args" .= str "bash .github/scripts/bindist.sh" ]
+ [ ]
+ ]
+ bindistStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported"
+
+ bindistStep Amd64 Darwin = [ghRun "Create bindist" "sh" [] $ unlines $
+ [ "brew install coreutils tree"
+ , "for bindist in out-*.tar ; do"
+ , " tar xf \"${bindist}\""
+ , "done"
+ , "unset bindist"
+ , "bash .github/scripts/bindist.sh"
+ ]
+ ]
+ bindistStep AArch64 Darwin = [ghRun "Run build" "sh" [] $ unlines $
+ [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree"
+ , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\""
+ , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\""
+ , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\""
+ , "export LD=ld"
+ , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\""
+ , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\""
+ , "for bindist in out-*.tar ; do"
+ , " tar xf \"${bindist}\""
+ , "done"
+ , "unset bindist"
+ , "bash .github/scripts/bindist.sh"
+ ]
+ ]
+
+ bindistStep Amd64 Windows = [ghRun "Run build" "pwsh" [] $ unlines $
+ [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S unzip zip git\""
+ , "taskkill /F /FI \"MODULES eq msys-2.0.dll\""
+ , "$env:CHERE_INVOKING = 1"
+ , "$env:MSYS2_PATH_TYPE = \"inherit\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/bindist.sh\""
+ ]
+ ]
+ bindistStep AArch64 Windows = error "aarch64 windows not supported"
+
+mkTestJob :: Arch -> Opsys -> Job
+mkTestJob arch os =
+ K.fromString (testJobName arch os) .= object
+ [ "runs-on" .= runner arch os
+ , "name" .= str (testJobName arch os ++ " (Test binaries)")
+ , "needs" .= [bindistJobName arch os]
+ , "environment" .= str "CI"
+ , "env" .= thisEnv
+ , "steps" .=
+ ( setupAction arch os
+ ++ [ checkoutAction , downloadArtifacts (bindistName arch os) "./out" ]
+ ++ testStep arch os)
+ ]
+ where thisEnv = envVars arch os
+
+ testStep Amd64 (Linux d) = [customAction d Test]
+ testStep AArch64 (Linux Ubuntu2004) =
+ [ ghAction "Run test" "docker://hasufell/arm64v8-ubuntu-haskell:focal"
+ [ "args" .= str "bash .github/scripts/test.sh" ]
+ [ ]
+ ]
+ testStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported"
+
+ testStep Amd64 Darwin = [ghRun "Run test" "sh" [] $ unlines $
+ [ "brew install coreutils tree"
+ , "bash .github/scripts/test.sh"
+ ]
+ ]
+ testStep AArch64 Darwin = [ghRun "Run test" "sh" [] $ unlines $
+ [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree"
+ , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\""
+ , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\""
+ , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\""
+ , "export LD=ld"
+ , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\""
+ , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\""
+ , "bash .github/scripts/test.sh"
+ ]
+ ]
+
+ testStep Amd64 Windows =
+ [ ghRun "install windows deps" "pwsh" [] $ unlines $
+ [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git\""
+ , "taskkill /F /FI \"MODULES eq msys-2.0.dll\""
+ ]
+ , ghRun "Run test" "pwsh" [] $ unlines $
+ [ "$env:CHERE_INVOKING = 1"
+ , "$env:MSYS2_PATH_TYPE = \"inherit\""
+ , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/test.sh\""
+ ]
+ ]
+ testStep AArch64 Windows = error "aarch64 windows not supported"
+
+
+ciConfigs :: [Config]
+ciConfigs =
+ [ MkConfig Amd64 Darwin allGHCs
+ , MkConfig AArch64 Darwin allGHCs
+ , MkConfig Amd64 Windows allGHCs
+ , MkConfig AArch64 (Linux Ubuntu2004) allGHCs]
+ ++ [ MkConfig Amd64 (Linux distro) allGHCs | distro <- allDistros ]
+
+main :: IO ()
+main = do
+ [root] <- getArgs
+ setCurrentDirectory root
+ removeDirectoryRecursive actionDir
+ createDirectoryIfMissing True actionDir
+ forM_ (mapMaybe configAction ciConfigs) $ \a -> do
+ let path = actionPath (actionDistro a)
+ createDirectoryIfMissing True path
+ BS.writeFile (path > "action.yaml") $ encode a
+ BS.putStr "### DO NOT EDIT - GENERATED FILE\n"
+ BS.putStr "### This file was generated by ./.github/generate-ci/gen_ci.hs\n"
+ BS.putStr "### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate\n"
+ BS.putStr $ encode $ CI ciConfigs
+
+
+-------------------------------------------------------------------------------
+-- Utils
+-------------------------------------------------------------------------------
+
+str :: String -> String
+str = id
+
+ghAction :: String -> String -> [(Key,Value)] -> [(Key,Value)] -> Value
+ghAction name uses args env = object $
+ [ "name" .= name
+ , "uses" .= uses
+ ]
+ ++ case args of
+ [] -> []
+ xs -> [ "with" .= object xs ]
+ ++ case env of
+ [] -> []
+ xs -> [ "env" .= object xs ]
+
+ghRun :: String -> String -> [(Key,Value)] -> String -> Value
+ghRun name shell env script = object $
+ [ "name" .= name
+ , "shell" .= shell
+ , "run" .= script
+ ]
+ ++ case env of
+ [] -> []
+ xs -> [ "env" .= object xs ]
+
+checkoutAction :: Value
+checkoutAction = ghAction "Checkout" "actions/checkout@v4" [] []
+
+uploadArtifacts :: String -> String -> Value
+uploadArtifacts name path = ghAction "Upload artifact" "actions/upload-artifact@v4"
+ [ "if-no-files-found" .= str "error"
+ , "retention-days" .= (2 :: Int)
+ , "name" .= name
+ , "path" .= path
+ ] []
+
+downloadArtifacts :: String -> String -> Value
+downloadArtifacts name path = ghAction "Download artifacts" "actions/download-artifact@v4" [ "name" .= name, "path" .= path ] []
diff --git a/.github/generate-ci/generate-ci.cabal b/.github/generate-ci/generate-ci.cabal
new file mode 100644
index 0000000000..ae9e9d3f52
--- /dev/null
+++ b/.github/generate-ci/generate-ci.cabal
@@ -0,0 +1,18 @@
+cabal-version: 3.0
+name: generate-ci
+version: 0.1.0.0
+license: Apache-2.0
+license-file: LICENSE
+build-type: Simple
+
+executable generate-ci
+ main-is: gen_ci.hs
+ ghc-options: -Wall
+ build-depends: base,
+ bytestring,
+ containers,
+ directory,
+ filepath,
+ aeson,
+ yaml >= 0.11.11.2
+ default-language: Haskell2010
diff --git a/.github/generate-ci/generate-jobs b/.github/generate-ci/generate-jobs
new file mode 100755
index 0000000000..4cffc82d2a
--- /dev/null
+++ b/.github/generate-ci/generate-jobs
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+set -e
+
+root="$(git rev-parse --show-toplevel)/"
+cd "$root/.github/generate-ci/"
+
+cabal run -v0 generate-ci "$root" > ../workflows/release.yaml
+
diff --git a/.github/scripts/bindist.sh b/.github/scripts/bindist.sh
index 72e8fe4676..b50aeb2aca 100644
--- a/.github/scripts/bindist.sh
+++ b/.github/scripts/bindist.sh
@@ -5,10 +5,7 @@ set -eux
. .github/scripts/env.sh
. .github/scripts/common.sh
-# ensure ghcup
-if ! command -v ghcup ; then
- install_ghcup
-fi
+install_ghcup
# create tarball/zip
case "${TARBALL_EXT}" in
@@ -24,8 +21,8 @@ case "${TARBALL_EXT}" in
# from the oldest version in the list
: "${GHCS:="$(cd "$CI_PROJECT_DIR/out/${ARTIFACT}" && rm -f ./*.json && for ghc in * ; do printf "%s\n" "$ghc" ; done | sort -r | tr '\n' ' ')"}"
emake --version
- emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" GHCS="${GHCS}" bindist
- emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" bindist-tar
+ emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" GHCS="${GHCS}" bindist || fail_with_ghcup_logs "make bindist failed"
+ emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" bindist-tar || fail_with_ghcup_logs "make bindist failed"
;;
*)
fail "Unknown TARBALL_EXT: ${TARBALL_EXT}"
diff --git a/.github/scripts/brew.sh b/.github/scripts/brew.sh
index 0f889c6299..4066dfb885 100644
--- a/.github/scripts/brew.sh
+++ b/.github/scripts/brew.sh
@@ -19,9 +19,7 @@ mkdir -p $CI_PROJECT_DIR/.brew_cache
export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache
mkdir -p $CI_PROJECT_DIR/.brew_logs
export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs
-mkdir -p /private/tmp/.brew_tmp
-export HOMEBREW_TEMP=/private/tmp/.brew_tmp
+export HOMEBREW_TEMP=$(mktemp -d)
#brew update
brew install ${1+"$@"}
-
diff --git a/.github/scripts/build.sh b/.github/scripts/build.sh
index d27a940e14..1c0eae6252 100644
--- a/.github/scripts/build.sh
+++ b/.github/scripts/build.sh
@@ -11,7 +11,9 @@ uname
pwd
env
-# ensure ghcup
+# Ensure ghcup is present and properly configured.
+# Sets up the vanilla channel, as HLS CI provides binaries
+# for GHCup's vanilla channel.
install_ghcup
# ensure cabal-cache
@@ -19,7 +21,7 @@ download_cabal_cache "$HOME/.local/bin/cabal-cache"
# build
-ghcup install ghc "${GHC_VERSION}"
+ghcup install ghc "${GHC_VERSION}" || fail_with_ghcup_logs "install ghc"
ghcup set ghc "${GHC_VERSION}"
sed -i.bak -e '/DELETE MARKER FOR CI/,/END DELETE/d' cabal.project # see comment in cabal.project
ecabal update
diff --git a/.github/scripts/common.sh b/.github/scripts/common.sh
index dde41675cf..a10d84045e 100644
--- a/.github/scripts/common.sh
+++ b/.github/scripts/common.sh
@@ -139,7 +139,7 @@ install_ghcup() {
source "$(dirname "${GHCUP_BIN}")/env"
# make sure we use the vanilla channel for installing binaries
# see https://github.com/haskell/ghcup-metadata/pull/166#issuecomment-1893075575
- ghcup config set url-source https://raw.githubusercontent.com/haskell/ghcup-metadata/master/ghcup-vanilla-0.0.8.yaml
+ ghcup config set url-source https://raw.githubusercontent.com/haskell/ghcup-metadata/refs/heads/master/ghcup-vanilla-0.0.9.yaml
ghcup install cabal --set "${BOOTSTRAP_HASKELL_CABAL_VERSION}"
fi
}
@@ -182,6 +182,10 @@ error() { echo_color "${RED}" "$1"; }
warn() { echo_color "${LT_BROWN}" "$1"; }
info() { echo_color "${LT_BLUE}" "$1"; }
+fail_with_ghcup_logs() {
+ cat /github/workspace/.ghcup/logs/*
+ fail "$!"
+}
fail() { error "error: $1"; exit 1; }
run() {
diff --git a/.github/scripts/entrypoint.sh b/.github/scripts/entrypoint.sh
new file mode 100755
index 0000000000..f02e4ec17a
--- /dev/null
+++ b/.github/scripts/entrypoint.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+set -x
+
+bash -c "$INSTALL curl bash git tree $TOOLS"
+
+unset INSTALL
+unset TOOLS
+
+if [ "${ARTIFACT}" = "x86_64-linux-unknown" ]; then
+ echo "NAME=Linux" > /etc/os-release
+ echo "ID=linux" >> /etc/os-release
+ echo "PRETTY_NAME=Linux" >> /etc/os-release
+fi
+
+case "$STAGE" in
+ "BUILD")
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ ;;
+ "BINDIST")
+ set -eux
+ for bindist in out-*.tar ; do
+ tar -xf "${bindist}"
+ done
+ unset bindist
+ bash .github/scripts/bindist.sh
+ ;;
+ "TEST")
+ bash .github/scripts/test.sh
+esac
+
diff --git a/.github/scripts/env.sh b/.github/scripts/env.sh
index 90e7219661..2f6eaa3c48 100644
--- a/.github/scripts/env.sh
+++ b/.github/scripts/env.sh
@@ -35,3 +35,5 @@ fi
export DEBIAN_FRONTEND=noninteractive
export TZ=Asia/Singapore
+export LANG=en_US.UTF-8
+export LC_ALL=C.UTF-8
diff --git a/.github/scripts/test.sh b/.github/scripts/test.sh
index dfcfc4b4ef..00638dca62 100644
--- a/.github/scripts/test.sh
+++ b/.github/scripts/test.sh
@@ -8,7 +8,7 @@ set -eux
. .github/scripts/env.sh
. .github/scripts/common.sh
-test_package="text-2.1.1"
+test_package="text-2.1.2"
test_module="src/Data/Text.hs"
create_cradle() {
@@ -60,7 +60,7 @@ test_all_hls() {
fi
done
# install the recommended GHC version so the wrapper can launch HLS
- ghcup install ghc --set recommended
+ ghcup install ghc --set 9.10.2
"$bindir/haskell-language-server-wrapper${ext}" typecheck "${test_module}" || fail "failed to typecheck with HLS wrapper"
}
diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml
index 659352e4e6..82a50589e4 100644
--- a/.github/workflows/bench.yml
+++ b/.github/workflows/bench.yml
@@ -17,7 +17,6 @@ on:
jobs:
pre_job:
runs-on: ubuntu-latest
- if: contains(github.event.pull_request.labels.*.name, 'performance')
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
@@ -53,8 +52,8 @@ jobs:
# see discussion https://github.com/haskell/haskell-language-server/pull/4118
# also possible to add more GHCs if we performs better in the future.
ghc:
- - '9.6'
- '9.8'
+ - '9.10'
os:
- ubuntu-latest
@@ -62,7 +61,7 @@ jobs:
# change of the strategy may require changing the bootstrapping/run code
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
# By default, the `pull_request` event has a `GITHUB_SHA` env variable
@@ -101,46 +100,47 @@ jobs:
tar -czf cabal.tar.gz *
- name: Upload workspace
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: workspace-${{ matrix.ghc }}-${{ matrix.os }}
retention-days: 1
path: workspace.tar.gz
- name: Upload .cabal
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }}
retention-days: 1
path: ~/.cabal/cabal.tar.gz
bench_example:
+ if: contains(github.event.pull_request.labels.*.name, 'performance')
needs: [bench_init, pre_job]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
- ghc: ['9.6', '9.8']
+ ghc: ['9.8', '9.10']
os: [ubuntu-latest]
- cabal: ['3.10']
+ cabal: ['3.14']
example: ['cabal', 'lsp-types']
steps:
- - uses: haskell-actions/setup@v2.7.9
+ - uses: haskell-actions/setup@v2.8.0
with:
ghc-version : ${{ matrix.ghc }}
cabal-version: ${{ matrix.cabal }}
enable-stack: false
- name: Download cabal home
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }}
path: .
- name: Download workspace
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: workspace-${{ matrix.ghc }}-${{ matrix.os }}
path: .
@@ -165,7 +165,7 @@ jobs:
run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz
- name: Archive benchmarking artifacts
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }}
path: benchmark-artifacts.tar.gz
@@ -175,7 +175,7 @@ jobs:
run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz
- name: Archive benchmark logs
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }}
path: benchmark-logs.tar.gz
diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml
index 5bddbd349e..bdd770acd0 100644
--- a/.github/workflows/nix.yml
+++ b/.github/workflows/nix.yml
@@ -44,17 +44,19 @@ jobs:
strategy:
fail-fast: false
matrix:
- os: [ubuntu-latest, macOS-latest]
+ # TODO: Fix compilation problems on macOS.
+ # os: [ubuntu-latest, macOS-latest]
+ os: [ubuntu-latest]
steps:
- uses: actions/checkout@v3
- - uses: cachix/install-nix-action@v30
+ - uses: cachix/install-nix-action@v31
with:
extra_nix_config: |
experimental-features = nix-command flakes
nix_path: nixpkgs=channel:nixos-unstable
- - uses: cachix/cachix-action@v15
+ - uses: cachix/cachix-action@v16
with:
name: haskell-language-server
authToken: ${{ secrets.HLS_CACHIX_AUTH_TOKEN }}
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index fc3f98bcca..30c55d375a 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -1,1023 +1,3833 @@
-name: Build and release
-
-on:
- push:
- tags:
- - '*'
- schedule:
- - cron: '0 2 * * 1'
+### DO NOT EDIT - GENERATED FILE
+### This file was generated by ./.github/generate-ci/gen_ci.hs
+### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate
env:
CABAL_CACHE_DISABLE: ${{ vars.CABAL_CACHE_DISABLE }}
CABAL_CACHE_NONFATAL: ${{ vars.CABAL_CACHE_NONFATAL }}
-
jobs:
- build-linux:
- name: Build linux binaries
- ## We need the environment here, to have access to the `vars` context.
- ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`.
- ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments
- ## assuming you have the proper permissions.
- environment: CI
- runs-on: ubuntu-latest
+ bindist-aarch64-linux-ubuntu2004:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-aarch64-linux-ubuntu2004 (Prepare bindist)
+ needs:
+ - build-aarch64-linux-ubuntu2004-967
+ - build-aarch64-linux-ubuntu2004-984
+ - build-aarch64-linux-ubuntu2004-9102
+ - build-aarch64-linux-ubuntu2004-9122
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-linux-ubuntu2004-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-linux-ubuntu2004-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-linux-ubuntu2004-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-linux-ubuntu2004-9122
+ path: ./
+ - name: Unpack aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/untar.sh
+ - name: Tar aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/bindist.sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-aarch64-linux-ubuntu2004
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-aarch64-mac:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-aarch64-mac (Prepare bindist)
+ needs:
+ - build-aarch64-mac-967
+ - build-aarch64-mac-984
+ - build-aarch64-mac-9102
+ - build-aarch64-mac-9122
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-mac-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-mac-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-mac-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-aarch64-mac-9122
+ path: ./
+ - name: Run build
+ run: |
+ bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH"
+ export CC="$HOME/.brew/opt/llvm@13/bin/clang"
+ export CXX="$HOME/.brew/opt/llvm@13/bin/clang++"
+ export LD=ld
+ export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar"
+ export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib"
+ for bindist in out-*.tar ; do
+ tar xf "${bindist}"
+ done
+ unset bindist
+ bash .github/scripts/bindist.sh
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-aarch64-apple-darwin
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-deb10:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-deb10 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-deb10-967
+ - build-x86_64-linux-deb10-984
+ - build-x86_64-linux-deb10-9102
+ - build-x86_64-linux-deb10-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb10-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb10-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb10-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb10-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-deb10
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-deb11:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-deb11 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-deb11-967
+ - build-x86_64-linux-deb11-984
+ - build-x86_64-linux-deb11-9102
+ - build-x86_64-linux-deb11-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb11-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb11-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb11-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb11-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-deb11
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-deb12:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-deb12 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-deb12-967
+ - build-x86_64-linux-deb12-984
+ - build-x86_64-linux-deb12-9102
+ - build-x86_64-linux-deb12-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb12-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb12-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb12-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb12-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-deb12
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-deb9:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-deb9 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-deb9-967
+ - build-x86_64-linux-deb9-984
+ - build-x86_64-linux-deb9-9102
+ - build-x86_64-linux-deb9-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb9-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb9-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb9-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-deb9-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-deb9
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-fedora33:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-fedora33 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-fedora33-967
+ - build-x86_64-linux-fedora33-984
+ - build-x86_64-linux-fedora33-9102
+ - build-x86_64-linux-fedora33-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora33-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora33-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora33-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora33-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-fedora33
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-fedora40:
env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-fedora40 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-fedora40-967
+ - build-x86_64-linux-fedora40-984
+ - build-x86_64-linux-fedora40-9102
+ - build-x86_64-linux-fedora40-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora40-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora40-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora40-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-fedora40-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-fedora40
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-mint193:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-mint193 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-mint193-967
+ - build-x86_64-linux-mint193-984
+ - build-x86_64-linux-mint193-9102
+ - build-x86_64-linux-mint193-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint193-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint193-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint193-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint193-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-mint193
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-mint202:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-mint202 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-mint202-967
+ - build-x86_64-linux-mint202-984
+ - build-x86_64-linux-mint202-9102
+ - build-x86_64-linux-mint202-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint202-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint202-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint202-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint202-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-mint202
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-mint213:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-mint213 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-mint213-967
+ - build-x86_64-linux-mint213-984
+ - build-x86_64-linux-mint213-9102
+ - build-x86_64-linux-mint213-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint213-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint213-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint213-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-mint213-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-mint213
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-ubuntu1804:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-ubuntu1804 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-ubuntu1804-967
+ - build-x86_64-linux-ubuntu1804-984
+ - build-x86_64-linux-ubuntu1804-9102
+ - build-x86_64-linux-ubuntu1804-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu1804-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu1804-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu1804-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu1804-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-ubuntu1804
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-ubuntu2004:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-ubuntu2004 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-ubuntu2004-967
+ - build-x86_64-linux-ubuntu2004-984
+ - build-x86_64-linux-ubuntu2004-9102
+ - build-x86_64-linux-ubuntu2004-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2004-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2004-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2004-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2004-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-ubuntu2004
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-ubuntu2204:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-ubuntu2204 (Prepare bindist)
+ needs:
+ - build-x86_64-linux-ubuntu2204-967
+ - build-x86_64-linux-ubuntu2204-984
+ - build-x86_64-linux-ubuntu2204-9102
+ - build-x86_64-linux-ubuntu2204-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2204-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2204-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2204-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-ubuntu2204-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-ubuntu2204
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-linux-unknown:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ name: bindist-x86_64-linux-unknown (Prepare bindist)
+ needs:
+ - build-x86_64-linux-unknown-967
+ - build-x86_64-linux-unknown-984
+ - build-x86_64-linux-unknown-9102
+ - build-x86_64-linux-unknown-9122
+ runs-on:
+ - self-hosted
+ - linux-space
+ - maerwald
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-unknown-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-unknown-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-unknown-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-linux-unknown-9122
+ path: ./
+ - name: Bindist
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: BINDIST
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-linux-unknown
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-mac:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
S3_HOST: ${{ secrets.S3_HOST }}
TARBALL_EXT: tar.xz
- ARCH: 64
- DEBIAN_FRONTEND: noninteractive
- TZ: Asia/Singapore
- strategy:
- fail-fast: false
- matrix:
- ghc: ["9.10.1", "9.8.2", "9.6.6", "9.4.8"]
- platform: [ { image: "debian:9"
- , installCmd: "sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Debian"
- , ARTIFACT: "x86_64-linux-deb9"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "debian:10"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Debian"
- , ARTIFACT: "x86_64-linux-deb10"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "debian:11"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Debian"
- , ARTIFACT: "x86_64-linux-deb11"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "ubuntu:18.04"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Ubuntu"
- , ARTIFACT: "x86_64-linux-ubuntu18.04"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "ubuntu:20.04"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Ubuntu"
- , ARTIFACT: "x86_64-linux-ubuntu20.04"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "ubuntu:22.04"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Ubuntu"
- , ARTIFACT: "x86_64-linux-ubuntu22.04"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "linuxmintd/mint19.3-amd64"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Mint"
- , ARTIFACT: "x86_64-linux-mint19.3"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "linuxmintd/mint20.2-amd64"
- , installCmd: "apt-get update && apt-get install -y"
- , toolRequirements: "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf"
- , DISTRO: "Mint"
- , ARTIFACT: "x86_64-linux-mint20.2"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "fedora:27"
- , installCmd: "dnf install -y"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Fedora"
- , ARTIFACT: "x86_64-linux-fedora27"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "fedora:33"
- , installCmd: "dnf install -y"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Fedora"
- , ARTIFACT: "x86_64-linux-fedora33"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- },
- { image: "centos:7"
- , installCmd: "sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* && sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* && yum -y install epel-release && yum install -y"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "CentOS"
- , ARTIFACT: "x86_64-linux-centos7"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- }
- ]
- # TODO: rm
- # Instead of manually adding the Unknown Linux Bindist jobs here,
- # it should be part of the matrix above.
- # However, due to GHC 9.4 shenanigans, we need some special logic.
- # https://gitlab.haskell.org/ghc/ghc/-/issues/22268
- #
- # Perhaps we can migrate *all* unknown linux builds to a uniform
- # image.
- include:
- - ghc: 9.4.8
- platform:
- { image: "fedora:27"
- , installCmd: "dnf install -y"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Unknown"
- , ARTIFACT: "x86_64-linux-unknown"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- }
- - ghc: 9.6.6
- platform:
- { image: "rockylinux:8"
- , installCmd: "yum -y install epel-release && yum install -y --allowerasing"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Unknown"
- , ARTIFACT: "x86_64-linux-unknown"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- }
- - ghc: 9.8.2
- platform:
- { image: "rockylinux:8"
- , installCmd: "yum -y install epel-release && yum install -y --allowerasing"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Unknown"
- , ARTIFACT: "x86_64-linux-unknown"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- }
- - ghc: 9.10.1
- platform:
- { image: "rockylinux:8"
- , installCmd: "yum -y install epel-release && yum install -y --allowerasing"
- , toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf"
- , DISTRO: "Unknown"
- , ARTIFACT: "x86_64-linux-unknown"
- , ADD_CABAL_ARGS: "--enable-split-sections"
- }
- container:
- image: ${{ matrix.platform.image }}
- steps:
- - name: Install requirements
- shell: sh
- run: |
- ${{ matrix.platform.installCmd }} curl bash git ${{ matrix.platform.toolRequirements }}
-
- - if: matrix.platform.DISTRO == 'Unknown'
- run: |
- echo "NAME=Linux" > /etc/os-release
- echo "ID=linux" >> /etc/os-release
- echo "PRETTY_NAME=Linux" >> /etc/os-release
-
- - uses: actions/checkout@v3
-
- - name: Run build
- run: |
- bash .github/scripts/build.sh
- tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
-
- env:
- ARTIFACT: ${{ matrix.platform.ARTIFACT }}
- DISTRO: ${{ matrix.platform.DISTRO }}
- ADD_CABAL_ARGS: ${{ matrix.platform.ADD_CABAL_ARGS }}
- GHC_VERSION: ${{ matrix.ghc }}
-
- - if: always()
- name: Upload artifact
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- retention-days: 2
- name: artifacts-${{ matrix.platform.ARTIFACT }}
- path: |
- ./out-${{ matrix.platform.ARTIFACT }}-${{ matrix.ghc }}.tar
-
- build-arm:
- name: Build ARM binary
- ## We need the environment here, to have access to the `vars` context.
- ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`.
- ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments
- ## assuming you have the proper permissions.
- environment: CI
- runs-on: [self-hosted, Linux, ARM64, maerwald]
- env:
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- S3_HOST: ${{ secrets.S3_HOST }}
- TARBALL_EXT: tar.xz
- ADD_CABAL_ARGS: ""
- DEBIAN_FRONTEND: noninteractive
- TZ: Asia/Singapore
- ARTIFACT: "aarch64-linux-ubuntu20"
+ TZ: Asia/Singapore
+ name: bindist-x86_64-mac (Prepare bindist)
+ needs:
+ - build-x86_64-mac-967
+ - build-x86_64-mac-984
+ - build-x86_64-mac-9102
+ - build-x86_64-mac-9122
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-mac-967
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-mac-984
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-mac-9102
+ path: ./
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-mac-9122
+ path: ./
+ - name: Create bindist
+ run: |
+ brew install coreutils tree
+ for bindist in out-*.tar ; do
+ tar xf "${bindist}"
+ done
+ unset bindist
+ bash .github/scripts/bindist.sh
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-apple-darwin
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ bindist-x86_64-windows:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ name: bindist-x86_64-windows (Prepare bindist)
+ needs:
+ - build-x86_64-windows-967
+ - build-x86_64-windows-984
+ - build-x86_64-windows-9102
+ - build-x86_64-windows-9122
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-windows-967
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-windows-984
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-windows-9102
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: artifacts-build-x86_64-windows-9122
+ path: ./out
+ - name: Run build
+ run: |
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S unzip zip git"
+ taskkill /F /FI "MODULES eq msys-2.0.dll"
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/bindist.sh"
+ shell: pwsh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: bindist-x86_64-mingw64
+ path: |-
+ ./out/*.tar.xz
+ ./out/plan.json/*
+ ./out/*.zip
+ retention-days: 2
+ build-aarch64-linux-ubuntu2004-9102:
+ env:
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
- DISTRO: Ubuntu
- strategy:
- fail-fast: true
- matrix:
- ghc: ["9.10.1", "9.8.2", "9.6.6", "9.4.8"]
- steps:
- - uses: docker://arm64v8/ubuntu:focal
- name: Cleanup (aarch64 linux)
- with:
- args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
-
- - name: git config
- run: |
- git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
- shell: bash
-
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
- name: Run build (aarch64 linux)
- with:
- args: bash .github/scripts/build.sh
- env:
- GHC_VERSION: ${{ matrix.ghc }}
-
- - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
- name: Run build (aarch64 linux)
- with:
- args: bash .github/scripts/tar.sh
- env:
- GHC_VERSION: ${{ matrix.ghc }}
-
- - if: always()
- name: Upload artifact
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- retention-days: 2
- name: artifacts-arm
- path: |
- ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar
-
- build-mac-x86_64:
- name: Build binary (Mac x86_64)
- ## We need the environment here, to have access to the `vars` context.
- ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`.
- ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments
- ## assuming you have the proper permissions.
- environment: CI
- runs-on: macOS-12
- env:
- MACOSX_DEPLOYMENT_TARGET: 10.13
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- S3_HOST: ${{ secrets.S3_HOST }}
- ADD_CABAL_ARGS: ""
- ARTIFACT: "x86_64-apple-darwin"
- ARCH: 64
- TARBALL_EXT: tar.xz
- DISTRO: na
- strategy:
- fail-fast: false
- matrix:
- ghc: ["9.10.1", "9.8.2", "9.6.6", "9.4.8"]
- steps:
- - name: Checkout code
- uses: actions/checkout@v3
-
- - name: Run build
- run: |
- brew install coreutils tree
- bash .github/scripts/build.sh
- tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
- env:
- GHC_VERSION: ${{ matrix.ghc }}
-
- - if: always()
- name: Upload artifact
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- retention-days: 2
- name: artifacts-mac-x86_64
- path: |
- ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar
-
- build-mac-aarch64:
- name: Build binary (Mac aarch64)
- ## We need the environment here, to have access to the `vars` context.
- ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`.
- ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments
- ## assuming you have the proper permissions.
- environment: CI
- runs-on: [self-hosted, macOS, ARM64]
- env:
- MACOSX_DEPLOYMENT_TARGET: 10.13
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- S3_HOST: ${{ secrets.S3_HOST }}
- ADD_CABAL_ARGS: ""
- ARTIFACT: "aarch64-apple-darwin"
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-linux-ubuntu2004-9102 (Build binaries)
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.10.2
+ name: Build aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/build.sh
+ - env:
+ GHC_VERSION: 9.10.2
+ name: Tar aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/tar.sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-linux-ubuntu2004-9102
+ path: out-aarch64-linux-ubuntu2004-9.10.2.tar
+ retention-days: 2
+ build-aarch64-linux-ubuntu2004-9122:
+ env:
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
TARBALL_EXT: tar.xz
- DISTRO: na
- HOMEBREW_CHANGE_ARCH_TO_ARM: 1
- strategy:
- fail-fast: false
- matrix:
- ghc: ["9.10.1", "9.8.2", "9.6.6", "9.4.8"]
- steps:
- - name: Checkout code
- uses: actions/checkout@v3
-
- - name: Run build
- run: |
- bash .github/scripts/brew.sh git coreutils autoconf automake tree
- export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH"
- export LD=ld
- bash .github/scripts/build.sh
- tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
- env:
- GHC_VERSION: ${{ matrix.ghc }}
-
- - if: always()
- name: Upload artifact
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- retention-days: 2
- name: artifacts-mac-aarch64
- path: |
- ./out-${{ env.ARTIFACT }}-${{ matrix.ghc }}.tar
-
- build-win:
- name: Build binary (Win)
- ## We need the environment here, to have access to the `vars` context.
- ## Allows us to specify: `CABAL_CACHE_DISABLE=yes`.
- ## The environments can be seen in https://github.com/haskell/haskell-language-server/settings/environments
- ## assuming you have the proper permissions.
- environment: CI
- runs-on: windows-latest
- env:
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- S3_HOST: ${{ secrets.S3_HOST }}
- ADD_CABAL_ARGS: ""
- ARTIFACT: "x86_64-mingw64"
- ARCH: 64
- TARBALL_EXT: "zip"
- DISTRO: na
- strategy:
- fail-fast: false
- matrix:
- ghc: ["9.10.1", "9.8.2", "9.6.6", "9.4.8"]
- steps:
- - name: install windows deps
- shell: pwsh
- run: |
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git"
- taskkill /F /FI "MODULES eq msys-2.0.dll"
-
- - name: Checkout code
- uses: actions/checkout@v3
-
- - name: Run build (windows)
- run: |
- $env:CHERE_INVOKING = 1
- $env:MSYS2_PATH_TYPE = "inherit"
- $ErrorActionPreference = "Stop"
- C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh"
- shell: pwsh
- env:
- GHC_VERSION: ${{ matrix.ghc }}
-
- - if: always()
- name: Upload artifact
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- retention-days: 2
- name: artifacts-win
- path: |
- ./out/*
-
- bindist-linux:
- name: Tar linux bindists (linux)
- runs-on: [self-hosted, linux-space, maerwald]
- needs: ["build-linux"]
- env:
- TARBALL_EXT: tar.xz
- ARCH: 64
- DEBIAN_FRONTEND: noninteractive
- TZ: Asia/Singapore
- strategy:
- fail-fast: false
- matrix:
- include:
- - image: debian:9
- installCmd: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb9"
- - image: debian:10
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb10"
- - image: debian:11
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb11"
- - image: ubuntu:18.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu18.04"
- - image: ubuntu:20.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu20.04"
- - image: ubuntu:22.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu22.04"
- - image: fedora:27
- installCmd: dnf install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: Fedora
- ARTIFACT: "x86_64-linux-fedora27"
- - image: fedora:33
- installCmd: dnf install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: Fedora
- ARTIFACT: "x86_64-linux-fedora33"
- - image: centos:7
- installCmd: sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* && sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* && yum -y install epel-release && yum install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: CentOS
- ARTIFACT: "x86_64-linux-centos7"
- - image: linuxmintd/mint19.3-amd64
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Mint
- ARTIFACT: "x86_64-linux-mint19.3"
- - image: "fedora:33"
- installCmd: "dnf install -y"
- toolRequirements: "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree"
- DISTRO: "Unknown"
- ARTIFACT: "x86_64-linux-unknown"
- - image: linuxmintd/mint20.2-amd64
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Mint
- ARTIFACT: "x86_64-linux-mint20.2"
- container:
- image: ${{ matrix.image }}
- steps:
- - name: Install requirements
- shell: sh
- run: |
- ${{ matrix.installCmd }} curl bash git ${{ matrix.toolRequirements }}
-
- - if: matrix.DISTRO == 'Unknown'
- run: |
- echo "NAME=Linux" > /etc/os-release
- echo "ID=linux" >> /etc/os-release
- echo "PRETTY_NAME=Linux" >> /etc/os-release
-
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: artifacts-${{ matrix.ARTIFACT }}
- path: ./
-
- - name: Create bindist
- run: |
- set -eux
- for bindist in out-*.tar ; do
- tar -xf "${bindist}"
- done
- unset bindist
- bash .github/scripts/bindist.sh
- env:
- ARTIFACT: ${{ matrix.ARTIFACT }}
-
- - name: Upload bindist
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- name: bindists-${{ matrix.ARTIFACT }}
- path: |
- ./out/*.tar.xz
- ./out/plan.json/*
-
- - uses: geekyeggo/delete-artifact@v2
- with:
- name: artifacts-${{ matrix.ARTIFACT }}
-
- bindist-arm:
- name: Tar linux bindists (arm)
- runs-on: [self-hosted, Linux, ARM64, maerwald]
- needs: ["build-arm"]
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-linux-ubuntu2004-9122 (Build binaries)
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.12.2
+ name: Build aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/build.sh
+ - env:
+ GHC_VERSION: 9.12.2
+ name: Tar aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/tar.sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-linux-ubuntu2004-9122
+ path: out-aarch64-linux-ubuntu2004-9.12.2.tar
+ retention-days: 2
+ build-aarch64-linux-ubuntu2004-967:
env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-linux-ubuntu2004-967 (Build binaries)
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.6.7
+ name: Build aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/build.sh
+ - env:
+ GHC_VERSION: 9.6.7
+ name: Tar aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/tar.sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-linux-ubuntu2004-967
+ path: out-aarch64-linux-ubuntu2004-9.6.7.tar
+ retention-days: 2
+ build-aarch64-linux-ubuntu2004-984:
+ env:
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
DEBIAN_FRONTEND: noninteractive
- ARTIFACT: "aarch64-linux-ubuntu20"
- TZ: Asia/Singapore
- steps:
- - uses: docker://arm64v8/ubuntu:focal
- name: Cleanup (aarch64 linux)
- with:
- args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
-
- - name: git config
- run: |
- git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
- shell: bash
-
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: artifacts-arm
- path: ./
-
- - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
- name: Unpack
- with:
- args: bash .github/scripts/untar.sh
-
- - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
- name: Create bindist (aarch64 linux)
- with:
- args: bash .github/scripts/bindist.sh
-
- - name: Upload bindist
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- name: bindists-arm
- path: |
- ./out/*.tar.xz
- ./out/plan.json/*
-
- - uses: geekyeggo/delete-artifact@v2
- with:
- name: artifacts-arm
-
- bindist-mac-x86_64:
- name: Tar bindists (Mac x86_64)
- runs-on: macOS-12
- needs: ["build-mac-x86_64"]
- env:
- TARBALL_EXT: tar.xz
- ARCH: 64
- ARTIFACT: "x86_64-apple-darwin"
- steps:
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: artifacts-mac-x86_64
- path: ./
-
- - name: Create bindist
- run: |
- brew install coreutils tree
- for bindist in out-*.tar ; do
- tar xf "${bindist}"
- done
- unset bindist
- bash .github/scripts/bindist.sh
-
- - name: Upload bindist
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- name: bindists-mac-x86_64
- path: |
- ./out/*.tar.xz
- ./out/plan.json/*
-
- - uses: geekyeggo/delete-artifact@v2
- with:
- name: artifacts-mac-x86_64
-
- bindist-mac-aarch64:
- name: Tar bindists (Mac aarch64)
- runs-on: [self-hosted, macOS, ARM64]
- needs: ["build-mac-aarch64"]
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-linux-ubuntu2004-984 (Build binaries)
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.8.4
+ name: Build aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/build.sh
+ - env:
+ GHC_VERSION: 9.8.4
+ name: Tar aarch64-linux binaries
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/tar.sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-linux-ubuntu2004-984
+ path: out-aarch64-linux-ubuntu2004-9.8.4.tar
+ retention-days: 2
+ build-aarch64-mac-9102:
env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-mac-9102 (Build binaries)
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.10.2
+ name: Run build
+ run: |
+ bash .github/scripts/brew.sh git coreutils autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH"
+ export LD=ld
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-mac-9102
+ path: out-aarch64-apple-darwin-9.10.2.tar
+ retention-days: 2
+ build-aarch64-mac-9122:
+ env:
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
- ARTIFACT: "aarch64-apple-darwin"
- steps:
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: artifacts-mac-aarch64
- path: ./
-
- - name: Create bindist
- run: |
- bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree
- export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH"
- export CC="$HOME/.brew/opt/llvm@13/bin/clang"
- export CXX="$HOME/.brew/opt/llvm@13/bin/clang++"
- export LD=ld
- export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar"
- export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib"
- for bindist in out-*.tar ; do
- tar xf "${bindist}"
- done
- unset bindist
- bash .github/scripts/bindist.sh
-
- - name: Upload bindist
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- name: bindists-mac-aarch64
- path: |
- ./out/*.tar.xz
- ./out/plan.json/*
-
- - uses: geekyeggo/delete-artifact@v2
- with:
- name: artifacts-mac-aarch64
-
- bindist-win:
- name: Tar bindists (Windows)
- runs-on: windows-latest
- needs: ["build-win"]
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-mac-9122 (Build binaries)
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.12.2
+ name: Run build
+ run: |
+ bash .github/scripts/brew.sh git coreutils autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH"
+ export LD=ld
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-mac-9122
+ path: out-aarch64-apple-darwin-9.12.2.tar
+ retention-days: 2
+ build-aarch64-mac-967:
env:
- TARBALL_EXT: zip
- ARTIFACT: "x86_64-mingw64"
- ARCH: 64
- steps:
- - name: install windows deps
- shell: pwsh
- run: |
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S unzip zip git"
- taskkill /F /FI "MODULES eq msys-2.0.dll"
-
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: artifacts-win
- path: ./out
-
- - name: Create bindist
- run: |
- $env:CHERE_INVOKING = 1
- $env:MSYS2_PATH_TYPE = "inherit"
- C:\msys64\usr\bin\bash -lc "bash .github/scripts/bindist.sh"
- shell: pwsh
-
- - name: Upload bindist
- uses: actions/upload-artifact@v3
- with:
- if-no-files-found: error
- name: bindists-win
- path: |
- ./out/*.zip
- ./out/plan.json/*
-
- - uses: geekyeggo/delete-artifact@v2
- with:
- name: artifacts-win
-
- test-linux:
- name: Test linux binaries
- runs-on: ubuntu-latest
- needs: ["bindist-linux"]
- env:
- TARBALL_EXT: tar.xz
- ARCH: 64
- DEBIAN_FRONTEND: noninteractive
- TZ: Asia/Singapore
- strategy:
- fail-fast: false
- matrix:
- include:
- - image: debian:9
- installCmd: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb9"
- - image: debian:10
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb10"
- - image: debian:11
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Debian
- ARTIFACT: "x86_64-linux-deb11"
- - image: ubuntu:18.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu18.04"
- - image: ubuntu:20.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu20.04"
- - image: ubuntu:22.04
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Ubuntu
- ARTIFACT: "x86_64-linux-ubuntu22.04"
- - image: fedora:27
- installCmd: dnf install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: Fedora
- ARTIFACT: "x86_64-linux-fedora27"
- - image: fedora:33
- installCmd: dnf install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: Fedora
- ARTIFACT: "x86_64-linux-fedora33"
- - image: centos:7
- installCmd: sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* && sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* && yum -y install epel-release && yum install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: CentOS
- ARTIFACT: "x86_64-linux-centos7"
- - image: linuxmintd/mint19.3-amd64
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Mint
- ARTIFACT: "x86_64-linux-mint19.3"
- - image: "fedora:33"
- installCmd: dnf install -y
- toolRequirements: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf tree
- DISTRO: "Unknown"
- ARTIFACT: "x86_64-linux-unknown"
- - image: linuxmintd/mint20.2-amd64
- installCmd: apt-get update && apt-get install -y
- toolRequirements: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf tree
- DISTRO: Mint
- ARTIFACT: "x86_64-linux-mint20.2"
- container:
- image: ${{ matrix.image }}
- steps:
- - name: Install requirements
- shell: sh
- run: |
- ${{ matrix.installCmd }} curl bash git ${{ matrix.toolRequirements }}
-
- - if: matrix.DISTRO == 'Unknown'
- run: |
- echo "NAME=Linux" > /etc/os-release
- echo "ID=linux" >> /etc/os-release
- echo "PRETTY_NAME=Linux" >> /etc/os-release
-
- - uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-${{ matrix.ARTIFACT }}
- path: ./out
-
- - name: Run test
- run: bash .github/scripts/test.sh
- env:
- ARTIFACT: ${{ matrix.ARTIFACT }}
- DISTRO: ${{ matrix.DISTRO }}
-
- test-arm:
- name: Test ARM binary
- runs-on: [self-hosted, Linux, ARM64, maerwald]
- needs: ["bindist-arm"]
- env:
- TARBALL_EXT: tar.xz
- DEBIAN_FRONTEND: noninteractive
- TZ: Asia/Singapore
- ARTIFACT: "aarch64-linux-ubuntu20"
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
- DISTRO: Ubuntu
- steps:
- - uses: docker://arm64v8/ubuntu:focal
- name: Cleanup (aarch64 linux)
- with:
- args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
-
- - name: git config
- run: |
- git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
- shell: bash
-
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-arm
- path: ./out
-
- - uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
- name: Run test (aarch64 linux)
- with:
- args: bash .github/scripts/test.sh
-
- test-mac-x86_64:
- name: Test binary (Mac x86_64)
- runs-on: macOS-12
- needs: ["bindist-mac-x86_64"]
- env:
- MACOSX_DEPLOYMENT_TARGET: 10.13
- ARTIFACT: "x86_64-apple-darwin"
- ARCH: 64
- TARBALL_EXT: tar.xz
- DISTRO: na
- steps:
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-mac-x86_64
- path: ./out
-
- - name: Run test (mac)
- run: |
- brew install coreutils tree
- bash .github/scripts/test.sh
-
- test-mac-aarch64:
- name: Test binary (Mac aarch64)
- runs-on: [self-hosted, macOS, ARM64]
- needs: ["bindist-mac-aarch64"]
- env:
- MACOSX_DEPLOYMENT_TARGET: 10.13
- ARTIFACT: "aarch64-apple-darwin"
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-mac-967 (Build binaries)
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.6.7
+ name: Run build
+ run: |
+ bash .github/scripts/brew.sh git coreutils autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH"
+ export LD=ld
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-mac-967
+ path: out-aarch64-apple-darwin-9.6.7.tar
+ retention-days: 2
+ build-aarch64-mac-984:
+ env:
+ ADD_CABAL_ARGS: ''
ARCH: ARM64
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
TARBALL_EXT: tar.xz
- DISTRO: n
- HOMEBREW_CHANGE_ARCH_TO_ARM: 1
- steps:
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-mac-aarch64
- path: ./out
-
- - name: Run test (mac)
- run: |
- bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree
- export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH"
- export CC="$HOME/.brew/opt/llvm@13/bin/clang"
- export CXX="$HOME/.brew/opt/llvm@13/bin/clang++"
- export LD=ld
- export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar"
- export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib"
- bash .github/scripts/test.sh
-
- test-win:
- name: Test binary (Win)
- runs-on: windows-latest
- needs: ["bindist-win"]
- env:
- ARTIFACT: "x86_64-mingw64"
- ARCH: 64
- TARBALL_EXT: zip
- DISTRO: na
- strategy:
- fail-fast: false
- steps:
- - name: install windows deps
- shell: pwsh
- run: |
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
- C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git"
- taskkill /F /FI "MODULES eq msys-2.0.dll"
-
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-win
- path: ./out
-
- - name: Run test (windows)
- run: |
- $env:CHERE_INVOKING = 1
- $env:MSYS2_PATH_TYPE = "inherit"
- C:\msys64\usr\bin\bash -lc "bash .github/scripts/test.sh"
- shell: pwsh
-
- release:
- name: release
- needs: ["test-linux", "test-mac-x86_64", "test-mac-aarch64", "test-win", "test-arm"]
- runs-on: ubuntu-latest
- if: startsWith(github.ref, 'refs/tags/')
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-aarch64-mac-984 (Build binaries)
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.8.4
+ name: Run build
+ run: |
+ bash .github/scripts/brew.sh git coreutils autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH"
+ export LD=ld
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-aarch64-mac-984
+ path: out-aarch64-apple-darwin-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-deb10-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb10-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb10-9102
+ path: out-x86_64-linux-deb10-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb10-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb10-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb10-9122
+ path: out-x86_64-linux-deb10-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb10-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb10-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb10-967
+ path: out-x86_64-linux-deb10-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-deb10-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb10-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb10-984
+ path: out-x86_64-linux-deb10-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-deb11-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb11-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
steps:
- - name: Checkout code
- uses: actions/checkout@v3
-
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-deb9
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-deb10
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-deb11
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-ubuntu18.04
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-ubuntu20.04
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-ubuntu22.04
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-fedora27
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-fedora33
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-centos7
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-unknown
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-mint19.3
- - uses: actions/download-artifact@v3
- with:
- path: ./out
- name: bindists-x86_64-linux-mint20.2
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-arm
- path: ./out
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-mac-x86_64
- path: ./out
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-mac-aarch64
- path: ./out
-
- - uses: actions/download-artifact@v3
- with:
- name: bindists-win
- path: ./out
-
- - name: Install requirements
- run: |
- sudo apt-get update && sudo apt-get install -y tar xz-utils
- shell: bash
-
- - name: tar plan.json
- run: |
- cd out/plan.json
- tar cf plan_json.tar *
- mv plan_json.tar ../
- shell: bash
-
- - name: build source tarball
- run: |
- export RELEASE=$GITHUB_REF_NAME
- git archive --format=tar.gz -o "out/haskell-language-server-${RELEASE}-src.tar.gz" --prefix="haskell-language-server-${RELEASE}/" HEAD
- shell: bash
-
- - name: Release
- uses: softprops/action-gh-release@v2
- with:
- draft: true
- files: |
- ./out/*.zip
- ./out/*.tar.xz
- ./out/*.tar.gz
- ./out/*.tar
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb11-9102
+ path: out-x86_64-linux-deb11-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb11-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb11-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb11-9122
+ path: out-x86_64-linux-deb11-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb11-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb11-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb11-967
+ path: out-x86_64-linux-deb11-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-deb11-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb11-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb11-984
+ path: out-x86_64-linux-deb11-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-deb12-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb12-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb12-9102
+ path: out-x86_64-linux-deb12-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb12-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb12-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb12-9122
+ path: out-x86_64-linux-deb12-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb12-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb12-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb12-967
+ path: out-x86_64-linux-deb12-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-deb12-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb12-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb12-984
+ path: out-x86_64-linux-deb12-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-deb9-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb9-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb9-9102
+ path: out-x86_64-linux-deb9-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb9-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb9-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb9-9122
+ path: out-x86_64-linux-deb9-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-deb9-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb9-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb9-967
+ path: out-x86_64-linux-deb9-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-deb9-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-deb9-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-deb9-984
+ path: out-x86_64-linux-deb9-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-fedora33-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora33-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora33-9102
+ path: out-x86_64-linux-fedora33-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-fedora33-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora33-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora33-9122
+ path: out-x86_64-linux-fedora33-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-fedora33-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora33-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora33-967
+ path: out-x86_64-linux-fedora33-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-fedora33-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora33-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora33-984
+ path: out-x86_64-linux-fedora33-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-fedora40-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora40-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora40-9102
+ path: out-x86_64-linux-fedora40-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-fedora40-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora40-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora40-9122
+ path: out-x86_64-linux-fedora40-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-fedora40-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora40-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora40-967
+ path: out-x86_64-linux-fedora40-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-fedora40-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-fedora40-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-fedora40-984
+ path: out-x86_64-linux-fedora40-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-mint193-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint193-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint193-9102
+ path: out-x86_64-linux-mint193-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint193-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint193-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint193-9122
+ path: out-x86_64-linux-mint193-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint193-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint193-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint193-967
+ path: out-x86_64-linux-mint193-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-mint193-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint193-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint193-984
+ path: out-x86_64-linux-mint193-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-mint202-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint202-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint202-9102
+ path: out-x86_64-linux-mint202-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint202-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint202-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint202-9122
+ path: out-x86_64-linux-mint202-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint202-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint202-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint202-967
+ path: out-x86_64-linux-mint202-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-mint202-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint202-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint202-984
+ path: out-x86_64-linux-mint202-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-mint213-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint213-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint213-9102
+ path: out-x86_64-linux-mint213-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint213-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint213-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint213-9122
+ path: out-x86_64-linux-mint213-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-mint213-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint213-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint213-967
+ path: out-x86_64-linux-mint213-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-mint213-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-mint213-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-mint213-984
+ path: out-x86_64-linux-mint213-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu1804-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu1804-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu1804-9102
+ path: out-x86_64-linux-ubuntu1804-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu1804-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu1804-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu1804-9122
+ path: out-x86_64-linux-ubuntu1804-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu1804-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu1804-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu1804-967
+ path: out-x86_64-linux-ubuntu1804-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu1804-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu1804-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu1804-984
+ path: out-x86_64-linux-ubuntu1804-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2004-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2004-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2004-9102
+ path: out-x86_64-linux-ubuntu2004-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2004-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2004-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2004-9122
+ path: out-x86_64-linux-ubuntu2004-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2004-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2004-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2004-967
+ path: out-x86_64-linux-ubuntu2004-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2004-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2004-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2004-984
+ path: out-x86_64-linux-ubuntu2004-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2204-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2204-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2204-9102
+ path: out-x86_64-linux-ubuntu2204-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2204-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2204-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2204-9122
+ path: out-x86_64-linux-ubuntu2204-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2204-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2204-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2204-967
+ path: out-x86_64-linux-ubuntu2204-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-ubuntu2204-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-ubuntu2204-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-ubuntu2204-984
+ path: out-x86_64-linux-ubuntu2204-9.8.4.tar
+ retention-days: 2
+ build-x86_64-linux-unknown-9102:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-unknown-9102 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.10.2
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: BUILD
+ version: 9.10.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-unknown-9102
+ path: out-x86_64-linux-unknown-9.10.2.tar
+ retention-days: 2
+ build-x86_64-linux-unknown-9122:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-unknown-9122 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.12.2
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: BUILD
+ version: 9.12.2
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-unknown-9122
+ path: out-x86_64-linux-unknown-9.12.2.tar
+ retention-days: 2
+ build-x86_64-linux-unknown-967:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-unknown-967 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.6.7
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: BUILD
+ version: 9.6.7
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-unknown-967
+ path: out-x86_64-linux-unknown-9.6.7.tar
+ retention-days: 2
+ build-x86_64-linux-unknown-984:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-linux-unknown-984 (Build binaries)
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Build 9.8.4
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: BUILD
+ version: 9.8.4
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-linux-unknown-984
+ path: out-x86_64-linux-unknown-9.8.4.tar
+ retention-days: 2
+ build-x86_64-mac-9102:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-mac-9102 (Build binaries)
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.10.2
+ name: Run build
+ run: |
+ brew install coreutils tree
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-mac-9102
+ path: out-x86_64-apple-darwin-9.10.2.tar
+ retention-days: 2
+ build-x86_64-mac-9122:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-mac-9122 (Build binaries)
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.12.2
+ name: Run build
+ run: |
+ brew install coreutils tree
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-mac-9122
+ path: out-x86_64-apple-darwin-9.12.2.tar
+ retention-days: 2
+ build-x86_64-mac-967:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-mac-967 (Build binaries)
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.6.7
+ name: Run build
+ run: |
+ brew install coreutils tree
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-mac-967
+ path: out-x86_64-apple-darwin-9.6.7.tar
+ retention-days: 2
+ build-x86_64-mac-984:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-mac-984 (Build binaries)
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.8.4
+ name: Run build
+ run: |
+ brew install coreutils tree
+ bash .github/scripts/build.sh
+ tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/
+ shell: sh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-mac-984
+ path: out-x86_64-apple-darwin-9.8.4.tar
+ retention-days: 2
+ build-x86_64-windows-9102:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-windows-9102 (Build binaries)
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.10.2
+ name: Run build
+ run: |
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ $ErrorActionPreference = "Stop"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh"
+ shell: pwsh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-windows-9102
+ path: ./out/*
+ retention-days: 2
+ build-x86_64-windows-9122:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-windows-9122 (Build binaries)
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.12.2
+ name: Run build
+ run: |
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ $ErrorActionPreference = "Stop"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh"
+ shell: pwsh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-windows-9122
+ path: ./out/*
+ retention-days: 2
+ build-x86_64-windows-967:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-windows-967 (Build binaries)
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.6.7
+ name: Run build
+ run: |
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ $ErrorActionPreference = "Stop"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh"
+ shell: pwsh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-windows-967
+ path: ./out/*
+ retention-days: 2
+ build-x86_64-windows-984:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ environment: CI
+ name: build-x86_64-windows-984 (Build binaries)
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - env:
+ GHC_VERSION: 9.8.4
+ name: Run build
+ run: |
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ $ErrorActionPreference = "Stop"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh"
+ shell: pwsh
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ if-no-files-found: error
+ name: artifacts-build-x86_64-windows-984
+ path: ./out/*
+ retention-days: 2
+ release:
+ if: startsWith(github.ref, 'refs/tags/')
+ name: release
+ needs:
+ - test-x86_64-mac
+ - test-aarch64-mac
+ - test-x86_64-windows
+ - test-aarch64-linux-ubuntu2004
+ - test-x86_64-linux-deb9
+ - test-x86_64-linux-deb10
+ - test-x86_64-linux-deb11
+ - test-x86_64-linux-deb12
+ - test-x86_64-linux-ubuntu1804
+ - test-x86_64-linux-ubuntu2004
+ - test-x86_64-linux-ubuntu2204
+ - test-x86_64-linux-mint193
+ - test-x86_64-linux-mint202
+ - test-x86_64-linux-mint213
+ - test-x86_64-linux-fedora33
+ - test-x86_64-linux-fedora40
+ - test-x86_64-linux-unknown
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-apple-darwin
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-aarch64-apple-darwin
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-mingw64
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-aarch64-linux-ubuntu2004
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb9
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb10
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb11
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb12
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu1804
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu2004
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu2204
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint193
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint202
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint213
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-fedora33
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-fedora40
+ path: ./out
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-unknown
+ path: ./out
+ - name: Prepare release
+ run: |
+ sudo apt-get update && sudo apt-get install -y tar xz-utils
+ cd out/plan.json
+ tar cf plan_json.tar *
+ mv plan_json.tar ../
+ cd ../..
+ export RELEASE=$GITHUB_REF_NAME
+ git archive --format=tar.gz -o "out/haskell-language-server-${RELEASE}-src.tar.gz" --prefix="haskell-language-server-${RELEASE}/" HEAD
+ shell: bash
+ - name: Release
+ uses: softprops/action-gh-release@v2
+ with:
+ draft: true
+ files: |
+ ./out/*.zip
+ ./out/*.tar.xz
+ ./out/*.tar.gz
+ ./out/*.tar
+ test-aarch64-linux-ubuntu2004:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-aarch64-linux-ubuntu2004 (Test binaries)
+ needs:
+ - bindist-aarch64-linux-ubuntu2004
+ runs-on:
+ - self-hosted
+ - Linux
+ - ARM64
+ - maerwald
+ steps:
+ - name: clean and git config for aarch64-linux
+ run: |
+ find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +
+ git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*"
+ shell: bash
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-aarch64-linux-ubuntu2004
+ path: ./out
+ - name: Run test
+ uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
+ with:
+ args: bash .github/scripts/test.sh
+ test-aarch64-mac:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: ARM64
+ ARTIFACT: aarch64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ HOMEBREW_CHANGE_ARCH_TO_ARM: '1'
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-aarch64-mac (Test binaries)
+ needs:
+ - bindist-aarch64-mac
+ runs-on:
+ - self-hosted
+ - macOS
+ - ARM64
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-aarch64-apple-darwin
+ path: ./out
+ - name: Run test
+ run: |
+ bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree
+ export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH"
+ export CC="$HOME/.brew/opt/llvm@13/bin/clang"
+ export CXX="$HOME/.brew/opt/llvm@13/bin/clang++"
+ export LD=ld
+ export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar"
+ export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib"
+ bash .github/scripts/test.sh
+ shell: sh
+ test-x86_64-linux-deb10:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb10
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-deb10 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-deb10
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb10
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-deb10
+ with:
+ stage: TEST
+ test-x86_64-linux-deb11:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb11
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-deb11 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-deb11
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb11
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-deb11
+ with:
+ stage: TEST
+ test-x86_64-linux-deb12:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb12
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-deb12 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-deb12
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb12
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-deb12
+ with:
+ stage: TEST
+ test-x86_64-linux-deb9:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-deb9
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-deb9 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-deb9
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-deb9
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-deb9
+ with:
+ stage: TEST
+ test-x86_64-linux-fedora33:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora33
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-fedora33 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-fedora33
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-fedora33
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-fedora33
+ with:
+ stage: TEST
+ test-x86_64-linux-fedora40:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-fedora40
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-fedora40 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-fedora40
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-fedora40
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-fedora40
+ with:
+ stage: TEST
+ test-x86_64-linux-mint193:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint193
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-mint193 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-mint193
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint193
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-mint193
+ with:
+ stage: TEST
+ test-x86_64-linux-mint202:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint202
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-mint202 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-mint202
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint202
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-mint202
+ with:
+ stage: TEST
+ test-x86_64-linux-mint213:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-mint213
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-mint213 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-mint213
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-mint213
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-mint213
+ with:
+ stage: TEST
+ test-x86_64-linux-ubuntu1804:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu1804
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-ubuntu1804 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-ubuntu1804
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu1804
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-ubuntu1804
+ with:
+ stage: TEST
+ test-x86_64-linux-ubuntu2004:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2004
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-ubuntu2004 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-ubuntu2004
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu2004
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-ubuntu2004
+ with:
+ stage: TEST
+ test-x86_64-linux-ubuntu2204:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-ubuntu2204
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-ubuntu2204 (Test binaries)
+ needs:
+ - bindist-x86_64-linux-ubuntu2204
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-ubuntu2204
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-ubuntu2204
+ with:
+ stage: TEST
+ test-x86_64-linux-unknown:
+ env:
+ ADD_CABAL_ARGS: --enable-split-sections
+ ARCH: '64'
+ ARTIFACT: x86_64-linux-unknown
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DEBIAN_FRONTEND: noninteractive
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-linux-unknown (Test binaries)
+ needs:
+ - bindist-x86_64-linux-unknown
+ runs-on:
+ - ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-linux-unknown
+ path: ./out
+ - name: Test
+ uses: ./.github/actions/bindist-actions/action-unknown
+ with:
+ stage: TEST
+ test-x86_64-mac:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-apple-darwin
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ MACOSX_DEPLOYMENT_TARGET: '10.13'
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: tar.xz
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-mac (Test binaries)
+ needs:
+ - bindist-x86_64-mac
+ runs-on:
+ - macOS-13
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-apple-darwin
+ path: ./out
+ - name: Run test
+ run: |
+ brew install coreutils tree
+ bash .github/scripts/test.sh
+ shell: sh
+ test-x86_64-windows:
+ env:
+ ADD_CABAL_ARGS: ''
+ ARCH: '64'
+ ARTIFACT: x86_64-mingw64
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ S3_HOST: ${{ secrets.S3_HOST }}
+ TARBALL_EXT: zip
+ TZ: Asia/Singapore
+ environment: CI
+ name: test-x86_64-windows (Test binaries)
+ needs:
+ - bindist-x86_64-windows
+ runs-on:
+ - windows-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: bindist-x86_64-mingw64
+ path: ./out
+ - name: install windows deps
+ run: |
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu"
+ C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git"
+ taskkill /F /FI "MODULES eq msys-2.0.dll"
+ shell: pwsh
+ - name: Run test
+ run: |
+ $env:CHERE_INVOKING = 1
+ $env:MSYS2_PATH_TYPE = "inherit"
+ C:\msys64\usr\bin\bash -lc "bash .github/scripts/test.sh"
+ shell: pwsh
+name: Build and release
+'on':
+ push:
+ tags:
+ - '*'
+ schedule:
+ - cron: 0 2 * * 1
diff --git a/.github/workflows/supported-ghc-versions.json b/.github/workflows/supported-ghc-versions.json
index b530e284e0..35a3bd4ac4 100644
--- a/.github/workflows/supported-ghc-versions.json
+++ b/.github/workflows/supported-ghc-versions.json
@@ -1 +1 @@
-["9.10", "9.8", "9.6", "9.4"]
+["9.12", "9.10", "9.8", "9.6"]
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 544a9c6e78..984758a310 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -139,7 +139,7 @@ jobs:
run: cabal test hls-refactor-plugin-tests || cabal test hls-refactor-plugin-tests
# TODO enable when it supports 9.10
- - if: matrix.test && matrix.ghc != '9.10'
+ - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12'
name: Test hls-floskell-plugin
run: cabal test hls-floskell-plugin-tests || cabal test hls-floskell-plugin-tests
@@ -156,16 +156,15 @@ jobs:
run: cabal test hls-eval-plugin-tests || cabal test hls-eval-plugin-tests
# TODO enable when it supports 9.10
- - if: matrix.test && matrix.ghc != '9.10'
+ - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12'
name: Test hls-splice-plugin
run: cabal test hls-splice-plugin-tests || cabal test hls-splice-plugin-tests
- - if: matrix.test
+ - if: matrix.test && matrix.ghc != '9.12'
name: Test hls-stan-plugin
run: cabal test hls-stan-plugin-tests || cabal test hls-stan-plugin-tests
- # TODO enable when it supports 9.10
- - if: matrix.test && matrix.ghc != '9.10'
+ - if: matrix.test
name: Test hls-stylish-haskell-plugin
run: cabal test hls-stylish-haskell-plugin-tests || cabal test hls-stylish-haskell-plugin-tests
@@ -227,11 +226,11 @@ jobs:
run: cabal test hls-explicit-record-fields-plugin-tests || cabal test hls-explicit-record-fields-plugin-tests
# versions need to be limited since the tests depend on cabal-fmt which only builds with ghc <9.10
- - if: matrix.test && matrix.ghc != '9.10'
+ - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12'
name: Test hls-cabal-fmt-plugin test suite
run: cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests || cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests
- - if: matrix.test
+ - if: matrix.test && matrix.ghc != '9.12'
name: Test hls-cabal-gild-plugin test suite
run: cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests || cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests
@@ -240,7 +239,7 @@ jobs:
run: cabal test hls-cabal-plugin-tests || cabal test hls-cabal-plugin-tests
# TODO enable when it supports 9.10
- - if: matrix.test && matrix.ghc != '9.10'
+ - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12'
name: Test hls-retrie-plugin test suite
run: cabal test hls-retrie-plugin-tests || cabal test hls-retrie-plugin-tests
@@ -256,6 +255,12 @@ jobs:
name: Test hls-notes-plugin test suite
run: cabal test hls-notes-plugin-tests || cabal test hls-notes-plugin-tests
+ # The plugin tutorial is only compatible with 9.6 and 9.8.
+ # No particular reason, just to avoid excessive CPP.
+ - if: matrix.test && matrix.ghc != '9.4' && matrix.ghc != '9.10' && matrix.ghc != '9.12'
+ name: Compile the plugin-tutorial
+ run: cabal build plugin-tutorial
+
test_post_job:
if: always()
runs-on: ubuntu-latest
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 87de7c4790..03edd673b7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,32 +1,23 @@
-{
- "repos": [
- {
- "hooks": [
- {
- "entry": "stylish-haskell --inplace",
- "exclude": "(^Setup.hs$|test/testdata/.*$|test/data/.*$|test/manual/lhs/.*$|^hie-compat/.*$|^plugins/hls-tactics-plugin/.*$|^ghcide/src/Development/IDE/GHC/Compat.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs$|^ghcide/src/Development/IDE/GHC/Compat/Core.hs$|^ghcide/src/Development/IDE/Spans/Pragmas.hs$|^ghcide/src/Development/IDE/LSP/Outline.hs$|^plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs$|^ghcide/src/Development/IDE/Core/Rules.hs$|^ghcide/src/Development/IDE/Core/Compile.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs$)",
- "files": "\\.l?hs$",
- "id": "stylish-haskell",
- "language": "system",
- "name": "stylish-haskell",
- "pass_filenames": true,
- "types": [
- "file"
- ]
- }
- ],
- "repo": "local"
- },
- {
- "repo": "https://github.com/pre-commit/pre-commit-hooks",
- "rev": "v4.1.0",
- "hooks": [
- {
- "id": "mixed-line-ending",
- "args": ["--fix", "lf"],
- "exclude": "test/testdata/.*CRLF.*?\\.hs$"
- }
- ]
- }
- ]
-}
+# https://pre-commit.com/
+# https://github.com/pre-commit/pre-commit
+repos:
+ - hooks:
+ - entry: stylish-haskell --inplace
+ exclude: >-
+ (^Setup.hs$|test/testdata/.*$|test/data/.*$|test/manual/lhs/.*$|^hie-compat/.*$|^plugins/hls-tactics-plugin/.*$|^ghcide/src/Development/IDE/GHC/Compat.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs$|^ghcide/src/Development/IDE/GHC/Compat/Core.hs$|^ghcide/src/Development/IDE/Spans/Pragmas.hs$|^ghcide/src/Development/IDE/LSP/Outline.hs$|^plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs$|^ghcide/src/Development/IDE/Core/Rules.hs$|^ghcide/src/Development/IDE/Core/Compile.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs$|^plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs$)
+ files: \.l?hs$
+ id: stylish-haskell
+ language: system
+ name: stylish-haskell
+ pass_filenames: true
+ types:
+ - file
+ repo: local
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.1.0
+ hooks:
+ - id: mixed-line-ending
+ args:
+ - '--fix'
+ - lf
+ exclude: test/testdata/.*CRLF.*?\.hs$
diff --git a/ChangeLog.md b/ChangeLog.md
index 24090d5e86..65000395e2 100644
--- a/ChangeLog.md
+++ b/ChangeLog.md
@@ -1,5 +1,228 @@
# Changelog for haskell-language-server
+
+## 2.11.0.0
+
+- Bindists for GHC 9.12.2
+ - Full plugin support, inlcuding refactor plugin
+- Bindists for GHC 9.10.2
+- Bindists for GHC 9.8.4
+- Bindists for GHC 9.6.7
+- Bindists for GHC 9.4.8
+- Dropped support for Centos 7 as this platform is no longer supported by ghc
+- Improved import suggestions for contructors and OverloadedRecordDot fields
+
+### Pull Requests
+
+- Add doc for project-wide renaming
+ ([#4584](https://github.com/haskell/haskell-language-server/pull/4584)) by @jian-lin
+- Use hie-bios 0.15.0
+ ([#4582](https://github.com/haskell/haskell-language-server/pull/4582)) by @fendor
+- Allow building HLS with GHC 9.10.2
+ ([#4581](https://github.com/haskell/haskell-language-server/pull/4581)) by @fendor
+- Fix Plugin support table for 9.12.2
+ ([#4580](https://github.com/haskell/haskell-language-server/pull/4580)) by @fendor
+- Fix misplaced inlay hints by applying PositionMapping
+ ([#4571](https://github.com/haskell/haskell-language-server/pull/4571)) by @jetjinser
+- Enable hls-plugin-gadt for ghc-9.12
+ ([#4568](https://github.com/haskell/haskell-language-server/pull/4568)) by @GuillaumedeVolpiano
+- Remove no longer needed allow-newer
+ ([#4566](https://github.com/haskell/haskell-language-server/pull/4566)) by @jhrcek
+- Add missing golden files for GHC 9.10 config tests
+ ([#4563](https://github.com/haskell/haskell-language-server/pull/4563)) by @jian-lin
+- updating the plugins support table for refactor
+ ([#4560](https://github.com/haskell/haskell-language-server/pull/4560)) by @GuillaumedeVolpiano
+- Enable stylish-haskell for ghc-9.10 and ghc-9.12
+ ([#4559](https://github.com/haskell/haskell-language-server/pull/4559)) by @GuillaumedeVolpiano
+- Bump haskell-actions/setup from 2.7.10 to 2.7.11
+ ([#4557](https://github.com/haskell/haskell-language-server/pull/4557)) by @dependabot[bot]
+- Provide code action in hls-eval-plugin
+ ([#4556](https://github.com/haskell/haskell-language-server/pull/4556)) by @jian-lin
+- enable hlint for ghc-9.12
+ ([#4555](https://github.com/haskell/haskell-language-server/pull/4555)) by @GuillaumedeVolpiano
+- Enable fourmolu and ormolu for GHC 9.12
+ ([#4554](https://github.com/haskell/haskell-language-server/pull/4554)) by @fendor
+- Enable hls-cabal-gild-plugin for GHC 9.12.2
+ ([#4553](https://github.com/haskell/haskell-language-server/pull/4553)) by @fendor
+- Update plugin support table for GHC 9.12.2
+ ([#4552](https://github.com/haskell/haskell-language-server/pull/4552)) by @fendor
+- Remove allow-newer for hiedb
+ ([#4551](https://github.com/haskell/haskell-language-server/pull/4551)) by @jhrcek
+- Fix typo of rename plugin config
+ ([#4546](https://github.com/haskell/haskell-language-server/pull/4546)) by @jian-lin
+- Update the ghcup-metadata generation script
+ ([#4545](https://github.com/haskell/haskell-language-server/pull/4545)) by @fendor
+- porting hls-refactor to ghc-9.12
+ ([#4543](https://github.com/haskell/haskell-language-server/pull/4543)) by @GuillaumedeVolpiano
+- add ghcide-bench flag to .cabal file
+ ([#4542](https://github.com/haskell/haskell-language-server/pull/4542)) by @juhp
+- Revert "link executables dynamically to speed up linking (#4423)"
+ ([#4541](https://github.com/haskell/haskell-language-server/pull/4541)) by @fendor
+- Support PackageImports in hiddenPackageSuggestion
+ ([#4537](https://github.com/haskell/haskell-language-server/pull/4537)) by @jian-lin
+- Improve FreeBSD installation docs
+ ([#4536](https://github.com/haskell/haskell-language-server/pull/4536)) by @arrowd
+- reinstating ignore-plugins-ghc-bounds
+ ([#4532](https://github.com/haskell/haskell-language-server/pull/4532)) by @GuillaumedeVolpiano
+- Simplify FuzzySearch test (avoid dependency on /usr/share/dict/words)
+ ([#4531](https://github.com/haskell/haskell-language-server/pull/4531)) by @jhrcek
+- Import suggestion for missing newtype constructor, all types constructor and indirect overloadedrecorddot fields
+ ([#4516](https://github.com/haskell/haskell-language-server/pull/4516)) by @guibou
+
+## 2.10.0.0
+
+- Bindists for GHC 9.12.2
+ - This is only basic support, many plugins are not yet compatible.
+- Bindists for GHC 9.8.4
+- Bindists for GHC 9.6.7
+- `hls-cabal-plugin` features
+ - Support for `cabal-add`
+ - Goto Definition for common sections
+ - Outline of .cabal files
+- Fix handling of LSP resolve requests
+- Display Inlay Hints
+ - Records
+ - Imports
+
+### Pull Requests
+
+- Fix cabal check for Hackage release
+ ([#4528](https://github.com/haskell/haskell-language-server/pull/4528)) by @fendor
+- GHC 9.12 support
+ ([#4527](https://github.com/haskell/haskell-language-server/pull/4527)) by @wz1000
+- Bump cachix/install-nix-action from 30 to 31
+ ([#4525](https://github.com/haskell/haskell-language-server/pull/4525)) by @dependabot[bot]
+- Bump cachix/cachix-action from 15 to 16
+ ([#4523](https://github.com/haskell/haskell-language-server/pull/4523)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.9 to 2.7.10
+ ([#4522](https://github.com/haskell/haskell-language-server/pull/4522)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.9 to 2.7.10 in /.github/actions/setup-build
+ ([#4521](https://github.com/haskell/haskell-language-server/pull/4521)) by @dependabot[bot]
+- Move ghcide-test to stand alone dir
+ ([#4520](https://github.com/haskell/haskell-language-server/pull/4520)) by @soulomoon
+- refactor: remove unnecessary instance and use of unsafeCoerce
+ ([#4518](https://github.com/haskell/haskell-language-server/pull/4518)) by @MangoIV
+- convert `pre-commit-config.yaml` from JSON to YAML
+ ([#4513](https://github.com/haskell/haskell-language-server/pull/4513)) by @peterbecich
+- Enable bench for 9.10
+ ([#4512](https://github.com/haskell/haskell-language-server/pull/4512)) by @soulomoon
+- Bugfix: Explicit record fields inlay hints for polymorphic records
+ ([#4510](https://github.com/haskell/haskell-language-server/pull/4510)) by @wczyz
+- Capitalization of "Replace"
+ ([#4509](https://github.com/haskell/haskell-language-server/pull/4509)) by @dschrempf
+- document eval plugin not supporting multiline expressions
+ ([#4495](https://github.com/haskell/haskell-language-server/pull/4495)) by @noughtmare
+- Documentation: Imrpove "Contributing" (and amend Sphinx builders)
+ ([#4494](https://github.com/haskell/haskell-language-server/pull/4494)) by @dschrempf
+- Documentation: HLS plugin tutorial improvements
+ ([#4491](https://github.com/haskell/haskell-language-server/pull/4491)) by @dschrempf
+- Nix tooling (minor changes)
+ ([#4490](https://github.com/haskell/haskell-language-server/pull/4490)) by @dschrempf
+- Bump haskell-actions/setup from 2.7.8 to 2.7.9
+ ([#4483](https://github.com/haskell/haskell-language-server/pull/4483)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.8 to 2.7.9 in /.github/actions/setup-build
+ ([#4482](https://github.com/haskell/haskell-language-server/pull/4482)) by @dependabot[bot]
+- Rework bindist CI
+ ([#4481](https://github.com/haskell/haskell-language-server/pull/4481)) by @wz1000
+- Remove Unsafe Dynflags deadcode, they don't exist any more!
+ ([#4480](https://github.com/haskell/haskell-language-server/pull/4480)) by @fendor
+- Implement fallback handler for `*/resolve` requests
+ ([#4478](https://github.com/haskell/haskell-language-server/pull/4478)) by @fendor
+- Bump haskell-actions/setup from 2.7.7 to 2.7.8
+ ([#4477](https://github.com/haskell/haskell-language-server/pull/4477)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.7 to 2.7.8 in /.github/actions/setup-build
+ ([#4476](https://github.com/haskell/haskell-language-server/pull/4476)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.6 to 2.7.7
+ ([#4471](https://github.com/haskell/haskell-language-server/pull/4471)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.6 to 2.7.7 in /.github/actions/setup-build
+ ([#4470](https://github.com/haskell/haskell-language-server/pull/4470)) by @dependabot[bot]
+- Allow building with GHC 9.8.4
+ ([#4459](https://github.com/haskell/haskell-language-server/pull/4459)) by @fendor
+- Update python read-the-docs dependencies to latest
+ ([#4457](https://github.com/haskell/haskell-language-server/pull/4457)) by @fendor
+- More tests and better docs for cabal-add
+ ([#4455](https://github.com/haskell/haskell-language-server/pull/4455)) by @VenInf
+- ci(mergify): upgrade configuration to current format
+ ([#4454](https://github.com/haskell/haskell-language-server/pull/4454)) by @mergify[bot]
+- Support record positional construction inlay hints
+ ([#4447](https://github.com/haskell/haskell-language-server/pull/4447)) by @jetjinser
+- Build HLS with GHC 9.8.3
+ ([#4444](https://github.com/haskell/haskell-language-server/pull/4444)) by @fendor
+- Don't suggest -Wno-deferred-out-of-scope-variables
+ ([#4441](https://github.com/haskell/haskell-language-server/pull/4441)) by @jeukshi
+- Enable hls-stan-plugin for GHC 9.10.1
+ ([#4437](https://github.com/haskell/haskell-language-server/pull/4437)) by @fendor
+- Enhance formatting of the `cabal-version` error message
+ ([#4436](https://github.com/haskell/haskell-language-server/pull/4436)) by @fendor
+- Support structured diagnostics 2
+ ([#4433](https://github.com/haskell/haskell-language-server/pull/4433)) by @noughtmare
+- Cabal ignore if for completions (#4289)
+ ([#4427](https://github.com/haskell/haskell-language-server/pull/4427)) by @SamuelLess
+- Fix cabal-add testdata for hls-cabal-plugin-tests
+ ([#4426](https://github.com/haskell/haskell-language-server/pull/4426)) by @fendor
+- gracefully handle errors for unsupported cabal version
+ ([#4425](https://github.com/haskell/haskell-language-server/pull/4425)) by @fridewald
+- Fix pre-commit in CI
+ ([#4424](https://github.com/haskell/haskell-language-server/pull/4424)) by @fendor
+- link executables dynamically to speed up linking
+ ([#4423](https://github.com/haskell/haskell-language-server/pull/4423)) by @develop7
+- Cabal plugin: implement check for package.yaml in a stack project
+ ([#4422](https://github.com/haskell/haskell-language-server/pull/4422)) by @JMoss-dev
+- Fix exporting operator pattern synonym
+ ([#4420](https://github.com/haskell/haskell-language-server/pull/4420)) by @pbrinkmeier
+- Add docs about running tests for new contributors
+ ([#4418](https://github.com/haskell/haskell-language-server/pull/4418)) by @pbrinkmeier
+- Bump cachix/install-nix-action from 29 to 30
+ ([#4413](https://github.com/haskell/haskell-language-server/pull/4413)) by @dependabot[bot]
+- Bump cachix/install-nix-action from V27 to 29
+ ([#4411](https://github.com/haskell/haskell-language-server/pull/4411)) by @dependabot[bot]
+- Avoid expectFail in the test suite
+ ([#4402](https://github.com/haskell/haskell-language-server/pull/4402)) by @sgillespie
+- Fix typos in hls-cabal-fmt-plugin
+ ([#4399](https://github.com/haskell/haskell-language-server/pull/4399)) by @fendor
+- Jump to instance definition and explain typeclass evidence
+ ([#4392](https://github.com/haskell/haskell-language-server/pull/4392)) by @fendor
+- Update cabal-add dependency
+ ([#4389](https://github.com/haskell/haskell-language-server/pull/4389)) by @VenInf
+- Improve error message for `--probe-tools`
+ ([#4387](https://github.com/haskell/haskell-language-server/pull/4387)) by @sgillespie
+- Documentation for build-depends on hover
+ ([#4385](https://github.com/haskell/haskell-language-server/pull/4385)) by @VenInf
+- Bump haskell-actions/setup from 2.7.3 to 2.7.6
+ ([#4384](https://github.com/haskell/haskell-language-server/pull/4384)) by @dependabot[bot]
+- Bump haskell-actions/setup from 2.7.5 to 2.7.6 in /.github/actions/setup-build
+ ([#4383](https://github.com/haskell/haskell-language-server/pull/4383)) by @dependabot[bot]
+- Clear GHCup caches in CI to not run out of space in CI
+ ([#4382](https://github.com/haskell/haskell-language-server/pull/4382)) by @fendor
+- Cabal go to module's definition
+ ([#4380](https://github.com/haskell/haskell-language-server/pull/4380)) by @VenInf
+- Add Goto Definition for cabal common sections
+ ([#4375](https://github.com/haskell/haskell-language-server/pull/4375)) by @ChristophHochrainer
+- cabal-add integration as a CodeAction
+ ([#4360](https://github.com/haskell/haskell-language-server/pull/4360)) by @VenInf
+- Bump haskell-actions/setup from 2.7.3 to 2.7.5 in /.github/actions/setup-build
+ ([#4354](https://github.com/haskell/haskell-language-server/pull/4354)) by @dependabot[bot]
+- Support Inlay hints for record wildcards
+ ([#4351](https://github.com/haskell/haskell-language-server/pull/4351)) by @jetjinser
+- Remove componentInternalUnits
+ ([#4350](https://github.com/haskell/haskell-language-server/pull/4350)) by @soulomoon
+- Fix core file location in `GetLinkable`
+ ([#4347](https://github.com/haskell/haskell-language-server/pull/4347)) by @soulomoon
+- Release 2.9.0.1
+ ([#4346](https://github.com/haskell/haskell-language-server/pull/4346)) by @wz1000
+- Using captureKicksDiagnostics to speed up multiple plugin tests
+ ([#4339](https://github.com/haskell/haskell-language-server/pull/4339)) by @komikat
+- Get files from Shake VFS from within plugin handlers
+ ([#4328](https://github.com/haskell/haskell-language-server/pull/4328)) by @awjchen
+- Cabal plugin outline view
+ ([#4323](https://github.com/haskell/haskell-language-server/pull/4323)) by @VenInf
+- Add missing documentation for cabal formatters
+ ([#4322](https://github.com/haskell/haskell-language-server/pull/4322)) by @fendor
+- Provide explicit import in inlay hints
+ ([#4235](https://github.com/haskell/haskell-language-server/pull/4235)) by @jetjinser
+- Add codeactions for cabal field names
+ ([#3273](https://github.com/haskell/haskell-language-server/pull/3273)) by @dyniec
+
## 2.9.0.1
- Bindists for GHC 9.6.6
diff --git a/RELEASING.md b/RELEASING.md
index 42ba158ac2..a48b32cb93 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -3,10 +3,9 @@
## Release checklist
- [ ] check ghcup supports new GHC releases if any
-- [ ] set the supported GHCs in workflow file `.github/workflows/release.yaml`
- - There is currently a list of GHC versions for each major platform. Search for `ghc: [` to find all lists.
- - Look for `TODO:` to find locations that require extra care for GHC versions.
- [ ] check all plugins still work if release includes code changes
+- [ ] set the supported GHCs in workflow file `.github/generate-ci/gen_ci.hs`
+- [ ] regenerate the CI via `./.github/generate-ci/generate-jobs`
- [ ] bump package versions in all `*.cabal` files (same version as hls)
- HLS uses lockstep versioning. The core packages and all plugins use the same version number, and only support exactly this version.
- Exceptions:
diff --git a/cabal.project b/cabal.project
index 7e488eae8c..3d43dff2f4 100644
--- a/cabal.project
+++ b/cabal.project
@@ -8,7 +8,7 @@ packages:
./hls-test-utils
-index-state: 2024-12-02T00:00:00Z
+index-state: 2025-06-07T14:57:40Z
tests: True
test-show-details: direct
@@ -17,12 +17,6 @@ benchmarks: True
write-ghc-environment-files: never
--- Link executables dynamically so the linker doesn't produce test
--- executables of ~150MB each and works lightning fast at that too
--- Disabled on Windows
-if(!os(windows))
- executable-dynamic: True
-
-- Many of our tests only work single-threaded, and the only way to
-- ensure tasty runs everything purely single-threaded is to pass
-- this at the top-level
@@ -49,16 +43,17 @@ constraints:
-- in the future, thus: TODO: remove this flag.
bitvec -simd,
-if impl(ghc >= 9.9)
- -- https://github.com/haskell/haskell-language-server/issues/4324
- benchmarks: False
-if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5)
- -- By depending on ghc-lib-parser and ghc, we are encountering
- -- a constraint conflict, ghc-9.8.4 comes with `filepath-1.4.301.0`,
- -- and `ghc-lib-parser-9.8.4.20241130` specifies `filepath >=1.5 && < 1.6.
- -- See https://github.com/digital-asset/ghc-lib/issues/572 for details.
- allow-older:
- ghc-lib-parser:filepath
- constraints:
- ghc-lib-parser==9.8.4.20241130
+-- Some of the formatters need the latest Cabal-syntax version,
+-- but 'cabal-install-parsers-0.6.2' only has Cabal-syntax (>=3.12.0.0 && <3.13).
+-- So, we relax the upper bounds here.
+-- fourmolu-0.18.0 and ormolu-0.8 depend on Cabal-syntax == 3.14.*, while
+-- cabal-add depends on cabal-install-parsers.
+allow-newer:
+ cabal-install-parsers:Cabal-syntax,
+
+if impl(ghc >= 9.11)
+ benchmarks: False
+ allow-newer:
+ cabal-install-parsers:base,
+ cabal-install-parsers:time,
diff --git a/docs/configuration.md b/docs/configuration.md
index 4edc2c7936..9da816c09e 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -41,9 +41,11 @@ This option obviously would not make sense for language servers for other langua
Here is a list of the additional settings currently supported by `haskell-language-server`, along with their setting key (you may not need to know this) and default:
- Formatting provider (`haskell.formattingProvider`, default `ormolu`): what formatter to use; one of `floskell`, `ormolu`, `fourmolu`, or `stylish-haskell`.
+- Cabal formatting provider (`haskell.cabalFormattingProvider`, default `cabal-gild`): what formatter to use for cabal files; one of `cabal-gild` or `cabal-fmt`.
- Max completions (`haskell.maxCompletions`, default 40): maximum number of completions sent to the LSP client.
- Check project (`haskell.checkProject`, default true): whether to typecheck the entire project on initial load. As it is activated by default could drive to bad performance in large projects.
- Check parents (`haskell.checkParents`, default `CheckOnSave`): when to typecheck reverse dependencies of a file; one of `NeverCheck`, `CheckOnSave` (means dependent/parent modules will only be checked when you save), or `AlwaysCheck` (means re-typechecking them on every change).
+- Session loading preference (`haskell.sessionLoading`, default `singleComponent`): how to load sessions; one of `singleComponent` (means always loading only a single component when a new component is discovered) or `multipleComponents` (means always preferring loading multiple components in the cradle at once). `multipleComponents` might not be always possible, if the tool doesn't support multiple components loading. The cradle can decide how to handle these situations, and whether to honour the preference at all.
#### Generic plugin configuration
@@ -61,7 +63,7 @@ Plugins have a generic config to control their behaviour. The schema of such con
- `haskell.plugin.eval.config.diff`, default true: When reloading haddock test results in changes, mark it with WAS/NOW.
- `haskell.plugin.eval.config.exception`, default false: When the command results in an exception, mark it with `*** Exception:`.
- `rename`:
- - `haskell.plugin.rename.config.diff`, default false: Enables renaming across modules (experimental)
+ - `haskell.plugin.rename.config.crossModule`, default false: Enables renaming across modules (experimental)
- `ghcide-completions`:
- `haskell.plugin.ghcide-completions.config.snippetsOn`, default true: Inserts snippets when using code completions.
- `haskell.plugin.ghcide-completions.config.autoExtendOn`, default true: Extends the import list automatically when completing a out-of-scope identifier.
diff --git a/docs/contributing/plugin-tutorial.lhs b/docs/contributing/plugin-tutorial.lhs
new file mode 120000
index 0000000000..e1837100c2
--- /dev/null
+++ b/docs/contributing/plugin-tutorial.lhs
@@ -0,0 +1 @@
+plugin-tutorial.md
\ No newline at end of file
diff --git a/docs/contributing/plugin-tutorial.md b/docs/contributing/plugin-tutorial.md
index c952ef9eb2..d9ca59c0ad 100644
--- a/docs/contributing/plugin-tutorial.md
+++ b/docs/contributing/plugin-tutorial.md
@@ -1,26 +1,113 @@
# Let’s write a Haskell Language Server plugin
-Originally written by Pepe Iborra, maintained by the Haskell community.
-Haskell Language Server (HLS) is an LSP server for the Haskell programming language. It builds on several previous efforts
-to create a Haskell IDE. You can find many more details on the history and architecture in the [IDE 2020](https://mpickering.github.io/ide/index.html) community page.
+Originally written by Pepe Iborra, maintained by the Haskell community.
+Haskell Language Server (HLS) is a Language Server Protocol (LSP) server for the Haskell programming language. It builds on several previous efforts to create a Haskell IDE.
+You can find many more details on the history and architecture on the [IDE 2020](https://mpickering.github.io/ide/index.html) community page.
In this article we are going to cover the creation of an HLS plugin from scratch: a code lens to display explicit import lists.
-Along the way we will learn about HLS, its plugin model, and the relationship with `ghcide` and LSP.
+Along the way we will learn about HLS, its plugin model, and the relationship with [ghcide](https://github.com/haskell/haskell-language-server/tree/master/ghcide) and LSP.
## Introduction
Writing plugins for HLS is a joy. Personally, I enjoy the ability to tap into the gigantic bag of goodies that is GHC, as well as the IDE integration thanks to LSP.
-In the last couple of months I have written various HLS (and `ghcide`) plugins for things like:
+In the last couple of months, I have written various HLS plugins, including:
1. Suggest imports for variables not in scope,
2. Remove redundant imports,
-2. Evaluate code in comments (Ã la [doctest](https://docs.python.org/3/library/doctest.html)),
-3. Integrate the [retrie](https://github.com/facebookincubator/retrie) refactoring library.
+3. Evaluate code in comments (Ã la [doctest](https://docs.python.org/3/library/doctest.html)),
+4. Integrate the [retrie](https://github.com/facebookincubator/retrie) refactoring library.
+
+These plugins are small but meaningful steps towards a more polished IDE experience.
+While writing them, I didn't have to worry about performance, UI, or distribution; another tool (usually GHC) always did the heavy lifting.
+
+The plugins also make these tools much more accessible to all users of HLS.
+
+## Preamble
+
+This tutorial is a literate Haskell file that can be compiled.
+As such, we list the imports, extensions etc... necessary for compilation.
+
+Please just skip over this `import` section, if you are only interested in the tutorial!
+
+```haskell
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE ViewPatterns #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE DeriveAnyClass #-}
+
+import Ide.Types
+import Ide.Logger
+import Ide.Plugin.Error
+
+import Development.IDE.Core.RuleTypes
+import Development.IDE.Core.Service hiding (Log)
+import Development.IDE.Core.Shake hiding (Log)
+import Development.IDE.GHC.Compat
+import Development.IDE.GHC.Compat.Core
+import Development.IDE.GHC.Error
+import Development.IDE.Types.HscEnvEq
+import Development.IDE.Core.PluginUtils
+
+import qualified Language.LSP.Server as LSP
+import Language.LSP.Protocol.Types as JL
+import Language.LSP.Protocol.Message
+
+import Data.Aeson as Aeson
+import Data.Map (Map)
+import Data.IORef
+import Data.Maybe (fromMaybe, catMaybes)
+import qualified Data.Map as Map
+import qualified Data.HashMap.Strict as HashMap
+import qualified Data.Text as T
+import Control.Monad (forM)
+import Control.Monad.IO.Class (liftIO)
+import Control.Monad.Trans.Class
+import GHC.Generics (Generic)
+```
-These plugins are small but meaningful steps towards a more polished IDE experience, and in writing them I didn't have to worry about performance, UI, distribution, or even think for the most part, since it's always another tool (usually GHC) doing all the heavy lifting. The plugins also make these tools much more accessible to all users of HLS.
+## Plugins in the HLS codebase
-## The task
+The HLS codebase includes several plugins (found in `./plugins`). For example:
+
+- The `ormolu`, `fourmolu`, `floskell` and `stylish-haskell` plugins used to format code
+- The `eval` plugin, a code lens provider to evaluate code in comments
+- The `retrie` plugin, a code action provider to execute retrie commands
+
+I recommend looking at the existing plugins for inspiration and reference. A few conventions shared by all plugins are:
+
+- Plugins are in the `./plugins` folder
+- Plugins implement their code under the `Ide.Plugin.*` namespace
+- Folders containing the plugin follow the `hls-pluginname-plugin` naming convention
+- Plugins are "linked" in `src/HlsPlugins.hs#idePlugins`. New plugin descriptors
+ must be added there.
+
+ ```haskell ignore
+ -- Defined in src/HlsPlugins.**hs**
+
+ idePlugins = pluginDescToIdePlugins allPlugins
+ where
+ allPlugins =
+ [ GhcIde.descriptor "ghcide"
+ , Pragmas.descriptor "pragmas"
+ , Floskell.descriptor "floskell"
+ , Fourmolu.descriptor "fourmolu"
+ , Ormolu.descriptor "ormolu"
+ , StylishHaskell.descriptor "stylish-haskell"
+ , Retrie.descriptor "retrie"
+ , Eval.descriptor "eval"
+ , NewPlugin.descriptor "new-plugin" -- Add new plugins here.
+ ]
+ ```
+
+To add a new plugin, extend the list of `allPlugins` and rebuild.
+
+## The goal of the plugin we will write
Here is a visual statement of what we want to accomplish:
@@ -29,301 +116,226 @@ Here is a visual statement of what we want to accomplish:
And here is the gist of the algorithm:
1. Request the type checking artifacts from the `ghcide` subsystem
-2. Extract the actual import lists from the type-checked AST,
-3. Ask GHC to produce the minimal import lists for this AST,
-4. For every import statement without an explicit import list, find out the minimal import list, and produce a code lens to display it together with a command to graft it on.
+2. Extract the actual import lists from the type-checked AST
+3. Ask GHC to produce the minimal import lists for this AST
+4. For every import statement without an explicit import list:
+ - Determine the minimal import list
+ - Produce a code lens to display it and a command to apply it
## Setup
-To get started, let’s fetch the HLS repository and build it. You need at least GHC 9.0 for this:
+To get started, fetch the HLS repository and build it by following the [installation instructions](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#building).
-```
-git clone --recursive http://github.com/haskell/haskell-language-server hls
-cd hls
-cabal update
-cabal build
-```
+If you run into any issues trying to build the binaries, you can get in touch with the HLS team using one of the [contact channels](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#how-to-contact-the-haskell-ide-team) or [open an issue](https://github.com/haskell/haskell-language-server/issues) in the HLS repository.
-If you run into any issues trying to build the binaries, the `#haskell-language-server` IRC chat room in
-[Libera Chat](https://libera.chat/) is always a good place to ask for help.
+Once the build is done, you can find the location of the HLS binary with `cabal list-bin exe:haskell-language-server` and point your LSP client to it.
+This way you can simply test your changes by reloading your editor after rebuilding the binary.
-Once cabal is done take a note of the location of the `haskell-language-server` binary and point your LSP client to it. In VSCode this is done by editing the "Haskell Server Executable Path" setting. This way you can simply test your changes by reloading your editor after rebuilding the binary.
+> **Note:** In VSCode, edit the "Haskell Server Executable Path" setting.
+>
+> **Note:** In Emacs, edit the `lsp-haskell-server-path` variable.

-## Anatomy of a plugin
-
-HLS plugins are values of the `Plugin` datatype, which is defined in `Ide.Plugin` as:
-```haskell
-data PluginDescriptor =
- PluginDescriptor { pluginId :: !PluginId
- , pluginRules :: !(Rules ())
- , pluginCommands :: ![PluginCommand]
- , pluginCodeActionProvider :: !(Maybe CodeActionProvider)
- , pluginCodeLensProvider :: !(Maybe CodeLensProvider)
- , pluginHoverProvider :: !(Maybe HoverProvider)
- , pluginSymbolsProvider :: !(Maybe SymbolsProvider)
- , pluginFormattingProvider :: !(Maybe (FormattingProvider IO))
- , pluginCompletionProvider :: !(Maybe CompletionProvider)
- , pluginRenameProvider :: !(Maybe RenameProvider)
- }
-```
-A plugin has a unique ID, a set of rules, a set of command handlers, and a set of "providers":
+[Manually test your hacked HLS](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#manually-testing-your-hacked-hls) to ensure you use the HLS package you just built.
-* Rules add new targets to the Shake build graph defined in `ghcide`. 99% of plugins need not define any new rules.
-* Commands are an LSP abstraction for actions initiated by the user which are handled in the server. These actions can be long running and involve multiple modules. Many plugins define command handlers.
-* Providers are a query-like abstraction where the LSP client asks the server for information. These queries must be fulfilled as quickly as possible.
+## Digression about the Language Server Protocol
-The HLS codebase includes several plugins under the namespace `Ide.Plugin.*`, the most relevant are:
+There are two main types of communication in the Language Server Protocol:
-- The `ghcide` plugin, which embeds `ghcide` as a plugin (`ghcide` is also the engine under HLS),
-- The `ormolu`, `fourmolu`, `floskell` and `stylish-haskell` plugins, a testament to the code formatting wars of our community,
-- The `eval` plugin, a code lens provider to evaluate code in comments,
-- The `retrie` plugin, a code actions provider to execute retrie commands.
+- A **request-response interaction** type where one party sends a message that requires a response from the other party.
+- A **notification** is a one-way interaction where one party sends a message without expecting any response.
-I would recommend looking at the existing plugins for inspiration and reference.
+> **Note**: The LSP client and server can both send requests or notifications to the other party.
-Plugins are "linked" in the `HlsPlugins` module, so we will need to add our plugin there once we have defined it:
-
-```haskell
-idePlugins = pluginDescToIdePlugins allPlugins
- where
- allPlugins =
- [ GhcIde.descriptor "ghcide"
- , Pragmas.descriptor "pragmas"
- , Floskell.descriptor "floskell"
- , Fourmolu.descriptor "fourmolu"
- , Ormolu.descriptor "ormolu"
- , StylishHaskell.descriptor "stylish-haskell"
- , Retrie.descriptor "retrie"
- , Eval.descriptor "eval"
- ]
-```
-To add a new plugin, simply extend the list of `allPlugins` and rebuild.
+## Anatomy of a plugin
-## Providers
+HLS plugins are values of the `PluginDescriptor` datatype, which is defined in `hls-plugin-api/src/Ide/Types.hs` as:
-99% of plugins will want to define at least one type of provider. But what is a provider? Let's take a look at some types:
-```haskell
-type CodeActionProvider = LSP.LspFuncs Config
- -> IdeState
- -> PluginId
- -> TextDocumentIdentifier
- -> Range
- -> CodeActionContext
- -> IO (Either ResponseError (List CAResult))
-
-type CompletionProvider = LSP.LspFuncs Config
- -> IdeState
- -> CompletionParams
- -> IO (Either ResponseError CompletionResponseResult)
-
-type CodeLensProvider = LSP.LspFuncs Config
- -> IdeState
- -> PluginId
- -> CodeLensParams
- -> IO (Either ResponseError (List CodeLens))
-
-type RenameProvider = LSP.LspFuncs Config
- -> IdeState
- -> RenameParams
- -> IO (Either ResponseError WorkspaceEdit)
+```haskell ignore
+data PluginDescriptor (ideState :: Type) =
+ PluginDescriptor { pluginId :: !PluginId
+ , pluginCommands :: ![PluginCommand ideState]
+ , pluginHandlers :: PluginHandlers ideState
+ , pluginNotificationHandlers :: PluginNotificationHandlers ideState
+-- , [...] -- Other fields omitted for brevity.
+ }
```
-Providers are functions that receive some inputs and produce an IO computation that returns either an error or some result.
+### Request-response interaction
-All providers receive an `LSP.LspFuncs` value, which is a record of functions to perform LSP actions. Most providers can safely ignore this argument, since the LSP interaction is automatically managed by HLS.
-Some of its capabilities are:
-- Querying the LSP client capabilities,
-- Manual progress reporting and cancellation, for plugins that provide long running commands (like the `retrie` plugin),
-- Custom user interactions via [message dialogs](https://microsoft.github.io/language-server-protocol/specification#window_showMessage). For instance, the `retrie` plugin uses this to report skipped modules.
+The `pluginHandlers` handle LSP client requests and provide responses to the client. They must fulfill these requests as quickly as possible.
-The second argument, which plugins receive, is `IdeState`. `IdeState` encapsulates all the `ghcide` state including the build graph. This allows to request `ghcide` rule results, which leverages Shake to parallelize and reuse previous results as appropriate. Rule types are instances of the `RuleResult` type family, and
-most of them are defined in `Development.IDE.Core.RuleTypes`. Some relevant rule types are:
-```haskell
--- | The parse tree for the file using GetFileContents
-type instance RuleResult GetParsedModule = ParsedModule
+- Example: When you want to format a file, the client sends the [`textDocument/formatting`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_formatting) request to the server. The server formats the file and responds with the formatted content.
--- | The type checked version of this file
-type instance RuleResult TypeCheck = TcModuleResult
+### Notification
--- | A GHC session that we reuse.
-type instance RuleResult GhcSession = HscEnvEq
+The `pluginNotificationHandlers` handle notifications sent by the client to the server that are not explicitly triggered by a user.
--- | A GHC session preloaded with all the dependencies
-type instance RuleResult GhcSessionDeps = HscEnvEq
+- Example: Whenever you modify a Haskell file, the client sends a notification informing HLS about the changes to the file.
--- | A ModSummary that has enough information to be used to get .hi and .hie files.
-type instance RuleResult GetModSummary = ModSummary
-```
+The `pluginCommands` are special types of user-initiated notifications sent to
+the server. These actions can be long-running and involve multiple modules.
-The `use` family of combinators allows to request rule results. For example, the following code is used in the `eval` plugin to request a GHC session and a module summary (for the imports) in order to set up an interactive evaluation environment
-```haskell
- let nfp = toNormalizedFilePath' fp
- session <- runAction "runEvalCmd.ghcSession" state $ use_ GhcSessionDeps nfp
- ms <- runAction "runEvalCmd.getModSummary" state $ use_ GetModSummary nfp
-```
+## The explicit imports plugin
-There are three flavours of `use` combinators:
+To achieve our plugin goals, we need to define:
-1. `use*` combinators block and propagate errors,
-2. `useWithStale*` combinators block and switch to stale data in case of an error,
-3. `useWithStaleFast*` combinators return immediately with stale data if any, or block otherwise.
+- a command handler (`importLensCommand`),
+- a code lens request handler (`lensProvider`).
-## LSP abstractions
+These will be assembled in the `descriptor` function of the plugin, which contains all the information wrapped in the `PluginDescriptor` datatype mentioned above.
-If you have used VSCode or any other LSP editor you are probably already familiar with the capabilities afforded by LSP. If not, check the [specification](https://microsoft.github.io/language-server-protocol/specification) for the full details.
-Another good source of information is the [haskell-lsp-types](https://hackage.haskell.org/package/haskell-lsp-types) package, which contains a Haskell encoding of the protocol.
+Using the convenience `defaultPluginDescriptor` function, we can bootstrap the plugin with the required parts:
-The [haskell-lsp-types](https://hackage.haskell.org/package/haskell-lsp-types-0.22.0.0/docs/Language-Haskell-LSP-Types.html#t:CodeLens) package encodes code lenses in Haskell as:
```haskell
-data CodeLens =
- CodeLens
- { _range :: Range
- , _command :: Maybe Command
- , _xdata :: Maybe A.Value
- } deriving (Read,Show,Eq)
-```
-That is, a code lens is a triple of a source range, maybe a command, and optionally some extra data. The [specification](https://microsoft.github.io/language-server-protocol/specification#textDocument_codeLens) clarifies the optionality:
-```
-/**
- * A code lens represents a command that should be shown along with
- * source text, like the number of references, a way to run tests, etc.
- *
- * A code lens is _unresolved_ when no command is associated to it. For performance
- * reasons the creation of a code lens and resolving should be done in two stages.
- */
+-- plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs
+
+data Log
+
+-- | The "main" function of a plugin.
+descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState
+descriptor recorder plId =
+ (defaultPluginDescriptor plId "A plugin for generating the minimal imports")
+ { pluginCommands = [importLensCommand], -- The plugin provides a command handler
+ pluginHandlers = mconcat -- The plugin provides request handlers
+ [ mkPluginHandler SMethod_TextDocumentCodeLens provider
+ ]
+ }
```
-To keep things simple our plugin won't make use of the unresolved facility, embedding the command directly in the code lens.
+We'll start with the command, since it's the simplest of the two.
-## The explicit imports plugin
+### The command handler
-To provide code lenses, our plugin must define a code lens provider as well as a command handler.
-The code at `Ide.Plugin.Example` shows how the convenience `defaultPluginDescriptor` function is used
-to bootstrap the plugin and how to add the desired providers:
+In short, LSP commands work like this:
-```haskell
-descriptor :: PluginId -> PluginDescriptor
-descriptor plId = (defaultPluginDescriptor plId) {
- -- This plugin provides code lenses
- pluginCodeLensProvider = Just provider,
- -- This plugin provides a command handler
- pluginCommands = [ importLensCommand ]
-}
-```
-
-### The command handler
+- The LSP server (HLS) initially sends a command descriptor to the client, in this case as part of a code lens.
+- When the user clicks on the code lens, the client asks HLS to execute the command with the given descriptor. The server then handles and executes the command; this latter part is implemented by the `commandFunc` field of our `PluginCommand` value.
-Our plugin provider has two components that need to be fleshed out. Let's start with the command provider, since it's the simplest of the two.
+> **Note**: Check the [LSP spec](https://microsoft.github.io/language-server-protocol/specification) for a deeper understanding of how commands work.
-```haskell
-importLensCommand :: PluginCommand
-```
+The command handler will be called `importLensCommand` and have the `PluginCommand` type, a type defined in `Ide.Types` as:
-`PluginCommand` is a data type defined in `LSP.Types` as:
+```haskell ignore
+-- hls-plugin-api/src/Ide/Types.hs
-```haskell
-data PluginCommand = forall a. (FromJSON a) =>
+data PluginCommand ideState = forall a. (FromJSON a) =>
PluginCommand { commandId :: CommandId
, commandDesc :: T.Text
- , commandFunc :: CommandFunction a
+ , commandFunc :: CommandFunction ideState a
}
```
-The meat is in the `commandFunc` field, which is of type `CommandFunction`, another type synonym from `LSP.Types`:
-```haskell
-type CommandFunction a =
- LSP.LspFuncs Config
- -> IdeState
- -> a
- -> IO (Either ResponseError Value, Maybe (ServerMethod, ApplyWorkspaceEditParams))
-```
-
-`CommandFunction` takes in the familiar `LspFuncs` and `IdeState` arguments, together with a JSON encoded argument.
-I recommend checking the LSP specifications in order to understand how commands work, but briefly the LSP server (us) initially sends a command descriptor to the client, in this case as part of a code lens. When the client decides to execute the command on behalf of a user action (in this case a click on the code lens), the client sends this descriptor back to the LSP server which then proceeds to handle and execute the command. The latter part is implemented by the `commandFunc` field of our `PluginCommand` value.
+Let's start by creating an unfinished command handler. We'll give it an ID and a description for now:
-For our command, we are going to have a very simple handler that receives a diff (`WorkspaceEdit`) and returns it to the client. The diff will be generated by our code lens provider and sent as part
-of the code lens to the LSP client, who will send it back to our command handler when the user activates
-the code lens:
```haskell
+-- | The command handler.
+importLensCommand :: PluginCommand IdeState
+importLensCommand =
+ PluginCommand
+ { commandId = importCommandId
+ , commandDesc = "Explicit import command"
+ , commandFunc = runImportCommand
+ }
+
importCommandId :: CommandId
importCommandId = "ImportLensCommand"
+```
-importLensCommand :: PluginCommand
-importLensCommand =
- PluginCommand importCommandId "Explicit import command" runImportCommand
+```haskell ignore
+-- | Not implemented yet.
+runImportCommand = undefined
+```
+
+The most important (and still `undefined`) field is `commandFunc :: CommandFunction`, a type synonym from `LSP.Types`:
+
+```haskell ignore
+-- hls-plugin-api/src/Ide/Types.hs
+type CommandFunction ideState a
+ = ideState
+ -> a
+ -> LspM Config (Either ResponseError Value)
+```
+
+`CommandFunction` takes an `ideState` and a JSON-encodable argument. `LspM` is a monad transformer with access to IO, and having access to a language context environment `Config`. The action evaluates to an `Either` value. `Left` indicates failure with a `ResponseError`, `Right` indicates sucess with a `Value`.
+
+Our handler will ignore the state argument and only use the `WorkspaceEdit` argument.
+
+```haskell
-- | The type of the parameters accepted by our command
-data ImportCommandParams = ImportCommandParams WorkspaceEdit
- deriving Generic
+newtype ImportCommandParams = ImportCommandParams WorkspaceEdit
+ deriving (Generic)
deriving anyclass (FromJSON, ToJSON)
-- | The actual command handler
-runImportCommand :: CommandFunction ImportCommandParams
-runImportCommand _lspFuncs _state (ImportCommandParams edit) = do
- return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams edit))
-
+runImportCommand :: CommandFunction IdeState ImportCommandParams
+runImportCommand _ _ (ImportCommandParams edit) = do
+ -- This command simply triggers a workspace edit!
+ _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ())
+ return $ InR JL.Null
```
+`runImportCommand` [sends a request](https://hackage.haskell.org/package/lsp/docs/Language-LSP-Server.html#v:sendRequest) to the client using the method `SWorkspaceApplyEdit` and the parameters `ApplyWorkspaceEditParams Nothing edit`, providing a response handler that does nothing. It then returns `Right Null`, which is an empty `Aeson.Value` wrapped in `Right`.
+
### The code lens provider
The code lens provider implements all the steps of the algorithm described earlier:
-> 1. Request the type checking artefacts from the `ghcide` subsystem
-> 2. Extract the actual import lists from the type-checked AST,
-> 3. Ask GHC to produce the minimal import lists for this AST,
-> 4. For every import statement without an explicit import list, find out the minimal import list, and produce a code lens to display it together with a command to graft it on.
+> 1. Request the type checking artifacts.
+> 2. Extract the actual import lists from the type-checked AST.
+> 3. Ask GHC to produce the minimal import lists for this AST.
+> 4. For each import statement lacking an explicit list, determine its minimal import list and generate a code lens displaying this list along with a command to insert it.
-The provider takes the usual `LspFuncs` and `IdeState` argument, as well as a `CodeLensParams` value containing the URI
-for a file, and returns an IO action producing either an error or a list of code lenses for that file.
+The provider takes the usual `LspFuncs` and `IdeState` arguments, as well as a `CodeLensParams` value containing a file URI. It returns an IO action that produces either an error or a list of code lenses for that file.
```haskell
-provider :: CodeLensProvider
-provider _lspFuncs -- LSP functions, not used
- state -- ghcide state, used to retrieve typechecking artifacts
+provider :: PluginMethodHandler IdeState Method_TextDocumentCodeLens
+provider state -- ghcide state, used to retrieve typechecking artifacts
pId -- Plugin ID
- CodeLensParams{_textDocument = TextDocumentIdentifier{_uri}}
+ CodeLensParams{_textDocument = TextDocumentIdentifier{_uri}} = do
-- VSCode uses URIs instead of file paths
-- haskell-lsp provides conversion functions
- | Just nfp <- uriToNormalizedFilePath $ toNormalizedUri _uri
- = do
- -- Get the typechecking artifacts from the module
- tmr <- runAction "importLens" state $ use TypeCheck nfp
- -- We also need a GHC session with all the dependencies
- hsc <- runAction "importLens" state $ use GhcSessionDeps nfp
- -- Use the GHC API to extract the "minimal" imports
- (imports, mbMinImports) <- extractMinimalImports hsc tmr
-
- case mbMinImports of
- Just minImports -> do
- let minImportsMap =
- Map.fromList [ (srcSpanStart l, i) | L l i <- minImports ]
- lenses <- forM imports $
- -- for every import, maybe generate a code lens
- generateLens pId _uri minImportsMap
- return $ Right (List $ catMaybes lenses)
- _ ->
- return $ Right (List [])
- | otherwise
- = return $ Right (List [])
+ nfp <- getNormalizedFilePathE _uri
+ -- Get the typechecking artifacts from the module
+ tmr <- runActionE "importLens" state $ useE TypeCheck nfp
+ -- We also need a GHC session with all the dependencies
+ hsc <- runActionE "importLens" state $ useE GhcSessionDeps nfp
+ -- Use the GHC API to extract the "minimal" imports
+ (imports, mbMinImports) <- liftIO $ extractMinimalImports hsc tmr
+
+ case mbMinImports of
+ Just minImports -> do
+ let minImportsMap =
+ Map.fromList [ (realSrcLocToPosition loc, i)
+ | L l i <- minImports
+ , let RealSrcLoc loc _ = srcSpanStart (locA l)
+ ]
+ lenses <- forM imports $ \imp ->
+ -- for every import, maybe generate a code lens
+ liftIO (generateLens pId _uri minImportsMap imp)
+ return $ InL (catMaybes lenses)
+ _ ->
+ return $ InL []
```
-Note how simple it is to retrieve the type checking artifacts for the module as well as a fully setup GHC session via the `ghcide` rules.
+Note the simplicity of retrieving the type checking artifacts for the module, as well as a fully set up GHC session, via the `ghcide` rules.
The function `extractMinimalImports` extracts the import statements from the AST and generates the minimal import lists, implementing steps 2 and 3 of the algorithm.
+
The details of the GHC API are not relevant to this tutorial, but the code is terse and easy to read:
```haskell
extractMinimalImports
- :: Maybe HscEnvEq
- -> Maybe TcModuleResult
+ :: HscEnvEq
+ -> TcModuleResult
-> IO ([LImportDecl GhcRn], Maybe [LImportDecl GhcRn])
-extractMinimalImports (Just hsc)) (Just (tmrModule -> TypecheckedModule{..})) = do
+extractMinimalImports hsc TcModuleResult{..} = do
-- Extract the original imports and the typechecking environment
- let (tcEnv,_) = tm_internals_
- Just (_, imports, _, _) = tm_renamed_source
- ParsedModule{ pm_parsed_source = L loc _} = tm_parsed_module
+ let tcEnv = tmrTypechecked
+ (_, imports, _, _) = tmrRenamed
+ ParsedModule{ pm_parsed_source = L loc _} = tmrParsed
span = fromMaybe (error "expected real") $ realSpan loc
-- GHC is secretly full of mutable state
@@ -334,44 +346,44 @@ extractMinimalImports (Just hsc)) (Just (tmrModule -> TypecheckedModule{..})) =
-- getMinimalImports computes the minimal explicit import lists
initTcWithGbl (hscEnv hsc) tcEnv span $ getMinimalImports usage
return (imports, minimalImports)
-extractMinimalImports _ _ = return ([], Nothing)
```
-The function `generateLens` implements step 4 of the algorithm, producing a code lens for an import statement that lacks an import list. Note how the code lens includes an `ImportCommandParams` value
-that contains a workspace edit that rewrites the import statement, as expected by our command provider.
+The function `generateLens` implements step 4 of the algorithm, producing a code lens for an import statement that lacks an import list. The code lens includes an `ImportCommandParams` value containing a workspace edit that rewrites the import statement, as our command provider expects.
```haskell
-- | Given an import declaration, generate a code lens unless it has an explicit import list
generateLens :: PluginId
-> Uri
- -> Map SrcLoc (ImportDecl GhcRn)
+ -> Map Position (ImportDecl GhcRn)
-> LImportDecl GhcRn
-> IO (Maybe CodeLens)
generateLens pId uri minImports (L src imp)
-- Explicit import list case
- | ImportDecl{ideclHiding = Just (False,_)} <- imp
+ | ImportDecl{ideclImportList = Just _} <- imp
= return Nothing
-- No explicit import list
- | RealSrcSpan l <- src
- , Just explicit <- Map.lookup (srcSpanStart src) minImports
+ | RealSrcSpan l _ <- locA src
+ , let position = realSrcLocToPosition $ realSrcSpanStart l
+ , Just explicit <- Map.lookup position minImports
, L _ mn <- ideclName imp
-- (Almost) no one wants to see an explicit import list for Prelude
, mn /= moduleName pRELUDE
= do
-- The title of the command is just the minimal explicit import decl
- let title = T.pack $ prettyPrint explicit
+ let title = T.pack $ printWithoutUniques explicit
-- The range of the code lens is the span of the original import decl
_range :: Range = realSrcSpanToRange l
-- The code lens has no extra data
_xdata = Nothing
-- An edit that replaces the whole declaration with the explicit one
- edit = WorkspaceEdit (Just editsMap) Nothing
- editsMap = HashMap.fromList [(uri, List [importEdit])]
+ edit = WorkspaceEdit (Just editsMap) Nothing Nothing
+ editsMap = Map.fromList [(uri, [importEdit])]
importEdit = TextEdit _range title
-- The command argument is simply the edit
_arguments = Just [toJSON $ ImportCommandParams edit]
- -- Create the command
- _command <- Just <$> mkLspCommand pId importCommandId title _arguments
+ _data_ = Nothing
+ -- Create the command
+ _command = Just $ mkLspCommand pId importCommandId title _arguments
-- Create and return the code lens
return $ Just CodeLens{..}
| otherwise
@@ -381,14 +393,26 @@ generateLens pId uri minImports (L src imp)
## Wrapping up
There's only one Haskell code change left to do at this point: "link" the plugin in the `HlsPlugins` HLS module.
-However integrating the plugin in HLS itself will need some changes in configuration files. The best way is looking for the ID (f.e. `hls-class-plugin`) of an existing plugin:
-- `./cabal*.project` and `./stack*.yaml`: add the plugin package in the `packages` field,
-- `./haskell-language-server.cabal`: add a conditional block with the plugin package dependency,
-- `./.github/workflows/test.yml`: add a block to run the test suite of the plugin,
-- `./.github/workflows/hackage.yml`: add the plugin to the component list to release the plugin package to Hackage,
-- `./*.nix`: add the plugin to Nix builds.
-The full code as used in this tutorial, including imports, can be found in [this Gist](https://gist.github.com/pepeiborra/49b872b2e9ad112f61a3220cdb7db967) as well as in this [branch](https://github.com/pepeiborra/ide/blob/imports-lens/src/Ide/Plugin/ImportLens.hs)
+Integrating the plugin into HLS itself requires changes to several configuration files.
+
+A good approach is to search for the ID of an existing plugin (e.g., `hls-class-plugin`):
+
+- `./haskell-language-server.cabal`: Add a conditional block with the plugin package dependency.
+- `./.github/workflows/test.yml`: Add a block to run the plugin's test suite.
+- `./.github/workflows/hackage.yml`: Add the plugin to the component list for releasing the plugin package to Hackage.
+- `./*.nix`: Add the plugin to Nix builds.
+
+This plugin tutorial re-implements parts of the [`hls-explicit-imports-plugin`] which is part of HLS.
+The plugin code additionally contains advanced concepts, such as `Rules`.
-I hope this has given you a taste of how easy and joyful it is to write plugins for HLS.
-If you are looking for ideas for contributing, here are some cool ones found in the HLS [issue tracker](https://github.com/haskell/haskell-language-server/issues?q=is%3Aopen+is%3Aissue+label%3A%22type%3A+possible+new+plugin%22).
+I hope this has given you a taste of how easy and joyful it is to write plugins for HLS. If you are looking for contribution ideas, here are some good ones listed in the HLS [issue tracker](https://github.com/haskell/haskell-language-server/issues).
+
+
+ Placeholder Main, unused
+
+```haskell
+main :: IO ()
+main = putStrLn "Just here to silence the error!"
+```
+
diff --git a/docs/features.md b/docs/features.md
index cb7e6ecde7..1eab0054b4 100644
--- a/docs/features.md
+++ b/docs/features.md
@@ -346,7 +346,7 @@ Shows the type signature for bindings without type signatures, and adds it with
Provided by: `hls-eval-plugin`
-Evaluates code blocks in comments with a click. [Tutorial](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md).
+Evaluates code blocks in comments with a click. A code action is also provided. [Tutorial](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md).

@@ -411,6 +411,17 @@ Known limitations:
- Cross-module renaming requires all components to be indexed, which sometimes causes [partial renames in multi-component projects](https://github.com/haskell/haskell-language-server/issues/2193).
+To eagerly load all components, you need to
+
+- set `haskell.sessionLoading` to `multipleComponents`,
+- set `hie.yaml` to load all components (currently only cabal supports this),
+ ```yaml
+ cradle:
+ cabal:
+ component: all
+ ```
+- and enable tests and benchmarks in `cabal.project` with `tests: True` and `benchmarks: True`.
+
## Semantic tokens
Provided by: `hls-semantic-tokens-plugin`
diff --git a/docs/installation.md b/docs/installation.md
index 4d021e2040..4a1147ade5 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -120,14 +120,16 @@ built against the official Fedora ghc package.
## FreeBSD
-HLS is available for installation from official binary packages. Use
+HLS is available for installation via [devel/hs-haskell-language-server](https://www.freshports.org/devel/hs-haskell-language-server)
+port or from official binary packages. Use
```bash
pkg install hs-haskell-language-server
```
-to install it. At the moment, HLS installed this way only supports the same GHC
-version as the ports one.
+to install it. HLS installed this way targets the same GHC version that the [lang/ghc](https://www.freshports.org/lang/ghc)
+port produces. Use the `pkg search haskell-language` command to list HLS packages
+for other GHCs.
## Gentoo
diff --git a/docs/support/ghc-version-support.md b/docs/support/ghc-version-support.md
index 5be5da694d..df0bc23494 100644
--- a/docs/support/ghc-version-support.md
+++ b/docs/support/ghc-version-support.md
@@ -15,41 +15,45 @@ Support status (see the support policy below for more details):
- "full support": this GHC version is currently actively supported, and most [tier 2 plugins](./plugin-support.md) work
- "deprecated": this GHC version was supported in the past, but is now deprecated
-| GHC version | Last supporting HLS version | Support status |
-|--------------|--------------------------------------------------------------------------------------|-----------------------------------------------------------------------------|
-| 9.10.1 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
-| 9.8.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
-| 9.8.1 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | full support |
-| 9.6.6 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
-| 9.6.5 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | full support |
-| 9.6.4 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | full support |
-| 9.6.3 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | full support |
-| 9.6.2 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
-| 9.6.1 | [2.0.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.0) | deprecated |
-| 9.4.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
-| 9.4.7 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated |
-| 9.4.6 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
-| 9.4.5 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
-| 9.4.4 | [1.10.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.10.0.0) | deprecated |
-| 9.4.3 | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated |
-| 9.4.(1,2) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
-| 9.2.8 | [2.9.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.0) | deprecated |
-| 9.2.7 | [2.0.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.1) | deprecated |
-| 9.2.(5,6) | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated |
-| 9.2.(3,4) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
-| 9.2.(1,2) | [1.7.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.7.0.0) | deprecated |
-| 9.0.2 | [2.4.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.4.0.0) | deprecated |
-| 9.0.1 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated |
-| 8.10.7 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
-| 8.10.6 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated |
-| 8.10.5 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/tag/1.5.1) | deprecated |
-| 8.10.(4,3,2) | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated |
-| 8.10.1 | [0.9.0](https://github.com/haskell/haskell-language-server/releases/tag/0.9.0) | deprecated |
-| 8.8.4 | [1.8.0](https://github.com/haskell/haskell-language-server/releases/1.8.0) | deprecated |
-| 8.8.3 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/1.5.1) | deprecated |
-| 8.8.2 | [1.2.0](https://github.com/haskell/haskell-language-server/releases/tag/1.2.0) | deprecated |
-| 8.6.5 | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
-| 8.6.4 | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated |
+| GHC version | Last supporting HLS version | Support status |
+| ------------ | ------------------------------------------------------------------------------------ | -------------- |
+| 9.12.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.10.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.10.1 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.8.4 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.8.2 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated |
+| 9.8.1 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated |
+| 9.6.7 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.6.6 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated |
+| 9.6.5 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated |
+| 9.6.4 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated |
+| 9.6.3 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated |
+| 9.6.2 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
+| 9.6.1 | [2.0.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.0) | deprecated |
+| 9.4.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support |
+| 9.4.7 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated |
+| 9.4.6 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
+| 9.4.5 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
+| 9.4.4 | [1.10.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.10.0.0) | deprecated |
+| 9.4.3 | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated |
+| 9.4.(1,2) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
+| 9.2.8 | [2.9.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.0) | deprecated |
+| 9.2.7 | [2.0.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.1) | deprecated |
+| 9.2.(5,6) | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated |
+| 9.2.(3,4) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
+| 9.2.(1,2) | [1.7.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.7.0.0) | deprecated |
+| 9.0.2 | [2.4.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.4.0.0) | deprecated |
+| 9.0.1 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated |
+| 8.10.7 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated |
+| 8.10.6 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated |
+| 8.10.5 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/tag/1.5.1) | deprecated |
+| 8.10.(4,3,2) | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated |
+| 8.10.1 | [0.9.0](https://github.com/haskell/haskell-language-server/releases/tag/0.9.0) | deprecated |
+| 8.8.4 | [1.8.0](https://github.com/haskell/haskell-language-server/releases/1.8.0) | deprecated |
+| 8.8.3 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/1.5.1) | deprecated |
+| 8.8.2 | [1.2.0](https://github.com/haskell/haskell-language-server/releases/tag/1.2.0) | deprecated |
+| 8.6.5 | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated |
+| 8.6.4 | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated |
GHC versions not in the list have never been supported by HLS.
LTS stands for [Stackage](https://www.stackage.org/) Long Term Support.
@@ -88,7 +92,7 @@ HLS will support major versions of GHC until they are older than _both_
1. The major version of GHC used in the current Stackage LTS; and
2. The major version of GHC recommended by GHCup
-For example, if
+For example, if
1. Stackage LTS uses GHC 9.2; and
2. GHCUp recommends GHC 9.4
diff --git a/docs/support/plugin-support.md b/docs/support/plugin-support.md
index ee833347fd..4263f0d035 100644
--- a/docs/support/plugin-support.md
+++ b/docs/support/plugin-support.md
@@ -37,34 +37,34 @@ For example, a plugin to provide a formatter which has itself been abandoned has
## Current plugin support tiers
-| Plugin | Tier | Unsupported GHC versions |
-| ----------------------------------- | ---- | ------------------------ |
-| ghcide core plugins | 1 | |
-| `hls-call-hierarchy-plugin` | 1 | |
-| `hls-code-range-plugin` | 1 | |
-| `hls-explicit-imports-plugin` | 1 | |
-| `hls-pragmas-plugin` | 1 | |
-| `hls-refactor-plugin` | 1 | |
-| `hls-alternate-number-plugin` | 2 | |
-| `hls-cabal-fmt-plugin` | 2 | |
-| `hls-cabal-gild-plugin` | 2 | |
-| `hls-class-plugin` | 2 | |
-| `hls-change-type-signature-plugin` | 2 | |
-| `hls-eval-plugin` | 2 | |
-| `hls-explicit-fixity-plugin` | 2 | |
-| `hls-explicit-record-fields-plugin` | 2 | |
-| `hls-fourmolu-plugin` | 2 | |
-| `hls-gadt-plugin` | 2 | |
-| `hls-hlint-plugin` | 2 | 9.10.1 |
-| `hls-module-name-plugin` | 2 | |
-| `hls-notes-plugin` | 2 | |
-| `hls-qualify-imported-names-plugin` | 2 | |
-| `hls-ormolu-plugin` | 2 | |
-| `hls-rename-plugin` | 2 | |
-| `hls-stylish-haskell-plugin` | 2 | 9.10.1 |
-| `hls-overloaded-record-dot-plugin` | 2 | |
-| `hls-semantic-tokens-plugin` | 2 | |
-| `hls-floskell-plugin` | 3 | 9.10.1 |
-| `hls-stan-plugin` | 3 | |
-| `hls-retrie-plugin` | 3 | 9.10.1 |
-| `hls-splice-plugin` | 3 | 9.10.1 |
+| Plugin | Tier | Unsupported GHC versions |
+| ------------------------------------ | ---- | ------------------------ |
+| ghcide core plugins | 1 | |
+| `hls-call-hierarchy-plugin` | 1 | |
+| `hls-code-range-plugin` | 1 | |
+| `hls-explicit-imports-plugin` | 1 | |
+| `hls-pragmas-plugin` | 1 | |
+| `hls-refactor-plugin` | 2 | |
+| `hls-alternate-number-format-plugin` | 2 | |
+| `hls-cabal-fmt-plugin` | 2 | |
+| `hls-cabal-gild-plugin` | 2 | |
+| `hls-class-plugin` | 2 | |
+| `hls-change-type-signature-plugin` | 2 | |
+| `hls-eval-plugin` | 2 | |
+| `hls-explicit-fixity-plugin` | 2 | |
+| `hls-explicit-record-fields-plugin` | 2 | |
+| `hls-fourmolu-plugin` | 2 | |
+| `hls-gadt-plugin` | 2 | |
+| `hls-hlint-plugin` | 2 | |
+| `hls-module-name-plugin` | 2 | |
+| `hls-notes-plugin` | 2 | |
+| `hls-qualify-imported-names-plugin` | 2 | |
+| `hls-ormolu-plugin` | 2 | |
+| `hls-rename-plugin` | 2 | |
+| `hls-stylish-haskell-plugin` | 2 | |
+| `hls-overloaded-record-dot-plugin` | 2 | |
+| `hls-semantic-tokens-plugin` | 2 | |
+| `hls-floskell-plugin` | 3 | 9.10.1, 9.12.2 |
+| `hls-stan-plugin` | 3 | 9.12.2 |
+| `hls-retrie-plugin` | 3 | 9.10.1, 9.12.2 |
+| `hls-splice-plugin` | 3 | 9.10.1, 9.12.2 |
diff --git a/docutils.nix b/docutils.nix
deleted file mode 100644
index 1c47e1455d..0000000000
--- a/docutils.nix
+++ /dev/null
@@ -1,32 +0,0 @@
-{ stdenv, lib, fetchPypi, buildPythonPackage, isPy3k, python }:
-
-buildPythonPackage rec {
- pname = "docutils";
- version = "0.17.1";
-
- src = fetchPypi {
- inherit pname version;
- sha256 = "686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125";
- };
-
- # Only Darwin needs LANG, but we could set it in general.
- # It's done here conditionally to prevent mass-rebuilds.
- checkPhase = lib.optionalString (isPy3k && stdenv.isDarwin)
- ''LANG="en_US.UTF-8" LC_ALL="en_US.UTF-8" '' + ''
- ${python.interpreter} test/alltests.py
- '';
-
- # Create symlinks lacking a ".py" suffix, many programs depend on these names
- postFixup = ''
- for f in $out/bin/*.py; do
- ln -s $(basename $f) $out/bin/$(basename $f .py)
- done
- '';
-
- meta = with lib; {
- description = "Python Documentation Utilities";
- homepage = "http://docutils.sourceforge.net/";
- license = with licenses; [ publicDomain bsd2 psfl gpl3Plus ];
- maintainers = with maintainers; [ AndersonTorres ];
- };
-}
diff --git a/flake.lock b/flake.lock
index 3fb48889a5..352483a773 100644
--- a/flake.lock
+++ b/flake.lock
@@ -3,11 +3,11 @@
"flake-compat": {
"flake": false,
"locked": {
- "lastModified": 1733328505,
- "narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
+ "lastModified": 1747046372,
+ "narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
"owner": "edolstra",
"repo": "flake-compat",
- "rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
+ "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
"type": "github"
},
"original": {
@@ -36,17 +36,17 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1739019272,
- "narHash": "sha256-7Fu7oazPoYCbDzb9k8D/DdbKrC3aU1zlnc39Y8jy/s8=",
+ "lastModified": 1748437873,
+ "narHash": "sha256-E2640ouB7VxooUQdCiDRo/rVXnr1ykgF9A7HrwWZVSo=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "fa35a3c8e17a3de613240fea68f876e5b4896aec",
+ "rev": "c742ae7908a82c9bf23ce27bfca92a00e9bcd541",
"type": "github"
},
"original": {
"owner": "NixOS",
- "ref": "nixpkgs-unstable",
"repo": "nixpkgs",
+ "rev": "c742ae7908a82c9bf23ce27bfca92a00e9bcd541",
"type": "github"
}
},
diff --git a/flake.nix b/flake.nix
index 934333cff0..1002eb87b5 100644
--- a/flake.nix
+++ b/flake.nix
@@ -2,7 +2,9 @@
description = "haskell-language-server development flake";
inputs = {
- nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
+ # Don't use nixpkgs-unstable as aarch64-darwin is currently broken there.
+ # Check again, when https://github.com/NixOS/nixpkgs/pull/414242 is resolved.
+ nixpkgs.url = "github:NixOS/nixpkgs/c742ae7908a82c9bf23ce27bfca92a00e9bcd541";
flake-utils.url = "github:numtide/flake-utils";
# For default.nix
flake-compat = {
@@ -13,7 +15,8 @@
outputs =
{ nixpkgs, flake-utils, ... }:
- flake-utils.lib.eachSystem [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]
+ flake-utils.lib.eachSystem
+ [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]
(system:
let
pkgs = import nixpkgs {
@@ -21,11 +24,18 @@
config = { allowBroken = true; };
};
- pythonWithPackages = pkgs.python3.withPackages (ps: [ps.sphinx ps.myst-parser ps.sphinx_rtd_theme ps.pip]);
+ pythonWithPackages = pkgs.python3.withPackages (ps:
+ [ ps.docutils
+ ps.myst-parser
+ ps.pip
+ ps.sphinx
+ ps.sphinx_rtd_theme
+ ]);
docs = pkgs.stdenv.mkDerivation {
name = "hls-docs";
- src = pkgs.lib.sourceFilesBySuffices ./. [ ".py" ".rst" ".md" ".png" ".gif" ".svg" ".cabal" ];
+ src = pkgs.lib.sourceFilesBySuffices ./.
+ [ ".py" ".rst" ".md" ".png" ".gif" ".svg" ".cabal" ];
buildInputs = [ pythonWithPackages ];
buildPhase = ''
cd docs
@@ -58,11 +68,14 @@
buildInputs = [
# Compiler toolchain
hpkgs.ghc
+ hpkgs.haskell-language-server
pkgs.haskellPackages.cabal-install
# Dependencies needed to build some parts of Hackage
gmp zlib ncurses
+ # for compatibility of curl with provided gcc
+ curl
# Changelog tooling
- (gen-hls-changelogs pkgs.haskellPackages)
+ (gen-hls-changelogs hpkgs)
# For the documentation
pythonWithPackages
(pkgs.haskell.lib.justStaticExecutables (pkgs.haskell.lib.dontCheck pkgs.haskellPackages.opentelemetry-extra))
@@ -90,21 +103,17 @@
'';
};
- in rec {
+ in {
# Developement shell with only dev tools
devShells = {
default = mkDevShell pkgs.haskellPackages;
- shell-ghc94 = mkDevShell pkgs.haskell.packages.ghc94;
shell-ghc96 = mkDevShell pkgs.haskell.packages.ghc96;
shell-ghc98 = mkDevShell pkgs.haskell.packages.ghc98;
shell-ghc910 = mkDevShell pkgs.haskell.packages.ghc910;
+ shell-ghc912 = mkDevShell pkgs.haskell.packages.ghc912;
};
packages = { inherit docs; };
-
- # The attributes for the default shell and package changed in recent versions of Nix,
- # these are here for backwards compatibility with the old versions.
- devShell = devShells.default;
});
nixConfig = {
diff --git a/ghcide-bench/src/Experiments.hs b/ghcide-bench/src/Experiments.hs
index e9da50c2c8..c53ffd0a7c 100644
--- a/ghcide-bench/src/Experiments.hs
+++ b/ghcide-bench/src/Experiments.hs
@@ -857,11 +857,9 @@ getBuildEdgesCount = tryCallTestPlugin GetBuildEdgesCount
getRebuildsCount :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) Int)
getRebuildsCount = tryCallTestPlugin GetRebuildsCount
--- Copy&paste from ghcide/test/Development.IDE.Test
getStoredKeys :: Session [Text]
getStoredKeys = callTestPlugin GetStoredKeys
--- Copy&paste from ghcide/test/Development.IDE.Test
tryCallTestPlugin :: (A.FromJSON b) => TestRequest -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) b)
tryCallTestPlugin cmd = do
let cm = SMethod_CustomMethod (Proxy @"test")
@@ -873,7 +871,6 @@ tryCallTestPlugin cmd = do
A.Success a -> Right a
A.Error e -> error e
--- Copy&paste from ghcide/test/Development.IDE.Test
callTestPlugin :: (A.FromJSON b) => TestRequest -> Session b
callTestPlugin cmd = do
res <- tryCallTestPlugin cmd
diff --git a/ghcide/test/LICENSE b/ghcide-test/LICENSE
similarity index 100%
rename from ghcide/test/LICENSE
rename to ghcide-test/LICENSE
diff --git a/ghcide/test/data/TH/THA.hs b/ghcide-test/data/TH/THA.hs
similarity index 100%
rename from ghcide/test/data/TH/THA.hs
rename to ghcide-test/data/TH/THA.hs
diff --git a/ghcide/test/data/TH/THB.hs b/ghcide-test/data/TH/THB.hs
similarity index 100%
rename from ghcide/test/data/TH/THB.hs
rename to ghcide-test/data/TH/THB.hs
diff --git a/ghcide/test/data/TH/THC.hs b/ghcide-test/data/TH/THC.hs
similarity index 100%
rename from ghcide/test/data/TH/THC.hs
rename to ghcide-test/data/TH/THC.hs
diff --git a/ghcide/test/data/TH/hie.yaml b/ghcide-test/data/TH/hie.yaml
similarity index 100%
rename from ghcide/test/data/TH/hie.yaml
rename to ghcide-test/data/TH/hie.yaml
diff --git a/ghcide/test/data/THCoreFile/THA.hs b/ghcide-test/data/THCoreFile/THA.hs
similarity index 100%
rename from ghcide/test/data/THCoreFile/THA.hs
rename to ghcide-test/data/THCoreFile/THA.hs
diff --git a/ghcide/test/data/THCoreFile/THB.hs b/ghcide-test/data/THCoreFile/THB.hs
similarity index 100%
rename from ghcide/test/data/THCoreFile/THB.hs
rename to ghcide-test/data/THCoreFile/THB.hs
diff --git a/ghcide/test/data/THCoreFile/THC.hs b/ghcide-test/data/THCoreFile/THC.hs
similarity index 100%
rename from ghcide/test/data/THCoreFile/THC.hs
rename to ghcide-test/data/THCoreFile/THC.hs
diff --git a/ghcide/test/data/THCoreFile/hie.yaml b/ghcide-test/data/THCoreFile/hie.yaml
similarity index 100%
rename from ghcide/test/data/THCoreFile/hie.yaml
rename to ghcide-test/data/THCoreFile/hie.yaml
diff --git a/ghcide/test/data/THLoading/A.hs b/ghcide-test/data/THLoading/A.hs
similarity index 100%
rename from ghcide/test/data/THLoading/A.hs
rename to ghcide-test/data/THLoading/A.hs
diff --git a/ghcide/test/data/THLoading/B.hs b/ghcide-test/data/THLoading/B.hs
similarity index 100%
rename from ghcide/test/data/THLoading/B.hs
rename to ghcide-test/data/THLoading/B.hs
diff --git a/ghcide/test/data/THLoading/THA.hs b/ghcide-test/data/THLoading/THA.hs
similarity index 100%
rename from ghcide/test/data/THLoading/THA.hs
rename to ghcide-test/data/THLoading/THA.hs
diff --git a/ghcide/test/data/THLoading/THB.hs b/ghcide-test/data/THLoading/THB.hs
similarity index 100%
rename from ghcide/test/data/THLoading/THB.hs
rename to ghcide-test/data/THLoading/THB.hs
diff --git a/ghcide/test/data/THLoading/hie.yaml b/ghcide-test/data/THLoading/hie.yaml
similarity index 100%
rename from ghcide/test/data/THLoading/hie.yaml
rename to ghcide-test/data/THLoading/hie.yaml
diff --git a/ghcide/test/data/THNewName/A.hs b/ghcide-test/data/THNewName/A.hs
similarity index 100%
rename from ghcide/test/data/THNewName/A.hs
rename to ghcide-test/data/THNewName/A.hs
diff --git a/ghcide/test/data/THNewName/B.hs b/ghcide-test/data/THNewName/B.hs
similarity index 100%
rename from ghcide/test/data/THNewName/B.hs
rename to ghcide-test/data/THNewName/B.hs
diff --git a/ghcide/test/data/THNewName/C.hs b/ghcide-test/data/THNewName/C.hs
similarity index 100%
rename from ghcide/test/data/THNewName/C.hs
rename to ghcide-test/data/THNewName/C.hs
diff --git a/ghcide/test/data/THNewName/hie.yaml b/ghcide-test/data/THNewName/hie.yaml
similarity index 100%
rename from ghcide/test/data/THNewName/hie.yaml
rename to ghcide-test/data/THNewName/hie.yaml
diff --git a/ghcide/test/data/THUnboxed/THA.hs b/ghcide-test/data/THUnboxed/THA.hs
similarity index 100%
rename from ghcide/test/data/THUnboxed/THA.hs
rename to ghcide-test/data/THUnboxed/THA.hs
diff --git a/ghcide/test/data/THUnboxed/THB.hs b/ghcide-test/data/THUnboxed/THB.hs
similarity index 100%
rename from ghcide/test/data/THUnboxed/THB.hs
rename to ghcide-test/data/THUnboxed/THB.hs
diff --git a/ghcide/test/data/THUnboxed/THC.hs b/ghcide-test/data/THUnboxed/THC.hs
similarity index 100%
rename from ghcide/test/data/THUnboxed/THC.hs
rename to ghcide-test/data/THUnboxed/THC.hs
diff --git a/ghcide/test/data/THUnboxed/hie.yaml b/ghcide-test/data/THUnboxed/hie.yaml
similarity index 100%
rename from ghcide/test/data/THUnboxed/hie.yaml
rename to ghcide-test/data/THUnboxed/hie.yaml
diff --git a/ghcide/test/data/boot/A.hs b/ghcide-test/data/boot/A.hs
similarity index 100%
rename from ghcide/test/data/boot/A.hs
rename to ghcide-test/data/boot/A.hs
diff --git a/ghcide/test/data/boot/A.hs-boot b/ghcide-test/data/boot/A.hs-boot
similarity index 100%
rename from ghcide/test/data/boot/A.hs-boot
rename to ghcide-test/data/boot/A.hs-boot
diff --git a/ghcide/test/data/boot/B.hs b/ghcide-test/data/boot/B.hs
similarity index 100%
rename from ghcide/test/data/boot/B.hs
rename to ghcide-test/data/boot/B.hs
diff --git a/ghcide/test/data/boot/C.hs b/ghcide-test/data/boot/C.hs
similarity index 100%
rename from ghcide/test/data/boot/C.hs
rename to ghcide-test/data/boot/C.hs
diff --git a/ghcide/test/data/boot/hie.yaml b/ghcide-test/data/boot/hie.yaml
similarity index 100%
rename from ghcide/test/data/boot/hie.yaml
rename to ghcide-test/data/boot/hie.yaml
diff --git a/ghcide/test/data/boot2/A.hs b/ghcide-test/data/boot2/A.hs
similarity index 100%
rename from ghcide/test/data/boot2/A.hs
rename to ghcide-test/data/boot2/A.hs
diff --git a/ghcide/test/data/boot2/B.hs b/ghcide-test/data/boot2/B.hs
similarity index 100%
rename from ghcide/test/data/boot2/B.hs
rename to ghcide-test/data/boot2/B.hs
diff --git a/ghcide/test/data/boot2/B.hs-boot b/ghcide-test/data/boot2/B.hs-boot
similarity index 100%
rename from ghcide/test/data/boot2/B.hs-boot
rename to ghcide-test/data/boot2/B.hs-boot
diff --git a/ghcide/test/data/boot2/C.hs b/ghcide-test/data/boot2/C.hs
similarity index 100%
rename from ghcide/test/data/boot2/C.hs
rename to ghcide-test/data/boot2/C.hs
diff --git a/ghcide/test/data/boot2/D.hs b/ghcide-test/data/boot2/D.hs
similarity index 100%
rename from ghcide/test/data/boot2/D.hs
rename to ghcide-test/data/boot2/D.hs
diff --git a/ghcide/test/data/boot2/E.hs b/ghcide-test/data/boot2/E.hs
similarity index 100%
rename from ghcide/test/data/boot2/E.hs
rename to ghcide-test/data/boot2/E.hs
diff --git a/ghcide/test/data/boot2/hie.yaml b/ghcide-test/data/boot2/hie.yaml
similarity index 100%
rename from ghcide/test/data/boot2/hie.yaml
rename to ghcide-test/data/boot2/hie.yaml
diff --git a/ghcide/test/data/cabal-exe/a/a.cabal b/ghcide-test/data/cabal-exe/a/a.cabal
similarity index 100%
rename from ghcide/test/data/cabal-exe/a/a.cabal
rename to ghcide-test/data/cabal-exe/a/a.cabal
diff --git a/ghcide/test/data/cabal-exe/a/src/Main.hs b/ghcide-test/data/cabal-exe/a/src/Main.hs
similarity index 100%
rename from ghcide/test/data/cabal-exe/a/src/Main.hs
rename to ghcide-test/data/cabal-exe/a/src/Main.hs
diff --git a/ghcide/test/data/cabal-exe/cabal.project b/ghcide-test/data/cabal-exe/cabal.project
similarity index 100%
rename from ghcide/test/data/cabal-exe/cabal.project
rename to ghcide-test/data/cabal-exe/cabal.project
diff --git a/ghcide/test/data/cabal-exe/hie.yaml b/ghcide-test/data/cabal-exe/hie.yaml
similarity index 100%
rename from ghcide/test/data/cabal-exe/hie.yaml
rename to ghcide-test/data/cabal-exe/hie.yaml
diff --git a/ghcide/test/data/hover/Bar.hs b/ghcide-test/data/hover/Bar.hs
similarity index 100%
rename from ghcide/test/data/hover/Bar.hs
rename to ghcide-test/data/hover/Bar.hs
diff --git a/ghcide/test/data/hover/Foo.hs b/ghcide-test/data/hover/Foo.hs
similarity index 100%
rename from ghcide/test/data/hover/Foo.hs
rename to ghcide-test/data/hover/Foo.hs
diff --git a/ghcide/test/data/hover/GotoHover.hs b/ghcide-test/data/hover/GotoHover.hs
similarity index 100%
rename from ghcide/test/data/hover/GotoHover.hs
rename to ghcide-test/data/hover/GotoHover.hs
diff --git a/ghcide/test/data/hover/GotoImplementation.hs b/ghcide-test/data/hover/GotoImplementation.hs
similarity index 100%
rename from ghcide/test/data/hover/GotoImplementation.hs
rename to ghcide-test/data/hover/GotoImplementation.hs
diff --git a/ghcide/test/data/hover/RecordDotSyntax.hs b/ghcide-test/data/hover/RecordDotSyntax.hs
similarity index 100%
rename from ghcide/test/data/hover/RecordDotSyntax.hs
rename to ghcide-test/data/hover/RecordDotSyntax.hs
diff --git a/ghcide/test/data/hover/hie.yaml b/ghcide-test/data/hover/hie.yaml
similarity index 100%
rename from ghcide/test/data/hover/hie.yaml
rename to ghcide-test/data/hover/hie.yaml
diff --git a/ghcide/test/data/ignore-fatal/IgnoreFatal.hs b/ghcide-test/data/ignore-fatal/IgnoreFatal.hs
similarity index 100%
rename from ghcide/test/data/ignore-fatal/IgnoreFatal.hs
rename to ghcide-test/data/ignore-fatal/IgnoreFatal.hs
diff --git a/ghcide/test/data/ignore-fatal/cabal.project b/ghcide-test/data/ignore-fatal/cabal.project
similarity index 100%
rename from ghcide/test/data/ignore-fatal/cabal.project
rename to ghcide-test/data/ignore-fatal/cabal.project
diff --git a/ghcide/test/data/ignore-fatal/hie.yaml b/ghcide-test/data/ignore-fatal/hie.yaml
similarity index 100%
rename from ghcide/test/data/ignore-fatal/hie.yaml
rename to ghcide-test/data/ignore-fatal/hie.yaml
diff --git a/ghcide/test/data/ignore-fatal/ignore-fatal.cabal b/ghcide-test/data/ignore-fatal/ignore-fatal.cabal
similarity index 100%
rename from ghcide/test/data/ignore-fatal/ignore-fatal.cabal
rename to ghcide-test/data/ignore-fatal/ignore-fatal.cabal
diff --git a/ghcide/test/data/multi-unit-reexport/a-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/a-1.0.0-inplace
rename to ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit-reexport/a/A.hs b/ghcide-test/data/multi-unit-reexport/a/A.hs
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/a/A.hs
rename to ghcide-test/data/multi-unit-reexport/a/A.hs
diff --git a/ghcide/test/data/multi-unit-reexport/b-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/b-1.0.0-inplace
rename to ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit-reexport/b/B.hs b/ghcide-test/data/multi-unit-reexport/b/B.hs
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/b/B.hs
rename to ghcide-test/data/multi-unit-reexport/b/B.hs
diff --git a/ghcide/test/data/multi-unit-reexport/c-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/c-1.0.0-inplace
rename to ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit-reexport/c/C.hs b/ghcide-test/data/multi-unit-reexport/c/C.hs
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/c/C.hs
rename to ghcide-test/data/multi-unit-reexport/c/C.hs
diff --git a/ghcide/test/data/multi-unit-reexport/cabal.project b/ghcide-test/data/multi-unit-reexport/cabal.project
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/cabal.project
rename to ghcide-test/data/multi-unit-reexport/cabal.project
diff --git a/ghcide/test/data/multi-unit-reexport/hie.yaml b/ghcide-test/data/multi-unit-reexport/hie.yaml
similarity index 100%
rename from ghcide/test/data/multi-unit-reexport/hie.yaml
rename to ghcide-test/data/multi-unit-reexport/hie.yaml
diff --git a/ghcide/test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit/a-1.0.0-inplace
rename to ghcide-test/data/multi-unit/a-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit/a/A.hs b/ghcide-test/data/multi-unit/a/A.hs
similarity index 100%
rename from ghcide/test/data/multi-unit/a/A.hs
rename to ghcide-test/data/multi-unit/a/A.hs
diff --git a/ghcide/test/data/multi-unit/b-1.0.0-inplace b/ghcide-test/data/multi-unit/b-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit/b-1.0.0-inplace
rename to ghcide-test/data/multi-unit/b-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit/b/B.hs b/ghcide-test/data/multi-unit/b/B.hs
similarity index 100%
rename from ghcide/test/data/multi-unit/b/B.hs
rename to ghcide-test/data/multi-unit/b/B.hs
diff --git a/ghcide/test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace
similarity index 100%
rename from ghcide/test/data/multi-unit/c-1.0.0-inplace
rename to ghcide-test/data/multi-unit/c-1.0.0-inplace
diff --git a/ghcide/test/data/multi-unit/c/C.hs b/ghcide-test/data/multi-unit/c/C.hs
similarity index 100%
rename from ghcide/test/data/multi-unit/c/C.hs
rename to ghcide-test/data/multi-unit/c/C.hs
diff --git a/ghcide/test/data/multi-unit/cabal.project b/ghcide-test/data/multi-unit/cabal.project
similarity index 100%
rename from ghcide/test/data/multi-unit/cabal.project
rename to ghcide-test/data/multi-unit/cabal.project
diff --git a/ghcide/test/data/multi-unit/hie.yaml b/ghcide-test/data/multi-unit/hie.yaml
similarity index 100%
rename from ghcide/test/data/multi-unit/hie.yaml
rename to ghcide-test/data/multi-unit/hie.yaml
diff --git a/ghcide/test/data/multi/a/A.hs b/ghcide-test/data/multi/a/A.hs
similarity index 100%
rename from ghcide/test/data/multi/a/A.hs
rename to ghcide-test/data/multi/a/A.hs
diff --git a/ghcide/test/data/multi/a/a.cabal b/ghcide-test/data/multi/a/a.cabal
similarity index 100%
rename from ghcide/test/data/multi/a/a.cabal
rename to ghcide-test/data/multi/a/a.cabal
diff --git a/ghcide/test/data/multi/b/B.hs b/ghcide-test/data/multi/b/B.hs
similarity index 100%
rename from ghcide/test/data/multi/b/B.hs
rename to ghcide-test/data/multi/b/B.hs
diff --git a/ghcide/test/data/multi/b/b.cabal b/ghcide-test/data/multi/b/b.cabal
similarity index 100%
rename from ghcide/test/data/multi/b/b.cabal
rename to ghcide-test/data/multi/b/b.cabal
diff --git a/ghcide/test/data/multi/c/C.hs b/ghcide-test/data/multi/c/C.hs
similarity index 100%
rename from ghcide/test/data/multi/c/C.hs
rename to ghcide-test/data/multi/c/C.hs
diff --git a/ghcide/test/data/multi/c/c.cabal b/ghcide-test/data/multi/c/c.cabal
similarity index 100%
rename from ghcide/test/data/multi/c/c.cabal
rename to ghcide-test/data/multi/c/c.cabal
diff --git a/ghcide/test/data/multi/cabal.project b/ghcide-test/data/multi/cabal.project
similarity index 100%
rename from ghcide/test/data/multi/cabal.project
rename to ghcide-test/data/multi/cabal.project
diff --git a/ghcide/test/data/multi/hie.yaml b/ghcide-test/data/multi/hie.yaml
similarity index 100%
rename from ghcide/test/data/multi/hie.yaml
rename to ghcide-test/data/multi/hie.yaml
diff --git a/ghcide/test/data/plugin-knownnat/KnownNat.hs b/ghcide-test/data/plugin-knownnat/KnownNat.hs
similarity index 100%
rename from ghcide/test/data/plugin-knownnat/KnownNat.hs
rename to ghcide-test/data/plugin-knownnat/KnownNat.hs
diff --git a/ghcide/test/data/plugin-knownnat/cabal.project b/ghcide-test/data/plugin-knownnat/cabal.project
similarity index 100%
rename from ghcide/test/data/plugin-knownnat/cabal.project
rename to ghcide-test/data/plugin-knownnat/cabal.project
diff --git a/ghcide/test/data/plugin-knownnat/plugin.cabal b/ghcide-test/data/plugin-knownnat/plugin.cabal
similarity index 100%
rename from ghcide/test/data/plugin-knownnat/plugin.cabal
rename to ghcide-test/data/plugin-knownnat/plugin.cabal
diff --git a/ghcide/test/data/recomp/A.hs b/ghcide-test/data/recomp/A.hs
similarity index 100%
rename from ghcide/test/data/recomp/A.hs
rename to ghcide-test/data/recomp/A.hs
diff --git a/ghcide/test/data/recomp/B.hs b/ghcide-test/data/recomp/B.hs
similarity index 100%
rename from ghcide/test/data/recomp/B.hs
rename to ghcide-test/data/recomp/B.hs
diff --git a/ghcide/test/data/recomp/P.hs b/ghcide-test/data/recomp/P.hs
similarity index 100%
rename from ghcide/test/data/recomp/P.hs
rename to ghcide-test/data/recomp/P.hs
diff --git a/ghcide/test/data/recomp/hie.yaml b/ghcide-test/data/recomp/hie.yaml
similarity index 100%
rename from ghcide/test/data/recomp/hie.yaml
rename to ghcide-test/data/recomp/hie.yaml
diff --git a/ghcide/test/data/references/Main.hs b/ghcide-test/data/references/Main.hs
similarity index 100%
rename from ghcide/test/data/references/Main.hs
rename to ghcide-test/data/references/Main.hs
diff --git a/ghcide/test/data/references/OtherModule.hs b/ghcide-test/data/references/OtherModule.hs
similarity index 100%
rename from ghcide/test/data/references/OtherModule.hs
rename to ghcide-test/data/references/OtherModule.hs
diff --git a/ghcide/test/data/references/OtherOtherModule.hs b/ghcide-test/data/references/OtherOtherModule.hs
similarity index 100%
rename from ghcide/test/data/references/OtherOtherModule.hs
rename to ghcide-test/data/references/OtherOtherModule.hs
diff --git a/ghcide/test/data/references/References.hs b/ghcide-test/data/references/References.hs
similarity index 100%
rename from ghcide/test/data/references/References.hs
rename to ghcide-test/data/references/References.hs
diff --git a/ghcide/test/data/references/hie.yaml b/ghcide-test/data/references/hie.yaml
similarity index 100%
rename from ghcide/test/data/references/hie.yaml
rename to ghcide-test/data/references/hie.yaml
diff --git a/ghcide/test/data/rootUri/dirA/Foo.hs b/ghcide-test/data/rootUri/dirA/Foo.hs
similarity index 100%
rename from ghcide/test/data/rootUri/dirA/Foo.hs
rename to ghcide-test/data/rootUri/dirA/Foo.hs
diff --git a/ghcide/test/data/rootUri/dirA/foo.cabal b/ghcide-test/data/rootUri/dirA/foo.cabal
similarity index 100%
rename from ghcide/test/data/rootUri/dirA/foo.cabal
rename to ghcide-test/data/rootUri/dirA/foo.cabal
diff --git a/ghcide/test/data/rootUri/dirB/Foo.hs b/ghcide-test/data/rootUri/dirB/Foo.hs
similarity index 100%
rename from ghcide/test/data/rootUri/dirB/Foo.hs
rename to ghcide-test/data/rootUri/dirB/Foo.hs
diff --git a/ghcide/test/data/rootUri/dirB/foo.cabal b/ghcide-test/data/rootUri/dirB/foo.cabal
similarity index 100%
rename from ghcide/test/data/rootUri/dirB/foo.cabal
rename to ghcide-test/data/rootUri/dirB/foo.cabal
diff --git a/ghcide/test/data/symlink/hie.yaml b/ghcide-test/data/symlink/hie.yaml
similarity index 100%
rename from ghcide/test/data/symlink/hie.yaml
rename to ghcide-test/data/symlink/hie.yaml
diff --git a/ghcide/test/data/symlink/other_loc/.gitkeep b/ghcide-test/data/symlink/other_loc/.gitkeep
similarity index 100%
rename from ghcide/test/data/symlink/other_loc/.gitkeep
rename to ghcide-test/data/symlink/other_loc/.gitkeep
diff --git a/ghcide/test/data/symlink/some_loc/Sym.hs b/ghcide-test/data/symlink/some_loc/Sym.hs
similarity index 100%
rename from ghcide/test/data/symlink/some_loc/Sym.hs
rename to ghcide-test/data/symlink/some_loc/Sym.hs
diff --git a/ghcide/test/data/symlink/src/Foo.hs b/ghcide-test/data/symlink/src/Foo.hs
similarity index 100%
rename from ghcide/test/data/symlink/src/Foo.hs
rename to ghcide-test/data/symlink/src/Foo.hs
diff --git a/ghcide-test/data/watched-files/reload/reload.cabal b/ghcide-test/data/watched-files/reload/reload.cabal
new file mode 100644
index 0000000000..d9d5607a94
--- /dev/null
+++ b/ghcide-test/data/watched-files/reload/reload.cabal
@@ -0,0 +1,12 @@
+cabal-version: 2.4
+name: reload
+version: 0.1.0.0
+author: Lin Jian
+maintainer: me@linj.tech
+build-type: Simple
+
+library
+ exposed-modules: MyLib
+ build-depends: base
+ hs-source-dirs: src
+ default-language: Haskell2010
diff --git a/ghcide-test/data/watched-files/reload/src/MyLib.hs b/ghcide-test/data/watched-files/reload/src/MyLib.hs
new file mode 100644
index 0000000000..bbb506d001
--- /dev/null
+++ b/ghcide-test/data/watched-files/reload/src/MyLib.hs
@@ -0,0 +1,6 @@
+module MyLib (someFunc) where
+
+import Data.List.Split
+
+someFunc :: IO ()
+someFunc = putStrLn "someFunc"
diff --git a/ghcide/test/data/working-dir/a/A.hs b/ghcide-test/data/working-dir/a/A.hs
similarity index 100%
rename from ghcide/test/data/working-dir/a/A.hs
rename to ghcide-test/data/working-dir/a/A.hs
diff --git a/ghcide/test/data/working-dir/a/B.hs b/ghcide-test/data/working-dir/a/B.hs
similarity index 100%
rename from ghcide/test/data/working-dir/a/B.hs
rename to ghcide-test/data/working-dir/a/B.hs
diff --git a/ghcide/test/data/working-dir/a/a.cabal b/ghcide-test/data/working-dir/a/a.cabal
similarity index 100%
rename from ghcide/test/data/working-dir/a/a.cabal
rename to ghcide-test/data/working-dir/a/a.cabal
diff --git a/ghcide/test/data/working-dir/a/wdtest b/ghcide-test/data/working-dir/a/wdtest
similarity index 100%
rename from ghcide/test/data/working-dir/a/wdtest
rename to ghcide-test/data/working-dir/a/wdtest
diff --git a/ghcide/test/data/working-dir/cabal.project b/ghcide-test/data/working-dir/cabal.project
similarity index 100%
rename from ghcide/test/data/working-dir/cabal.project
rename to ghcide-test/data/working-dir/cabal.project
diff --git a/ghcide/test/data/working-dir/hie.yaml b/ghcide-test/data/working-dir/hie.yaml
similarity index 100%
rename from ghcide/test/data/working-dir/hie.yaml
rename to ghcide-test/data/working-dir/hie.yaml
diff --git a/ghcide/test/exe/AsyncTests.hs b/ghcide-test/exe/AsyncTests.hs
similarity index 100%
rename from ghcide/test/exe/AsyncTests.hs
rename to ghcide-test/exe/AsyncTests.hs
diff --git a/ghcide/test/exe/BootTests.hs b/ghcide-test/exe/BootTests.hs
similarity index 100%
rename from ghcide/test/exe/BootTests.hs
rename to ghcide-test/exe/BootTests.hs
diff --git a/ghcide/test/exe/CPPTests.hs b/ghcide-test/exe/CPPTests.hs
similarity index 100%
rename from ghcide/test/exe/CPPTests.hs
rename to ghcide-test/exe/CPPTests.hs
diff --git a/ghcide/test/exe/ClientSettingsTests.hs b/ghcide-test/exe/ClientSettingsTests.hs
similarity index 100%
rename from ghcide/test/exe/ClientSettingsTests.hs
rename to ghcide-test/exe/ClientSettingsTests.hs
diff --git a/ghcide/test/exe/CodeLensTests.hs b/ghcide-test/exe/CodeLensTests.hs
similarity index 53%
rename from ghcide/test/exe/CodeLensTests.hs
rename to ghcide-test/exe/CodeLensTests.hs
index 4ec5f3957c..fd821e37b6 100644
--- a/ghcide/test/exe/CodeLensTests.hs
+++ b/ghcide-test/exe/CodeLensTests.hs
@@ -10,7 +10,6 @@ import Control.Monad.IO.Class (liftIO)
import qualified Data.Aeson as A
import Data.Maybe
import qualified Data.Text as T
-import Data.Tuple.Extra
import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion)
import qualified Language.LSP.Protocol.Lens as L
import Language.LSP.Protocol.Types hiding
@@ -28,6 +27,25 @@ tests = testGroup "code lenses"
[ addSigLensesTests
]
+data TestSpec =
+ TestSpec
+ { mName :: Maybe TestName -- ^ Optional Test Name
+ , input :: T.Text -- ^ Input
+ , expected :: Maybe T.Text -- ^ Expected Type Sig
+ }
+
+mkT :: T.Text -> T.Text -> TestSpec
+mkT i e = TestSpec Nothing i (Just e)
+mkT' :: TestName -> T.Text -> T.Text -> TestSpec
+mkT' name i e = TestSpec (Just name) i (Just e)
+
+noExpected :: TestSpec -> TestSpec
+noExpected t = t { expected = Nothing }
+
+mkTestName :: TestSpec -> String
+mkTestName t = case mName t of
+ Nothing -> T.unpack $ T.replace "\n" "\\n" (input t)
+ Just name -> name
addSigLensesTests :: TestTree
addSigLensesTests =
@@ -41,14 +59,14 @@ addSigLensesTests =
, "data T1 a where"
, " MkT1 :: (Show b) => a -> b -> T1 a"
]
- before enableGHCWarnings exported (def, _) others =
- T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported, def] <> others
- after' enableGHCWarnings exported (def, sig) others =
- T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported] <> maybe [] pure sig <> [def] <> others
+ before enableGHCWarnings exported spec others =
+ T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported, input spec] <> others
+ after' enableGHCWarnings exported spec others =
+ T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported] <> maybe [] pure (expected spec) <> [input spec] <> others
createConfig mode = A.object ["plugin" A..= A.object ["ghcide-type-lenses" A..= A.object ["config" A..= A.object ["mode" A..= A.String mode]]]]
- sigSession testName enableGHCWarnings waitForDiags mode exported def others = testWithDummyPluginEmpty testName $ do
- let originalCode = before enableGHCWarnings exported def others
- let expectedCode = after' enableGHCWarnings exported def others
+ sigSession testName enableGHCWarnings waitForDiags mode exported spec others = testWithDummyPluginEmpty testName $ do
+ let originalCode = before enableGHCWarnings exported spec others
+ let expectedCode = after' enableGHCWarnings exported spec others
setConfigSection "haskell" (createConfig mode)
doc <- createDoc "Sigs.hs" "haskell" originalCode
-- Because the diagnostics mode is really relying only on diagnostics now
@@ -58,7 +76,7 @@ addSigLensesTests =
then void waitForDiagnostics
else waitForProgressDone
codeLenses <- getAndResolveCodeLenses doc
- if not $ null $ snd def
+ if isJust $ expected spec
then do
liftIO $ length codeLenses == 1 @? "Expected 1 code lens, but got: " <> show codeLenses
executeCommand $ fromJust $ head codeLenses ^. L.command
@@ -66,43 +84,46 @@ addSigLensesTests =
liftIO $ expectedCode @=? modifiedCode
else liftIO $ null codeLenses @? "Expected no code lens, but got: " <> show codeLenses
cases =
- [ ("abc = True", "abc :: Bool")
- , ("foo a b = a + b", "foo :: Num a => a -> a -> a")
- , ("bar a b = show $ a + b", "bar :: (Show a, Num a) => a -> a -> String")
- , ("(!!!) a b = a > b", "(!!!) :: Ord a => a -> a -> Bool")
- , ("a >>>> b = a + b", "(>>>>) :: Num a => a -> a -> a")
- , ("a `haha` b = a b", "haha :: (t1 -> t2) -> t1 -> t2")
- , ("pattern Some a = Just a", "pattern Some :: a -> Maybe a")
- , ("pattern Some a <- Just a", "pattern Some :: a -> Maybe a")
- , ("pattern Some a <- Just a\n where Some a = Just a", "pattern Some :: a -> Maybe a")
- , ("pattern Some a <- Just !a\n where Some !a = Just a", "pattern Some :: a -> Maybe a")
- , ("pattern Point{x, y} = (x, y)", "pattern Point :: a -> b -> (a, b)")
- , ("pattern Point{x, y} <- (x, y)", "pattern Point :: a -> b -> (a, b)")
- , ("pattern Point{x, y} <- (x, y)\n where Point x y = (x, y)", "pattern Point :: a -> b -> (a, b)")
- , ("pattern MkT1' b = MkT1 42 b", "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a")
- , ("pattern MkT1' b <- MkT1 42 b", "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a")
- , ("pattern MkT1' b <- MkT1 42 b\n where MkT1' b = MkT1 42 b", "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a")
- , ("qualifiedSigTest= C.realPart", "qualifiedSigTest :: C.Complex a -> a")
- , ("head = 233", "head :: Integer")
- , ("rank2Test (k :: forall a . a -> a) = (k 233 :: Int, k \"QAQ\")", "rank2Test :: (forall a. a -> a) -> (Int, String)")
- , ("symbolKindTest = Proxy @\"qwq\"", "symbolKindTest :: Proxy \"qwq\"")
- , ("promotedKindTest = Proxy @Nothing", if ghcVersion >= GHC96 then "promotedKindTest :: Proxy Nothing" else "promotedKindTest :: Proxy 'Nothing")
- , ("typeOperatorTest = Refl", "typeOperatorTest :: forall {k} {a :: k}. a :~: a")
- , ("notInScopeTest = mkCharType"
- , if ghcVersion < GHC910
+ [ mkT "abc = True" "abc :: Bool"
+ , mkT "foo a b = a + b" "foo :: Num a => a -> a -> a"
+ , mkT "bar a b = show $ a + b" "bar :: (Show a, Num a) => a -> a -> String"
+ , mkT "(!!!) a b = a > b" "(!!!) :: Ord a => a -> a -> Bool"
+ , mkT "a >>>> b = a + b" "(>>>>) :: Num a => a -> a -> a"
+ , mkT "a `haha` b = a b" "haha :: (t1 -> t2) -> t1 -> t2"
+ , mkT "pattern Some a = Just a" "pattern Some :: a -> Maybe a"
+ , mkT "pattern Some a <- Just a" "pattern Some :: a -> Maybe a"
+ , mkT "pattern Some a <- Just a\n where Some a = Just a" "pattern Some :: a -> Maybe a"
+ , mkT "pattern Some a <- Just !a\n where Some !a = Just a" "pattern Some :: a -> Maybe a"
+ , mkT "pattern Point{x, y} = (x, y)" "pattern Point :: a -> b -> (a, b)"
+ , mkT "pattern Point{x, y} <- (x, y)" "pattern Point :: a -> b -> (a, b)"
+ , mkT "pattern Point{x, y} <- (x, y)\n where Point x y = (x, y)" "pattern Point :: a -> b -> (a, b)"
+ , mkT "pattern MkT1' b = MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a"
+ , mkT "pattern MkT1' b <- MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a"
+ , mkT "pattern MkT1' b <- MkT1 42 b\n where MkT1' b = MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a"
+ , mkT "qualifiedSigTest= C.realPart" "qualifiedSigTest :: C.Complex a -> a"
+ , mkT "head = 233" "head :: Integer"
+ , mkT "rank2Test (k :: forall a . a -> a) = (k 233 :: Int, k \"QAQ\")" "rank2Test :: (forall a. a -> a) -> (Int, String)"
+ , mkT "symbolKindTest = Proxy @\"qwq\"" "symbolKindTest :: Proxy \"qwq\""
+ , mkT "promotedKindTest = Proxy @Nothing" (if ghcVersion >= GHC96 then "promotedKindTest :: Proxy Nothing" else "promotedKindTest :: Proxy 'Nothing")
+ , mkT "typeOperatorTest = Refl" "typeOperatorTest :: forall {k} {a :: k}. a :~: a"
+ , mkT "notInScopeTest = mkCharType"
+ (if ghcVersion < GHC910
then "notInScopeTest :: String -> Data.Data.DataType"
else "notInScopeTest :: String -> GHC.Internal.Data.Data.DataType"
)
- , ("aVeryLongSignature a b c d e f g h i j k l m n = a && b && c && d && e && f && g && h && i && j && k && l && m && n", "aVeryLongSignature :: Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool")
+
+ , mkT' "aVeryLongSignature"
+ "aVeryLongSignature a b c d e f g h i j k l m n = a && b && c && d && e && f && g && h && i && j && k && l && m && n"
+ "aVeryLongSignature :: Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool"
]
in testGroup
"add signature"
- [ testGroup "signatures are correct" [sigSession (T.unpack $ T.replace "\n" "\\n" def) False False "always" "" (def, Just sig) [] | (def, sig) <- cases]
- , sigSession "exported mode works" False False "exported" "xyz" ("xyz = True", Just "xyz :: Bool") (fst <$> take 3 cases)
+ [ testGroup "signatures are correct" [sigSession (mkTestName spec) False False "always" "" spec [] | spec <- cases]
+ , sigSession "exported mode works" False False "exported" "xyz" (mkT "xyz = True" "xyz :: Bool") (input <$> take 3 cases)
, testGroup
"diagnostics mode works"
- [ sigSession "with GHC warnings" True True "diagnostics" "" (second Just $ head cases) []
- , sigSession "without GHC warnings" False False "diagnostics" "" (second (const Nothing) $ head cases) []
+ [ sigSession "with GHC warnings" True True "diagnostics" "" (head cases) []
+ , sigSession "without GHC warnings" False False "diagnostics" "" (noExpected $ head cases) []
]
, testWithDummyPluginEmpty "keep stale lens" $ do
let content = T.unlines
diff --git a/ghcide/test/exe/CompletionTests.hs b/ghcide-test/exe/CompletionTests.hs
similarity index 94%
rename from ghcide/test/exe/CompletionTests.hs
rename to ghcide-test/exe/CompletionTests.hs
index a980d47233..8c44173bd6 100644
--- a/ghcide/test/exe/CompletionTests.hs
+++ b/ghcide-test/exe/CompletionTests.hs
@@ -33,7 +33,6 @@ import Test.Hls.Util
import Test.Tasty
import Test.Tasty.HUnit
-
tests :: TestTree
tests
= testGroup "completion"
@@ -61,6 +60,7 @@ completionTest :: HasCallStack => String -> [T.Text] -> Position -> [(T.Text, Co
completionTest name src pos expected = testSessionSingleFile name "A.hs" (T.unlines src) $ do
docId <- openDoc "A.hs" "haskell"
_ <- waitForDiagnostics
+
compls <- getAndResolveCompletions docId pos
let compls' = [ (_label, _kind, _insertText, _additionalTextEdits) | CompletionItem{..} <- compls]
let emptyToMaybe x = if T.null x then Nothing else Just x
@@ -211,7 +211,38 @@ localCompletionTests = [
compls <- getCompletions doc (Position 0 15)
liftIO $ filter ("AAA" `T.isPrefixOf`) (mapMaybe _insertText compls) @?= ["AAAAA"]
- pure ()
+ pure (),
+ completionTest
+ "polymorphic record dot completion"
+ [ "{-# LANGUAGE OverloadedRecordDot #-}"
+ , "module A () where"
+ , "data Record = Record"
+ , " { field1 :: Int"
+ , " , field2 :: Int"
+ , " }"
+ , -- Without the following, this file doesn't trigger any diagnostics, so completionTest waits forever
+ "triggerDiag :: UnknownType"
+ , "foo record = record.f"
+ ]
+ (Position 7 21)
+ [("field1", CompletionItemKind_Function, "field1", True, False, Nothing)
+ ,("field2", CompletionItemKind_Function, "field2", True, False, Nothing)
+ ],
+ completionTest
+ "qualified polymorphic record dot completion"
+ [ "{-# LANGUAGE OverloadedRecordDot #-}"
+ , "module A () where"
+ , "data Record = Record"
+ , " { field1 :: Int"
+ , " , field2 :: Int"
+ , " }"
+ , "someValue = undefined"
+ , "foo = A.someValue.f"
+ ]
+ (Position 7 19)
+ [("field1", CompletionItemKind_Function, "field1", True, False, Nothing)
+ ,("field2", CompletionItemKind_Function, "field2", True, False, Nothing)
+ ]
]
nonLocalCompletionTests :: [TestTree]
@@ -276,8 +307,7 @@ nonLocalCompletionTests =
where
brokenForWinGhc = knownBrokenOnWindows "Windows has strange things in scope for some reason"
brokenForWinOldGhc =
- knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC94] "Windows (GHC == 9.4) has strange things in scope for some reason"
- . knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC96] "Windows (GHC == 9.6) has strange things in scope for some reason"
+ knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC96] "Windows (GHC == 9.6) has strange things in scope for some reason"
. knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC98] "Windows (GHC == 9.8) has strange things in scope for some reason"
otherCompletionTests :: [TestTree]
@@ -350,10 +380,11 @@ packageCompletionTests =
, _label == "fromList"
]
liftIO $ take 3 (sort compls') @?=
- map ("Defined in "<>) (
- [ "'Data.List.NonEmpty"
+ map ("Defined in "<>) [
+ "'Data.List.NonEmpty"
, "'GHC.Exts"
- ] ++ (["'GHC.IsList" | ghcVersion >= GHC94]))
+ , "'GHC.IsList"
+ ]
, testSessionEmptyWithCradle "Map" "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, containers, A]}}" $ do
doc <- createDoc "A.hs" "haskell" $ T.unlines
diff --git a/ghcide/test/exe/Config.hs b/ghcide-test/exe/Config.hs
similarity index 99%
rename from ghcide/test/exe/Config.hs
rename to ghcide-test/exe/Config.hs
index 19ae47c67b..c98023e90e 100644
--- a/ghcide/test/exe/Config.hs
+++ b/ghcide-test/exe/Config.hs
@@ -28,6 +28,7 @@ module Config(
, withLongTimeout
, lspTestCaps
, lspTestCapsNoFileWatches
+ , testDataDir
) where
import Control.Exception (bracket_)
@@ -47,7 +48,7 @@ import Test.Hls
import qualified Test.Hls.FileSystem as FS
testDataDir :: FilePath
-testDataDir = "ghcide" > "test" > "data"
+testDataDir = "ghcide-test" > "data"
mkIdeTestFs :: [FS.FileTree] -> FS.VirtualFileTree
mkIdeTestFs = FS.mkVirtualFileTree testDataDir
diff --git a/ghcide/test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs
similarity index 100%
rename from ghcide/test/exe/CradleTests.hs
rename to ghcide-test/exe/CradleTests.hs
diff --git a/ghcide/test/exe/DependentFileTest.hs b/ghcide-test/exe/DependentFileTest.hs
similarity index 100%
rename from ghcide/test/exe/DependentFileTest.hs
rename to ghcide-test/exe/DependentFileTest.hs
diff --git a/ghcide/test/exe/DiagnosticTests.hs b/ghcide-test/exe/DiagnosticTests.hs
similarity index 96%
rename from ghcide/test/exe/DiagnosticTests.hs
rename to ghcide-test/exe/DiagnosticTests.hs
index 615e6ad69e..52aba0b9b7 100644
--- a/ghcide/test/exe/DiagnosticTests.hs
+++ b/ghcide-test/exe/DiagnosticTests.hs
@@ -343,19 +343,9 @@ tests = testGroup "diagnostics"
expectDiagnostics
[ ( "Main.hs"
, [(DiagnosticSeverity_Error, (6, 9),
- if ghcVersion >= GHC96 then
- "Variable not in scope: ThisList.map"
- else if ghcVersion >= GHC94 then
- "Variable not in scope: map" -- See https://gitlab.haskell.org/ghc/ghc/-/issues/22130
- else
- "Not in scope: \8216ThisList.map\8217", Just "GHC-88464")
+ "Variable not in scope: ThisList.map", Just "GHC-88464")
,(DiagnosticSeverity_Error, (7, 9),
- if ghcVersion >= GHC96 then
- "Variable not in scope: BaseList.x"
- else if ghcVersion >= GHC94 then
- "Variable not in scope: x" -- See https://gitlab.haskell.org/ghc/ghc/-/issues/22130
- else
- "Not in scope: \8216BaseList.x\8217", Just "GHC-88464")
+ "Variable not in scope: BaseList.x", Just "GHC-88464")
]
)
]
@@ -373,7 +363,7 @@ tests = testGroup "diagnostics"
-- where appropriate. The warning should use an unqualified name 'Ord', not
-- something like 'GHC.Classes.Ord'. The choice of redundant-constraints to
-- test this is fairly arbitrary.
- , [(DiagnosticSeverity_Warning, (2, if ghcVersion >= GHC94 then 7 else 0), "Redundant constraint: Ord a", Just "GHC-30606")
+ , [(DiagnosticSeverity_Warning, (2, 7), "Redundant constraint: Ord a", Just "GHC-30606")
]
)
]
diff --git a/ghcide/test/exe/ExceptionTests.hs b/ghcide-test/exe/ExceptionTests.hs
similarity index 99%
rename from ghcide/test/exe/ExceptionTests.hs
rename to ghcide-test/exe/ExceptionTests.hs
index 756e7e0547..a95f91e97c 100644
--- a/ghcide/test/exe/ExceptionTests.hs
+++ b/ghcide-test/exe/ExceptionTests.hs
@@ -8,7 +8,7 @@ import Control.Monad.Error.Class (MonadError (throwError))
import Control.Monad.IO.Class (liftIO)
import qualified Data.Aeson as A
import Data.Default (Default (..))
-import Data.Text as T
+import qualified Data.Text as T
import Development.IDE.Core.Shake (IdeState (..))
import qualified Development.IDE.LSP.Notifications as Notifications
import Development.IDE.Plugin.HLS (toResponseError)
diff --git a/ghcide/test/exe/FindDefinitionAndHoverTests.hs b/ghcide-test/exe/FindDefinitionAndHoverTests.hs
similarity index 99%
rename from ghcide/test/exe/FindDefinitionAndHoverTests.hs
rename to ghcide-test/exe/FindDefinitionAndHoverTests.hs
index e46141df4e..7920ff4949 100644
--- a/ghcide/test/exe/FindDefinitionAndHoverTests.hs
+++ b/ghcide-test/exe/FindDefinitionAndHoverTests.hs
@@ -187,7 +187,7 @@ tests = let
holeL65 = Position 65 8 ; hleInfo2 = [ExpectHoverText ["_ :: a -> Maybe a"]]
cccL17 = Position 17 16 ; docLink = [ExpectHoverTextRegex "\\*Defined in 'GHC.Types'\\* \\*\\(ghc-prim-[0-9.]+\\)\\*\n\n"]
imported = Position 56 13 ; importedSig = getDocUri "Foo.hs" >>= \foo -> return [ExpectHoverText ["foo", "Foo", "Haddock"], mkL foo 5 0 5 3]
- reexported = Position 55 14 ; reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion >= GHC94 && ghcVersion < GHC910 then mkL bar 3 5 3 8 else mkL bar 3 0 3 14]
+ reexported = Position 55 14 ; reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 then mkL bar 3 5 3 8 else mkL bar 3 0 3 14]
thLocL57 = Position 59 10 ; thLoc = [ExpectHoverText ["Identity"]]
cmtL68 = Position 67 0 ; lackOfdEq = [ExpectHoverExcludeText ["$dEq"]]
import310 = Position 3 10; pkgTxt = [ExpectHoverText ["Data.Text\n\ntext-"]]
diff --git a/ghcide/test/exe/FindImplementationAndHoverTests.hs b/ghcide-test/exe/FindImplementationAndHoverTests.hs
similarity index 100%
rename from ghcide/test/exe/FindImplementationAndHoverTests.hs
rename to ghcide-test/exe/FindImplementationAndHoverTests.hs
diff --git a/ghcide-test/exe/FuzzySearch.hs b/ghcide-test/exe/FuzzySearch.hs
new file mode 100644
index 0000000000..1d2a5ac181
--- /dev/null
+++ b/ghcide-test/exe/FuzzySearch.hs
@@ -0,0 +1,52 @@
+module FuzzySearch (tests) where
+
+import Data.Maybe (isJust, mapMaybe)
+import Data.Text (Text)
+import qualified Data.Text as Text
+import Prelude hiding (filter)
+import Test.Tasty
+import Test.Tasty.HUnit
+import Test.Tasty.QuickCheck
+import Text.Fuzzy.Parallel
+
+tests :: TestTree
+tests =
+ testGroup
+ "Fuzzy search"
+ [ testGroup "match"
+ [ testCase "empty" $
+ match "" "" @?= Just 0
+ , testCase "camel case" $
+ match "myImportantField" "myImportantField" @?= Just 262124
+ , testCase "a" $
+ mapMaybe (matchInput "a") ["", "a", "aa", "aaa", "A", "AA", "aA", "Aa"]
+ @?= [("a",3),("aa",3),("aaa",3),("aA",3),("Aa",1)]
+ , testCase "lowercase words" $
+ mapMaybe (matchInput "abc") ["abc", "abcd", "axbc", "axbxc", "def"]
+ @?= [("abc", 25), ("abcd", 25), ("axbc", 7), ("axbxc", 5)]
+ , testCase "lower upper mix" $
+ mapMaybe (matchInput "abc") ["abc", "aBc", "axbC", "axBxC", "def"]
+ @?= [("abc", 25), ("aBc", 25), ("axbC", 7), ("axBxC", 5)]
+ , testCase "prefixes" $
+ mapMaybe (matchInput "alpha") (Text.inits "alphabet")
+ @?= [("alpha", 119), ("alphab", 119), ("alphabe", 119), ("alphabet", 119)]
+ , testProperty "x `isSubsequenceOf` y => match x y returns Just"
+ prop_matchIfSubsequence
+ ]
+ ]
+ where
+ matchInput :: Text -> Text -> Maybe (Text, Int)
+ matchInput needle candidate = (candidate,) <$> match needle candidate
+
+prop_matchIfSubsequence :: Property
+prop_matchIfSubsequence =
+ forAll genNonEmptyText $ \haystack ->
+ forAll (genSubsequence haystack) $ \needle ->
+ isJust (match needle haystack)
+ where
+ genNonEmptyText =
+ Text.pack <$> listOf1 (elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'])
+
+ genSubsequence :: Text -> Gen Text
+ genSubsequence =
+ fmap Text.pack . sublistOf . Text.unpack
diff --git a/ghcide/test/exe/GarbageCollectionTests.hs b/ghcide-test/exe/GarbageCollectionTests.hs
similarity index 100%
rename from ghcide/test/exe/GarbageCollectionTests.hs
rename to ghcide-test/exe/GarbageCollectionTests.hs
diff --git a/ghcide/test/exe/HaddockTests.hs b/ghcide-test/exe/HaddockTests.hs
similarity index 100%
rename from ghcide/test/exe/HaddockTests.hs
rename to ghcide-test/exe/HaddockTests.hs
diff --git a/ghcide/test/exe/HieDbRetry.hs b/ghcide-test/exe/HieDbRetry.hs
similarity index 100%
rename from ghcide/test/exe/HieDbRetry.hs
rename to ghcide-test/exe/HieDbRetry.hs
diff --git a/ghcide/test/exe/HighlightTests.hs b/ghcide-test/exe/HighlightTests.hs
similarity index 100%
rename from ghcide/test/exe/HighlightTests.hs
rename to ghcide-test/exe/HighlightTests.hs
diff --git a/ghcide/test/exe/IfaceTests.hs b/ghcide-test/exe/IfaceTests.hs
similarity index 100%
rename from ghcide/test/exe/IfaceTests.hs
rename to ghcide-test/exe/IfaceTests.hs
diff --git a/ghcide/test/exe/InitializeResponseTests.hs b/ghcide-test/exe/InitializeResponseTests.hs
similarity index 100%
rename from ghcide/test/exe/InitializeResponseTests.hs
rename to ghcide-test/exe/InitializeResponseTests.hs
diff --git a/ghcide/test/exe/LogType.hs b/ghcide-test/exe/LogType.hs
similarity index 100%
rename from ghcide/test/exe/LogType.hs
rename to ghcide-test/exe/LogType.hs
diff --git a/ghcide/test/exe/Main.hs b/ghcide-test/exe/Main.hs
similarity index 100%
rename from ghcide/test/exe/Main.hs
rename to ghcide-test/exe/Main.hs
diff --git a/ghcide/test/exe/NonLspCommandLine.hs b/ghcide-test/exe/NonLspCommandLine.hs
similarity index 90%
rename from ghcide/test/exe/NonLspCommandLine.hs
rename to ghcide-test/exe/NonLspCommandLine.hs
index a0940625b5..b2b41071d4 100644
--- a/ghcide/test/exe/NonLspCommandLine.hs
+++ b/ghcide-test/exe/NonLspCommandLine.hs
@@ -14,6 +14,7 @@ import System.Process.Extra (CreateProcess (cwd), proc,
readCreateProcessWithExitCode)
import Test.Tasty
import Test.Tasty.HUnit
+import Config (testDataDir)
-- A test to ensure that the command line ghcide workflow stays working
@@ -44,7 +45,7 @@ withTempDir f = System.IO.Extra.withTempDir $ canonicalizePath >=> f
copyTestDataFiles :: FilePath -> FilePath -> IO ()
copyTestDataFiles dir prefix = do
-- Copy all the test data files to the temporary workspace
- testDataFiles <- getDirectoryFilesIO ("ghcide/test/data" > prefix) ["//*"]
+ testDataFiles <- getDirectoryFilesIO (testDataDir > prefix) ["//*"]
for_ testDataFiles $ \f -> do
createDirectoryIfMissing True $ dir > takeDirectory f
- copyFile ("ghcide/test/data" > prefix > f) (dir > f)
+ copyFile (testDataDir > prefix > f) (dir > f)
diff --git a/ghcide/test/exe/OpenCloseTest.hs b/ghcide-test/exe/OpenCloseTest.hs
similarity index 100%
rename from ghcide/test/exe/OpenCloseTest.hs
rename to ghcide-test/exe/OpenCloseTest.hs
diff --git a/ghcide/test/exe/OutlineTests.hs b/ghcide-test/exe/OutlineTests.hs
similarity index 100%
rename from ghcide/test/exe/OutlineTests.hs
rename to ghcide-test/exe/OutlineTests.hs
diff --git a/ghcide/test/exe/PluginSimpleTests.hs b/ghcide-test/exe/PluginSimpleTests.hs
similarity index 100%
rename from ghcide/test/exe/PluginSimpleTests.hs
rename to ghcide-test/exe/PluginSimpleTests.hs
diff --git a/ghcide/test/exe/PositionMappingTests.hs b/ghcide-test/exe/PositionMappingTests.hs
similarity index 100%
rename from ghcide/test/exe/PositionMappingTests.hs
rename to ghcide-test/exe/PositionMappingTests.hs
diff --git a/ghcide/test/exe/PreprocessorTests.hs b/ghcide-test/exe/PreprocessorTests.hs
similarity index 100%
rename from ghcide/test/exe/PreprocessorTests.hs
rename to ghcide-test/exe/PreprocessorTests.hs
diff --git a/ghcide/test/exe/Progress.hs b/ghcide-test/exe/Progress.hs
similarity index 100%
rename from ghcide/test/exe/Progress.hs
rename to ghcide-test/exe/Progress.hs
diff --git a/ghcide/test/exe/ReferenceTests.hs b/ghcide-test/exe/ReferenceTests.hs
similarity index 95%
rename from ghcide/test/exe/ReferenceTests.hs
rename to ghcide-test/exe/ReferenceTests.hs
index 50c263c4fc..cdbf8e472d 100644
--- a/ghcide/test/exe/ReferenceTests.hs
+++ b/ghcide-test/exe/ReferenceTests.hs
@@ -115,7 +115,7 @@ tests = testGroup "references"
]
, testGroup "can get references to non FOIs"
- [ referenceTest "can get references to symbol defined in a module we import"
+ [ referenceTest "references to symbol defined in a module we import"
("References.hs", 22, 4)
YesIncludeDeclaration
[ ("References.hs", 22, 4)
@@ -123,7 +123,7 @@ tests = testGroup "references"
, ("OtherModule.hs", 4, 0)
]
- , referenceTest "can get references in modules that import us to symbols we define"
+ , referenceTest "references in modules that import us to symbols we define"
("OtherModule.hs", 4, 0)
YesIncludeDeclaration
[ ("References.hs", 22, 4)
@@ -131,7 +131,7 @@ tests = testGroup "references"
, ("OtherModule.hs", 4, 0)
]
- , referenceTest "can get references to symbol defined in a module we import transitively"
+ , referenceTest "references to symbol defined in a module we import transitively"
("References.hs", 24, 4)
YesIncludeDeclaration
[ ("References.hs", 24, 4)
@@ -139,7 +139,7 @@ tests = testGroup "references"
, ("OtherOtherModule.hs", 2, 0)
]
- , referenceTest "can get references in modules that import us transitively to symbols we define"
+ , referenceTest "references in modules that transitively use symbols we define"
("OtherOtherModule.hs", 2, 0)
YesIncludeDeclaration
[ ("References.hs", 24, 4)
@@ -147,7 +147,7 @@ tests = testGroup "references"
, ("OtherOtherModule.hs", 2, 0)
]
- , referenceTest "can get type references to other modules"
+ , referenceTest "type references to other modules"
("Main.hs", 12, 10)
YesIncludeDeclaration
[ ("Main.hs", 12, 7)
diff --git a/ghcide/test/exe/ResolveTests.hs b/ghcide-test/exe/ResolveTests.hs
similarity index 100%
rename from ghcide/test/exe/ResolveTests.hs
rename to ghcide-test/exe/ResolveTests.hs
diff --git a/ghcide/test/exe/RootUriTests.hs b/ghcide-test/exe/RootUriTests.hs
similarity index 100%
rename from ghcide/test/exe/RootUriTests.hs
rename to ghcide-test/exe/RootUriTests.hs
diff --git a/ghcide/test/exe/SafeTests.hs b/ghcide-test/exe/SafeTests.hs
similarity index 100%
rename from ghcide/test/exe/SafeTests.hs
rename to ghcide-test/exe/SafeTests.hs
diff --git a/ghcide/test/exe/SymlinkTests.hs b/ghcide-test/exe/SymlinkTests.hs
similarity index 100%
rename from ghcide/test/exe/SymlinkTests.hs
rename to ghcide-test/exe/SymlinkTests.hs
diff --git a/ghcide/test/exe/THTests.hs b/ghcide-test/exe/THTests.hs
similarity index 100%
rename from ghcide/test/exe/THTests.hs
rename to ghcide-test/exe/THTests.hs
diff --git a/ghcide/test/exe/UnitTests.hs b/ghcide-test/exe/UnitTests.hs
similarity index 100%
rename from ghcide/test/exe/UnitTests.hs
rename to ghcide-test/exe/UnitTests.hs
diff --git a/ghcide/test/exe/WatchedFileTests.hs b/ghcide-test/exe/WatchedFileTests.hs
similarity index 78%
rename from ghcide/test/exe/WatchedFileTests.hs
rename to ghcide-test/exe/WatchedFileTests.hs
index d89a4ca84b..1c2ded9109 100644
--- a/ghcide/test/exe/WatchedFileTests.hs
+++ b/ghcide-test/exe/WatchedFileTests.hs
@@ -3,11 +3,14 @@
module WatchedFileTests (tests) where
-import Config (testWithDummyPluginEmpty')
+import Config (mkIdeTestFs,
+ testWithDummyPlugin',
+ testWithDummyPluginEmpty')
import Control.Applicative.Combinators
import Control.Monad.IO.Class (liftIO)
import qualified Data.Aeson as A
import qualified Data.Text as T
+import qualified Data.Text.IO as T
import Development.IDE.Test (expectDiagnostics)
import Language.LSP.Protocol.Message
import Language.LSP.Protocol.Types hiding
@@ -18,6 +21,7 @@ import Language.LSP.Protocol.Types hiding
import Language.LSP.Test
import System.Directory
import System.FilePath
+import Test.Hls.FileSystem
import Test.Tasty
import Test.Tasty.HUnit
@@ -69,6 +73,17 @@ tests = testGroup "watched files"
sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams
[FileEvent (filePathToUri $ sessionDir > "B.hs") FileChangeType_Changed ]
expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Int'", Just "GHC-83865")])]
+ , testWithDummyPlugin' "reload HLS after .cabal file changes" (mkIdeTestFs [copyDir ("watched-files" > "reload")]) $ \sessionDir -> do
+ let hsFile = "src" > "MyLib.hs"
+ _ <- openDoc hsFile "haskell"
+ expectDiagnostics [(hsFile, [(DiagnosticSeverity_Error, (2, 7), "Could not load module \8216Data.List.Split\8217", Nothing)])]
+ let cabalFile = "reload.cabal"
+ cabalContent <- liftIO $ T.readFile cabalFile
+ let fix = T.replace "build-depends: base" "build-depends: base, split"
+ liftIO $ T.writeFile cabalFile (fix cabalContent)
+ sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams
+ [ FileEvent (filePathToUri $ sessionDir > cabalFile) FileChangeType_Changed ]
+ expectDiagnostics [(hsFile, [])]
]
]
diff --git a/ghcide/test/manual/lhs/Bird.lhs b/ghcide-test/manual/lhs/Bird.lhs
similarity index 100%
rename from ghcide/test/manual/lhs/Bird.lhs
rename to ghcide-test/manual/lhs/Bird.lhs
diff --git a/ghcide/test/manual/lhs/Main.hs b/ghcide-test/manual/lhs/Main.hs
similarity index 100%
rename from ghcide/test/manual/lhs/Main.hs
rename to ghcide-test/manual/lhs/Main.hs
diff --git a/ghcide/test/manual/lhs/Test.lhs b/ghcide-test/manual/lhs/Test.lhs
similarity index 100%
rename from ghcide/test/manual/lhs/Test.lhs
rename to ghcide-test/manual/lhs/Test.lhs
diff --git a/ghcide/test/preprocessor/Main.hs b/ghcide-test/preprocessor/Main.hs
similarity index 100%
rename from ghcide/test/preprocessor/Main.hs
rename to ghcide-test/preprocessor/Main.hs
diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal
index af9a191406..4d4b481c14 100644
--- a/ghcide/ghcide.cabal
+++ b/ghcide/ghcide.cabal
@@ -2,7 +2,7 @@ cabal-version: 3.4
build-type: Simple
category: Development
name: ghcide
-version: 2.9.0.1
+version: 2.11.0.0
license: Apache-2.0
license-file: LICENSE
author: Digital Asset and Ghcide contributors
@@ -14,15 +14,10 @@ homepage:
https://github.com/haskell/haskell-language-server/tree/master/ghcide#readme
bug-reports: https://github.com/haskell/haskell-language-server/issues
-tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8
+tested-with: GHC == {9.12.2, 9.10.1, 9.8.4, 9.6.7}
extra-source-files:
CHANGELOG.md
README.md
- test/data/**/*.cabal
- test/data/**/*.hs
- test/data/**/*.hs-boot
- test/data/**/*.project
- test/data/**/*.yaml
source-repository head
type: git
@@ -62,7 +57,7 @@ library
, deepseq
, dependent-map
, dependent-sum
- , Diff ^>=0.5
+ , Diff ^>=0.5 || ^>=1.0.0
, directory
, dlist
, enummapset
@@ -78,11 +73,11 @@ library
, Glob
, haddock-library >=1.8 && <1.12
, hashable
- , hie-bios ^>=0.14.0
+ , hie-bios ^>=0.15.0
, hie-compat ^>=0.3.0.0
- , hiedb ^>= 0.6.0.0
- , hls-graph == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , hiedb ^>= 0.6.0.2
+ , hls-graph == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, implicit-hie >= 0.1.4.0 && < 0.1.5
, lens
, lens-aeson
@@ -92,6 +87,7 @@ library
, mtl
, opentelemetry >=0.6.1
, optparse-applicative
+ , os-string
, parallel
, prettyprinter >=1.7
, prettyprinter-ansi-terminal
@@ -212,20 +208,6 @@ library
ghc-options:
-Werror
-flag test-exe
- description: Build the ghcide-test-preprocessor executable
- default: True
-
-executable ghcide-test-preprocessor
- import: warnings
- default-language: GHC2021
- hs-source-dirs: test/preprocessor
- main-is: Main.hs
- build-depends: base >=4 && <5
-
- if !flag(test-exe)
- buildable: False
-
flag executable
description: Build the ghcide executable
default: True
diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs
index a1768be564..78bfb798af 100644
--- a/ghcide/session-loader/Development/IDE/Session.hs
+++ b/ghcide/session-loader/Development/IDE/Session.hs
@@ -124,6 +124,10 @@ import GHC.Types.Error (errMsgDiagnostic,
singleMessage)
import GHC.Unit.State
+#if MIN_VERSION_ghc(9,13,0)
+import GHC.Driver.Make (checkHomeUnitsClosed)
+#endif
+
data Log
= LogSettingInitialDynFlags
| LogGetInitialGhcLibDirDefaultCradleFail !CradleError !FilePath !(Maybe FilePath) !(Cradle Void)
@@ -694,7 +698,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do
let ncfp = toNormalizedFilePath' (toAbsolutePath file)
cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap
hieYaml <- cradleLoc file
- sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e ->
+ let
+ -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action
+ -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes.
+ -- The GlobPattern of a FileSystemWatcher can be absolute or relative.
+ -- We use the absolute one because it is supported by more LSP clients.
+ -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern.
+ absolutePathsCradleDeps (eq, deps)
+ = (eq, fmap toAbsolutePath deps)
+ (absolutePathsCradleDeps <$> sessionOpts (join cachedHieYamlLocation <|> hieYaml, file)) `Safe.catch` \e ->
return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml)
returnWithVersion $ \file -> do
@@ -782,6 +794,11 @@ toFlagsMap TargetDetails{..} =
setNameCache :: NameCache -> HscEnv -> HscEnv
setNameCache nc hsc = hsc { hsc_NC = nc }
+#if MIN_VERSION_ghc(9,13,0)
+-- Moved back to implementation in GHC.
+checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages]
+checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue
+#else
-- This function checks the important property that if both p and q are home units
-- then any dependency of p, which transitively depends on q is also a home unit.
-- GHC had an implementation of this function, but it was horribly inefficient
@@ -838,6 +855,7 @@ checkHomeUnitsClosed' ue home_id_set
Just depends ->
let todo'' = (depends OS.\\ done) `OS.union` todo'
in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo''
+#endif
-- | Create a mapping from FilePaths to HscEnvEqs
-- This combines all the components we know about into
@@ -870,11 +888,7 @@ newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do
ideErrorWithSource
(Just "cradle") (Just DiagnosticSeverity_Warning) _cfp
(T.pack (Compat.printWithoutUniques (singleMessage err)))
-#if MIN_VERSION_ghc(9,5,0)
(Just (fmap GhcDriverMessage err))
-#else
- Nothing
-#endif
multi_errs = map closure_err_to_multi_err closure_errs
bad_units = OS.fromList $ concat $ do
x <- map errMsgDiagnostic closure_errs
diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs
index ac98ae453d..2890c87966 100644
--- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs
+++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs
@@ -28,7 +28,7 @@ data CradleErrorDetails =
Depicts the cradle error in a user-friendly way.
-}
renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic
-renderCradleError (CradleError deps _ec ms) cradle nfp =
+renderCradleError cradleError cradle nfp =
let noDetails =
ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp (T.unlines $ map T.pack userFriendlyMessage) Nothing
in
@@ -36,7 +36,9 @@ renderCradleError (CradleError deps _ec ms) cradle nfp =
then noDetails & fdLspDiagnosticL %~ \diag -> diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}
else noDetails
where
- absDeps = fmap (cradleRootDir cradle >) deps
+ ms = cradleErrorStderr cradleError
+
+ absDeps = fmap (cradleRootDir cradle >) (cradleErrorDependencies cradleError)
userFriendlyMessage :: [String]
userFriendlyMessage
| HieBios.isCabalCradle cradle = fromMaybe ms $ fileMissingMessage <|> mkUnknownModuleMessage
diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs
index 47872b9255..3200f31f9e 100644
--- a/ghcide/src/Development/IDE/Core/Compile.hs
+++ b/ghcide/src/Development/IDE/Core/Compile.hs
@@ -36,80 +36,80 @@ module Development.IDE.Core.Compile
, sourceTypecheck
, sourceParser
, shareUsages
+ , setNonHomeFCHook
) where
-import Control.Concurrent.STM.Stats hiding (orElse)
-import Control.DeepSeq (NFData (..), force,
- rnf)
-import Control.Exception (evaluate)
+import Control.Concurrent.STM.Stats hiding (orElse)
+import Control.DeepSeq (NFData (..),
+ force, rnf)
+import Control.Exception (evaluate)
import Control.Exception.Safe
-import Control.Lens hiding (List, pre,
- (<.>))
+import Control.Lens hiding (List, pre,
+ (<.>))
import Control.Monad.Extra
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
-import qualified Control.Monad.Trans.State.Strict as S
-import Data.Aeson (toJSON)
-import Data.Bifunctor (first, second)
+import qualified Control.Monad.Trans.State.Strict as S
+import Data.Aeson (toJSON)
+import Data.Bifunctor (first, second)
import Data.Binary
-import qualified Data.ByteString as BS
+import qualified Data.ByteString as BS
import Data.Coerce
-import qualified Data.DList as DL
+import qualified Data.DList as DL
import Data.Functor
import Data.Generics.Aliases
import Data.Generics.Schemes
-import qualified Data.HashMap.Strict as HashMap
-import Data.IntMap (IntMap)
+import qualified Data.HashMap.Strict as HashMap
+import Data.IntMap (IntMap)
import Data.IORef
import Data.List.Extra
-import qualified Data.Map.Strict as Map
+import qualified Data.Map.Strict as Map
import Data.Maybe
-import Data.Proxy (Proxy (Proxy))
-import qualified Data.Text as T
-import Data.Time (UTCTime (..))
-import Data.Tuple.Extra (dupe)
+import Data.Proxy (Proxy (Proxy))
+import qualified Data.Text as T
+import Data.Time (UTCTime (..))
+import Data.Tuple.Extra (dupe)
import Debug.Trace
-import Development.IDE.Core.FileStore (resetInterfaceStore)
+import Development.IDE.Core.FileStore (resetInterfaceStore)
import Development.IDE.Core.Preprocessor
-import Development.IDE.Core.ProgressReporting (progressUpdate)
+import Development.IDE.Core.ProgressReporting (progressUpdate)
import Development.IDE.Core.RuleTypes
import Development.IDE.Core.Shake
-import Development.IDE.Core.Tracing (withTrace)
-import Development.IDE.GHC.Compat hiding (assert,
- loadInterface,
- parseHeader,
- parseModule,
- tcRnModule,
- writeHieFile)
-import qualified Development.IDE.GHC.Compat as Compat
-import qualified Development.IDE.GHC.Compat as GHC
-import qualified Development.IDE.GHC.Compat.Util as Util
+import Development.IDE.Core.Tracing (withTrace)
+import qualified Development.IDE.GHC.Compat as Compat
+import qualified Development.IDE.GHC.Compat as GHC
+import Development.IDE.GHC.Compat.Driver (hscTypecheckRenameWithDiagnostics)
+import qualified Development.IDE.GHC.Compat.Util as Util
import Development.IDE.GHC.CoreFile
import Development.IDE.GHC.Error
-import Development.IDE.GHC.Orphans ()
+import Development.IDE.GHC.Orphans ()
import Development.IDE.GHC.Util
import Development.IDE.GHC.Warnings
+import Development.IDE.Import.DependencyInformation
import Development.IDE.Types.Diagnostics
import Development.IDE.Types.Location
import Development.IDE.Types.Options
-import GHC (ForeignHValue,
- GetDocsFailure (..),
- parsedSource, ModLocation (..))
-import qualified GHC.LanguageExtensions as LangExt
+import GHC (ForeignHValue,
+ GetDocsFailure (..),
+ ModLocation (..),
+ parsedSource)
+import qualified GHC.LanguageExtensions as LangExt
import GHC.Serialized
-import HieDb hiding (withHieDb)
-import qualified Language.LSP.Protocol.Message as LSP
-import Language.LSP.Protocol.Types (DiagnosticTag (..))
-import qualified Language.LSP.Server as LSP
-import Prelude hiding (mod)
+import HieDb hiding (withHieDb)
+import qualified Language.LSP.Protocol.Message as LSP
+import Language.LSP.Protocol.Types (DiagnosticTag (..))
+import qualified Language.LSP.Server as LSP
+import Prelude hiding (mod)
import System.Directory
import System.FilePath
-import System.IO.Extra (fixIO,
- newTempFileWithin)
+import System.IO.Extra (fixIO,
+ newTempFileWithin)
-import qualified Data.Set as Set
-import qualified GHC as G
-import qualified GHC.Runtime.Loader as Loader
+import qualified Data.Set as Set
+import qualified GHC as G
+import GHC.Core.Lint.Interactive
+import GHC.Driver.Config.CoreToStg.Prep
+import qualified GHC.Runtime.Loader as Loader
import GHC.Tc.Gen.Splice
import GHC.Types.Error
import GHC.Types.ForeignStubs
@@ -118,20 +118,38 @@ import GHC.Types.TypeEnv
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-
-#if MIN_VERSION_ghc(9,5,0)
-import GHC.Core.Lint.Interactive
-import GHC.Driver.Config.CoreToStg.Prep
-#endif
-
#if MIN_VERSION_ghc(9,7,0)
-import Data.Foldable (toList)
+import Data.Foldable (toList)
import GHC.Unit.Module.Warnings
#else
-import Development.IDE.Core.FileStore (shareFilePath)
+import Development.IDE.Core.FileStore (shareFilePath)
+#endif
+
+#if MIN_VERSION_ghc(9,10,0)
+import Development.IDE.GHC.Compat hiding (assert,
+ loadInterface,
+ parseHeader,
+ parseModule,
+ tcRnModule,
+ writeHieFile)
+#else
+import Development.IDE.GHC.Compat hiding
+ (loadInterface,
+ parseHeader,
+ parseModule,
+ tcRnModule,
+ writeHieFile)
#endif
-import Development.IDE.GHC.Compat.Driver (hscTypecheckRenameWithDiagnostics)
+#if MIN_VERSION_ghc(9,11,0)
+import qualified Data.List.NonEmpty as NE
+import Data.Time (getCurrentTime)
+import GHC.Driver.Env (hsc_all_home_unit_ids)
+#endif
+
+#if MIN_VERSION_ghc(9,12,0)
+import Development.IDE.Import.FindImports
+#endif
--Simple constants to make sure the source is consistently named
sourceTypecheck :: T.Text
@@ -168,9 +186,10 @@ computePackageDeps env pkg = do
]
Just pkgInfo -> return $ Right $ unitDepends pkgInfo
-newtype TypecheckHelpers
+data TypecheckHelpers
= TypecheckHelpers
- { getLinkables :: [NormalizedFilePath] -> IO [LinkableResult] -- ^ hls-graph action to get linkables for files
+ { getLinkables :: [NormalizedFilePath] -> IO [LinkableResult] -- ^ hls-graph action to get linkables for files
+ , getModuleGraph :: IO DependencyInformation
}
typecheckModule :: IdeDefer
@@ -271,6 +290,9 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do
(icInteractiveModule ictxt)
stg_expr
[] Nothing
+#if MIN_VERSION_ghc(9,11,0)
+ [] -- spt_entries
+#endif
-- Exclude wired-in names because we may not have read
-- their interface files, so getLinkDeps will fail
@@ -294,19 +316,21 @@ captureSplicesAndDeps TypecheckHelpers{..} env k = do
mods_transitive_list =
mapMaybe nodeKeyToInstalledModule $ Set.toList mods_transitive
- ; moduleLocs <- readIORef (fcModuleCache $ hsc_FC hsc_env)
- ; lbs <- getLinkables [toNormalizedFilePath' file
+ ; moduleLocs <- getModuleGraph
+ ; lbs <- getLinkables [file
| installedMod <- mods_transitive_list
- , let ifr = fromJust $ lookupInstalledModuleEnv moduleLocs installedMod
- file = case ifr of
- InstalledFound loc _ ->
- fromJust $ ml_hs_file loc
- _ -> panic "hscCompileCoreExprHook: module not found"
+ , let file = fromJust $ lookupModuleFile (installedMod { moduleUnit = RealUnit (Definite $ moduleUnit installedMod) }) moduleLocs
]
; let hsc_env' = loadModulesHome (map linkableHomeMod lbs) hsc_env
{- load it -}
+#if MIN_VERSION_ghc(9,11,0)
+ ; bco_time <- getCurrentTime
+ ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan $
+ Linkable bco_time (icInteractiveModule ictxt) $ NE.singleton $ BCOs bcos
+#else
; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan bcos
+#endif
; let hval = (expectJust "hscCompileCoreExpr'" $ lookup (idName binding_id) fv_hvs, lbss, pkgs)
; modifyIORef' var (flip extendModuleEnvList [(mi_module $ hm_iface hm, linkableHash lb) | lb <- lbs, let hm = linkableHomeMod lb])
@@ -430,7 +454,14 @@ mkHiFileResultNoCompile session tcm = do
details <- makeSimpleDetails hsc_env_tmp tcGblEnv
sf <- finalSafeMode (ms_hspp_opts ms) tcGblEnv
iface' <- mkIfaceTc hsc_env_tmp sf details ms Nothing tcGblEnv
- let iface = iface' { mi_globals = Nothing, mi_usages = filterUsages (mi_usages iface') } -- See Note [Clearing mi_globals after generating an iface]
+ -- See Note [Clearing mi_globals after generating an iface]
+ let iface = iface'
+#if MIN_VERSION_ghc(9,11,0)
+ & set_mi_top_env Nothing
+ & set_mi_usages (filterUsages (mi_usages iface'))
+#else
+ { mi_globals = Nothing, mi_usages = filterUsages (mi_usages iface') }
+#endif
pure $! mkHiFileResult ms iface details (tmrRuntimeModules tcm) Nothing
mkHiFileResultCompile
@@ -451,18 +482,27 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do
pure (details, guts)
let !partial_iface = force $ mkPartialIface session
-#if MIN_VERSION_ghc(9,5,0)
(cg_binds guts)
-#endif
details
ms
+#if MIN_VERSION_ghc(9,11,0)
+ (tcg_import_decls (tmrTypechecked tcm))
+#endif
simplified_guts
final_iface' <- mkFullIface session partial_iface Nothing
-#if MIN_VERSION_ghc(9,4,2)
Nothing
+#if MIN_VERSION_ghc(9,11,0)
+ NoStubs []
+#endif
+ -- See Note [Clearing mi_globals after generating an iface]
+ let final_iface = final_iface'
+#if MIN_VERSION_ghc(9,11,0)
+ & set_mi_top_env Nothing
+ & set_mi_usages (filterUsages (mi_usages final_iface'))
+#else
+ {mi_globals = Nothing, mi_usages = filterUsages (mi_usages final_iface')}
#endif
- let final_iface = final_iface' {mi_globals = Nothing, mi_usages = filterUsages (mi_usages final_iface')} -- See Note [Clearing mi_globals after generating an iface]
-- Write the core file now
core_file <- do
@@ -470,7 +510,7 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do
core_file = codeGutsToCoreFile iface_hash guts
iface_hash = getModuleHash final_iface
core_hash1 <- atomicFileWrite se core_fp $ \fp ->
- writeBinCoreFile fp core_file
+ writeBinCoreFile (hsc_dflags session) fp core_file
-- We want to drop references to guts and read in a serialized, compact version
-- of the core file from disk (as it is deserialised lazily)
-- This is because we don't want to keep the guts in memory for every file in
@@ -492,17 +532,9 @@ mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do
mod = ms_mod ms
data_tycons = filter isDataTyCon tycons
CgGuts{cg_binds = unprep_binds'} <- coreFileToCgGuts session final_iface details core
-
-#if MIN_VERSION_ghc(9,5,0)
cp_cfg <- initCorePrepConfig session
-#endif
-
let corePrep = corePrepPgm
-#if MIN_VERSION_ghc(9,5,0)
(hsc_logger session) cp_cfg (initCorePrepPgmConfig (hsc_dflags session) (interactiveInScope $ hsc_IC session))
-#else
- session
-#endif
mod (ms_location ms)
-- Run corePrep first as we want to test the final version of the program that will
@@ -615,22 +647,20 @@ generateObjectCode session summary guts = do
(Just dot_o)
$ hsc_dflags env'
session' = hscSetFlags newFlags session
-#if MIN_VERSION_ghc(9,4,2)
(outputFilename, _mStub, _foreign_files, _cinfos, _stgcinfos) <- hscGenHardCode session' guts
-#else
- (outputFilename, _mStub, _foreign_files, _cinfos) <- hscGenHardCode session' guts
-#endif
(ms_location summary)
fp
obj <- compileFile session' driverNoStop (outputFilename, Just (As False))
case obj of
Nothing -> throwGhcExceptionIO $ Panic "compileFile didn't generate object code"
Just x -> pure x
- let unlinked = DotO dot_o_fp
-- Need time to be the modification time for recompilation checking
t <- liftIO $ getModificationTime dot_o_fp
- let linkable = LM t mod [unlinked]
-
+#if MIN_VERSION_ghc(9,11,0)
+ let linkable = Linkable t mod (pure $ DotO dot_o_fp ModuleObject)
+#else
+ let linkable = LM t mod [DotO dot_o_fp]
+#endif
pure (map snd warnings, linkable)
newtype CoreFileTime = CoreFileTime UTCTime
@@ -639,6 +669,16 @@ generateByteCode :: CoreFileTime -> HscEnv -> ModSummary -> CgGuts -> IO (IdeRes
generateByteCode (CoreFileTime time) hscEnv summary guts = do
fmap (either (, Nothing) (second Just)) $
catchSrcErrors (hsc_dflags hscEnv) "bytecode" $ do
+
+#if MIN_VERSION_ghc(9,11,0)
+ (warnings, (_, bytecode)) <-
+ withWarnings "bytecode" $ \_tweak -> do
+ let session = _tweak (hscSetFlags (ms_hspp_opts summary) hscEnv)
+ -- TODO: maybe settings ms_hspp_opts is unnecessary?
+ summary' = summary { ms_hspp_opts = hsc_dflags session }
+ hscInteractive session (mkCgInteractiveGuts guts)
+ (ms_location summary')
+#else
(warnings, (_, bytecode, sptEntries)) <-
withWarnings "bytecode" $ \_tweak -> do
let session = _tweak (hscSetFlags (ms_hspp_opts summary) hscEnv)
@@ -646,8 +686,14 @@ generateByteCode (CoreFileTime time) hscEnv summary guts = do
summary' = summary { ms_hspp_opts = hsc_dflags session }
hscInteractive session (mkCgInteractiveGuts guts)
(ms_location summary')
- let unlinked = BCOs bytecode sptEntries
- let linkable = LM time (ms_mod summary) [unlinked]
+#endif
+
+#if MIN_VERSION_ghc(9,11,0)
+ let linkable = Linkable time (ms_mod summary) (pure $ BCOs bytecode)
+#else
+ let linkable = LM time (ms_mod summary) [BCOs bytecode sptEntries]
+#endif
+
pure (map snd warnings, linkable)
demoteTypeErrorsToWarnings :: ParsedModule -> ParsedModule
@@ -741,28 +787,49 @@ addRelativeImport fp modu dflags = dflags
-- | Also resets the interface store
atomicFileWrite :: ShakeExtras -> FilePath -> (FilePath -> IO a) -> IO a
-atomicFileWrite se targetPath write = do
- let dir = takeDirectory targetPath
- createDirectoryIfMissing True dir
- (tempFilePath, cleanUp) <- newTempFileWithin dir
- (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x)
- `onException` cleanUp
-
-generateHieAsts :: HscEnv -> TcModuleResult -> IO ([FileDiagnostic], Maybe (HieASTs Type))
+atomicFileWrite se targetPath write =
+ uninterruptibleMask_ $ do
+ let dir = takeDirectory targetPath
+ createDirectoryIfMissing True dir
+ (tempFilePath, cleanUp) <- newTempFileWithin dir
+ (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x)
+ `onException` cleanUp
+
+generateHieAsts :: HscEnv -> TcModuleResult
+#if MIN_VERSION_ghc(9,11,0)
+ -> IO ([FileDiagnostic], Maybe (HieASTs Type, NameEntityInfo))
+#else
+ -> IO ([FileDiagnostic], Maybe (HieASTs Type))
+#endif
generateHieAsts hscEnv tcm =
handleGenerationErrors' dflags "extended interface generation" $ runHsc hscEnv $ do
-- These varBinds use unitDataConId but it could be anything as the id name is not used
-- during the hie file generation process. It's a workaround for the fact that the hie modules
-- don't export an interface which allows for additional information to be added to hie files.
- let fake_splice_binds = Util.listToBag (map (mkVarBind unitDataConId) (spliceExpressions $ tmrTopLevelSplices tcm))
+ let fake_splice_binds =
+#if !MIN_VERSION_ghc(9,11,0)
+ Util.listToBag $
+#endif
+ map (mkVarBind unitDataConId) (spliceExpressions $ tmrTopLevelSplices tcm)
real_binds = tcg_binds $ tmrTypechecked tcm
+ all_binds =
+#if MIN_VERSION_ghc(9,11,0)
+ fake_splice_binds ++ real_binds
+#else
+ fake_splice_binds `Util.unionBags` real_binds
+#endif
ts = tmrTypechecked tcm :: TcGblEnv
top_ev_binds = tcg_ev_binds ts :: Util.Bag EvBind
insts = tcg_insts ts :: [ClsInst]
tcs = tcg_tcs ts :: [TyCon]
+ hie_asts = GHC.enrichHie all_binds (tmrRenamed tcm) top_ev_binds insts tcs
pure $ Just $
- GHC.enrichHie (fake_splice_binds `Util.unionBags` real_binds) (tmrRenamed tcm) top_ev_binds insts tcs
+#if MIN_VERSION_ghc(9,11,0)
+ hie_asts (tcg_type_env ts)
+#else
+ hie_asts
+#endif
where
dflags = hsc_dflags hscEnv
@@ -850,7 +917,14 @@ indexHieFile se mod_summary srcPath !hash hf = do
toJSON $ fromNormalizedFilePath srcPath
whenJust mdone $ \_ -> progressUpdate indexProgressReporting ProgressCompleted
-writeAndIndexHieFile :: HscEnv -> ShakeExtras -> ModSummary -> NormalizedFilePath -> [GHC.AvailInfo] -> HieASTs Type -> BS.ByteString -> IO [FileDiagnostic]
+writeAndIndexHieFile
+ :: HscEnv -> ShakeExtras -> ModSummary -> NormalizedFilePath -> [GHC.AvailInfo]
+#if MIN_VERSION_ghc(9,11,0)
+ -> (HieASTs Type, NameEntityInfo)
+#else
+ -> HieASTs Type
+#endif
+ -> BS.ByteString -> IO [FileDiagnostic]
writeAndIndexHieFile hscEnv se mod_summary srcPath exports ast source =
handleGenerationErrors dflags "extended interface write/compression" $ do
hf <- runHsc hscEnv $
@@ -898,14 +972,59 @@ handleGenerationErrors' dflags source action =
)
]
-
-- Merge the HPTs, module graphs and FinderCaches
-- See Note [GhcSessionDeps] in Development.IDE.Core.Rules
-- Add the current ModSummary to the graph, along with the
-- HomeModInfo's of all direct dependencies (by induction hypothesis all
-- transitive dependencies will be contained in envs)
-mergeEnvs :: HscEnv -> ModuleGraph -> ModSummary -> [HomeModInfo] -> [HscEnv] -> IO HscEnv
-mergeEnvs env mg ms extraMods envs = do
+#if MIN_VERSION_ghc(9,11,0)
+mergeEnvs :: HscEnv
+ -> ModuleGraph
+ -> DependencyInformation
+ -> ModSummary
+ -> [HomeModInfo]
+ -> [HscEnv]
+ -> IO HscEnv
+mergeEnvs env mg dep_info ms extraMods envs = do
+ return $! loadModulesHome extraMods $
+ let newHug = foldl' mergeHUG (hsc_HUG env) (map hsc_HUG envs) in
+ (hscUpdateHUG (const newHug) env){
+ hsc_mod_graph = mg,
+ hsc_FC = (hsc_FC env)
+ { addToFinderCache = \gwib@(GWIB im _) val ->
+ if moduleUnit im `elem` hsc_all_home_unit_ids env
+ then pure ()
+ else addToFinderCache (hsc_FC env) gwib val
+ , lookupFinderCache = \gwib@(GWIB im _) ->
+ if moduleUnit im `elem` hsc_all_home_unit_ids env
+ then case lookupModuleFile (im { moduleUnit = RealUnit (Definite $ moduleUnit im) }) dep_info of
+ Nothing -> pure Nothing
+ Just fs -> let ml = fromJust $ do
+ id <- lookupPathToId (depPathIdMap dep_info) fs
+ artifactModLocation (idToModLocation (depPathIdMap dep_info) id)
+ in pure $ Just $ InstalledFound ml im
+ else lookupFinderCache (hsc_FC env) gwib
+ }
+ }
+
+ where
+ mergeHUG (UnitEnvGraph a) (UnitEnvGraph b) = UnitEnvGraph $ Map.unionWith mergeHUE a b
+ mergeHUE a b = a { homeUnitEnv_hpt = mergeUDFM (homeUnitEnv_hpt a) (homeUnitEnv_hpt b) }
+ mergeUDFM = plusUDFM_C combineModules
+
+ combineModules a b
+ | HsSrcFile <- mi_hsc_src (hm_iface a) = a
+ | otherwise = b
+
+#else
+mergeEnvs :: HscEnv
+ -> ModuleGraph
+ -> DependencyInformation
+ -> ModSummary
+ -> [HomeModInfo]
+ -> [HscEnv]
+ -> IO HscEnv
+mergeEnvs env mg _dep_info ms extraMods envs = do
let im = Compat.installedModule (toUnitId $ moduleUnit $ ms_mod ms) (moduleName (ms_mod ms))
ifr = InstalledFound (ms_location ms) im
curFinderCache = Compat.extendInstalledModuleEnv Compat.emptyInstalledModuleEnv im ifr
@@ -939,6 +1058,7 @@ mergeEnvs env mg ms extraMods envs = do
fcModules' <- newIORef $! foldl' (plusInstalledModuleEnv combineModuleLocations) cur fcModules
fcFiles' <- newIORef $! Map.unions fcFiles
pure $ FinderCache fcModules' fcFiles'
+#endif
withBootSuffix :: HscSource -> ModLocation -> ModLocation
@@ -1066,11 +1186,7 @@ parseHeader
=> DynFlags -- ^ flags to use
-> FilePath -- ^ the filename (for source locations)
-> Util.StringBuffer -- ^ Haskell module source text (full Unicode is supported)
-#if MIN_VERSION_ghc(9,5,0)
-> ExceptT [FileDiagnostic] m ([FileDiagnostic], Located (HsModule GhcPs))
-#else
- -> ExceptT [FileDiagnostic] m ([FileDiagnostic], Located HsModule)
-#endif
parseHeader dflags filename contents = do
let loc = mkRealSrcLoc (Util.mkFastString filename) 1 1
case unP Compat.parseHeader (initParserState (initParserOpts dflags) contents loc) of
@@ -1249,6 +1365,7 @@ data RecompilationInfo m
, old_value :: Maybe (HiFileResult, FileVersion)
, get_file_version :: NormalizedFilePath -> m (Maybe FileVersion)
, get_linkable_hashes :: [NormalizedFilePath] -> m [BS.ByteString]
+ , get_module_graph :: m DependencyInformation
, regenerate :: Maybe LinkableType -> m ([FileDiagnostic], Maybe HiFileResult) -- ^ Action to regenerate an interface
}
@@ -1331,7 +1448,7 @@ loadInterface session ms linkableNeeded RecompilationInfo{..} = do
| not (mi_used_th iface) = emptyModuleEnv
| otherwise = parseRuntimeDeps (md_anns details)
-- Peform the fine grained recompilation check for TH
- maybe_recomp <- checkLinkableDependencies session get_linkable_hashes runtime_deps
+ maybe_recomp <- checkLinkableDependencies get_linkable_hashes get_module_graph runtime_deps
case maybe_recomp of
Just msg -> do_regenerate msg
Nothing
@@ -1368,16 +1485,10 @@ parseRuntimeDeps anns = mkModuleEnv $ mapMaybe go anns
-- the runtime dependencies of the module, to check if any of them are out of date
-- Hopefully 'runtime_deps' will be empty if the module didn't actually use TH
-- See Note [Recompilation avoidance in the presence of TH]
-checkLinkableDependencies :: MonadIO m => HscEnv -> ([NormalizedFilePath] -> m [BS.ByteString]) -> ModuleEnv BS.ByteString -> m (Maybe RecompileRequired)
-checkLinkableDependencies hsc_env get_linkable_hashes runtime_deps = do
- moduleLocs <- liftIO $ readIORef (fcModuleCache $ hsc_FC hsc_env)
- let go (mod, hash) = do
- ifr <- lookupInstalledModuleEnv moduleLocs $ Compat.installedModule (toUnitId $ moduleUnit mod) (moduleName mod)
- case ifr of
- InstalledFound loc _ -> do
- hs <- ml_hs_file loc
- pure (toNormalizedFilePath' hs,hash)
- _ -> Nothing
+checkLinkableDependencies :: MonadIO m => ([NormalizedFilePath] -> m [BS.ByteString]) -> m DependencyInformation -> ModuleEnv BS.ByteString -> m (Maybe RecompileRequired)
+checkLinkableDependencies get_linkable_hashes get_module_graph runtime_deps = do
+ graph <- get_module_graph
+ let go (mod, hash) = (,hash) <$> lookupModuleFile mod graph
hs_files = mapM go (moduleEnvToList runtime_deps)
case hs_files of
Nothing -> error "invalid module graph"
@@ -1421,12 +1532,12 @@ coreFileToCgGuts session iface details core_file = do
-- Implicit binds aren't saved, so we need to regenerate them ourselves.
let _implicit_binds = concatMap getImplicitBinds tyCons -- only used if GHC < 9.6
tyCons = typeEnvTyCons (md_types details)
-#if MIN_VERSION_ghc(9,5,0)
-- In GHC 9.6, the implicit binds are tidied and part of core_binds
- pure $ CgGuts this_mod tyCons core_binds [] NoStubs [] mempty (emptyHpcInfo False) Nothing []
-#else
- pure $ CgGuts this_mod tyCons (_implicit_binds ++ core_binds) [] NoStubs [] mempty (emptyHpcInfo False) Nothing []
+ pure $ CgGuts this_mod tyCons core_binds [] NoStubs [] mempty
+#if !MIN_VERSION_ghc(9,11,0)
+ (emptyHpcInfo False)
#endif
+ Nothing []
coreFileToLinkable :: LinkableType -> HscEnv -> ModSummary -> ModIface -> ModDetails -> CoreFile -> UTCTime -> IO ([FileDiagnostic], Maybe HomeModInfo)
coreFileToLinkable linkableType session ms iface details core_file t = do
@@ -1499,6 +1610,22 @@ pathToModuleName = mkModuleName . map rep
rep ':' = '_'
rep c = c
+-- | Initialising plugins looks in the finder cache, but we know that the plugin doesn't come from a home module, so don't
+-- error out when we don't find it
+setNonHomeFCHook :: HscEnv -> HscEnv
+setNonHomeFCHook hsc_env =
+#if MIN_VERSION_ghc(9,11,0)
+ hsc_env { hsc_FC = (hsc_FC hsc_env)
+ { lookupFinderCache = \m@(GWIB im _) ->
+ if moduleUnit im `elem` hsc_all_home_unit_ids hsc_env
+ then pure (Just $ InstalledNotFound [] Nothing)
+ else lookupFinderCache (hsc_FC hsc_env) m
+ }
+ }
+#else
+ hsc_env
+#endif
+
{- Note [Guidelines For Using CPP In GHCIDE Import Statements]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHCIDE's interface with GHC is extensive, and unfortunately, because we have
@@ -1515,7 +1642,7 @@ pathToModuleName = mkModuleName . map rep
with negative if clauses coming before positive if clauses of the same
version. (If you think about which GHC version a clause activates for this
should make sense `!MIN_VERSION_GHC(9,0,0)` refers to 8.10 and lower which is
- a earlier version than `MIN_VERSION_GHC(9,0,0)` which refers to versions 9.0
+ an earlier version than `MIN_VERSION_GHC(9,0,0)` which refers to versions 9.0
and later). In addition there should be a space before and after each CPP
clause.
diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs
index 3de21e175d..7dad386ece 100644
--- a/ghcide/src/Development/IDE/Core/FileStore.hs
+++ b/ghcide/src/Development/IDE/Core/FileStore.hs
@@ -264,7 +264,7 @@ typecheckParents recorder state nfp = void $ shakeEnqueue (shakeExtras state) pa
typecheckParentsAction :: Recorder (WithPriority Log) -> NormalizedFilePath -> Action ()
typecheckParentsAction recorder nfp = do
- revs <- transitiveReverseDependencies nfp <$> useNoFile_ GetModuleGraph
+ revs <- transitiveReverseDependencies nfp <$> useWithSeparateFingerprintRule_ GetModuleGraphTransReverseDepsFingerprints GetModuleGraph nfp
case revs of
Nothing -> logWith recorder Info $ LogCouldNotIdentifyReverseDeps nfp
Just rs -> do
diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs
index 2a594c1021..19e0f40e24 100644
--- a/ghcide/src/Development/IDE/Core/OfInterest.hs
+++ b/ghcide/src/Development/IDE/Core/OfInterest.hs
@@ -29,7 +29,6 @@ import Development.IDE.Graph
import Control.Concurrent.STM.Stats (atomically,
modifyTVar')
import Data.Aeson (toJSON)
-import qualified Data.Aeson as Aeson
import qualified Data.ByteString as BS
import Data.Maybe (catMaybes)
import Development.IDE.Core.ProgressReporting
diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs
index fd6ef75cda..43b80be119 100644
--- a/ghcide/src/Development/IDE/Core/RuleTypes.hs
+++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs
@@ -24,7 +24,8 @@ import qualified Data.Map as M
import Data.Time.Clock.POSIX
import Data.Typeable
import Development.IDE.GHC.Compat hiding
- (HieFileResult)
+ (HieFileResult,
+ assert)
import Development.IDE.GHC.Compat.Util
import Development.IDE.GHC.CoreFile
import Development.IDE.GHC.Util
@@ -73,6 +74,12 @@ type instance RuleResult GetParsedModuleWithComments = ParsedModule
type instance RuleResult GetModuleGraph = DependencyInformation
+-- | it only compute the fingerprint of the module graph for a file and its dependencies
+-- we need this to trigger recompilation when the sub module graph for a file changes
+type instance RuleResult GetModuleGraphTransDepsFingerprints = Fingerprint
+type instance RuleResult GetModuleGraphTransReverseDepsFingerprints = Fingerprint
+type instance RuleResult GetModuleGraphImmediateReverseDepsFingerprints = Fingerprint
+
data GetKnownTargets = GetKnownTargets
deriving (Show, Generic, Eq, Ord)
instance Hashable GetKnownTargets
@@ -83,7 +90,7 @@ type instance RuleResult GetKnownTargets = KnownTargets
type instance RuleResult GenerateCore = ModGuts
data GenerateCore = GenerateCore
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GenerateCore
instance NFData GenerateCore
@@ -103,12 +110,12 @@ instance NFData LinkableResult where
rnf = rwhnf
data GetLinkable = GetLinkable
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetLinkable
instance NFData GetLinkable
data GetImportMap = GetImportMap
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetImportMap
instance NFData GetImportMap
@@ -282,6 +289,8 @@ type instance RuleResult GetFileContents = (FileVersion, Maybe Rope)
type instance RuleResult GetFileExists = Bool
+type instance RuleResult GetFileHash = Fingerprint
+
type instance RuleResult AddWatchedFile = Bool
@@ -332,16 +341,22 @@ instance Hashable GetFileContents
instance NFData GetFileContents
data GetFileExists = GetFileExists
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance NFData GetFileExists
instance Hashable GetFileExists
+data GetFileHash = GetFileHash
+ deriving (Eq, Show, Generic)
+
+instance NFData GetFileHash
+instance Hashable GetFileHash
+
data FileOfInterestStatus
= OnDisk
| Modified { firstOpen :: !Bool -- ^ was this file just opened
}
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable FileOfInterestStatus
instance NFData FileOfInterestStatus
@@ -349,7 +364,7 @@ instance Pretty FileOfInterestStatus where
pretty = viaShow
data IsFileOfInterestResult = NotFOI | IsFOI FileOfInterestStatus
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable IsFileOfInterestResult
instance NFData IsFileOfInterestResult
@@ -381,17 +396,17 @@ type instance RuleResult GetModSummary = ModSummaryResult
type instance RuleResult GetModSummaryWithoutTimestamps = ModSummaryResult
data GetParsedModule = GetParsedModule
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetParsedModule
instance NFData GetParsedModule
data GetParsedModuleWithComments = GetParsedModuleWithComments
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetParsedModuleWithComments
instance NFData GetParsedModuleWithComments
data GetLocatedImports = GetLocatedImports
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetLocatedImports
instance NFData GetLocatedImports
@@ -399,42 +414,57 @@ instance NFData GetLocatedImports
type instance RuleResult NeedsCompilation = Maybe LinkableType
data NeedsCompilation = NeedsCompilation
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable NeedsCompilation
instance NFData NeedsCompilation
data GetModuleGraph = GetModuleGraph
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModuleGraph
instance NFData GetModuleGraph
+data GetModuleGraphTransDepsFingerprints = GetModuleGraphTransDepsFingerprints
+ deriving (Eq, Show, Generic)
+instance Hashable GetModuleGraphTransDepsFingerprints
+instance NFData GetModuleGraphTransDepsFingerprints
+
+data GetModuleGraphTransReverseDepsFingerprints = GetModuleGraphTransReverseDepsFingerprints
+ deriving (Eq, Show, Generic)
+instance Hashable GetModuleGraphTransReverseDepsFingerprints
+instance NFData GetModuleGraphTransReverseDepsFingerprints
+
+data GetModuleGraphImmediateReverseDepsFingerprints = GetModuleGraphImmediateReverseDepsFingerprints
+ deriving (Eq, Show, Generic)
+instance Hashable GetModuleGraphImmediateReverseDepsFingerprints
+instance NFData GetModuleGraphImmediateReverseDepsFingerprints
+
data ReportImportCycles = ReportImportCycles
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable ReportImportCycles
instance NFData ReportImportCycles
data TypeCheck = TypeCheck
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable TypeCheck
instance NFData TypeCheck
data GetDocMap = GetDocMap
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetDocMap
instance NFData GetDocMap
data GetHieAst = GetHieAst
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetHieAst
instance NFData GetHieAst
data GetBindings = GetBindings
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetBindings
instance NFData GetBindings
data GhcSession = GhcSession
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GhcSession
instance NFData GhcSession
@@ -443,7 +473,7 @@ newtype GhcSessionDeps = GhcSessionDeps_
-- Required for interactive evaluation, but leads to more cache invalidations
fullModSummary :: Bool
}
- deriving newtype (Eq, Typeable, Hashable, NFData)
+ deriving newtype (Eq, Hashable, NFData)
instance Show GhcSessionDeps where
show (GhcSessionDeps_ False) = "GhcSessionDeps"
@@ -453,45 +483,45 @@ pattern GhcSessionDeps :: GhcSessionDeps
pattern GhcSessionDeps = GhcSessionDeps_ False
data GetModIfaceFromDisk = GetModIfaceFromDisk
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModIfaceFromDisk
instance NFData GetModIfaceFromDisk
data GetModIfaceFromDiskAndIndex = GetModIfaceFromDiskAndIndex
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModIfaceFromDiskAndIndex
instance NFData GetModIfaceFromDiskAndIndex
data GetModIface = GetModIface
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModIface
instance NFData GetModIface
data IsFileOfInterest = IsFileOfInterest
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable IsFileOfInterest
instance NFData IsFileOfInterest
data GetModSummaryWithoutTimestamps = GetModSummaryWithoutTimestamps
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModSummaryWithoutTimestamps
instance NFData GetModSummaryWithoutTimestamps
data GetModSummary = GetModSummary
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetModSummary
instance NFData GetModSummary
-- See Note [Client configuration in Rules]
-- | Get the client config stored in the ide state
data GetClientSettings = GetClientSettings
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetClientSettings
instance NFData GetClientSettings
type instance RuleResult GetClientSettings = Hashed (Maybe Value)
-data AddWatchedFile = AddWatchedFile deriving (Eq, Show, Typeable, Generic)
+data AddWatchedFile = AddWatchedFile deriving (Eq, Show, Generic)
instance Hashable AddWatchedFile
instance NFData AddWatchedFile
@@ -511,7 +541,7 @@ data IdeGhcSession = IdeGhcSession
instance Show IdeGhcSession where show _ = "IdeGhcSession"
instance NFData IdeGhcSession where rnf !_ = ()
-data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Typeable, Generic)
+data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Generic)
instance Hashable GhcSessionIO
instance NFData GhcSessionIO
diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs
index 5650300a4c..f1b11d971b 100644
--- a/ghcide/src/Development/IDE/Core/Rules.hs
+++ b/ghcide/src/Development/IDE/Core/Rules.hs
@@ -159,10 +159,10 @@ import Ide.Plugin.Properties (HasProperty,
usePropertyByPath)
import Ide.Types (DynFlagsModifications (dynFlagsModifyGlobal, dynFlagsModifyParser),
PluginId)
+import qualified Language.LSP.Protocol.Lens as JL
import Language.LSP.Protocol.Message (SMethod (SMethod_CustomMethod, SMethod_WindowShowMessage))
import Language.LSP.Protocol.Types (MessageType (MessageType_Info),
ShowMessageParams (ShowMessageParams))
-import qualified Language.LSP.Protocol.Lens as JL
import Language.LSP.Server (LspT)
import qualified Language.LSP.Server as LSP
import Language.LSP.VFS
@@ -174,7 +174,6 @@ import System.Info.Extra (isWindows)
import qualified Data.IntMap as IM
import GHC.Fingerprint
-
data Log
= LogShake Shake.Log
| LogReindexingHieFile !NormalizedFilePath
@@ -471,7 +470,7 @@ rawDependencyInformation fs = do
reportImportCyclesRule :: Recorder (WithPriority Log) -> Rules ()
reportImportCyclesRule recorder =
defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \ReportImportCycles file -> fmap (\errs -> if null errs then (Just "1",([], Just ())) else (Nothing, (errs, Nothing))) $ do
- DependencyInformation{..} <- useNoFile_ GetModuleGraph
+ DependencyInformation{..} <- useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph file
case pathToId depPathIdMap file of
-- The header of the file does not parse, so it can't be part of any import cycles.
Nothing -> pure []
@@ -519,7 +518,12 @@ persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybe
getHieAstRuleDefinition :: NormalizedFilePath -> HscEnv -> TcModuleResult -> Action (IdeResult HieAstResult)
getHieAstRuleDefinition f hsc tmr = do
- (diags, masts) <- liftIO $ generateHieAsts hsc tmr
+ (diags, masts') <- liftIO $ generateHieAsts hsc tmr
+#if MIN_VERSION_ghc(9,11,0)
+ let masts = fst <$> masts'
+#else
+ let masts = masts'
+#endif
se <- getShakeExtras
isFoi <- use_ IsFileOfInterest f
@@ -529,7 +533,7 @@ getHieAstRuleDefinition f hsc tmr = do
LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $
toJSON $ fromNormalizedFilePath f
pure []
- _ | Just asts <- masts -> do
+ _ | Just asts <- masts' -> do
source <- getSourceFileSource f
let exports = tcg_exports $ tmrTypechecked tmr
modSummary = tmrModSummary tmr
@@ -602,7 +606,7 @@ typeCheckRule recorder = define (cmapWithPrio LogShake recorder) $ \TypeCheck fi
-- very expensive.
when (foi == NotFOI) $
logWith recorder Logger.Warning $ LogTypecheckedFOI file
- typeCheckRuleDefinition hsc pm
+ typeCheckRuleDefinition hsc pm file
knownFilesRule :: Recorder (WithPriority Log) -> Rules ()
knownFilesRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetKnownTargets -> do
@@ -610,6 +614,13 @@ knownFilesRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorde
fs <- knownTargets
pure (LBS.toStrict $ B.encode $ hash fs, unhashed fs)
+getFileHashRule :: Recorder (WithPriority Log) -> Rules ()
+getFileHashRule recorder =
+ defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetFileHash file -> do
+ void $ use_ GetModificationTime file
+ fileHash <- liftIO $ Util.getFileHash (fromNormalizedFilePath file)
+ return (Just (fingerprintToBS fileHash), ([], Just fileHash))
+
getModuleGraphRule :: Recorder (WithPriority Log) -> Rules ()
getModuleGraphRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetModuleGraph -> do
fs <- toKnownFiles <$> useNoFile_ GetKnownTargets
@@ -630,7 +641,10 @@ dependencyInfoForFiles fs = do
go (Just ms) _ = Just $ ModuleNode [] ms
go _ _ = Nothing
mg = mkModuleGraph mns
- pure (fingerprintToBS $ Util.fingerprintFingerprints $ map (maybe fingerprint0 msrFingerprint) msrs, processDependencyInformation rawDepInfo bm mg)
+ let shallowFingers = IntMap.fromList $ foldr' (\(i, m) acc -> case m of
+ Just x -> (getFilePathId i,msrFingerprint x):acc
+ Nothing -> acc) [] $ zip _all_ids msrs
+ pure (fingerprintToBS $ Util.fingerprintFingerprints $ map (maybe fingerprint0 msrFingerprint) msrs, processDependencyInformation rawDepInfo bm mg shallowFingers)
-- This is factored out so it can be directly called from the GetModIface
-- rule. Directly calling this rule means that on the initial load we can
@@ -639,13 +653,15 @@ dependencyInfoForFiles fs = do
typeCheckRuleDefinition
:: HscEnv
-> ParsedModule
+ -> NormalizedFilePath
-> Action (IdeResult TcModuleResult)
-typeCheckRuleDefinition hsc pm = do
+typeCheckRuleDefinition hsc pm fp = do
IdeOptions { optDefer = defer } <- getIdeOptions
unlift <- askUnliftIO
let dets = TypecheckHelpers
{ getLinkables = unliftIO unlift . uses_ GetLinkable
+ , getModuleGraph = unliftIO unlift $ useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph fp
}
addUsageDependencies $ liftIO $
typecheckModule defer hsc dets pm
@@ -742,9 +758,10 @@ ghcSessionDepsDefinition fullModSummary GhcSessionDepsConfig{..} env file = do
depSessions <- map hscEnv <$> uses_ (GhcSessionDeps_ fullModSummary) deps
ifaces <- uses_ GetModIface deps
let inLoadOrder = map (\HiFileResult{..} -> HomeModInfo hirModIface hirModDetails emptyHomeModInfoLinkable) ifaces
+ de <- useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph file
mg <- do
if fullModuleGraph
- then depModuleGraph <$> useNoFile_ GetModuleGraph
+ then return $ depModuleGraph de
else do
let mgs = map hsc_mod_graph depSessions
-- On GHC 9.4+, the module graph contains not only ModSummary's but each `ModuleNode` in the graph
@@ -757,7 +774,7 @@ ghcSessionDepsDefinition fullModSummary GhcSessionDepsConfig{..} env file = do
nubOrdOn mkNodeKey (ModuleNode final_deps ms : concatMap mgModSummaries' mgs)
liftIO $ evaluate $ liftRnf rwhnf module_graph_nodes
return $ mkModuleGraph module_graph_nodes
- session' <- liftIO $ mergeEnvs hsc mg ms inLoadOrder depSessions
+ session' <- liftIO $ mergeEnvs hsc mg de ms inLoadOrder depSessions
-- Here we avoid a call to to `newHscEnvEqWithImportPaths`, which creates a new
-- ExportsMap when it is called. We only need to create the ExportsMap once per
@@ -786,9 +803,11 @@ getModIfaceFromDiskRule recorder = defineEarlyCutoff (cmapWithPrio LogShake reco
, old_value = m_old
, get_file_version = use GetModificationTime_{missingFileDiagnostics = False}
, get_linkable_hashes = \fs -> map (snd . fromJust . hirCoreFp) <$> uses_ GetModIface fs
+ , get_module_graph = useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph f
, regenerate = regenerateHiFile session f ms
}
- r <- loadInterface (hscEnv session) ms linkableType recompInfo
+ hsc_env' <- setFileCacheHook (hscEnv session)
+ r <- loadInterface hsc_env' ms linkableType recompInfo
case r of
(diags, Nothing) -> return (Nothing, (diags, Nothing))
(diags, Just x) -> do
@@ -856,7 +875,7 @@ getModSummaryRule displayTHWarning recorder = do
defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModSummary f -> do
session' <- hscEnv <$> use_ GhcSession f
modify_dflags <- getModifyDynFlags dynFlagsModifyGlobal
- let session = hscSetFlags (modify_dflags $ hsc_dflags session') session'
+ let session = setNonHomeFCHook $ hscSetFlags (modify_dflags $ hsc_dflags session') session' -- TODO wz1000
(modTime, mFileContent) <- getFileModTimeContents f
let fp = fromNormalizedFilePath f
modS <- liftIO $ runExceptT $
@@ -887,8 +906,9 @@ getModSummaryRule displayTHWarning recorder = do
generateCore :: RunSimplifier -> NormalizedFilePath -> Action (IdeResult ModGuts)
generateCore runSimplifier file = do
packageState <- hscEnv <$> use_ GhcSessionDeps file
+ hsc' <- setFileCacheHook packageState
tm <- use_ TypeCheck file
- liftIO $ compileModule runSimplifier packageState (tmrModSummary tm) (tmrTypechecked tm)
+ liftIO $ compileModule runSimplifier hsc' (tmrModSummary tm) (tmrTypechecked tm)
generateCoreRule :: Recorder (WithPriority Log) -> Rules ()
generateCoreRule recorder =
@@ -903,14 +923,15 @@ getModIfaceRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $
tmr <- use_ TypeCheck f
linkableType <- getLinkableType f
hsc <- hscEnv <$> use_ GhcSessionDeps f
+ hsc' <- setFileCacheHook hsc
let compile = fmap ([],) $ use GenerateCore f
se <- getShakeExtras
- (diags, !mbHiFile) <- writeCoreFileIfNeeded se hsc linkableType compile tmr
+ (diags, !mbHiFile) <- writeCoreFileIfNeeded se hsc' linkableType compile tmr
let fp = hiFileFingerPrint <$> mbHiFile
hiDiags <- case mbHiFile of
Just hiFile
| OnDisk <- status
- , not (tmrDeferredError tmr) -> liftIO $ writeHiFile se hsc hiFile
+ , not (tmrDeferredError tmr) -> liftIO $ writeHiFile se hsc' hiFile
_ -> pure []
return (fp, (diags++hiDiags, mbHiFile))
NotFOI -> do
@@ -934,12 +955,21 @@ incrementRebuildCount = do
count <- getRebuildCountVar <$> getIdeGlobalAction
liftIO $ atomically $ modifyTVar' count (+1)
+setFileCacheHook :: HscEnv -> Action HscEnv
+setFileCacheHook old_hsc_env = do
+#if MIN_VERSION_ghc(9,11,0)
+ unlift <- askUnliftIO
+ return $ old_hsc_env { hsc_FC = (hsc_FC old_hsc_env) { lookupFileCache = unliftIO unlift . use_ GetFileHash . toNormalizedFilePath' } }
+#else
+ return old_hsc_env
+#endif
+
-- | Also generates and indexes the `.hie` file, along with the `.o` file if needed
-- Invariant maintained is that if the `.hi` file was successfully written, then the
-- `.hie` and `.o` file (if needed) were also successfully written
regenerateHiFile :: HscEnvEq -> NormalizedFilePath -> ModSummary -> Maybe LinkableType -> Action ([FileDiagnostic], Maybe HiFileResult)
regenerateHiFile sess f ms compNeeded = do
- let hsc = hscEnv sess
+ hsc <- setFileCacheHook (hscEnv sess)
opt <- getIdeOptions
-- Embed haddocks in the interface file
@@ -949,7 +979,7 @@ regenerateHiFile sess f ms compNeeded = do
Just pm -> do
-- Invoke typechecking directly to update it without incurring a dependency
-- on the parsed module and the typecheck rules
- (diags', mtmr) <- typeCheckRuleDefinition hsc pm
+ (diags', mtmr) <- typeCheckRuleDefinition hsc pm f
case mtmr of
Nothing -> pure (diags', Nothing)
Just tmr -> do
@@ -1038,6 +1068,13 @@ getLinkableRule recorder =
HiFileResult{hirModSummary, hirModIface, hirModDetails, hirCoreFp} <- use_ GetModIface f
let obj_file = ml_obj_file (ms_location hirModSummary)
core_file = ml_core_file (ms_location hirModSummary)
+#if MIN_VERSION_ghc(9,11,0)
+ mkLinkable t mod l = Linkable t mod (pure l)
+ dotO o = DotO o ModuleObject
+#else
+ mkLinkable t mod l = LM t mod [l]
+ dotO = DotO
+#endif
case hirCoreFp of
Nothing -> error $ "called GetLinkable for a file without a linkable: " ++ show f
Just (bin_core, fileHash) -> do
@@ -1063,10 +1100,15 @@ getLinkableRule recorder =
else pure Nothing
case mobj_time of
Just obj_t
- | obj_t >= core_t -> pure ([], Just $ HomeModInfo hirModIface hirModDetails (justObjects $ LM (posixSecondsToUTCTime obj_t) (ms_mod hirModSummary) [DotO obj_file]))
+ | obj_t >= core_t -> pure ([], Just $ HomeModInfo hirModIface hirModDetails (justObjects $ mkLinkable (posixSecondsToUTCTime obj_t) (ms_mod hirModSummary) (dotO obj_file)))
_ -> liftIO $ coreFileToLinkable linkableType (hscEnv session) hirModSummary hirModIface hirModDetails bin_core (error "object doesn't have time")
-- Record the linkable so we know not to unload it, and unload old versions
- whenJust ((homeModInfoByteCode =<< hmi) <|> (homeModInfoObject =<< hmi)) $ \(LM time mod _) -> do
+ whenJust ((homeModInfoByteCode =<< hmi) <|> (homeModInfoObject =<< hmi))
+#if MIN_VERSION_ghc(9,11,0)
+ $ \(Linkable time mod _) -> do
+#else
+ $ \(LM time mod _) -> do
+#endif
compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction
liftIO $ modifyVar compiledLinkables $ \old -> do
let !to_keep = extendModuleEnv old mod time
@@ -1080,7 +1122,9 @@ getLinkableRule recorder =
--just before returning it to be loaded. This has a substantial effect on recompile
--times as the number of loaded modules and splices increases.
--
- unload (hscEnv session) (map (\(mod', time') -> LM time' mod' []) $ moduleEnvToList to_keep)
+ --We use a dummy DotA linkable part to fake a NativeCode linkable.
+ --The unload function doesn't care about the exact linkable parts.
+ unload (hscEnv session) (map (\(mod', time') -> mkLinkable time' mod' (DotA "dummy")) $ moduleEnvToList to_keep)
return (to_keep, ())
return (fileHash <$ hmi, (warns, LinkableResult <$> hmi <*> pure fileHash))
@@ -1093,7 +1137,7 @@ needsCompilationRule file
| "boot" `isSuffixOf` fromNormalizedFilePath file =
pure (Just $ encodeLinkableType Nothing, Just Nothing)
needsCompilationRule file = do
- graph <- useNoFile GetModuleGraph
+ graph <- useWithSeparateFingerprintRule GetModuleGraphImmediateReverseDepsFingerprints GetModuleGraph file
res <- case graph of
-- Treat as False if some reverse dependency header fails to parse
Nothing -> pure Nothing
@@ -1178,12 +1222,13 @@ mainRule recorder RulesConfig{..} = do
reportImportCyclesRule recorder
typeCheckRule recorder
getDocMapRule recorder
- loadGhcSession recorder GhcSessionDepsConfig{fullModuleGraph}
+ loadGhcSession recorder def{fullModuleGraph}
getModIfaceFromDiskRule recorder
getModIfaceFromDiskAndIndexRule recorder
getModIfaceRule recorder
getModSummaryRule templateHaskellWarning recorder
getModuleGraphRule recorder
+ getFileHashRule recorder
knownFilesRule recorder
getClientSettingsRule recorder
getHieAstsRule recorder
@@ -1204,6 +1249,19 @@ mainRule recorder RulesConfig{..} = do
persistentDocMapRule
persistentImportMapRule
getLinkableRule recorder
+ defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphTransDepsFingerprints file -> do
+ di <- useNoFile_ GetModuleGraph
+ let finger = lookupFingerprint file di (depTransDepsFingerprints di)
+ return (fingerprintToBS <$> finger, ([], finger))
+ defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphTransReverseDepsFingerprints file -> do
+ di <- useNoFile_ GetModuleGraph
+ let finger = lookupFingerprint file di (depTransReverseDepsFingerprints di)
+ return (fingerprintToBS <$> finger, ([], finger))
+ defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphImmediateReverseDepsFingerprints file -> do
+ di <- useNoFile_ GetModuleGraph
+ let finger = lookupFingerprint file di (depImmediateReverseDepsFingerprints di)
+ return (fingerprintToBS <$> finger, ([], finger))
+
-- | Get HieFile for haskell file on NormalizedFilePath
getHieFile :: NormalizedFilePath -> Action (Maybe HieFile)
diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs
index ed27a2f608..6fc9a4d00e 100644
--- a/ghcide/src/Development/IDE/Core/Shake.hs
+++ b/ghcide/src/Development/IDE/Core/Shake.hs
@@ -31,6 +31,8 @@ module Development.IDE.Core.Shake(
shakeEnqueue,
newSession,
use, useNoFile, uses, useWithStaleFast, useWithStaleFast', delayedAction,
+ useWithSeparateFingerprintRule,
+ useWithSeparateFingerprintRule_,
FastResult(..),
use_, useNoFile_, uses_,
useWithStale, usesWithStale,
@@ -1148,6 +1150,23 @@ usesWithStale key files = do
-- whether the rule succeeded or not.
traverse (lastValue key) files
+-- we use separate fingerprint rules to trigger the rebuild of the rule
+useWithSeparateFingerprintRule
+ :: (IdeRule k v, IdeRule k1 Fingerprint)
+ => k1 -> k -> NormalizedFilePath -> Action (Maybe v)
+useWithSeparateFingerprintRule fingerKey key file = do
+ _ <- use fingerKey file
+ useWithoutDependency key emptyFilePath
+
+-- we use separate fingerprint rules to trigger the rebuild of the rule
+useWithSeparateFingerprintRule_
+ :: (IdeRule k v, IdeRule k1 Fingerprint)
+ => k1 -> k -> NormalizedFilePath -> Action v
+useWithSeparateFingerprintRule_ fingerKey key file = do
+ useWithSeparateFingerprintRule fingerKey key file >>= \case
+ Just v -> return v
+ Nothing -> liftIO $ throwIO $ BadDependency (show key)
+
useWithoutDependency :: IdeRule k v
=> k -> NormalizedFilePath -> Action (Maybe v)
useWithoutDependency key file =
@@ -1246,7 +1265,7 @@ defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do
(mbBs, (diags, mbRes)) <- actionCatch
(do v <- action staleV; liftIO $ evaluate $ force v) $
\(e :: SomeException) -> do
- pure (Nothing, ([ideErrorText file (T.pack $ show e) | not $ isBadDependency e],Nothing))
+ pure (Nothing, ([ideErrorText file (T.pack $ show (key, file) ++ show e) | not $ isBadDependency e],Nothing))
ver <- estimateFileVersionUnsafely key mbRes file
(bs, res) <- case mbRes of
diff --git a/ghcide/src/Development/IDE/GHC/CPP.hs b/ghcide/src/Development/IDE/GHC/CPP.hs
index 289794d2a5..c97afd90e7 100644
--- a/ghcide/src/Development/IDE/GHC/CPP.hs
+++ b/ghcide/src/Development/IDE/GHC/CPP.hs
@@ -19,15 +19,13 @@ import Development.IDE.GHC.Compat as Compat
import Development.IDE.GHC.Compat.Util
import GHC
import GHC.Settings
+import qualified GHC.SysTools.Cpp as Pipeline
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if !MIN_VERSION_ghc(9,5,0)
-import qualified GHC.Driver.Pipeline.Execute as Pipeline
-#endif
-#if MIN_VERSION_ghc(9,5,0)
-import qualified GHC.SysTools.Cpp as Pipeline
+#if MIN_VERSION_ghc(9,10,2)
+import qualified GHC.SysTools.Tasks as Pipeline
#endif
#if MIN_VERSION_ghc(9,11,0)
@@ -45,24 +43,21 @@ addOptP f = alterToolSettings $ \s -> s
doCpp :: HscEnv -> FilePath -> FilePath -> IO ()
doCpp env input_fn output_fn =
- -- See GHC commit a2f53ac8d968723417baadfab5be36a020ea6850
- -- this function/Pipeline.doCpp previously had a raw parameter
- -- always set to True that corresponded to these settings
-
-#if MIN_VERSION_ghc(9,5,0)
+ -- See GHC commit a2f53ac8d968723417baadfab5be36a020ea6850
+ -- this function/Pipeline.doCpp previously had a raw parameter
+ -- always set to True that corresponded to these settings
let cpp_opts = Pipeline.CppOpts
{ cppLinePragmas = True
-#if MIN_VERSION_ghc(9,11,0)
+
+#if MIN_VERSION_ghc(9,10,2)
, sourceCodePreprocessor = Pipeline.SCPHsCpp
#elif MIN_VERSION_ghc(9,10,0)
, useHsCpp = True
#else
, cppUseCc = False
#endif
+
} in
-#else
- let cpp_opts = True in
-#endif
Pipeline.doCpp (hsc_logger env) (hsc_tmpfs env) (hsc_dflags env) (hsc_unit_env env) cpp_opts input_fn output_fn
diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs
index 5f66625ee5..ddf01c61c5 100644
--- a/ghcide/src/Development/IDE/GHC/Compat.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat.hs
@@ -102,9 +102,7 @@ module Development.IDE.GHC.Compat(
Dependencies(dep_direct_mods),
NameCacheUpdater,
-#if MIN_VERSION_ghc(9,5,0)
XModulePs(..),
-#endif
#if !MIN_VERSION_ghc(9,7,0)
liftZonkM,
@@ -167,8 +165,13 @@ import GHC.Types.Var.Env
import GHC.Builtin.Uniques
import GHC.ByteCode.Types
+import GHC.Core.Lint.Interactive (interactiveInScope)
import GHC.CoreToStg
import GHC.Data.Maybe
+import GHC.Driver.Config.Core.Lint.Interactive (lintInteractiveExpr)
+import GHC.Driver.Config.Core.Opt.Simplify (initSimplifyExprOpts)
+import GHC.Driver.Config.CoreToStg (initCoreToStgOpts)
+import GHC.Driver.Config.CoreToStg.Prep (initCorePrepConfig)
import GHC.Driver.Config.Stg.Pipeline
import GHC.Driver.Env as Env
import GHC.Iface.Env
@@ -188,18 +191,6 @@ import GHC.Unit.Module.ModIface
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if !MIN_VERSION_ghc(9,5,0)
-import GHC.Core.Lint (lintInteractiveExpr)
-#endif
-
-#if MIN_VERSION_ghc(9,5,0)
-import GHC.Core.Lint.Interactive (interactiveInScope)
-import GHC.Driver.Config.Core.Lint.Interactive (lintInteractiveExpr)
-import GHC.Driver.Config.Core.Opt.Simplify (initSimplifyExprOpts)
-import GHC.Driver.Config.CoreToStg (initCoreToStgOpts)
-import GHC.Driver.Config.CoreToStg.Prep (initCorePrepConfig)
-#endif
-
#if MIN_VERSION_ghc(9,7,0)
import GHC.Tc.Zonk.TcType (tcInitTidyEnv)
#endif
@@ -230,11 +221,7 @@ myCoreToStgExpr logger dflags ictxt
binding for the stg2stg step) -}
let bco_tmp_id = mkSysLocal (fsLit "BCO_toplevel")
(mkPseudoUniqueE 0)
-#if MIN_VERSION_ghc(9,5,0)
ManyTy
-#else
- Many
-#endif
(exprType prepd_expr)
(stg_binds, prov_map, collected_ccs) <-
myCoreToStg logger
@@ -258,27 +245,17 @@ myCoreToStg logger dflags ictxt
let (stg_binds, denv, cost_centre_info)
= {-# SCC "Core2Stg" #-}
coreToStg
-#if MIN_VERSION_ghc(9,5,0)
(initCoreToStgOpts dflags)
-#else
- dflags
-#endif
this_mod ml prepd_binds
#if MIN_VERSION_ghc(9,8,0)
(unzip -> (stg_binds2,_),_)
-#elif MIN_VERSION_ghc(9,4,2)
- (stg_binds2,_)
#else
- stg_binds2
+ (stg_binds2,_)
#endif
<- {-# SCC "Stg2Stg" #-}
stg2stg logger
-#if MIN_VERSION_ghc(9,5,0)
(interactiveInScope ictxt)
-#else
- ictxt
-#endif
(initStgPipelineOpts dflags for_bytecode) this_mod stg_binds
return (stg_binds2, denv, cost_centre_info)
@@ -293,42 +270,21 @@ getDependentMods :: ModIface -> [ModuleName]
getDependentMods = map (gwib_mod . snd) . S.toList . dep_direct_mods . mi_deps
simplifyExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr
-#if MIN_VERSION_ghc(9,5,0)
simplifyExpr _ env = GHC.simplifyExpr (Development.IDE.GHC.Compat.Env.hsc_logger env) (ue_eps (Development.IDE.GHC.Compat.Env.hsc_unit_env env)) (initSimplifyExprOpts (hsc_dflags env) (hsc_IC env))
-#else
-simplifyExpr _ = GHC.simplifyExpr
-#endif
corePrepExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr
-#if MIN_VERSION_ghc(9,5,0)
corePrepExpr _ env expr = do
cfg <- initCorePrepConfig env
GHC.corePrepExpr (Development.IDE.GHC.Compat.Env.hsc_logger env) cfg expr
-#else
-corePrepExpr _ = GHC.corePrepExpr
-#endif
renderMessages :: PsMessages -> (Bag WarnMsg, Bag ErrMsg)
renderMessages msgs =
-#if MIN_VERSION_ghc(9,5,0)
let renderMsgs extractor = (fmap . fmap) GhcPsMessage . getMessages $ extractor msgs
in (renderMsgs psWarnings, renderMsgs psErrors)
-#else
- let renderMsgs extractor = (fmap . fmap) renderDiagnosticMessageWithHints . getMessages $ extractor msgs
- in (renderMsgs psWarnings, renderMsgs psErrors)
-#endif
-#if MIN_VERSION_ghc(9,5,0)
pattern PFailedWithErrorMessages :: forall a b. (b -> Bag (MsgEnvelope GhcMessage)) -> ParseResult a
-#else
-pattern PFailedWithErrorMessages :: forall a b. (b -> Bag (MsgEnvelope DecoratedSDoc)) -> ParseResult a
-#endif
pattern PFailedWithErrorMessages msgs
-#if MIN_VERSION_ghc(9,5,0)
<- PFailed (const . fmap (fmap GhcPsMessage) . getMessages . getPsErrorMessages -> msgs)
-#else
- <- PFailed (const . fmap (fmap renderDiagnosticMessageWithHints) . getMessages . getPsErrorMessages -> msgs)
-#endif
{-# COMPLETE POk, PFailedWithErrorMessages #-}
hieExportNames :: HieFile -> [(SrcSpan, Name)]
@@ -338,10 +294,20 @@ type NameCacheUpdater = NameCache
mkHieFile' :: ModSummary
-> [Avail.AvailInfo]
+#if MIN_VERSION_ghc(9,11,0)
+ -> (HieASTs Type, NameEntityInfo)
+#else
-> HieASTs Type
+#endif
-> BS.ByteString
-> Hsc HieFile
-mkHieFile' ms exports asts src = do
+mkHieFile' ms exports
+#if MIN_VERSION_ghc(9,11,0)
+ (asts, entityInfo)
+#else
+ asts
+#endif
+ src = do
let Just src_file = ml_hs_file $ ms_location ms
(asts',arr) = compressTypes asts
return $ HieFile
@@ -349,6 +315,9 @@ mkHieFile' ms exports asts src = do
, hie_module = ms_mod ms
, hie_types = arr
, hie_asts = asts'
+#if MIN_VERSION_ghc(9,11,0)
+ , hie_entity_infos = entityInfo
+#endif
-- mkIfaceExports sorts the AvailInfos for stability
, hie_exports = mkIfaceExports exports
, hie_hs_src = src
@@ -440,24 +409,24 @@ generatedNodeInfo :: HieAST a -> Maybe (NodeInfo a)
generatedNodeInfo = Map.lookup GeneratedInfo . getSourcedNodeInfo . sourcedNodeInfo
data GhcVersion
- = GHC94
- | GHC96
+ = GHC96
| GHC98
| GHC910
+ | GHC912
deriving (Eq, Ord, Show, Enum)
ghcVersionStr :: String
ghcVersionStr = VERSION_ghc
ghcVersion :: GhcVersion
-#if MIN_VERSION_GLASGOW_HASKELL(9,10,0,0)
+#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0)
+ghcVersion = GHC912
+#elif MIN_VERSION_GLASGOW_HASKELL(9,10,0,0)
ghcVersion = GHC910
#elif MIN_VERSION_GLASGOW_HASKELL(9,8,0,0)
ghcVersion = GHC98
-#elif MIN_VERSION_GLASGOW_HASKELL(9,6,0,0)
+#else
ghcVersion = GHC96
-#elif MIN_VERSION_GLASGOW_HASKELL(9,4,0,0)
-ghcVersion = GHC94
#endif
simpleNodeInfoCompat :: FastStringCompat -> FastStringCompat -> NodeInfo a
@@ -494,14 +463,8 @@ loadModulesHome mod_infos e =
recDotDot :: HsRecFields (GhcPass p) arg -> Maybe Int
recDotDot x =
-#if MIN_VERSION_ghc(9,5,0)
unRecFieldsDotDot <$>
-#endif
unLoc <$> rec_dotdot x
-#if MIN_VERSION_ghc(9,5,0)
-extract_cons (NewTypeCon x) = [x]
+extract_cons (NewTypeCon x) = [x]
extract_cons (DataTypeCons _ xs) = xs
-#else
-extract_cons = id
-#endif
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Core.hs b/ghcide/src/Development/IDE/GHC/Compat/Core.hs
index 301aa980bd..42f654b609 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Core.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Core.hs
@@ -69,6 +69,11 @@ module Development.IDE.GHC.Compat.Core (
IfaceTyCon(..),
ModIface,
ModIface_(..),
+#if MIN_VERSION_ghc(9,11,0)
+ pattern ModIface,
+ set_mi_top_env,
+ set_mi_usages,
+#endif
HscSource(..),
WhereFrom(..),
loadInterface,
@@ -220,6 +225,7 @@ module Development.IDE.GHC.Compat.Core (
SrcLoc.noSrcSpan,
SrcLoc.noSrcLoc,
SrcLoc.noLoc,
+ SrcLoc.srcSpanToRealSrcSpan,
mapLoc,
-- * Finder
FindResult(..),
@@ -230,7 +236,11 @@ module Development.IDE.GHC.Compat.Core (
ModuleOrigin(..),
PackageName(..),
-- * Linker
+#if MIN_VERSION_ghc(9,11,0)
+ LinkablePart(..),
+#else
Unlinked(..),
+#endif
Linkable(..),
unload,
-- * Hooks
@@ -365,27 +375,13 @@ module Development.IDE.GHC.Compat.Core (
module GHC.Unit.Finder.Types,
module GHC.Unit.Env,
module GHC.Driver.Phases,
-#if !MIN_VERSION_ghc(9,4,0)
- pattern HsFieldBind,
- hfbAnn,
- hfbLHS,
- hfbRHS,
- hfbPun,
-#endif
-#if !MIN_VERSION_ghc_boot_th(9,4,1)
- Extension(.., NamedFieldPuns),
-#else
Extension(..),
-#endif
mkCgInteractiveGuts,
justBytecode,
justObjects,
emptyHomeModInfoLinkable,
homeModInfoByteCode,
homeModInfoObject,
-#if !MIN_VERSION_ghc(9,5,0)
- field_label,
-#endif
groupOrigin,
isVisibleFunArg,
#if MIN_VERSION_ghc(9,8,0)
@@ -452,7 +448,7 @@ import GHC.Tc.Types.Evidence hiding ((<.>))
import GHC.Tc.Utils.Env
import GHC.Tc.Utils.Monad hiding (Applicative (..), IORef,
MonadFix (..), MonadIO (..), allM,
- anyM, concatMapM, mapMaybeM,
+ anyM, concatMapM, mapMaybeM, foldMapM,
(<$>))
import GHC.Tc.Utils.TcType as TcType
import qualified GHC.Types.Avail as Avail
@@ -530,16 +526,29 @@ import GHC.Unit.Module.Graph
import GHC.Unit.Module.Imported
import GHC.Unit.Module.ModDetails
import GHC.Unit.Module.ModGuts
+#if !MIN_VERSION_ghc(9,9,0)
+import GHC.Hs (SrcSpanAnn')
+#endif
import GHC.Unit.Module.ModIface (IfaceExport, ModIface,
- ModIface_ (..), mi_fix)
+ ModIface_ (..), mi_fix
+#if MIN_VERSION_ghc(9,11,0)
+ , pattern ModIface
+ , set_mi_top_env
+ , set_mi_usages
+#endif
+ )
import GHC.Unit.Module.ModSummary (ModSummary (..))
import GHC.Utils.Error (mkPlainErrorMsgEnvelope)
import GHC.Utils.Panic
import GHC.Utils.TmpFs
import Language.Haskell.Syntax hiding (FunDep)
+
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
+#if MIN_VERSION_ghc(9,11,0)
+import System.OsPath
+#endif
#if !MIN_VERSION_ghc(9,7,0)
import GHC.Types.Avail (greNamePrintableName)
@@ -550,7 +559,13 @@ import GHC.Hs (SrcSpanAnn')
#endif
mkHomeModLocation :: DynFlags -> ModuleName -> FilePath -> IO Module.ModLocation
+#if MIN_VERSION_ghc(9,11,0)
+mkHomeModLocation df mn f =
+ let osf = unsafeEncodeUtf f
+ in pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn osf
+#else
mkHomeModLocation df mn f = pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn f
+#endif
pattern RealSrcSpan :: SrcLoc.RealSrcSpan -> Maybe BufSpan -> SrcLoc.SrcSpan
@@ -601,21 +616,11 @@ pattern ExposePackage s a mr = DynFlags.ExposePackage s a mr
#endif
isVisibleFunArg :: Development.IDE.GHC.Compat.Core.FunTyFlag -> Bool
-#if __GLASGOW_HASKELL__ >= 906
isVisibleFunArg = TypesVar.isVisibleFunArg
type FunTyFlag = TypesVar.FunTyFlag
-#else
-isVisibleFunArg VisArg = True
-isVisibleFunArg _ = False
-type FunTyFlag = TypesVar.AnonArgFlag
-#endif
pattern FunTy :: Development.IDE.GHC.Compat.Core.FunTyFlag -> Type -> Type -> Type
pattern FunTy af arg res <- TyCoRep.FunTy {ft_af = af, ft_arg = arg, ft_res = res}
-
--- type HasSrcSpan x a = (GenLocated SrcSpan a ~ x)
--- type HasSrcSpan x = () :: Constraint
-
class HasSrcSpan a where
getLoc :: a -> SrcSpan
@@ -674,7 +679,7 @@ initObjLinker env =
loadDLL :: HscEnv -> String -> IO (Maybe String)
loadDLL env str = do
res <- GHCi.loadDLL (GHCi.hscInterp env) str
-#if MIN_VERSION_ghc(9,11,0) || (MIN_VERSION_ghc(9, 8, 3) && !MIN_VERSION_ghc(9, 9, 0))
+#if MIN_VERSION_ghc(9,11,0) || (MIN_VERSION_ghc(9, 8, 3) && !MIN_VERSION_ghc(9, 9, 0)) || (MIN_VERSION_ghc(9, 10, 2) && !MIN_VERSION_ghc(9, 11, 0))
pure $
case res of
Left err_msg -> Just err_msg
@@ -709,7 +714,7 @@ pattern GRE{gre_name, gre_par, gre_lcl, gre_imp} <- RdrName.GRE
#endif
,gre_par, gre_lcl, gre_imp = (toList -> gre_imp)}
-collectHsBindsBinders :: CollectPass p => Bag (XRec p (HsBindLR p idR)) -> [IdP p]
+collectHsBindsBinders :: CollectPass p => LHsBindsLR p idR -> [IdP p]
collectHsBindsBinders x = GHC.collectHsBindsBinders CollNoDictBinders x
@@ -721,11 +726,7 @@ makeSimpleDetails hsc_env =
mkIfaceTc :: HscEnv -> GHC.SafeHaskellMode -> ModDetails -> ModSummary -> Maybe CoreProgram -> TcGblEnv -> IO ModIface
mkIfaceTc hscEnv shm md _ms _mcp =
-#if MIN_VERSION_ghc(9,5,0)
GHC.mkIfaceTc hscEnv shm md _ms _mcp -- mcp::Maybe CoreProgram is only used in GHC >= 9.6
-#else
- GHC.mkIfaceTc hscEnv shm md _ms -- ms::ModSummary is only used in GHC >= 9.4
-#endif
mkBootModDetailsTc :: HscEnv -> TcGblEnv -> IO ModDetails
mkBootModDetailsTc session = GHC.mkBootModDetailsTc
@@ -739,50 +740,10 @@ initTidyOpts =
driverNoStop :: StopPhase
driverNoStop = NoStop
-
-#if !MIN_VERSION_ghc(9,4,0)
-pattern HsFieldBind :: XHsRecField id -> id -> arg -> Bool -> HsRecField' id arg
-pattern HsFieldBind {hfbAnn, hfbLHS, hfbRHS, hfbPun} <- HsRecField hfbAnn (SrcLoc.unLoc -> hfbLHS) hfbRHS hfbPun where
- HsFieldBind ann lhs rhs pun = HsRecField ann (SrcLoc.noLoc lhs) rhs pun
-#endif
-
-#if !MIN_VERSION_ghc_boot_th(9,4,1)
-pattern NamedFieldPuns :: Extension
-pattern NamedFieldPuns = RecordPuns
-#endif
-
groupOrigin :: MatchGroup GhcRn body -> Origin
-#if MIN_VERSION_ghc(9,5,0)
mapLoc :: (a -> b) -> SrcLoc.GenLocated l a -> SrcLoc.GenLocated l b
mapLoc = fmap
groupOrigin = mg_ext
-#else
-mapLoc :: (a -> b) -> SrcLoc.GenLocated l a -> SrcLoc.GenLocated l b
-mapLoc = SrcLoc.mapLoc
-groupOrigin = mg_origin
-#endif
-
-
-#if !MIN_VERSION_ghc(9,5,0)
-mkCgInteractiveGuts :: CgGuts -> CgGuts
-mkCgInteractiveGuts = id
-
-emptyHomeModInfoLinkable :: Maybe Linkable
-emptyHomeModInfoLinkable = Nothing
-
-justBytecode :: Linkable -> Maybe Linkable
-justBytecode = Just
-
-justObjects :: Linkable -> Maybe Linkable
-justObjects = Just
-
-homeModInfoByteCode, homeModInfoObject :: HomeModInfo -> Maybe Linkable
-homeModInfoByteCode = hm_linkable
-homeModInfoObject = hm_linkable
-
-field_label :: a -> a
-field_label = id
-#endif
mkSimpleTarget :: DynFlags -> FilePath -> Target
mkSimpleTarget df fp = Target (TargetFile fp Nothing) True (homeUnitId_ df) Nothing
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Driver.hs b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs
index c88d0963d6..6ab1d26df2 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Driver.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs
@@ -12,6 +12,12 @@ module Development.IDE.GHC.Compat.Driver
( hscTypecheckRenameWithDiagnostics
) where
+#if MIN_VERSION_ghc(9,11,0)
+
+import GHC.Driver.Main (hscTypecheckRenameWithDiagnostics)
+
+#else
+
import Control.Monad
import GHC.Core
import GHC.Data.FastString
@@ -73,11 +79,7 @@ hsc_typecheck keep_rn mod_summary mb_rdr_module = do
tc_result0 <- tcRnModule' mod_summary keep_rn' hpm
if hsc_src == HsigFile
then
-#if MIN_VERSION_ghc(9,5,0)
do (iface, _) <- liftIO $ hscSimpleIface hsc_env Nothing tc_result0 mod_summary
-#else
- do (iface, _) <- liftIO $ hscSimpleIface hsc_env tc_result0 mod_summary
-#endif
ioMsgMaybe $ hoistTcRnMessage $
tcRnMergeSignatures hsc_env hpm tc_result0 iface
else return tc_result0
@@ -129,7 +131,6 @@ extract_renamed_stuff mod_summary tc_result = do
-- ============================================================================
-- DO NOT EDIT - Refer to top of file
-- ============================================================================
-#if MIN_VERSION_ghc(9,5,0)
hscSimpleIface :: HscEnv
-> Maybe CoreProgram
-> TcGblEnv
@@ -137,11 +138,5 @@ hscSimpleIface :: HscEnv
-> IO (ModIface, ModDetails)
hscSimpleIface hsc_env mb_core_program tc_result summary
= runHsc hsc_env $ hscSimpleIface' mb_core_program tc_result summary
-#else
-hscSimpleIface :: HscEnv
- -> TcGblEnv
- -> ModSummary
- -> IO (ModIface, ModDetails)
-hscSimpleIface hsc_env tc_result summary
- = runHsc hsc_env $ hscSimpleIface' tc_result summary
+
#endif
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Env.hs b/ghcide/src/Development/IDE/GHC/Compat/Env.hs
index 988739e3b8..cbccc1a3de 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Env.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Env.hs
@@ -105,22 +105,14 @@ hscHomeUnit =
setBytecodeLinkerOptions :: DynFlags -> DynFlags
setBytecodeLinkerOptions df = df {
ghcLink = LinkInMemory
-#if MIN_VERSION_ghc(9,5,0)
, backend = noBackend
-#else
- , backend = NoBackend
-#endif
, ghcMode = CompManager
}
setInterpreterLinkerOptions :: DynFlags -> DynFlags
setInterpreterLinkerOptions df = df {
ghcLink = LinkInMemory
-#if MIN_VERSION_ghc(9,5,0)
, backend = interpreterBackend
-#else
- , backend = Interpreter
-#endif
, ghcMode = CompManager
}
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs
index 06b6a9876b..0255886726 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs
@@ -19,9 +19,11 @@ module Development.IDE.GHC.Compat.Error (
Diagnostic(..),
-- * Prisms for error selection
_TcRnMessage,
+ _TcRnMessageWithCtx,
_GhcPsMessage,
_GhcDsMessage,
_GhcDriverMessage,
+ _TcRnMissingSignature,
) where
import Control.Lens
@@ -30,8 +32,20 @@ import GHC.HsToCore.Errors.Types
import GHC.Tc.Errors.Types
import GHC.Types.Error
-_TcRnMessage :: Prism' GhcMessage TcRnMessage
-_TcRnMessage = prism' GhcTcRnMessage (\case
+-- | Some 'TcRnMessage's are nested in other constructors for additional context.
+-- For example, 'TcRnWithHsDocContext' and 'TcRnMessageWithInfo'.
+-- However, in most occasions you don't need the additional context and you just want
+-- the error message. @'_TcRnMessage'@ recursively unwraps these constructors,
+-- until there are no more constructors with additional context.
+--
+-- Use @'_TcRnMessageWithCtx'@ if you need the additional context. You can always
+-- strip it later using @'stripTcRnMessageContext'@.
+--
+_TcRnMessage :: Fold GhcMessage TcRnMessage
+_TcRnMessage = _TcRnMessageWithCtx . to stripTcRnMessageContext
+
+_TcRnMessageWithCtx :: Prism' GhcMessage TcRnMessage
+_TcRnMessageWithCtx = prism' GhcTcRnMessage (\case
GhcTcRnMessage tcRnMsg -> Just tcRnMsg
_ -> Nothing)
@@ -66,3 +80,5 @@ stripTcRnMessageContext = \case
msgEnvelopeErrorL :: Lens' (MsgEnvelope e) e
msgEnvelopeErrorL = lens errMsgDiagnostic (\envelope e -> envelope { errMsgDiagnostic = e } )
+
+makePrisms ''TcRnMessage
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Iface.hs b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs
index e76de880d5..39cf9e0d45 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Iface.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs
@@ -21,7 +21,11 @@ import GHC.Iface.Errors.Types (IfaceMessage)
#endif
writeIfaceFile :: HscEnv -> FilePath -> ModIface -> IO ()
+#if MIN_VERSION_ghc(9,11,0)
+writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) (Iface.flagsToIfCompression $ hsc_dflags env) fp iface
+#else
writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) fp iface
+#endif
cannotFindModule :: HscEnv -> ModuleName -> FindResult -> SDoc
cannotFindModule env modname fr =
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Logger.hs b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs
index 32ec11da4c..c3cc5247d0 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Logger.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs
@@ -28,10 +28,8 @@ type LogActionCompat = LogFlags -> Maybe DiagnosticReason -> Maybe Severity -> S
logActionCompat :: LogActionCompat -> LogAction
#if MIN_VERSION_ghc(9,7,0)
logActionCompat logAction logFlags (MCDiagnostic severity (ResolvedDiagnosticReason wr) _) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify
-#elif MIN_VERSION_ghc(9,5,0)
-logActionCompat logAction logFlags (MCDiagnostic severity wr _) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify
#else
-logActionCompat logAction logFlags (MCDiagnostic severity wr) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify
+logActionCompat logAction logFlags (MCDiagnostic severity wr _) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify
#endif
logActionCompat logAction logFlags _cls loc = logAction logFlags Nothing Nothing loc alwaysQualify
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs
index d1053ebffc..ccec23c9c3 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs
@@ -16,14 +16,12 @@ module Development.IDE.GHC.Compat.Outputable (
-- * Parser errors
PsWarning,
PsError,
-#if MIN_VERSION_ghc(9,5,0)
defaultDiagnosticOpts,
GhcMessage,
DriverMessage,
Messages,
initDiagOpts,
pprMessages,
-#endif
DiagnosticReason(..),
renderDiagnosticMessageWithHints,
pprMsgEnvelopeBagWithLoc,
@@ -51,6 +49,7 @@ module Development.IDE.GHC.Compat.Outputable (
import Data.Maybe
import GHC.Driver.Config.Diagnostic
import GHC.Driver.Env
+import GHC.Driver.Errors.Types (DriverMessage, GhcMessage)
import GHC.Driver.Ppr
import GHC.Driver.Session
import GHC.Parser.Errors.Types
@@ -66,17 +65,11 @@ import GHC.Utils.Panic
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if MIN_VERSION_ghc(9,5,0)
-import GHC.Driver.Errors.Types (DriverMessage, GhcMessage)
-#endif
-
#if MIN_VERSION_ghc(9,7,0)
import GHC.Types.Error (defaultDiagnosticOpts)
#endif
-#if MIN_VERSION_ghc(9,5,0)
type PrintUnqualified = NamePprCtx
-#endif
-- | A compatible function to print `Outputable` instances
-- without unique symbols.
@@ -118,33 +111,19 @@ pprNoLocMsgEnvelope (MsgEnvelope { errMsgDiagnostic = e
-#if MIN_VERSION_ghc(9,5,0)
type ErrMsg = MsgEnvelope GhcMessage
type WarnMsg = MsgEnvelope GhcMessage
-#else
-type ErrMsg = MsgEnvelope DecoratedSDoc
-type WarnMsg = MsgEnvelope DecoratedSDoc
-#endif
mkPrintUnqualifiedDefault :: HscEnv -> GlobalRdrEnv -> PrintUnqualified
-#if MIN_VERSION_ghc(9,5,0)
mkPrintUnqualifiedDefault env =
mkNamePprCtx ptc (hsc_unit_env env)
where
ptc = initPromotionTickContext (hsc_dflags env)
-#else
-mkPrintUnqualifiedDefault env =
- -- GHC 9.2 version
- -- mkPrintUnqualified :: UnitEnv -> GlobalRdrEnv -> PrintUnqualified
- mkPrintUnqualified (hsc_unit_env env)
-#endif
renderDiagnosticMessageWithHints :: forall a. Diagnostic a => a -> DecoratedSDoc
renderDiagnosticMessageWithHints a = Error.unionDecoratedSDoc
(diagnosticMessage
-#if MIN_VERSION_ghc(9,5,0)
(defaultDiagnosticOpts @a)
-#endif
a) (mkDecorated $ map ppr $ diagnosticHints a)
mkWarnMsg :: DynFlags -> Maybe DiagnosticReason -> b -> SrcSpan -> PrintUnqualified -> SDoc -> MsgEnvelope DecoratedSDoc
diff --git a/ghcide/src/Development/IDE/GHC/Compat/Parser.hs b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs
index 25d23bcad4..8e2967ed30 100644
--- a/ghcide/src/Development/IDE/GHC/Compat/Parser.hs
+++ b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs
@@ -16,7 +16,9 @@ module Development.IDE.GHC.Compat.Parser (
Development.IDE.GHC.Compat.Parser.pm_mod_summary,
Development.IDE.GHC.Compat.Parser.pm_extra_src_files,
-- * API Annotations
+#if !MIN_VERSION_ghc(9,11,0)
Anno.AnnKeywordId(..),
+#endif
pattern EpaLineComment,
pattern EpaBlockComment
) where
@@ -47,11 +49,7 @@ initParserState :: ParserOpts -> StringBuffer -> RealSrcLoc -> PState
initParserState =
Lexer.initParserState
-#if MIN_VERSION_ghc(9,5,0)
pattern HsParsedModule :: Located (HsModule GhcPs) -> [FilePath] -> GHC.HsParsedModule
-#else
-pattern HsParsedModule :: Located HsModule -> [FilePath] -> GHC.HsParsedModule
-#endif
pattern HsParsedModule
{ hpm_module
, hpm_src_files
diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs
index f2b58ee02e..9977ad573b 100644
--- a/ghcide/src/Development/IDE/GHC/CoreFile.hs
+++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs
@@ -10,7 +10,7 @@ module Development.IDE.GHC.CoreFile
, readBinCoreFile
, writeBinCoreFile
, getImplicitBinds
- , occNamePrefixes) where
+ ) where
import Control.Monad
import Control.Monad.IO.Class
@@ -26,6 +26,9 @@ import GHC.CoreToIface
import GHC.Fingerprint
import GHC.Iface.Binary
import GHC.Iface.Env
+#if MIN_VERSION_ghc(9,11,0)
+import qualified GHC.Iface.Load as Iface
+#endif
import GHC.Iface.Recomp.Binary (fingerprintBinMem)
import GHC.IfaceToCore
import GHC.Types.Id.Make
@@ -87,14 +90,20 @@ readBinCoreFile name_cache fat_hi_path = do
return (file, fp)
-- | Write a core file
-writeBinCoreFile :: FilePath -> CoreFile -> IO Fingerprint
-writeBinCoreFile core_path fat_iface = do
+writeBinCoreFile :: DynFlags -> FilePath -> CoreFile -> IO Fingerprint
+writeBinCoreFile _dflags core_path fat_iface = do
bh <- openBinMem initBinMemSize
let quietTrace =
QuietBinIFace
- putWithUserData quietTrace bh fat_iface
+ putWithUserData
+ quietTrace
+#if MIN_VERSION_ghc(9,11,0)
+ (Iface.flagsToIfCompression _dflags)
+#endif
+ bh
+ fat_iface
-- And send the result to the file
writeBinMem bh core_path
@@ -109,21 +118,8 @@ codeGutsToCoreFile
:: Fingerprint -- ^ Hash of the interface this was generated from
-> CgGuts
-> CoreFile
-#if MIN_VERSION_ghc(9,5,0)
-- In GHC 9.6, implicit binds are tidied and part of core binds
codeGutsToCoreFile hash CgGuts{..} = CoreFile (map (toIfaceTopBind1 cg_module) cg_binds) hash
-#else
-codeGutsToCoreFile hash CgGuts{..} = CoreFile (map (toIfaceTopBind1 cg_module) $ filter isNotImplictBind cg_binds) hash
-
--- | Implicit binds can be generated from the interface and are not tidied,
--- so we must filter them out
-isNotImplictBind :: CoreBind -> Bool
-isNotImplictBind bind = not . all isImplicitId $ bindBindings bind
-
-bindBindings :: CoreBind -> [Var]
-bindBindings (NonRec b _) = [b]
-bindBindings (Rec bnds) = map fst bnds
-#endif
getImplicitBinds :: TyCon -> [CoreBind]
getImplicitBinds tc = cls_binds ++ getTyConImplicitBinds tc
@@ -141,7 +137,11 @@ getClassImplicitBinds cls
| (op, val_index) <- classAllSelIds cls `zip` [0..] ]
get_defn :: Id -> CoreBind
-get_defn identifier = NonRec identifier (unfoldingTemplate (realIdUnfolding identifier))
+get_defn identifier = NonRec identifier templ
+ where
+ templ = case maybeUnfoldingTemplate (realIdUnfolding identifier) of
+ Nothing -> error "get_dfn: no unfolding template"
+ Just x -> x
toIfaceTopBndr1 :: Module -> Id -> IfaceId
toIfaceTopBndr1 mod identifier
@@ -210,44 +210,3 @@ tc_iface_bindings (TopIfaceRec vs) = do
vs' <- traverse (\(v, e) -> (v,) <$> tcIfaceExpr e) vs
pure $ Rec vs'
--- | Prefixes that can occur in a GHC OccName
-occNamePrefixes :: [T.Text]
-occNamePrefixes =
- [
- -- long ones
- "$con2tag_"
- , "$tag2con_"
- , "$maxtag_"
-
- -- four chars
- , "$sel:"
- , "$tc'"
-
- -- three chars
- , "$dm"
- , "$co"
- , "$tc"
- , "$cp"
- , "$fx"
-
- -- two chars
- , "$W"
- , "$w"
- , "$m"
- , "$b"
- , "$c"
- , "$d"
- , "$i"
- , "$s"
- , "$f"
- , "$r"
- , "C:"
- , "N:"
- , "D:"
- , "$p"
- , "$L"
- , "$f"
- , "$t"
- , "$c"
- , "$m"
- ]
diff --git a/ghcide/src/Development/IDE/GHC/Error.hs b/ghcide/src/Development/IDE/GHC/Error.hs
index 8f919a3bf2..048987f8ae 100644
--- a/ghcide/src/Development/IDE/GHC/Error.hs
+++ b/ghcide/src/Development/IDE/GHC/Error.hs
@@ -78,15 +78,9 @@ diagFromErrMsg diagSource dflags origErr =
-- The function signature changes based on the GHC version.
-- While this is not desirable, it avoids more CPP statements in code
-- that implements actual logic.
-#if MIN_VERSION_ghc(9,5,0)
diagFromGhcErrorMessages :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic]
diagFromGhcErrorMessages sourceParser dflags errs =
diagFromErrMsgs sourceParser dflags errs
-#else
-diagFromGhcErrorMessages :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope Compat.DecoratedSDoc) -> [FileDiagnostic]
-diagFromGhcErrorMessages sourceParser dflags errs =
- diagFromSDocErrMsgs sourceParser dflags errs
-#endif
diagFromErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic]
diagFromErrMsgs diagSource dflags = concatMap (diagFromErrMsg diagSource dflags) . Compat.bagToList
diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs
index 2ee19beeb2..543c6f4387 100644
--- a/ghcide/src/Development/IDE/GHC/Orphans.hs
+++ b/ghcide/src/Development/IDE/GHC/Orphans.hs
@@ -32,11 +32,9 @@ import GHC.Types.SrcLoc
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if MIN_VERSION_ghc(9,5,0)
import GHC.Unit.Home.ModInfo
import GHC.Unit.Module.Location (ModLocation (..))
import GHC.Unit.Module.WholeCoreBindings
-#endif
-- Orphan instance for Shake.hs
-- https://hub.darcs.net/ross/transformers/issue/86
@@ -51,22 +49,39 @@ instance Show ModDetails where show = const ""
instance NFData ModDetails where rnf = rwhnf
instance NFData SafeHaskellMode where rnf = rwhnf
instance Show Linkable where show = unpack . printOutputable
+#if MIN_VERSION_ghc(9,11,0)
+instance NFData Linkable where rnf (Linkable a b c) = rnf a `seq` rnf b `seq` rnf c
+instance NFData LinkableObjectSort where rnf = rwhnf
+instance NFData LinkablePart where
+ rnf (DotO a b) = rnf a `seq` rnf b
+ rnf (DotA f) = rnf f
+ rnf (DotDLL f) = rnf f
+ rnf (BCOs a) = seqCompiledByteCode a
+ rnf (CoreBindings wcb) = rnf wcb
+ rnf (LazyBCOs a b) = seqCompiledByteCode a `seq` rnf b
+#else
instance NFData Linkable where rnf (LM a b c) = rnf a `seq` rnf b `seq` rnf c
instance NFData Unlinked where
rnf (DotO f) = rnf f
rnf (DotA f) = rnf f
rnf (DotDLL f) = rnf f
rnf (BCOs a b) = seqCompiledByteCode a `seq` liftRnf rwhnf b
-#if MIN_VERSION_ghc(9,5,0)
rnf (CoreBindings wcb) = rnf wcb
rnf (LoadedBCOs us) = rnf us
+#endif
instance NFData WholeCoreBindings where
+#if MIN_VERSION_ghc(9,11,0)
+ rnf (WholeCoreBindings bs m ml f) = rnf bs `seq` rnf m `seq` rnf ml `seq` rnf f
+#else
rnf (WholeCoreBindings bs m ml) = rnf bs `seq` rnf m `seq` rnf ml
+#endif
instance NFData ModLocation where
+#if MIN_VERSION_ghc(9,11,0)
+ rnf (OsPathModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5
+#else
rnf (ModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5
-
#endif
instance Show PackageFlag where show = unpack . printOutputable
@@ -82,12 +97,6 @@ instance NFData SB.StringBuffer where rnf = rwhnf
instance Show Module where
show = moduleNameString . moduleName
-
-#if !MIN_VERSION_ghc(9,5,0)
-instance (NFData l, NFData e) => NFData (GenLocated l e) where
- rnf (L l e) = rnf l `seq` rnf e
-#endif
-
instance Show ModSummary where
show = show . ms_mod
@@ -170,11 +179,6 @@ instance NFData Type where
instance Show a => Show (Bag a) where
show = show . bagToList
-#if !MIN_VERSION_ghc(9,5,0)
-instance NFData HsDocString where
- rnf = rwhnf
-#endif
-
instance Show ModGuts where
show _ = "modguts"
instance NFData ModGuts where
@@ -183,11 +187,7 @@ instance NFData ModGuts where
instance NFData (ImportDecl GhcPs) where
rnf = rwhnf
-#if MIN_VERSION_ghc(9,5,0)
instance (NFData (HsModule a)) where
-#else
-instance (NFData HsModule) where
-#endif
rnf = rwhnf
instance Show OccName where show = unpack . printOutputable
@@ -218,10 +218,8 @@ instance NFData UnitId where
instance NFData NodeKey where
rnf = rwhnf
-#if MIN_VERSION_ghc(9,5,0)
instance NFData HomeModLinkable where
rnf = rwhnf
-#endif
instance NFData (HsExpr (GhcPass Renamed)) where
rnf = rwhnf
@@ -240,16 +238,3 @@ instance NFData Extension where
instance NFData (UniqFM Name [Name]) where
rnf (ufmToIntMap -> m) = rnf m
-
-#if !MIN_VERSION_ghc(9,5,0)
-instance NFData DuplicateRecordFields where
- rnf DuplicateRecordFields = ()
- rnf NoDuplicateRecordFields = ()
-
-instance NFData FieldSelectors where
- rnf FieldSelectors = ()
- rnf NoFieldSelectors = ()
-
-instance NFData FieldLabel where
- rnf (FieldLabel a b c d) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d
-#endif
diff --git a/ghcide/src/Development/IDE/GHC/Util.hs b/ghcide/src/Development/IDE/GHC/Util.hs
index a6e0c10461..fb051bda5a 100644
--- a/ghcide/src/Development/IDE/GHC/Util.hs
+++ b/ghcide/src/Development/IDE/GHC/Util.hs
@@ -27,7 +27,8 @@ module Development.IDE.GHC.Util(
dontWriteHieFiles,
disableWarningsAsErrors,
printOutputable,
- getExtensions
+ getExtensions,
+ stripOccNamePrefix,
) where
import Control.Concurrent
@@ -62,6 +63,7 @@ import GHC.IO.Handle.Types
import Ide.PluginUtils (unescape)
import System.FilePath
+import Data.Monoid (First (..))
import GHC.Data.EnumSet
import GHC.Data.FastString
import GHC.Data.StringBuffer
@@ -271,3 +273,55 @@ printOutputable =
getExtensions :: ParsedModule -> [Extension]
getExtensions = toList . extensionFlags . ms_hspp_opts . pm_mod_summary
+
+-- | When e.g. DuplicateRecordFields is enabled, compiler generates
+-- names like "$sel:accessor:One" and "$sel:accessor:Two" to
+-- disambiguate record selectors
+-- https://ghc.haskell.org/trac/ghc/wiki/Records/OverloadedRecordFields/DuplicateRecordFields#Implementation
+stripOccNamePrefix :: T.Text -> T.Text
+stripOccNamePrefix name = T.takeWhile (/=':') $ fromMaybe name $
+ getFirst $ foldMap (First . (`T.stripPrefix` name))
+ occNamePrefixes
+
+-- | Prefixes that can occur in a GHC OccName
+occNamePrefixes :: [T.Text]
+occNamePrefixes =
+ [
+ -- long ones
+ "$con2tag_"
+ , "$tag2con_"
+ , "$maxtag_"
+
+ -- four chars
+ , "$sel:"
+ , "$tc'"
+
+ -- three chars
+ , "$dm"
+ , "$co"
+ , "$tc"
+ , "$cp"
+ , "$fx"
+
+ -- two chars
+ , "$W"
+ , "$w"
+ , "$m"
+ , "$b"
+ , "$c"
+ , "$d"
+ , "$i"
+ , "$s"
+ , "$f"
+ , "$r"
+ , "C:"
+ , "N:"
+ , "D:"
+ , "$p"
+ , "$L"
+ , "$f"
+ , "$t"
+ , "$c"
+ , "$m"
+ ]
+
diff --git a/ghcide/src/Development/IDE/Import/DependencyInformation.hs b/ghcide/src/Development/IDE/Import/DependencyInformation.hs
index 5372a1364a..471cf52eab 100644
--- a/ghcide/src/Development/IDE/Import/DependencyInformation.hs
+++ b/ghcide/src/Development/IDE/Import/DependencyInformation.hs
@@ -20,6 +20,7 @@ module Development.IDE.Import.DependencyInformation
, insertImport
, pathToId
, idToPath
+ , idToModLocation
, reachableModules
, processDependencyInformation
, transitiveDeps
@@ -28,6 +29,7 @@ module Development.IDE.Import.DependencyInformation
, lookupModuleFile
, BootIdMap
, insertBootId
+ , lookupFingerprint
) where
import Control.DeepSeq
@@ -48,6 +50,8 @@ import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe
import Data.Tuple.Extra hiding (first, second)
import Development.IDE.GHC.Compat
+import Development.IDE.GHC.Compat.Util (Fingerprint)
+import qualified Development.IDE.GHC.Compat.Util as Util
import Development.IDE.GHC.Orphans ()
import Development.IDE.Import.FindImports (ArtifactsLocation (..))
import Development.IDE.Types.Diagnostics
@@ -135,23 +139,35 @@ data RawDependencyInformation = RawDependencyInformation
data DependencyInformation =
DependencyInformation
- { depErrorNodes :: !(FilePathIdMap (NonEmpty NodeError))
+ { depErrorNodes :: !(FilePathIdMap (NonEmpty NodeError))
-- ^ Nodes that cannot be processed correctly.
- , depModules :: !(FilePathIdMap ShowableModule)
- , depModuleDeps :: !(FilePathIdMap FilePathIdSet)
+ , depModules :: !(FilePathIdMap ShowableModule)
+ , depModuleDeps :: !(FilePathIdMap FilePathIdSet)
-- ^ For a non-error node, this contains the set of module immediate dependencies
-- in the same package.
- , depReverseModuleDeps :: !(IntMap IntSet)
+ , depReverseModuleDeps :: !(IntMap IntSet)
-- ^ Contains a reverse mapping from a module to all those that immediately depend on it.
- , depPathIdMap :: !PathIdMap
+ , depPathIdMap :: !PathIdMap
-- ^ Map from FilePath to FilePathId
- , depBootMap :: !BootIdMap
+ , depBootMap :: !BootIdMap
-- ^ Map from hs-boot file to the corresponding hs file
- , depModuleFiles :: !(ShowableModuleEnv FilePathId)
+ , depModuleFiles :: !(ShowableModuleEnv FilePathId)
-- ^ Map from Module to the corresponding non-boot hs file
- , depModuleGraph :: !ModuleGraph
+ , depModuleGraph :: !ModuleGraph
+ , depTransDepsFingerprints :: !(FilePathIdMap Fingerprint)
+ -- ^ Map from Module to fingerprint of the transitive dependencies of the module.
+ , depTransReverseDepsFingerprints :: !(FilePathIdMap Fingerprint)
+ -- ^ Map from FilePathId to the fingerprint of the transitive reverse dependencies of the module.
+ , depImmediateReverseDepsFingerprints :: !(FilePathIdMap Fingerprint)
+ -- ^ Map from FilePathId to the fingerprint of the immediate reverse dependencies of the module.
} deriving (Show, Generic)
+lookupFingerprint :: NormalizedFilePath -> DependencyInformation -> FilePathIdMap Fingerprint -> Maybe Fingerprint
+lookupFingerprint fileId DependencyInformation {..} depFingerprintMap =
+ do
+ FilePathId cur_id <- lookupPathToId depPathIdMap fileId
+ IntMap.lookup cur_id depFingerprintMap
+
newtype ShowableModule =
ShowableModule {showableModule :: Module}
deriving NFData
@@ -227,8 +243,8 @@ instance Semigroup NodeResult where
SuccessNode _ <> ErrorNode errs = ErrorNode errs
SuccessNode a <> SuccessNode _ = SuccessNode a
-processDependencyInformation :: RawDependencyInformation -> BootIdMap -> ModuleGraph -> DependencyInformation
-processDependencyInformation RawDependencyInformation{..} rawBootMap mg =
+processDependencyInformation :: RawDependencyInformation -> BootIdMap -> ModuleGraph -> FilePathIdMap Fingerprint -> DependencyInformation
+processDependencyInformation RawDependencyInformation{..} rawBootMap mg shallowFingerMap =
DependencyInformation
{ depErrorNodes = IntMap.fromList errorNodes
, depModuleDeps = moduleDeps
@@ -238,6 +254,9 @@ processDependencyInformation RawDependencyInformation{..} rawBootMap mg =
, depBootMap = rawBootMap
, depModuleFiles = ShowableModuleEnv reverseModuleMap
, depModuleGraph = mg
+ , depTransDepsFingerprints = buildTransDepsFingerprintMap moduleDeps shallowFingerMap
+ , depTransReverseDepsFingerprints = buildTransDepsFingerprintMap reverseModuleDeps shallowFingerMap
+ , depImmediateReverseDepsFingerprints = buildImmediateDepsFingerprintMap reverseModuleDeps shallowFingerMap
}
where resultGraph = buildResultGraph rawImports
(errorNodes, successNodes) = partitionNodeResults $ IntMap.toList resultGraph
@@ -397,3 +416,44 @@ instance NFData NamedModuleDep where
instance Show NamedModuleDep where
show NamedModuleDep{..} = show nmdFilePath
+
+
+buildImmediateDepsFingerprintMap :: FilePathIdMap FilePathIdSet -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint
+buildImmediateDepsFingerprintMap modulesDeps shallowFingers =
+ IntMap.fromList
+ $ map
+ ( \k ->
+ ( k,
+ Util.fingerprintFingerprints $
+ map
+ (shallowFingers IntMap.!)
+ (k : IntSet.toList (IntMap.findWithDefault IntSet.empty k modulesDeps))
+ )
+ )
+ $ IntMap.keys shallowFingers
+
+-- | Build a map from file path to its full fingerprint.
+-- The fingerprint is depend on both the fingerprints of the file and all its dependencies.
+-- This is used to determine if a file has changed and needs to be reloaded.
+buildTransDepsFingerprintMap :: FilePathIdMap FilePathIdSet -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint
+buildTransDepsFingerprintMap modulesDeps shallowFingers = go keys IntMap.empty
+ where
+ keys = IntMap.keys shallowFingers
+ go :: [IntSet.Key] -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint
+ go keys acc =
+ case keys of
+ [] -> acc
+ k : ks ->
+ if IntMap.member k acc
+ -- already in the map, so we can skip
+ then go ks acc
+ -- not in the map, so we need to add it
+ else
+ let -- get the dependencies of the current key
+ deps = IntSet.toList $ IntMap.findWithDefault IntSet.empty k modulesDeps
+ -- add fingerprints of the dependencies to the accumulator
+ depFingerprints = go deps acc
+ -- combine the fingerprints of the dependencies with the current key
+ combinedFingerprints = Util.fingerprintFingerprints $ shallowFingers IntMap.! k : map (depFingerprints IntMap.!) deps
+ in -- add the combined fingerprints to the accumulator
+ go ks (IntMap.insert k combinedFingerprints depFingerprints)
diff --git a/ghcide/src/Development/IDE/Import/FindImports.hs b/ghcide/src/Development/IDE/Import/FindImports.hs
index 7fa287836b..7c4046a63a 100644
--- a/ghcide/src/Development/IDE/Import/FindImports.hs
+++ b/ghcide/src/Development/IDE/Import/FindImports.hs
@@ -29,6 +29,10 @@ import GHC.Unit.State
import System.FilePath
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Driver.DynFlags
+#endif
+
data Import
= FileImport !ArtifactsLocation
| PackageImport
@@ -96,7 +100,11 @@ locateModuleFile import_dirss exts targetFor isSource modName = do
-- current module. In particular, it will return Nothing for 'main' components
-- as they can never be imported into another package.
mkImportDirs :: HscEnv -> (UnitId, DynFlags) -> Maybe (UnitId, ([FilePath], S.Set ModuleName))
+#if MIN_VERSION_ghc(9,11,0)
+mkImportDirs _env (i, flags) = Just (i, (importPaths flags, S.fromList $ map reexportTo $ reexportedModules flags))
+#else
mkImportDirs _env (i, flags) = Just (i, (importPaths flags, reexportedModules flags))
+#endif
-- | locate a module in either the file system or the package database. Where we go from *daml to
-- Haskell
@@ -137,31 +145,23 @@ locateModule env comp_info exts targetFor modName mbPkgName isSource = do
dflags = hsc_dflags env
import_paths = mapMaybe (mkImportDirs env) comp_info
other_imports =
-#if MIN_VERSION_ghc(9,4,0)
- -- On 9.4+ instead of bringing all the units into scope, only bring into scope the units
- -- this one depends on
+ -- Instead of bringing all the units into scope, only bring into scope the units
+ -- this one depends on.
-- This way if you have multiple units with the same module names, we won't get confused
-- For example if unit a imports module M from unit B, when there is also a module M in unit C,
-- and unit a only depends on unit b, without this logic there is the potential to get confused
-- about which module unit a imports.
-- Without multi-component support it is hard to recontruct the dependency environment so
-- unit a will have both unit b and unit c in scope.
+#if MIN_VERSION_ghc(9,11,0)
+ map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, S.fromList $ map reexportTo $ reexportedModules this_df)) hpt_deps
+#else
map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, reexportedModules this_df)) hpt_deps
+#endif
ue = hsc_unit_env env
units = homeUnitEnv_units $ ue_findHomeUnitEnv (homeUnitId_ dflags) ue
hpt_deps :: [UnitId]
hpt_deps = homeUnitDepends units
-#else
- _import_paths'
-#endif
-
- -- first try to find the module as a file. If we can't find it try to find it in the package
- -- database.
- -- Here the importPaths for the current modules are added to the front of the import paths from the other components.
- -- This is particularly important for Paths_* modules which get generated for every component but unless you use it in
- -- each component will end up being found in the wrong place and cause a multi-cradle match failure.
- _import_paths' = -- import_paths' is only used in GHC < 9.4
- import_paths
toModLocation uid file = liftIO $ do
loc <- mkHomeModLocation dflags (unLoc modName) (fromNormalizedFilePath file)
diff --git a/ghcide/src/Development/IDE/Main.hs b/ghcide/src/Development/IDE/Main.hs
index 62b71c3ab6..872e957364 100644
--- a/ghcide/src/Development/IDE/Main.hs
+++ b/ghcide/src/Development/IDE/Main.hs
@@ -15,8 +15,6 @@ import Control.Concurrent.Extra (withNumCapabilities)
import Control.Concurrent.MVar (newEmptyMVar,
putMVar, tryReadMVar)
import Control.Concurrent.STM.Stats (dumpSTMStats)
-import Control.Exception.Safe (SomeException,
- displayException)
import Control.Monad.Extra (concatMapM, unless,
when)
import Control.Monad.IO.Class (liftIO)
diff --git a/ghcide/src/Development/IDE/Plugin/Completions.hs b/ghcide/src/Development/IDE/Plugin/Completions.hs
index 0564855177..d92bf1da85 100644
--- a/ghcide/src/Development/IDE/Plugin/Completions.hs
+++ b/ghcide/src/Development/IDE/Plugin/Completions.hs
@@ -114,15 +114,10 @@ produceCompletions recorder = do
-- Drop any explicit imports in ImportDecl if not hidden
dropListFromImportDecl :: LImportDecl GhcPs -> LImportDecl GhcPs
dropListFromImportDecl iDecl = let
-#if MIN_VERSION_ghc(9,5,0)
f d@ImportDecl {ideclImportList} = case ideclImportList of
Just (Exactly, _) -> d {ideclImportList=Nothing}
-#else
- f d@ImportDecl {ideclHiding} = case ideclHiding of
- Just (False, _) -> d {ideclHiding=Nothing}
-#endif
-- if hiding or Nothing just return d
- _ -> d
+ _ -> d
f x = x
in f <$> iDecl
diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs
index 9fdc196cd5..a00705ba39 100644
--- a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs
+++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs
@@ -37,14 +37,12 @@ import Data.Aeson (ToJSON (toJSON))
import Data.Function (on)
import qualified Data.HashSet as HashSet
-import Data.Monoid (First (..))
import Data.Ord (Down (Down))
import qualified Data.Set as Set
import Development.IDE.Core.PositionMapping
import Development.IDE.GHC.Compat hiding (isQual, ppr)
import qualified Development.IDE.GHC.Compat as GHC
import Development.IDE.GHC.Compat.Util
-import Development.IDE.GHC.CoreFile (occNamePrefixes)
import Development.IDE.GHC.Error
import Development.IDE.GHC.Util
import Development.IDE.Plugin.Completions.Types
@@ -55,6 +53,7 @@ import Ide.PluginUtils (mkLspCommand)
import Ide.Types (CommandId (..),
IdePlugins (..),
PluginId)
+import Language.Haskell.Syntax.Basic
import qualified Language.LSP.Protocol.Lens as L
import Language.LSP.Protocol.Types
import qualified Language.LSP.VFS as VFS
@@ -74,9 +73,6 @@ import GHC.Plugins (Depth (AllTheWay),
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if MIN_VERSION_ghc(9,5,0)
-import Language.Haskell.Syntax.Basic
-#endif
-- Chunk size used for parallelizing fuzzy matching
chunkSize :: Int
@@ -138,42 +134,23 @@ getCContext pos pm
| pos `isInsideSrcSpan` r = Just TypeContext
goInline _ = Nothing
-#if MIN_VERSION_ghc(9,5,0)
importGo :: GHC.LImportDecl GhcPs -> Maybe Context
importGo (L (locA -> r) impDecl)
| pos `isInsideSrcSpan` r
= importInline importModuleName (fmap (fmap reLoc) $ ideclImportList impDecl)
-#else
- importGo :: GHC.LImportDecl GhcPs -> Maybe Context
- importGo (L (locA -> r) impDecl)
- | pos `isInsideSrcSpan` r
- = importInline importModuleName (fmap (fmap reLoc) $ ideclHiding impDecl)
-#endif
<|> Just (ImportContext importModuleName)
| otherwise = Nothing
where importModuleName = moduleNameString $ unLoc $ ideclName impDecl
-- importInline :: String -> Maybe (Bool, GHC.Located [LIE GhcPs]) -> Maybe Context
-#if MIN_VERSION_ghc(9,5,0)
importInline modName (Just (EverythingBut, L r _))
| pos `isInsideSrcSpan` r = Just $ ImportHidingContext modName
| otherwise = Nothing
-#else
- importInline modName (Just (True, L r _))
- | pos `isInsideSrcSpan` r = Just $ ImportHidingContext modName
- | otherwise = Nothing
-#endif
-#if MIN_VERSION_ghc(9,5,0)
importInline modName (Just (Exactly, L r _))
| pos `isInsideSrcSpan` r = Just $ ImportListContext modName
| otherwise = Nothing
-#else
- importInline modName (Just (False, L r _))
- | pos `isInsideSrcSpan` r = Just $ ImportListContext modName
- | otherwise = Nothing
-#endif
importInline _ _ = Nothing
@@ -261,7 +238,7 @@ mkNameCompItem doc thingParent origName provenance isInfix !imp mod = CI {..}
compKind = occNameToComKind origName
isTypeCompl = isTcOcc origName
typeText = Nothing
- label = stripPrefix $ printOutputable origName
+ label = stripOccNamePrefix $ printOutputable origName
insertText = case isInfix of
Nothing -> label
Just LeftSide -> label <> "`"
@@ -801,17 +778,6 @@ openingBacktick line prefixModule prefixText Position { _character=(fromIntegral
-- ---------------------------------------------------------------------
--- | Under certain circumstance GHC generates some extra stuff that we
--- don't want in the autocompleted symbols
- {- When e.g. DuplicateRecordFields is enabled, compiler generates
- names like "$sel:accessor:One" and "$sel:accessor:Two" to disambiguate record selectors
- https://ghc.haskell.org/trac/ghc/wiki/Records/OverloadedRecordFields/DuplicateRecordFields#Implementation
- -}
--- TODO: Turn this into an alex lexer that discards prefixes as if they were whitespace.
-stripPrefix :: T.Text -> T.Text
-stripPrefix name = T.takeWhile (/=':') $ fromMaybe name $
- getFirst $ foldMap (First . (`T.stripPrefix` name)) occNamePrefixes
-
mkRecordSnippetCompItem :: Uri -> Maybe T.Text -> T.Text -> [T.Text] -> Provenance -> Maybe (LImportDecl GhcPs) -> CompItem
mkRecordSnippetCompItem uri parent ctxStr compl importedFrom imp = r
where
@@ -912,7 +878,9 @@ getCompletionPrefixFromRope pos@(Position l c) ropetext =
[] -> Nothing
(x:xs) -> do
let modParts = reverse $ filter (not .T.null) xs
- modName = T.intercalate "." modParts
+ -- Must check the prefix is a valid module name, else record dot accesses treat
+ -- the record name as a qualName for search and generated imports
+ modName = if all (isUpper . T.head) modParts then T.intercalate "." modParts else ""
return $ PosPrefixInfo { fullLine = curLine, prefixScope = modName, prefixText = x, cursorPos = pos }
completionPrefixPos :: PosPrefixInfo -> Position
diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs
index 2d950d66a9..338b969bab 100644
--- a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs
+++ b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs
@@ -16,7 +16,6 @@ import Data.Aeson
import Data.Aeson.Types
import Data.Hashable (Hashable)
import Data.Text (Text)
-import Data.Typeable (Typeable)
import Development.IDE.GHC.Compat
import Development.IDE.Graph (RuleResult)
import Development.IDE.Spans.Common ()
@@ -31,12 +30,12 @@ type instance RuleResult LocalCompletions = CachedCompletions
type instance RuleResult NonLocalCompletions = CachedCompletions
data LocalCompletions = LocalCompletions
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable LocalCompletions
instance NFData LocalCompletions
data NonLocalCompletions = NonLocalCompletions
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable NonLocalCompletions
instance NFData NonLocalCompletions
diff --git a/ghcide/src/Development/IDE/Plugin/TypeLenses.hs b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs
index a1aa237de8..c596d1fb82 100644
--- a/ghcide/src/Development/IDE/Plugin/TypeLenses.hs
+++ b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE TypeFamilies #-}
@@ -15,7 +16,7 @@ module Development.IDE.Plugin.TypeLenses (
import Control.Concurrent.STM.Stats (atomically)
import Control.DeepSeq (rwhnf)
-import Control.Lens ((?~))
+import Control.Lens (to, (?~), (^?))
import Control.Monad (mzero)
import Control.Monad.Extra (whenMaybe)
import Control.Monad.IO.Class (MonadIO (liftIO))
@@ -24,13 +25,17 @@ import Data.Aeson.Types (toJSON)
import qualified Data.Aeson.Types as A
import Data.List (find)
import qualified Data.Map as Map
-import Data.Maybe (catMaybes, maybeToList)
+import Data.Maybe (catMaybes, isJust,
+ maybeToList)
import qualified Data.Text as T
import Development.IDE (FileDiagnostic (..),
GhcSession (..),
HscEnvEq (hscEnv),
RuleResult, Rules, Uri,
- define, srcSpanToRange,
+ _SomeStructuredMessage,
+ define,
+ fdStructuredMessageL,
+ srcSpanToRange,
usePropertyAction)
import Development.IDE.Core.Compile (TcModuleResult (..))
import Development.IDE.Core.PluginUtils
@@ -44,6 +49,10 @@ import Development.IDE.Core.Shake (getHiddenDiagnostics,
use)
import qualified Development.IDE.Core.Shake as Shake
import Development.IDE.GHC.Compat
+import Development.IDE.GHC.Compat.Error (_TcRnMessage,
+ _TcRnMissingSignature,
+ msgEnvelopeErrorL,
+ stripTcRnMessageContext)
import Development.IDE.GHC.Util (printName)
import Development.IDE.Graph.Classes
import Development.IDE.Types.Location (Position (Position, _line),
@@ -128,9 +137,9 @@ codeLensProvider ideState pId CodeLensParams{_textDocument = TextDocumentIdentif
-- dummy type to make sure HLS resolves our lens
[ CodeLens _range Nothing (Just $ toJSON TypeLensesResolve)
| diag <- diags
- , let lspDiag@Diagnostic {_range} = fdLspDiagnostic diag
+ , let Diagnostic {_range} = fdLspDiagnostic diag
, fdFilePath diag == nfp
- , isGlobalDiagnostic lspDiag]
+ , isGlobalDiagnostic diag]
-- The second option is to generate lenses from the GlobalBindingTypeSig
-- rule. This is the only type that needs to have the range adjusted
-- with PositionMapping.
@@ -199,7 +208,7 @@ commandHandler _ideState _ wedit = do
pure $ InR Null
--------------------------------------------------------------------------------
-suggestSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> Diagnostic -> [(T.Text, TextEdit)]
+suggestSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> FileDiagnostic -> [(T.Text, TextEdit)]
suggestSignature isQuickFix mGblSigs diag =
maybeToList (suggestGlobalSignature isQuickFix mGblSigs diag)
@@ -207,14 +216,19 @@ suggestSignature isQuickFix mGblSigs diag =
-- works with a diagnostic, which then calls the secondary function with
-- whatever pieces of the diagnostic it needs. This allows the resolve function,
-- which no longer has the Diagnostic, to still call the secondary functions.
-suggestGlobalSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> Diagnostic -> Maybe (T.Text, TextEdit)
-suggestGlobalSignature isQuickFix mGblSigs diag@Diagnostic{_range}
+suggestGlobalSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> FileDiagnostic -> Maybe (T.Text, TextEdit)
+suggestGlobalSignature isQuickFix mGblSigs diag@FileDiagnostic {fdLspDiagnostic = Diagnostic {_range}}
| isGlobalDiagnostic diag =
suggestGlobalSignature' isQuickFix mGblSigs Nothing _range
| otherwise = Nothing
-isGlobalDiagnostic :: Diagnostic -> Bool
-isGlobalDiagnostic Diagnostic{_message} = _message =~ ("(Top-level binding|Pattern synonym) with no type signature" :: T.Text)
+isGlobalDiagnostic :: FileDiagnostic -> Bool
+isGlobalDiagnostic diag = diag ^? fdStructuredMessageL
+ . _SomeStructuredMessage
+ . msgEnvelopeErrorL
+ . _TcRnMessage
+ . _TcRnMissingSignature
+ & isJust
-- If a PositionMapping is supplied, this function will call
-- gblBindingTypeSigToEdit with it to create a TextEdit in the right location.
@@ -321,7 +335,11 @@ gblBindingType (Just hsc) (Just gblEnv) = do
let name = idName identifier
hasSig name $ do
env <- tcInitTidyEnv
+#if MIN_VERSION_ghc(9,11,0)
+ let ty = tidyOpenType env (idType identifier)
+#else
let (_, ty) = tidyOpenType env (idType identifier)
+#endif
pure $ GlobalBindingTypeSig name (printName name <> " :: " <> showDoc (pprSigmaType ty)) (name `elemNameSet` exports)
patToSig p = do
let name = patSynName p
diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs
index 4fafa3e952..a577cae32e 100644
--- a/ghcide/src/Development/IDE/Spans/AtPoint.hs
+++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs
@@ -36,6 +36,7 @@ import Language.LSP.Protocol.Types hiding
import Prelude hiding (mod)
-- compiler and infrastructure
+import Development.IDE.Core.Compile (setNonHomeFCHook)
import Development.IDE.Core.PositionMapping
import Development.IDE.Core.RuleTypes
import Development.IDE.GHC.Compat
@@ -306,7 +307,7 @@ atPoint IdeOptions{} (HAR _ (hf :: HieASTs a) rf _ (kind :: HieKind hietype)) (D
-- the package(with version) this `ModuleName` belongs to.
packageNameForImportStatement :: ModuleName -> IO T.Text
packageNameForImportStatement mod = do
- mpkg <- findImportedModule env mod :: IO (Maybe Module)
+ mpkg <- findImportedModule (setNonHomeFCHook env) mod :: IO (Maybe Module)
let moduleName = printOutputable mod
case mpkg >>= packageNameWithVersion of
Nothing -> pure moduleName
diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs
index ee8a8c18bc..f3e86d792d 100644
--- a/ghcide/src/Development/IDE/Spans/Common.hs
+++ b/ghcide/src/Development/IDE/Spans/Common.hs
@@ -38,11 +38,7 @@ type DocMap = NameEnv SpanDoc
type TyThingMap = NameEnv TyThing
-- | Shows IEWrappedName, without any modifier, qualifier or unique identifier.
-#if MIN_VERSION_ghc(9,5,0)
unqualIEWrapName :: IEWrappedName GhcPs -> T.Text
-#else
-unqualIEWrapName :: IEWrappedName RdrName -> T.Text
-#endif
unqualIEWrapName = printOutputable . rdrNameOcc . ieWrappedName
-- From haskell-ide-engine/src/Haskell/Ide/Engine/Support/HieExtras.hs
diff --git a/ghcide/src/Development/IDE/Types/Diagnostics.hs b/ghcide/src/Development/IDE/Types/Diagnostics.hs
index cbd49a91f8..5072fa7ffa 100644
--- a/ghcide/src/Development/IDE/Types/Diagnostics.hs
+++ b/ghcide/src/Development/IDE/Types/Diagnostics.hs
@@ -24,9 +24,7 @@ module Development.IDE.Types.Diagnostics (
ideErrorFromLspDiag,
showDiagnostics,
showDiagnosticsColored,
-#if MIN_VERSION_ghc(9,5,0)
showGhcCode,
-#endif
IdeResultNoDiagnosticsEarlyCutoff,
attachReason,
attachedReason) where
@@ -37,7 +35,7 @@ import Control.Lens
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Lens as JSON
import Data.ByteString (ByteString)
-import Data.List
+import Data.Foldable
import Data.Maybe as Maybe
import qualified Data.Text as T
import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope,
@@ -45,17 +43,11 @@ import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope,
flagSpecName, wWarningFlags)
import Development.IDE.Types.Location
import GHC.Generics
-#if MIN_VERSION_ghc(9,5,0)
import GHC.Types.Error (DiagnosticCode (..),
DiagnosticReason (..),
diagnosticCode,
diagnosticReason,
errMsgDiagnostic)
-#else
-import GHC.Types.Error (DiagnosticReason (..),
- diagnosticReason,
- errMsgDiagnostic)
-#endif
import Language.LSP.Diagnostics
import Language.LSP.Protocol.Lens (data_)
import Language.LSP.Protocol.Types as LSP
@@ -110,30 +102,25 @@ ideErrorFromLspDiag lspDiag fdFilePath mbOrigMsg =
fdLspDiagnostic =
lspDiag
& attachReason (fmap (diagnosticReason . errMsgDiagnostic) mbOrigMsg)
- & setGhcCode mbOrigMsg
+ & attachDiagnosticCode ((diagnosticCode . errMsgDiagnostic) =<< mbOrigMsg)
in
FileDiagnostic {..}
--- | Set the code of the 'LSP.Diagnostic' to the GHC diagnostic code which is linked
+-- | Set the code of the 'LSP.Diagnostic' to the GHC diagnostic code, and include the link
-- to https://errors.haskell.org/.
-setGhcCode :: Maybe (MsgEnvelope GhcMessage) -> LSP.Diagnostic -> LSP.Diagnostic
-#if MIN_VERSION_ghc(9,5,0)
-setGhcCode mbOrigMsg diag =
- let mbGhcCode = do
- origMsg <- mbOrigMsg
- code <- diagnosticCode (errMsgDiagnostic origMsg)
- pure (InR (showGhcCode code))
- in
- diag { _code = mbGhcCode <|> _code diag }
-#else
-setGhcCode _ diag = diag
-#endif
+attachDiagnosticCode :: Maybe DiagnosticCode -> LSP.Diagnostic -> LSP.Diagnostic
+attachDiagnosticCode Nothing diag = diag
+attachDiagnosticCode (Just code) diag =
+ let
+ textualCode = showGhcCode code
+ codeDesc = LSP.CodeDescription{ _href = Uri $ "https://errors.haskell.org/messages/" <> textualCode }
+ in diag { _code = Just (InR textualCode), _codeDescription = Just codeDesc}
#if MIN_VERSION_ghc(9,9,0)
-- DiagnosticCode only got a show instance in 9.10.1
showGhcCode :: DiagnosticCode -> T.Text
showGhcCode = T.pack . show
-#elif MIN_VERSION_ghc(9,5,0)
+#else
showGhcCode :: DiagnosticCode -> T.Text
showGhcCode (DiagnosticCode prefix c) = T.pack $ prefix ++ "-" ++ printf "%05d" c
#endif
@@ -146,7 +133,10 @@ attachReason Nothing = id
attachReason (Just wr) = attachedReason .~ fmap JSON.toJSON (showReason wr)
where
showReason = \case
- WarningWithFlag flag -> showFlag flag
+ WarningWithFlag flag -> Just $ catMaybes [showFlag flag]
+#if MIN_VERSION_ghc(9,7,0)
+ WarningWithFlags flags -> Just $ catMaybes (fmap showFlag $ toList flags)
+#endif
_ -> Nothing
showFlag :: WarningFlag -> Maybe T.Text
diff --git a/ghcide/src/Development/IDE/Types/HscEnvEq.hs b/ghcide/src/Development/IDE/Types/HscEnvEq.hs
index 10dc1b8f9f..1c2ed1732f 100644
--- a/ghcide/src/Development/IDE/Types/HscEnvEq.hs
+++ b/ghcide/src/Development/IDE/Types/HscEnvEq.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
module Development.IDE.Types.HscEnvEq
( HscEnvEq,
hscEnv, newHscEnvEq,
@@ -13,6 +14,8 @@ import Control.DeepSeq (force, rwhnf)
import Control.Exception (evaluate, mask, throwIO)
import Control.Monad.Extra (eitherM, join, mapMaybeM)
import Data.Either (fromRight)
+import Data.IORef
+import qualified Data.Map as M
import Data.Unique (Unique)
import qualified Data.Unique as Unique
import Development.IDE.GHC.Compat hiding (newUnique)
@@ -21,7 +24,11 @@ import Development.IDE.GHC.Error (catchSrcErrors)
import Development.IDE.GHC.Util (lookupPackageConfig)
import Development.IDE.Graph.Classes
import Development.IDE.Types.Exports (ExportsMap, createExportsMap)
+import GHC.Driver.Env (hsc_all_home_unit_ids)
+import Ide.PluginUtils (toAbsolute)
import OpenTelemetry.Eventlog (withSpan)
+import System.Directory (makeAbsolute)
+
-- | An 'HscEnv' with equality. Two values are considered equal
-- if they are created with the same call to 'newHscEnvEq' or
@@ -44,7 +51,32 @@ updateHscEnvEq oldHscEnvEq newHscEnv = do
-- | Wrap an 'HscEnv' into an 'HscEnvEq'.
newHscEnvEq :: HscEnv -> IO HscEnvEq
-newHscEnvEq hscEnv = do
+newHscEnvEq hscEnv' = do
+
+ mod_cache <- newIORef emptyInstalledModuleEnv
+ file_cache <- newIORef M.empty
+ -- This finder cache is for things which are outside of things which are tracked
+ -- by HLS. For example, non-home modules, dependent object files etc
+#if MIN_VERSION_ghc(9,11,0)
+ let hscEnv = hscEnv'
+ { hsc_FC = FinderCache
+ { flushFinderCaches = \_ -> error "GHC should never call flushFinderCaches outside the driver"
+ , addToFinderCache = \(GWIB im _) val -> do
+ if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv'
+ then error "tried to add home module to FC"
+ else atomicModifyIORef' mod_cache $ \c -> (extendInstalledModuleEnv c im val, ())
+ , lookupFinderCache = \(GWIB im _) -> do
+ if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv'
+ then error ("tried to lookup home module from FC" ++ showSDocUnsafe (ppr (im, hsc_all_home_unit_ids hscEnv')))
+ else lookupInstalledModuleEnv <$> readIORef mod_cache <*> pure im
+ , lookupFileCache = \fp -> error ("not used by HLS" ++ fp)
+ }
+ }
+
+#else
+ let hscEnv = hscEnv'
+#endif
+
let dflags = hsc_dflags hscEnv
envUnique <- Unique.newUnique
diff --git a/ghcide/src/Development/IDE/Types/Shake.hs b/ghcide/src/Development/IDE/Types/Shake.hs
index 2083625c43..cc8f84e3b6 100644
--- a/ghcide/src/Development/IDE/Types/Shake.hs
+++ b/ghcide/src/Development/IDE/Types/Shake.hs
@@ -33,10 +33,9 @@ import GHC.Generics
import HieDb.Types (HieDb)
import qualified StmContainers.Map as STM
import Type.Reflection (SomeTypeRep (SomeTypeRep),
- pattern App, pattern Con,
- typeOf, typeRep,
- typeRepTyCon)
-import Unsafe.Coerce (unsafeCoerce)
+ eqTypeRep, pattern App,
+ type (:~~:) (HRefl),
+ typeOf, typeRep)
-- | Intended to represent HieDb calls wrapped with (currently) retry
-- functionality
@@ -86,11 +85,12 @@ fromKey (Key k)
-- | fromKeyType (Q (k,f)) = (typeOf k, f)
fromKeyType :: Key -> Maybe (SomeTypeRep, NormalizedFilePath)
-fromKeyType (Key k) = case typeOf k of
- App (Con tc) a | tc == typeRepTyCon (typeRep @Q)
- -> case unsafeCoerce k of
- Q (_ :: (), f) -> Just (SomeTypeRep a, f)
- _ -> Nothing
+fromKeyType (Key k)
+ | App tc a <- typeOf k
+ , Just HRefl <- tc `eqTypeRep` (typeRep @Q)
+ , Q (_, f) <- k
+ = Just (SomeTypeRep a, f)
+ | otherwise = Nothing
toNoFileKey :: (Show k, Typeable k, Eq k, Hashable k) => k -> Key
toNoFileKey k = newKey $ Q (k, emptyFilePath)
@@ -101,13 +101,11 @@ newtype Q k = Q (k, NormalizedFilePath)
instance Show k => Show (Q k) where
show (Q (k, file)) = show k ++ "; " ++ fromNormalizedFilePath file
--- | Invariant: the 'v' must be in normal form (fully evaluated).
+-- | Invariant: the @v@ must be in normal form (fully evaluated).
-- Otherwise we keep repeatedly 'rnf'ing values taken from the Shake database
newtype A v = A (Value v)
deriving Show
-instance NFData (A v) where rnf (A v) = v `seq` ()
-
-- In the Shake database we only store one type of key/result pairs,
-- namely Q (question) / A (answer).
type instance RuleResult (Q k) = A (RuleResult k)
diff --git a/ghcide/test/exe/FuzzySearch.hs b/ghcide/test/exe/FuzzySearch.hs
deleted file mode 100644
index 3bc3ecb4b1..0000000000
--- a/ghcide/test/exe/FuzzySearch.hs
+++ /dev/null
@@ -1,130 +0,0 @@
-module FuzzySearch (tests) where
-
-import Data.Char (toLower)
-import Data.Maybe (catMaybes)
-import qualified Data.Monoid.Textual as T
-import Data.Text (Text, inits, pack)
-import qualified Data.Text as Text
-import Prelude hiding (filter)
-import System.Directory (doesFileExist)
-import System.IO.Unsafe (unsafePerformIO)
-import Test.QuickCheck
-import Test.Tasty
-import Test.Tasty.ExpectedFailure
-import Test.Tasty.QuickCheck (testProperty)
-import qualified Text.Fuzzy as Fuzzy
-import Text.Fuzzy (Fuzzy (..))
-import Text.Fuzzy.Parallel
-
-tests :: TestTree
-tests =
- testGroup
- "Fuzzy search"
- [ needDictionary $
- testGroup
- "match works as expected on the english dictionary"
- [ testProperty "for legit words" propLegit,
- testProperty "for prefixes" propPrefix,
- testProperty "for typos" propTypo
- ]
- ]
-
-test :: Text -> Bool
-test candidate = do
- let previous =
- catMaybes
- [ (d,) . Fuzzy.score
- <$> referenceImplementation candidate d "" "" id
- | d <- dictionary
- ]
- new = catMaybes [(d,) <$> match candidate d | d <- dictionary]
- previous == new
-
-propLegit :: Property
-propLegit = forAll (elements dictionary) test
-
-propPrefix :: Property
-propPrefix = forAll (elements dictionary >>= elements . inits) test
-
-propTypo :: Property
-propTypo = forAll typoGen test
-
-typoGen :: Gen Text
-typoGen = do
- w <- elements dictionary
- l <- elements [0 .. Text.length w -1]
- let wl = Text.index w l
- c <- elements [ c | c <- ['a' .. 'z'], c /= wl]
- return $ replaceAt w l c
-
-replaceAt :: Text -> Int -> Char -> Text
-replaceAt t i c =
- let (l, r) = Text.splitAt i t
- in l <> Text.singleton c <> r
-
-dictionaryPath :: FilePath
-dictionaryPath = "/usr/share/dict/words"
-
-{-# ANN dictionary ("HLint: ignore Avoid restricted function" :: String) #-}
-{-# NOINLINE dictionary #-}
-dictionary :: [Text]
-dictionary = unsafePerformIO $ do
- existsDictionary <- doesFileExist dictionaryPath
- if existsDictionary
- then map pack . words <$> readFile dictionaryPath
- else pure []
-
-referenceImplementation :: forall s t.
- (T.TextualMonoid s) =>
- -- | Pattern in lowercase except for first character
- s ->
- -- | The value containing the text to search in.
- t ->
- -- | The text to add before each match.
- s ->
- -- | The text to add after each match.
- s ->
- -- | The function to extract the text from the container.
- (t -> s) ->
- -- | The original value, rendered string and score.
- Maybe (Fuzzy t s)
-referenceImplementation pat t pre post extract =
- if null pat then Just (Fuzzy t result totalScore) else Nothing
- where
- null :: (T.TextualMonoid s) => s -> Bool
- null = not . T.any (const True)
-
- s = extract t
- (totalScore, _currScore, result, pat, _) =
- T.foldl'
- undefined
- ( \(tot, cur, res, pat, isFirst) c ->
- case T.splitCharacterPrefix pat of
- Nothing -> (tot, 0, res <> T.singleton c, pat, isFirst)
- Just (x, xs) ->
- -- the case of the first character has to match
- -- otherwise use lower case since the pattern is assumed lower
- let !c' = if isFirst then c else toLower c
- in if x == c'
- then
- let cur' = cur * 2 + 1
- in ( tot + cur',
- cur',
- res <> pre <> T.singleton c <> post,
- xs,
- False
- )
- else (tot, 0, res <> T.singleton c, pat, isFirst)
- )
- ( 0,
- 1, -- matching at the start gives a bonus (cur = 1)
- mempty,
- pat,
- True
- )
- s
-
-needDictionary :: TestTree -> TestTree
-needDictionary
- | null dictionary = ignoreTestBecause ("not found: " <> dictionaryPath)
- | otherwise = id
diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal
index dcbb546733..f49c619ec1 100644
--- a/haskell-language-server.cabal
+++ b/haskell-language-server.cabal
@@ -1,7 +1,7 @@
cabal-version: 3.4
category: Development
name: haskell-language-server
-version: 2.9.0.1
+version: 2.11.0.0
synopsis: LSP server for GHC
description:
Please see the README on GitHub at
@@ -14,7 +14,7 @@ copyright: The Haskell IDE Team
license: Apache-2.0
license-file: LICENSE
build-type: Simple
-tested-with: GHC ==9.10.1 || ==9.8.2 || ==9.6.5 || ==9.4.8
+tested-with: GHC == {9.12.2, 9.10.1, 9.8.4, 9.6.7}
extra-source-files:
README.md
ChangeLog.md
@@ -32,6 +32,12 @@ extra-source-files:
plugins/**/*.txt
plugins/**/*.hs
+ ghcide-test/data/**/*.cabal
+ ghcide-test/data/**/*.hs
+ ghcide-test/data/**/*.hs-boot
+ ghcide-test/data/**/*.project
+ ghcide-test/data/**/*.yaml
+
bindist/wrapper.in
source-repository head
@@ -42,6 +48,8 @@ common defaults
default-language: GHC2021
-- Should have been in GHC2021, an oversight
default-extensions: ExplicitNamespaces
+ build-depends:
+ , base >=4.12 && <5
common test-defaults
ghc-options: -threaded -rtsopts -with-rtsopts=-N
@@ -109,7 +117,7 @@ flag cabalfmt
manual: True
common cabalfmt
- if flag(cabalfmt)
+ if flag(cabalfmt) && flag(cabal)
build-depends: haskell-language-server:hls-cabal-fmt-plugin
cpp-options: -Dhls_cabalfmt
@@ -121,16 +129,15 @@ flag isolateCabalfmtTests
library hls-cabal-fmt-plugin
import: defaults, pedantic, warnings
- if !flag(cabalfmt)
+ if !flag(cabalfmt) || !flag(cabal)
buildable: False
exposed-modules: Ide.Plugin.CabalFmt
hs-source-dirs: plugins/hls-cabal-fmt-plugin/src
build-depends:
- , base >=4.12 && <5
, directory
, filepath
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp-types
, mtl
@@ -140,19 +147,18 @@ library hls-cabal-fmt-plugin
-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers
test-suite hls-cabal-fmt-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !flag(cabalfmt)
+ if !flag(cabalfmt) || !flag(cabal)
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-cabal-fmt-plugin/test
main-is: Main.hs
build-depends:
- , base
, directory
, filepath
, haskell-language-server:hls-cabal-plugin
, haskell-language-server:hls-cabal-fmt-plugin
- , hls-plugin-api == 2.9.0.1
- , hls-test-utils == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
+ , hls-test-utils == 2.11.0.0
if flag(isolateCabalfmtTests)
build-tool-depends: cabal-fmt:cabal-fmt ^>=0.1.12
@@ -168,7 +174,7 @@ flag cabalgild
manual: True
common cabalgild
- if flag(cabalgild)
+ if flag(cabalgild) && flag(cabal)
build-depends: haskell-language-server:hls-cabal-gild-plugin
cpp-options: -Dhls_cabalgild
@@ -180,16 +186,15 @@ flag isolateCabalGildTests
library hls-cabal-gild-plugin
import: defaults, pedantic, warnings
- if !flag(cabalgild)
+ if !flag(cabalgild) || !flag(cabal)
buildable: False
exposed-modules: Ide.Plugin.CabalGild
hs-source-dirs: plugins/hls-cabal-gild-plugin/src
build-depends:
- , base >=4.12 && <5
, directory
, filepath
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp-types
, text
, mtl
@@ -198,19 +203,18 @@ library hls-cabal-gild-plugin
-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers
test-suite hls-cabal-gild-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !flag(cabalgild)
+ if !flag(cabalgild) || !flag(cabal)
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-cabal-gild-plugin/test
main-is: Main.hs
build-depends:
- , base
, directory
, filepath
, haskell-language-server:hls-cabal-plugin
, haskell-language-server:hls-cabal-gild-plugin
- , hls-plugin-api == 2.9.0.1
- , hls-test-utils == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
+ , hls-test-utils == 2.11.0.0
if flag(isolateCabalGildTests)
-- https://github.com/tfausak/cabal-gild/issues/89
@@ -258,7 +262,6 @@ library hls-cabal-plugin
build-depends:
- , base >=4.12 && <5
, bytestring
, Cabal-syntax >= 3.7
, containers
@@ -266,10 +269,10 @@ library hls-cabal-plugin
, directory
, filepath
, extra >=1.7.4
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hls-plugin-api == 2.9.0.1
- , hls-graph == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
+ , hls-graph == 2.11.0.0
, lens
, lsp ^>=2.7
, lsp-types ^>=2.3
@@ -302,14 +305,13 @@ test-suite hls-cabal-plugin-tests
Outline
Utils
build-depends:
- , base
, bytestring
, Cabal-syntax >= 3.7
, extra
, filepath
, ghcide
, haskell-language-server:hls-cabal-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -342,15 +344,14 @@ library hls-class-plugin
hs-source-dirs: plugins/hls-class-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, containers
, deepseq
, extra
, ghc
- , ghc-exactprint >= 1.5 && < 1.10.0.0
- , ghcide == 2.9.0.1
+ , ghc-exactprint >= 1.5 && < 1.13.0.0
+ , ghcide == 2.11.0.0
, hls-graph
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, mtl
@@ -369,10 +370,9 @@ test-suite hls-class-plugin-tests
hs-source-dirs: plugins/hls-class-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-class-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -404,12 +404,11 @@ library hls-call-hierarchy-plugin
hs-source-dirs: plugins/hls-call-hierarchy-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, containers
, extra
- , ghcide == 2.9.0.1
- , hiedb ^>= 0.6.0.0
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hiedb ^>= 0.6.0.2
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp >=2.7
, sqlite-simple
@@ -426,12 +425,11 @@ test-suite hls-call-hierarchy-plugin-tests
main-is: Main.hs
build-depends:
, aeson
- , base
, containers
, extra
, filepath
, haskell-language-server:hls-call-hierarchy-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp
, lsp-test
@@ -462,9 +460,9 @@ library hls-eval-plugin
hs-source-dirs: plugins/hls-eval-plugin/src
other-modules:
Ide.Plugin.Eval.Code
- Ide.Plugin.Eval.CodeLens
Ide.Plugin.Eval.Config
Ide.Plugin.Eval.GHC
+ Ide.Plugin.Eval.Handlers
Ide.Plugin.Eval.Parse.Comments
Ide.Plugin.Eval.Parse.Option
Ide.Plugin.Eval.Rules
@@ -472,19 +470,18 @@ library hls-eval-plugin
build-depends:
, aeson
- , base >=4.12 && <5
, bytestring
, containers
, deepseq
- , Diff ^>=0.5
+ , Diff ^>=0.5 || ^>=1.0.0
, dlist
, extra
, filepath
, ghc
, ghc-boot-th
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hls-graph
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, lsp-types
@@ -510,13 +507,12 @@ test-suite hls-eval-plugin-tests
ghc-options: -fno-ignore-asserts
build-depends:
, aeson
- , base
, containers
, extra
, filepath
, haskell-language-server:hls-eval-plugin
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -525,16 +521,16 @@ test-suite hls-eval-plugin-tests
-- import lens plugin
-----------------------------
+flag importLens
+ description: Enable importLens plugin
+ default: True
+ manual: False
+
common importLens
if flag(importLens)
build-depends: haskell-language-server:hls-explicit-imports-plugin
cpp-options: -Dhls_importLens
-flag importLens
- description: Enable importLens plugin
- default: True
- manual: True
-
library hls-explicit-imports-plugin
import: defaults, pedantic, warnings
if !flag(importlens)
@@ -543,13 +539,12 @@ library hls-explicit-imports-plugin
hs-source-dirs: plugins/hls-explicit-imports-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, containers
, deepseq
, ghc
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hls-graph
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, mtl
@@ -567,11 +562,10 @@ test-suite hls-explicit-imports-plugin-tests
hs-source-dirs: plugins/hls-explicit-imports-plugin/test
main-is: Main.hs
build-depends:
- , base
, extra
, filepath
, haskell-language-server:hls-explicit-imports-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -597,13 +591,12 @@ library hls-rename-plugin
exposed-modules: Ide.Plugin.Rename
hs-source-dirs: plugins/hls-rename-plugin/src
build-depends:
- , base >=4.12 && <5
, containers
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hiedb ^>= 0.6.0.0
+ , hiedb ^>= 0.6.0.2
, hie-compat
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, haskell-language-server:hls-refactor-plugin
, lens
, lsp-types
@@ -624,12 +617,11 @@ test-suite hls-rename-plugin-tests
main-is: Main.hs
build-depends:
, aeson
- , base
, containers
, filepath
, hls-plugin-api
, haskell-language-server:hls-rename-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -644,26 +636,25 @@ flag retrie
manual: True
common retrie
- if flag(retrie) && impl(ghc < 9.10)
+ if flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))
build-depends: haskell-language-server:hls-retrie-plugin
cpp-options: -Dhls_retrie
library hls-retrie-plugin
import: defaults, pedantic, warnings
- if !(flag(retrie) && impl(ghc < 9.10))
+ if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
exposed-modules: Ide.Plugin.Retrie
hs-source-dirs: plugins/hls-retrie-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, bytestring
, containers
, extra
, ghc
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, haskell-language-server:hls-refactor-plugin
, lens
, lsp
@@ -682,18 +673,17 @@ library hls-retrie-plugin
test-suite hls-retrie-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !(flag(retrie) && impl(ghc < 9.10))
+ if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-retrie-plugin/test
main-is: Main.hs
build-depends:
- , base
, containers
, filepath
, hls-plugin-api
, haskell-language-server:{hls-refactor-plugin, hls-retrie-plugin}
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, text
-----------------------------
@@ -713,28 +703,27 @@ flag hlint
manual: True
common hlint
- if flag(hlint) && impl(ghc < 9.10)
+ if flag(hlint)
build-depends: haskell-language-server:hls-hlint-plugin
cpp-options: -Dhls_hlint
library hls-hlint-plugin
import: defaults, pedantic, warnings
-- https://github.com/ndmitchell/hlint/pull/1594
- if !(flag(hlint) && impl(ghc < 9.10))
+ if !flag(hlint)
buildable: False
exposed-modules: Ide.Plugin.Hlint
hs-source-dirs: plugins/hls-hlint-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, bytestring
, containers
, deepseq
, filepath
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hlint >= 3.5 && < 3.9
- , hls-plugin-api == 2.9.0.1
+ , hlint >= 3.5 && < 3.11
+ , hls-plugin-api == 2.11.0.0
, lens
, mtl
, refact
@@ -746,10 +735,14 @@ library hls-hlint-plugin
, transformers
, unordered-containers
, ghc-lib-parser-ex
- , apply-refact
- --
, lsp-types
+ -- apply-refact doesn't work on 9.10, or even have a buildable
+ -- configuration
+ if impl(ghc >= 9.11) || impl(ghc < 9.10)
+ cpp-options: -DAPPLY_REFACT
+ build-depends: apply-refact
+
if flag(ghc-lib)
cpp-options: -DGHC_LIB
build-depends:
@@ -764,7 +757,7 @@ library hls-hlint-plugin
test-suite hls-hlint-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !(flag(hlint) && impl(ghc < 9.10))
+ if !flag(hlint)
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-hlint-plugin/test
@@ -772,14 +765,14 @@ test-suite hls-hlint-plugin-tests
-- Work around https://gitlab.haskell.org/ghc/ghc/-/issues/24648
if os(darwin)
ghc-options: -optl-Wl,-ld_classic
+
build-depends:
aeson
- , base
, containers
, filepath
, haskell-language-server:hls-hlint-plugin
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -794,20 +787,17 @@ flag stan
manual: True
common stan
- if flag(stan)
+ if flag(stan) && (impl(ghc < 9.11) || flag(ignore-plugins-ghc-bounds))
build-depends: haskell-language-server:hls-stan-plugin
cpp-options: -Dhls_stan
library hls-stan-plugin
import: defaults, pedantic, warnings
- if flag(stan)
- buildable: True
- else
+ if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds))
buildable: False
exposed-modules: Ide.Plugin.Stan
hs-source-dirs: plugins/hls-stan-plugin/src
build-depends:
- base
, deepseq
, hashable
, hie-compat
@@ -816,7 +806,7 @@ library hls-stan-plugin
, lsp-types
, text
, unordered-containers
- , stan >= 0.1.2.0
+ , stan >= 0.2.1.0
, trial
, directory
@@ -828,19 +818,16 @@ library hls-stan-plugin
test-suite hls-stan-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if flag(stan)
- buildable: True
- else
+ if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds))
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-stan-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-stan-plugin
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -869,11 +856,10 @@ library hls-module-name-plugin
hs-source-dirs: plugins/hls-module-name-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, containers
, filepath
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp
, text
, text-rope
@@ -888,10 +874,9 @@ test-suite hls-module-name-plugin-tests
hs-source-dirs: plugins/hls-module-name-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-module-name-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
-- pragmas plugin
@@ -914,12 +899,11 @@ library hls-pragmas-plugin
exposed-modules: Ide.Plugin.Pragmas
hs-source-dirs: plugins/hls-pragmas-plugin/src
build-depends:
- , base >=4.12 && <5
, aeson
, extra
, fuzzy
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lens-aeson
, lsp
@@ -936,10 +920,9 @@ test-suite hls-pragmas-plugin-tests
main-is: Main.hs
build-depends:
, aeson
- , base
, filepath
, haskell-language-server:hls-pragmas-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-types
, text
@@ -954,13 +937,13 @@ flag splice
manual: True
common splice
- if flag(splice) && impl(ghc < 9.10)
+ if flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))
build-depends: haskell-language-server:hls-splice-plugin
cpp-options: -Dhls_splice
library hls-splice-plugin
import: defaults, pedantic, warnings
- if !(flag(splice) && impl(ghc < 9.10))
+ if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
exposed-modules:
Ide.Plugin.Splice
@@ -969,12 +952,11 @@ library hls-splice-plugin
hs-source-dirs: plugins/hls-splice-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, extra
, foldl
, ghc
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, haskell-language-server:hls-refactor-plugin
, lens
, lsp
@@ -989,16 +971,15 @@ library hls-splice-plugin
test-suite hls-splice-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !(flag(splice) && impl(ghc < 9.10))
+ if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-splice-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-splice-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, text
-----------------------------
@@ -1023,13 +1004,12 @@ library hls-alternate-number-format-plugin
other-modules: Ide.Plugin.Literals
hs-source-dirs: plugins/hls-alternate-number-format-plugin/src
build-depends:
- , base >=4.12 && < 5
, containers
, extra
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, ghc-boot-th
, hls-graph
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp ^>=2.7
, mtl
@@ -1052,10 +1032,9 @@ test-suite hls-alternate-number-format-plugin-tests
main-is: Main.hs
ghc-options: -fno-ignore-asserts
build-depends:
- , base >=4.12 && < 5
, filepath
, haskell-language-server:hls-alternate-number-format-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, regex-tdfa
, tasty-quickcheck
, text
@@ -1086,10 +1065,9 @@ library hls-qualify-imported-names-plugin
exposed-modules: Ide.Plugin.QualifyImportedNames
hs-source-dirs: plugins/hls-qualify-imported-names-plugin/src
build-depends:
- , base >=4.12 && <5
, containers
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, text
@@ -1108,11 +1086,10 @@ test-suite hls-qualify-imported-names-plugin-tests
hs-source-dirs: plugins/hls-qualify-imported-names-plugin/test
main-is: Main.hs
build-depends:
- , base
, text
, filepath
, haskell-language-server:hls-qualify-imported-names-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
-- code range plugin
@@ -1139,13 +1116,12 @@ library hls-code-range-plugin
Ide.Plugin.CodeRange.ASTPreProcess
hs-source-dirs: plugins/hls-code-range-plugin/src
build-depends:
- , base >=4.12 && <5
, containers
, deepseq
, extra
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, mtl
@@ -1164,11 +1140,10 @@ test-suite hls-code-range-plugin-tests
Ide.Plugin.CodeRangeTest
Ide.Plugin.CodeRange.RulesTest
build-depends:
- , base
, bytestring
, filepath
, haskell-language-server:hls-code-range-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp
, lsp-test
@@ -1196,9 +1171,8 @@ library hls-change-type-signature-plugin
exposed-modules: Ide.Plugin.ChangeTypeSignature
hs-source-dirs: plugins/hls-change-type-signature-plugin/src
build-depends:
- , base >=4.12 && < 5
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp-types
, regex-tdfa
, syb
@@ -1220,10 +1194,9 @@ test-suite hls-change-type-signature-plugin-tests
hs-source-dirs: plugins/hls-change-type-signature-plugin/test
main-is: Main.hs
build-depends:
- , base >=4.12 && < 5
, filepath
, haskell-language-server:hls-change-type-signature-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, regex-tdfa
, text
default-extensions:
@@ -1253,13 +1226,12 @@ library hls-gadt-plugin
hs-source-dirs: plugins/hls-gadt-plugin/src
build-depends:
, aeson
- , base >=4.12 && <5
, containers
, extra
, ghc
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, ghc-exactprint
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, haskell-language-server:hls-refactor-plugin
, lens
, lsp >=2.7
@@ -1277,10 +1249,9 @@ test-suite hls-gadt-plugin-tests
hs-source-dirs: plugins/hls-gadt-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-gadt-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, text
-----------------------------
@@ -1304,13 +1275,12 @@ library hls-explicit-fixity-plugin
exposed-modules: Ide.Plugin.ExplicitFixity
hs-source-dirs: plugins/hls-explicit-fixity-plugin/src
build-depends:
- base >=4.12 && <5
, containers
, deepseq
, extra
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, hashable
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, lsp >=2.7
, text
@@ -1324,10 +1294,9 @@ test-suite hls-explicit-fixity-plugin-tests
hs-source-dirs: plugins/hls-explicit-fixity-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-explicit-fixity-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, text
-----------------------------
@@ -1350,9 +1319,8 @@ library hls-explicit-record-fields-plugin
buildable: False
exposed-modules: Ide.Plugin.ExplicitFields
build-depends:
- , base >=4.12 && <5
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp
, lens
, hls-graph
@@ -1374,12 +1342,11 @@ test-suite hls-explicit-record-fields-plugin-tests
hs-source-dirs: plugins/hls-explicit-record-fields-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, text
, ghcide
, haskell-language-server:hls-explicit-record-fields-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
-- overloaded record dot plugin
@@ -1401,7 +1368,6 @@ library hls-overloaded-record-dot-plugin
buildable: False
exposed-modules: Ide.Plugin.OverloadedRecordDot
build-depends:
- , base >=4.16 && <5
, aeson
, ghcide
, hls-plugin-api
@@ -1423,11 +1389,10 @@ test-suite hls-overloaded-record-dot-plugin-tests
hs-source-dirs: plugins/hls-overloaded-record-dot-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, text
, haskell-language-server:hls-overloaded-record-dot-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
@@ -1440,22 +1405,21 @@ flag floskell
manual: True
common floskell
- if flag(floskell) && impl(ghc < 9.10)
+ if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))
build-depends: haskell-language-server:hls-floskell-plugin
cpp-options: -Dhls_floskell
library hls-floskell-plugin
import: defaults, pedantic, warnings
-- https://github.com/ennocramer/floskell/pull/82
- if !(flag(floskell) && impl(ghc < 9.10))
+ if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
exposed-modules: Ide.Plugin.Floskell
hs-source-dirs: plugins/hls-floskell-plugin/src
build-depends:
- , base >=4.12 && <5
, floskell ^>=0.11.0
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp-types ^>=2.3
, mtl
, text
@@ -1463,16 +1427,15 @@ library hls-floskell-plugin
test-suite hls-floskell-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !(flag(floskell) && impl(ghc < 9.10))
+ if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)))
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-floskell-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-floskell-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
-- fourmolu plugin
@@ -1495,12 +1458,11 @@ library hls-fourmolu-plugin
exposed-modules: Ide.Plugin.Fourmolu
hs-source-dirs: plugins/hls-fourmolu-plugin/src
build-depends:
- , base >=4.12 && <5
, filepath
- , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16
+ , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 || ^>=0.17 || ^>=0.18
, ghc-boot-th
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, mtl
@@ -1522,12 +1484,11 @@ test-suite hls-fourmolu-plugin-tests
build-tool-depends:
fourmolu:fourmolu
build-depends:
- , base >=4.12 && <5
, aeson
, filepath
, haskell-language-server:hls-fourmolu-plugin
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lsp-test
-----------------------------
@@ -1551,16 +1512,15 @@ library hls-ormolu-plugin
exposed-modules: Ide.Plugin.Ormolu
hs-source-dirs: plugins/hls-ormolu-plugin/src
build-depends:
- , base >=4.12 && <5
, extra
, filepath
, ghc-boot-th
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp
, mtl
, process-extras >= 0.7.1
- , ormolu ^>=0.1.2 || ^>= 0.2 || ^>= 0.3 || ^>= 0.5 || ^>= 0.6 || ^>= 0.7
+ , ormolu ^>=0.5.3 || ^>= 0.6 || ^>= 0.7 || ^>=0.8
, text
, transformers
@@ -1578,12 +1538,11 @@ test-suite hls-ormolu-plugin-tests
build-tool-depends:
ormolu:ormolu
build-depends:
- , base
, aeson
, filepath
, haskell-language-server:hls-ormolu-plugin
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lsp-types
, ormolu
@@ -1597,42 +1556,40 @@ flag stylishHaskell
manual: True
common stylishHaskell
- if flag(stylishHaskell) && impl(ghc < 9.10)
+ if flag(stylishHaskell)
build-depends: haskell-language-server:hls-stylish-haskell-plugin
cpp-options: -Dhls_stylishHaskell
library hls-stylish-haskell-plugin
import: defaults, pedantic, warnings
-- https://github.com/haskell/stylish-haskell/issues/479
- if !(flag(stylishHaskell) && impl(ghc < 9.10))
+ if !flag(stylishHaskell)
buildable: False
exposed-modules: Ide.Plugin.StylishHaskell
hs-source-dirs: plugins/hls-stylish-haskell-plugin/src
build-depends:
- , base >=4.12 && <5
, directory
, filepath
, ghc-boot-th
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp-types
, mtl
- , stylish-haskell ^>=0.12 || ^>=0.13 || ^>=0.14
+ , stylish-haskell >=0.12 && <0.16
, text
test-suite hls-stylish-haskell-plugin-tests
import: defaults, pedantic, test-defaults, warnings
- if !(flag(stylishHaskell) && impl(ghc < 9.10))
+ if !flag(stylishHaskell)
buildable: False
type: exitcode-stdio-1.0
hs-source-dirs: plugins/hls-stylish-haskell-plugin/test
main-is: Main.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-stylish-haskell-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
-----------------------------
-- refactor plugin
@@ -1680,13 +1637,12 @@ library hls-refactor-plugin
ViewPatterns
hs-source-dirs: plugins/hls-refactor-plugin/src
build-depends:
- , base >=4.12 && <5
, ghc
, bytestring
, ghc-boot
, regex-tdfa
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lsp
, text
, text-rope
@@ -1718,14 +1674,13 @@ test-suite hls-refactor-plugin-tests
other-modules: Test.AddArgument
ghc-options: -O0
build-depends:
- , base
, data-default
, directory
, extra
, filepath
, ghcide:ghcide
, haskell-language-server:hls-refactor-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-test
, lsp-types
@@ -1768,13 +1723,12 @@ library hls-semantic-tokens-plugin
hs-source-dirs: plugins/hls-semantic-tokens-plugin/src
build-depends:
- , base >=4.12 && <5
, containers
, extra
, text-rope
, mtl >= 2.2
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp >=2.6
, text
@@ -1784,7 +1738,7 @@ library hls-semantic-tokens-plugin
, array
, deepseq
, dlist
- , hls-graph == 2.9.0.1
+ , hls-graph == 2.11.0.0
, template-haskell
, data-default
, stm
@@ -1802,14 +1756,13 @@ test-suite hls-semantic-tokens-plugin-tests
build-depends:
, aeson
- , base
, containers
, data-default
, filepath
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, haskell-language-server:hls-semantic-tokens-plugin
- , hls-plugin-api == 2.9.0.1
- , hls-test-utils == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
+ , hls-test-utils == 2.11.0.0
, lens
, lsp
, lsp-test
@@ -1838,11 +1791,10 @@ library hls-notes-plugin
Ide.Plugin.Notes
hs-source-dirs: plugins/hls-notes-plugin/src
build-depends:
- , base >=4.12 && <5
, array
- , ghcide == 2.9.0.1
- , hls-graph == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-graph == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp >=2.7
, mtl >= 2.2
@@ -1866,10 +1818,9 @@ test-suite hls-notes-plugin-tests
hs-source-dirs: plugins/hls-notes-plugin/test
main-is: NotesTest.hs
build-depends:
- , base
, filepath
, haskell-language-server:hls-notes-plugin
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
default-extensions: OverloadedStrings
----------------------------
@@ -1924,16 +1875,15 @@ library
hs-source-dirs: src
build-depends:
, aeson-pretty
- , base >=4.16 && <5
, data-default
, directory
, extra
, filepath
, ghc
- , ghcide == 2.9.0.1
+ , ghcide == 2.11.0.0
, githash >=0.1.6.1
, hie-bios
- , hls-plugin-api == 2.9.0.1
+ , hls-plugin-api == 2.11.0.0
, optparse-applicative
, optparse-simple
, prettyprinter >= 1.7
@@ -1970,7 +1920,6 @@ executable haskell-language-server
ghc-options: -dynamic
build-depends:
- , base >=4.16 && <5
, haskell-language-server
, hls-plugin-api
, lsp
@@ -1996,7 +1945,6 @@ executable haskell-language-server-wrapper
"-with-rtsopts=-I0 -A128M"
build-depends:
- , base >=4.16 && <5
, data-default
, directory
, extra
@@ -2027,11 +1975,9 @@ test-suite func-test
type: exitcode-stdio-1.0
build-tool-depends:
haskell-language-server:haskell-language-server,
- ghcide:ghcide-test-preprocessor
build-depends:
, aeson
- , base >=4.16 && <5
, bytestring
, containers
, deepseq
@@ -2040,7 +1986,7 @@ test-suite func-test
, ghcide:ghcide
, hashable
, hls-plugin-api
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, lens
, lsp-test
, lsp-types
@@ -2066,7 +2012,7 @@ test-suite func-test
if flag(eval)
cpp-options: -Dhls_eval
-- formatters
- if flag(floskell) && impl(ghc < 9.10)
+ if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))
cpp-options: -Dhls_floskell
if flag(fourmolu)
cpp-options: -Dhls_fourmolu
@@ -2083,9 +2029,8 @@ test-suite wrapper-test
haskell-language-server:haskell-language-server
build-depends:
- , base >=4.16 && <5
, extra
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
, process
hs-source-dirs: test/wrapper
@@ -2100,7 +2045,7 @@ benchmark benchmark
hs-source-dirs: bench
build-tool-depends:
haskell-language-server:ghcide-bench,
- hp2pretty:hp2pretty,
+ eventlog2html:eventlog2html,
default-extensions:
LambdaCase
RecordWildCards
@@ -2108,7 +2053,6 @@ benchmark benchmark
build-depends:
, aeson
- , base >=4.16 && <5
, containers
, data-default
, directory
@@ -2124,26 +2068,37 @@ benchmark benchmark
, text
, yaml
+flag test-exe
+ description: Build the ghcide-test-preprocessor executable
+ default: True
-test-suite ghcide-tests
+executable ghcide-test-preprocessor
import: warnings
+ default-language: GHC2021
+ hs-source-dirs: ghcide-test/preprocessor
+ main-is: Main.hs
+ build-depends: base >=4 && <5
+
+ if !flag(test-exe)
+ buildable: False
+
+test-suite ghcide-tests
+ import: warnings, defaults
type: exitcode-stdio-1.0
default-language: GHC2021
build-tool-depends:
, ghcide:ghcide
- , ghcide:ghcide-test-preprocessor
+ , haskell-language-server:ghcide-test-preprocessor
, implicit-hie:gen-hie
build-depends:
, aeson
- , base
, containers
, data-default
, directory
, enummapset
, extra
, filepath
- , fuzzy
, ghcide
, hls-plugin-api
, lens
@@ -2151,7 +2106,6 @@ test-suite ghcide-tests
, lsp
, lsp-test ^>=0.17.1
, lsp-types
- , monoid-subclasses
, mtl
, network-uri
, QuickCheck
@@ -2169,12 +2123,12 @@ test-suite ghcide-tests
, text
, text-rope
, unordered-containers
- , hls-test-utils == 2.9.0.1
+ , hls-test-utils == 2.11.0.0
if impl(ghc <9.3)
build-depends: ghc-typelits-knownnat
- hs-source-dirs: ghcide/test/exe
+ hs-source-dirs: ghcide-test/exe
ghc-options: -threaded -O0
main-is: Main.hs
@@ -2223,12 +2177,16 @@ test-suite ghcide-tests
RecordWildCards
ViewPatterns
+flag ghcide-bench
+ description: Build the ghcide-bench executable
+ default: True
executable ghcide-bench
- default-language: GHC2021
+ import: defaults
+ if !flag(ghcide-bench)
+ buildable: False
build-depends:
aeson,
- base,
bytestring,
containers,
data-default,
@@ -2257,7 +2215,7 @@ executable ghcide-bench
ViewPatterns
library ghcide-bench-lib
- default-language: GHC2021
+ import: defaults
hs-source-dirs: ghcide-bench/src
ghc-options: -Wall -Wno-name-shadowing
exposed-modules:
@@ -2266,7 +2224,6 @@ library ghcide-bench-lib
build-depends:
aeson,
async,
- base == 4.*,
binary,
bytestring,
deepseq,
@@ -2293,8 +2250,8 @@ library ghcide-bench-lib
test-suite ghcide-bench-test
+ import: defaults
type: exitcode-stdio-1.0
- default-language: GHC2021
build-tool-depends:
ghcide:ghcide,
main-is: Main.hs
@@ -2302,7 +2259,6 @@ test-suite ghcide-bench-test
ghc-options: -Wunused-packages
ghc-options: -threaded -Wall
build-depends:
- base,
extra,
haskell-language-server:ghcide-bench-lib,
lsp-test ^>= 0.17,
@@ -2314,3 +2270,28 @@ test-suite ghcide-bench-test
OverloadedStrings
RecordWildCards
ViewPatterns
+
+executable plugin-tutorial
+ import: defaults
+ -- The plugin tutorial is only compatible with 9.6 and 9.8.
+ -- No particular reason, just to avoid excessive CPP.
+ if (impl(ghc >= 9.6) && impl(ghc < 9.10))
+ buildable: True
+ else
+ buildable: False
+ ghc-options: -pgmL markdown-unlit
+ main-is: docs/contributing/plugin-tutorial.lhs
+ build-tool-depends: markdown-unlit:markdown-unlit
+ build-depends:
+ base,
+ ghcide,
+ hls-plugin-api,
+ aeson,
+ lsp,
+ lsp-types,
+ markdown-unlit,
+ text,
+ unordered-containers,
+ containers,
+ transformers,
+ ghc,
diff --git a/hie-compat/hie-compat.cabal b/hie-compat/hie-compat.cabal
index bb96ab88fb..2b361df887 100644
--- a/hie-compat/hie-compat.cabal
+++ b/hie-compat/hie-compat.cabal
@@ -24,7 +24,7 @@ source-repository head
library
default-language: GHC2021
build-depends:
- base < 4.21, array, bytestring, containers, directory, filepath, transformers
+ base < 4.22, array, bytestring, containers, directory, filepath, transformers
build-depends: ghc >= 8.10, ghc-boot
ghc-options: -Wall -Wno-name-shadowing
diff --git a/hie-compat/src-ghc92/Compat/HieAst.hs b/hie-compat/src-ghc92/Compat/HieAst.hs
index f72b1283de..3445ff6213 100644
--- a/hie-compat/src-ghc92/Compat/HieAst.hs
+++ b/hie-compat/src-ghc92/Compat/HieAst.hs
@@ -72,7 +72,7 @@ import qualified Data.Array as A
import qualified Data.ByteString as BS
import qualified Data.Map as M
import qualified Data.Set as S
-import Data.Data ( Data, Typeable )
+import Data.Data ( Data )
import Data.Void ( Void, absurd )
import Control.Monad ( forM_ )
import Control.Monad.Trans.State.Strict
@@ -469,7 +469,7 @@ data PScoped a = PS (Maybe Span)
Scope -- ^ use site of the pattern
Scope -- ^ pattern to the right of a, not including a
a
- deriving (Typeable, Data) -- Pattern Scope
+ deriving (Data) -- Pattern Scope
{- Note [TyVar Scopes]
~~~~~~~~~~~~~~~~~~~
@@ -1040,10 +1040,6 @@ instance HiePass p => ToHie (PScoped (LocatedA (Pat (GhcPass p)))) where
in [ toHie $ L ospan wrap
, toHie $ PS rsp scope pscope $ (L ospan pat)
]
--- CHANGED: removed preprocessor stuff
--- #if __GLASGOW_HASKELL__ < 811
--- HieRn -> []
--- #endif
where
contextify :: a ~ LPat (GhcPass p) => HsConDetails (HsPatSigType (NoGhcTc (GhcPass p))) a (HsRecFields (GhcPass p) a)
-> HsConDetails (TScoped (HsPatSigType (NoGhcTc (GhcPass p)))) (PScoped a) (RContext (HsRecFields (GhcPass p) (PScoped a)))
@@ -1928,11 +1924,6 @@ instance HiePass p => ToHie (LocatedA (HsSplice (GhcPass p))) where
HsSpliced _ _ _ ->
[]
XSplice x -> case ghcPass @p of
--- CHANGED: removed preprocessor stuff
--- #if __GLASGOW_HASKELL__ < 811
--- GhcPs -> noExtCon x
--- GhcRn -> noExtCon x
--- #endif
GhcTc -> case x of
HsSplicedT _ -> []
diff --git a/hls-graph/hls-graph.cabal b/hls-graph/hls-graph.cabal
index d5a9f781de..5eccb4d75e 100644
--- a/hls-graph/hls-graph.cabal
+++ b/hls-graph/hls-graph.cabal
@@ -1,6 +1,6 @@
cabal-version: 2.4
name: hls-graph
-version: 2.9.0.1
+version: 2.11.0.0
synopsis: Haskell Language Server internal graph API
description:
Please see the README on GitHub at
diff --git a/hls-graph/src/Control/Concurrent/STM/Stats.hs b/hls-graph/src/Control/Concurrent/STM/Stats.hs
index 3b7c28b013..a6e7d0459b 100644
--- a/hls-graph/src/Control/Concurrent/STM/Stats.hs
+++ b/hls-graph/src/Control/Concurrent/STM/Stats.hs
@@ -20,7 +20,6 @@ import Control.Monad
import Data.IORef
import qualified Data.Map.Strict as M
import Data.Time (getCurrentTime)
-import Data.Typeable (Typeable)
import GHC.Conc (unsafeIOToSTM)
import System.IO
import System.IO.Unsafe
@@ -151,7 +150,6 @@ trackSTMConf (TrackSTMConf {..}) name txm = do
-- 'BlockedIndefinitelyOnNamedSTM', carrying the name of the transaction and
-- thus giving more helpful error messages.
newtype BlockedIndefinitelyOnNamedSTM = BlockedIndefinitelyOnNamedSTM String
- deriving (Typeable)
instance Show BlockedIndefinitelyOnNamedSTM where
showsPrec _ (BlockedIndefinitelyOnNamedSTM name) =
diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs
index c70cf6ff1c..34bed42391 100644
--- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs
+++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs
@@ -227,7 +227,7 @@ data GraphException = forall e. Exception e => GraphException {
stack :: [String], -- ^ The stack of keys that led to this exception
inner :: e -- ^ The underlying exception
}
- deriving (Typeable, Exception)
+ deriving (Exception)
instance Show GraphException where
show GraphException{..} = unlines $
@@ -249,7 +249,7 @@ instance Show Stack where
show (Stack kk _) = "Stack: " <> intercalate " -> " (map show kk)
newtype StackException = StackException Stack
- deriving (Typeable, Show)
+ deriving (Show)
instance Exception StackException where
fromException = fromGraphException
diff --git a/hls-graph/test/Example.hs b/hls-graph/test/Example.hs
index c6a74e90a6..c20ea79328 100644
--- a/hls-graph/test/Example.hs
+++ b/hls-graph/test/Example.hs
@@ -38,7 +38,7 @@ ruleBool = addRule $ \Rule _old _mode -> do
data CondRule = CondRule
- deriving (Eq, Generic, Hashable, NFData, Show, Typeable)
+ deriving (Eq, Generic, Hashable, NFData, Show)
type instance RuleResult CondRule = Bool
@@ -48,7 +48,7 @@ ruleCond mv = addRule $ \CondRule _old _mode -> do
return $ RunResult ChangedRecomputeDiff "" r (return ())
data BranchedRule = BranchedRule
- deriving (Eq, Generic, Hashable, NFData, Show, Typeable)
+ deriving (Eq, Generic, Hashable, NFData, Show)
type instance RuleResult BranchedRule = Int
ruleWithCond :: Rules ()
@@ -61,7 +61,7 @@ ruleWithCond = addRule $ \BranchedRule _old _mode -> do
return $ RunResult ChangedRecomputeDiff "" (2 :: Int) (return ())
data SubBranchRule = SubBranchRule
- deriving (Eq, Generic, Hashable, NFData, Show, Typeable)
+ deriving (Eq, Generic, Hashable, NFData, Show)
type instance RuleResult SubBranchRule = Int
ruleSubBranch :: C.MVar Int -> Rules ()
@@ -70,5 +70,5 @@ ruleSubBranch mv = addRule $ \SubBranchRule _old _mode -> do
return $ RunResult ChangedRecomputeDiff "" r (return ())
data CountRule = CountRule
- deriving (Eq, Generic, Hashable, NFData, Show, Typeable)
+ deriving (Eq, Generic, Hashable, NFData, Show)
type instance RuleResult CountRule = Int
diff --git a/hls-plugin-api/hls-plugin-api.cabal b/hls-plugin-api/hls-plugin-api.cabal
index b177550f62..bad55992bb 100644
--- a/hls-plugin-api/hls-plugin-api.cabal
+++ b/hls-plugin-api/hls-plugin-api.cabal
@@ -1,6 +1,6 @@
cabal-version: 2.4
name: hls-plugin-api
-version: 2.9.0.1
+version: 2.11.0.0
synopsis: Haskell Language Server API for plugin communication
description:
Please see the README on GitHub at
@@ -60,13 +60,13 @@ library
, data-default
, dependent-map
, dependent-sum >=0.7
- , Diff ^>=0.5
+ , Diff ^>=0.5 || ^>=1.0.0
, dlist
, extra
, filepath
, ghc
, hashable
- , hls-graph == 2.9.0.1
+ , hls-graph == 2.11.0.0
, lens
, lens-aeson
, lsp ^>=2.7
diff --git a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs
index 8ee6110d29..a7350ab344 100644
--- a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs
+++ b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs
@@ -3,7 +3,11 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-module Ide.Plugin.ConfigUtils where
+module Ide.Plugin.ConfigUtils (
+ pluginsToDefaultConfig,
+ pluginsToVSCodeExtensionSchema,
+ pluginsCustomConfigToMarkdownTables
+ ) where
import Control.Lens (at, (&), (?~))
import qualified Data.Aeson as A
@@ -15,8 +19,15 @@ import qualified Data.Dependent.Sum as DSum
import Data.List.Extra (nubOrd)
import Data.String (IsString (fromString))
import qualified Data.Text as T
+import GHC.TypeLits (symbolVal)
import Ide.Plugin.Config
-import Ide.Plugin.Properties (toDefaultJSON,
+import Ide.Plugin.Properties (KeyNameProxy, MetaData (..),
+ PluginCustomConfig (..),
+ PluginCustomConfigParam (..),
+ Properties (..),
+ SPropertyKey (..),
+ SomePropertyKeyWithMetaData (..),
+ toDefaultJSON,
toVSCodeExtensionSchema)
import Ide.Types
import Language.LSP.Protocol.Message
@@ -31,10 +42,10 @@ pluginsToDefaultConfig :: IdePlugins a -> A.Value
pluginsToDefaultConfig IdePlugins {..} =
-- Use '_Object' and 'at' to get at the "plugin" key
-- and actually set it.
- A.toJSON defaultConfig & _Object . at "plugin" ?~ elems
+ A.toJSON defaultConfig & _Object . at "plugin" ?~ pluginSpecificDefaultConfigs
where
- defaultConfig@Config {} = def
- elems = A.object $ mconcat $ singlePlugin <$> ipMap
+ defaultConfig = def :: Config
+ pluginSpecificDefaultConfigs = A.object $ mconcat $ singlePlugin <$> ipMap
-- Splice genericDefaultConfig and dedicatedDefaultConfig
-- Example:
--
@@ -48,6 +59,7 @@ pluginsToDefaultConfig IdePlugins {..} =
-- }
-- }
-- }
+ singlePlugin :: PluginDescriptor ideState -> [A.Pair]
singlePlugin PluginDescriptor {pluginConfigDescriptor = ConfigDescriptor {..}, ..} =
let x = genericDefaultConfig <> dedicatedDefaultConfig
in [fromString (T.unpack pId) A..= A.object x | not $ null x]
@@ -66,8 +78,8 @@ pluginsToDefaultConfig IdePlugins {..} =
<> nubOrd (mconcat
(handlersToGenericDefaultConfig configInitialGenericConfig <$> handlers))
in case x of
- -- if the plugin has only one capability, we produce globalOn instead of the specific one;
- -- otherwise we don't produce globalOn at all
+ -- If the plugin has only one capability, we produce globalOn instead of the specific one;
+ -- otherwise we omit globalOn
[_] -> ["globalOn" A..= plcGlobalOn configInitialGenericConfig]
_ -> x
-- Example:
@@ -139,3 +151,92 @@ pluginsToVSCodeExtensionSchema IdePlugins {..} = A.object $ mconcat $ singlePlug
]
withIdPrefix x = "haskell.plugin." <> pId <> "." <> x
toKey' = fromString . T.unpack . withIdPrefix
+
+
+-- | Generates markdown tables for custom config
+pluginsCustomConfigToMarkdownTables :: IdePlugins a -> T.Text
+pluginsCustomConfigToMarkdownTables IdePlugins {..} = T.unlines
+ $ map renderCfg
+ $ filter (\(PluginCustomConfig _ params) -> not $ null params)
+ $ map toPluginCustomConfig ipMap
+ where
+ toPluginCustomConfig :: PluginDescriptor ideState -> PluginCustomConfig
+ toPluginCustomConfig PluginDescriptor {pluginConfigDescriptor = ConfigDescriptor {configCustomConfig = c}, pluginId = PluginId pId} =
+ PluginCustomConfig { pcc'Name = pId, pcc'Params = toPluginCustomConfigParams c}
+ toPluginCustomConfigParams :: CustomConfig -> [PluginCustomConfigParam]
+ toPluginCustomConfigParams (CustomConfig p) = toPluginCustomConfigParams' p
+ toPluginCustomConfigParams' :: Properties r -> [PluginCustomConfigParam]
+ toPluginCustomConfigParams' EmptyProperties = []
+ toPluginCustomConfigParams' (ConsProperties (keyNameProxy :: KeyNameProxy s) (k :: SPropertyKey k) (m :: MetaData t) xs) =
+ toEntry (SomePropertyKeyWithMetaData k m) : toPluginCustomConfigParams' xs
+ where
+ toEntry :: SomePropertyKeyWithMetaData -> PluginCustomConfigParam
+ toEntry (SomePropertyKeyWithMetaData SNumber MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData SInteger MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData SString MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData SBoolean MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData (SObject _) MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = "TODO: nested object", -- T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData (SArray _) MetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = "TODO: Array values", -- T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ toEntry (SomePropertyKeyWithMetaData (SEnum _) EnumMetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = map (T.pack . show) enumValues
+ }
+ toEntry (SomePropertyKeyWithMetaData SProperties PropertiesMetaData {..}) =
+ PluginCustomConfigParam {
+ pccp'Name = T.pack $ symbolVal keyNameProxy,
+ pccp'Description = description,
+ pccp'Default = T.pack $ show defaultValue,
+ pccp'EnumValues = []
+ }
+ renderCfg :: PluginCustomConfig -> T.Text
+ renderCfg (PluginCustomConfig pId pccParams) =
+ T.unlines (pluginHeader : tableHeader : rows pccParams)
+ where
+ pluginHeader = "## " <> pId
+ tableHeader =
+ "| Property | Description | Default | Allowed values |" <> "\n" <>
+ "| --- | --- | --- | --- |"
+ rows = map renderRow
+ renderRow PluginCustomConfigParam {..} =
+ "| `" <> pccp'Name <> "` | " <> pccp'Description <> " | `" <> pccp'Default <> "` | " <> renderEnum pccp'EnumValues <> " |"
+ renderEnum [] = " " -- Placeholder to prevent missing cells
+ renderEnum vs = " " <> (T.intercalate " " $ map (\x -> "" <> x <> "
") vs) <> "
"
diff --git a/hls-plugin-api/src/Ide/Plugin/Properties.hs b/hls-plugin-api/src/Ide/Plugin/Properties.hs
index dda2bb7e33..49a45721b4 100644
--- a/hls-plugin-api/src/Ide/Plugin/Properties.hs
+++ b/hls-plugin-api/src/Ide/Plugin/Properties.hs
@@ -21,9 +21,10 @@ module Ide.Plugin.Properties
MetaData (..),
PropertyKey (..),
SPropertyKey (..),
+ SomePropertyKeyWithMetaData (..),
KeyNameProxy (..),
KeyNamePath (..),
- Properties,
+ Properties(..),
HasProperty,
HasPropertyByPath,
emptyProperties,
@@ -42,6 +43,8 @@ module Ide.Plugin.Properties
usePropertyByPathEither,
usePropertyByPath,
(&),
+ PluginCustomConfig(..),
+ PluginCustomConfigParam(..),
)
where
@@ -516,3 +519,15 @@ toVSCodeExtensionSchema' ps = case ps of
]
(SomePropertyKeyWithMetaData SProperties PropertiesMetaData {..}) ->
map (first Just) $ toVSCodeExtensionSchema' childrenProperties
+
+data PluginCustomConfig = PluginCustomConfig {
+ pcc'Name :: T.Text,
+ pcc'Params :: [PluginCustomConfigParam]
+}
+data PluginCustomConfigParam = PluginCustomConfigParam {
+ pccp'Name :: T.Text,
+ pccp'Description :: T.Text,
+ pccp'Default :: T.Text,
+ pccp'EnumValues :: [T.Text]
+}
+
diff --git a/hls-plugin-api/src/Ide/Types.hs b/hls-plugin-api/src/Ide/Types.hs
index c84fe15345..3a06656a77 100644
--- a/hls-plugin-api/src/Ide/Types.hs
+++ b/hls-plugin-api/src/Ide/Types.hs
@@ -727,7 +727,7 @@ instance PluginRequestMethod Method_TextDocumentPrepareRename where
combineResponses _ _ _ _ (x :| _) = x
instance PluginRequestMethod Method_TextDocumentHover where
- combineResponses _ _ _ _ (mapMaybe nullToMaybe . toList -> hs :: [Hover]) =
+ combineResponses _ _ _ _ (mapMaybe nullToMaybe . toList -> (hs :: [Hover])) =
if null hs
then InR Null
else InL $ Hover (InL mcontent) r
diff --git a/hls-test-utils/hls-test-utils.cabal b/hls-test-utils/hls-test-utils.cabal
index 49f58d82c4..084de98534 100644
--- a/hls-test-utils/hls-test-utils.cabal
+++ b/hls-test-utils/hls-test-utils.cabal
@@ -1,6 +1,6 @@
cabal-version: 2.4
name: hls-test-utils
-version: 2.9.0.1
+version: 2.11.0.0
synopsis: Utilities used in the tests of Haskell Language Server
description:
Please see the README on GitHub at
@@ -43,8 +43,8 @@ library
, directory
, extra
, filepath
- , ghcide == 2.9.0.1
- , hls-plugin-api == 2.9.0.1
+ , ghcide == 2.11.0.0
+ , hls-plugin-api == 2.11.0.0
, lens
, lsp
, lsp-test ^>=0.17
diff --git a/hls-test-utils/src/Test/Hls/Util.hs b/hls-test-utils/src/Test/Hls/Util.hs
index d0621ebe3a..98c795f8e0 100644
--- a/hls-test-utils/src/Test/Hls/Util.hs
+++ b/hls-test-utils/src/Test/Hls/Util.hs
@@ -36,6 +36,7 @@ module Test.Hls.Util
, inspectCodeAction
, inspectCommand
, inspectDiagnostic
+ , inspectDiagnosticAny
, waitForDiagnosticsFrom
, waitForDiagnosticsFromSource
, waitForDiagnosticsFromSourceWithTimeout
@@ -247,6 +248,10 @@ inspectDiagnostic :: [Diagnostic] -> [T.Text] -> IO Diagnostic
inspectDiagnostic diags s = onMatch diags (\ca -> all (`T.isInfixOf` (ca ^. L.message)) s) err
where err = "expected diagnostic matching '" ++ show s ++ "' but did not find one"
+inspectDiagnosticAny :: [Diagnostic] -> [T.Text] -> IO Diagnostic
+inspectDiagnosticAny diags s = onMatch diags (\ca -> any (`T.isInfixOf` (ca ^. L.message)) s) err
+ where err = "expected diagnostic matching one of'" ++ show s ++ "' but did not find one"
+
expectDiagnostic :: [Diagnostic] -> [T.Text] -> IO ()
expectDiagnostic diags s = void $ inspectDiagnostic diags s
diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs
index 2abee54b5c..9a56467f3f 100644
--- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs
+++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs
@@ -20,11 +20,11 @@ import qualified Data.HashMap.Strict as HashMap
import qualified Data.List as List
import qualified Data.List.NonEmpty as NE
import qualified Data.Maybe as Maybe
+import Data.Proxy
import qualified Data.Text ()
import qualified Data.Text as T
import qualified Data.Text.Encoding as Encoding
import Data.Text.Utf16.Rope.Mixed as Rope
-import Data.Typeable
import Development.IDE as D
import Development.IDE.Core.FileStore (getVersionedTextDoc)
import Development.IDE.Core.PluginUtils
@@ -249,10 +249,12 @@ cabalRules recorder plId = do
let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings
case pm of
Left (_cabalVersion, pErrorNE) -> do
- let regex :: T.Text
+ let regexUnknownCabalBefore310 :: T.Text
-- We don't support the cabal version, this should not be an error, as the
-- user did not do anything wrong. Instead we cast it to a warning
- regex = "Unsupported cabal-version [0-9]+.[0-9]*"
+ regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*"
+ regexUnknownCabalVersion :: T.Text
+ regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+"
unsupportedCabalHelpText = unlines
[ "The used `cabal-version` is not fully supported by this `HLS` binary."
, "Either the `cabal-version` is unknown, or too new for this executable."
@@ -267,7 +269,10 @@ cabalRules recorder plId = do
NE.toList $
NE.map
( \pe@(PError pos text) ->
- if text =~ regex
+ if any (text =~)
+ [ regexUnknownCabalBefore310
+ , regexUnknownCabalVersion
+ ]
then Diagnostics.warningDiagnostic file (Syntax.PWarning Syntax.PWTOther pos $
unlines
[ text
@@ -437,14 +442,14 @@ newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath
instance Shake.IsIdeGlobal OfInterestCabalVar
data IsCabalFileOfInterest = IsCabalFileOfInterest
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable IsCabalFileOfInterest
instance NFData IsCabalFileOfInterest
type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult
data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable CabalFileOfInterestResult
instance NFData CabalFileOfInterestResult
diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs
index ed43099998..3b46eec128 100644
--- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs
+++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs
@@ -190,6 +190,12 @@ addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath caba
-- > It is a member of the hidden package ‘split-0.2.5’.
-- > Perhaps you need to add ‘split’ to the build-depends in your .cabal file."
--
+-- or this if PackageImports extension is used:
+--
+-- > "Could not find module ‘Data.List.Split’
+-- > Perhaps you meant
+-- > Data.List.Split (needs flag -package-id split-0.2.5)"
+--
-- It extracts mentioned package names and version numbers.
-- In this example, it will be @[("split", "0.2.5")]@
--
@@ -204,13 +210,18 @@ hiddenPackageSuggestion diag = getMatch (msg =~ regex)
msg :: T.Text
msg = _message diag
regex :: T.Text -- TODO: Support multiple packages suggestion
- regex = "It is a member of the hidden package [\8216']([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?[\8217']"
+ regex =
+ let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?"
+ in "It is a member of the hidden package [\8216']" <> regex' <> "[\8217']"
+ <> "|"
+ <> "needs flag -package-id " <> regex'
-- Have to do this matching because `Regex.TDFA` doesn't(?) support
-- not-capturing groups like (?:message)
getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)]
getMatch (_, _, _, []) = []
- getMatch (_, _, _, [dependency, _, cleanVersion]) = [(dependency, cleanVersion)]
- getMatch (_, _, _, _) = error "Impossible pattern matching case"
+ getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)]
+ getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)]
+ getMatch (_, _, _, _) = []
command :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddCommandParams
command recorder state _ params@(CabalAddCommandParams {cabalPath = path, verTxtDocId = verTxtDocId, buildTarget = target, dependency = dep, version = mbVer}) = do
diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs
index 5defdbbe63..0e1053453b 100644
--- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs
+++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs
@@ -7,6 +7,7 @@ import Distribution.PackageDescription (Benchmark (..),
BuildInfo (..),
CondTree (condTreeData),
Executable (..),
+ ForeignLib (..),
GenericPackageDescription (..),
Library (..),
UnqualComponentName,
@@ -118,6 +119,10 @@ sourceDirsExtractionTestSuite name gpd = extractRelativeDirsFromStanza name gpd
sourceDirsExtractionBenchmark :: Maybe StanzaName -> GenericPackageDescription -> [FilePath]
sourceDirsExtractionBenchmark name gpd = extractRelativeDirsFromStanza name gpd condBenchmarks benchmarkBuildInfo
+-- | Extracts the source directories of foreign-lib stanza with the given name.
+sourceDirsExtractionForeignLib :: Maybe StanzaName -> GenericPackageDescription -> [FilePath]
+sourceDirsExtractionForeignLib name gpd = extractRelativeDirsFromStanza name gpd condForeignLibs foreignLibBuildInfo
+
{- | Takes a possible stanza name, a GenericPackageDescription,
a function to access the stanza information we are interested in
and a function to access the build info from the specific stanza.
diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs
index c27568d692..03e517eae2 100644
--- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs
+++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs
@@ -1,7 +1,4 @@
{-# LANGUAGE OverloadedStrings #-}
-{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
-
-{-# HLINT ignore "Redundant bracket" #-}
module Ide.Plugin.Cabal.Completion.Data where
@@ -19,6 +16,17 @@ import Ide.Plugin.Cabal.Completion.Completer.Types (Completer)
import Ide.Plugin.Cabal.Completion.Types
import Ide.Plugin.Cabal.LicenseSuggest (licenseNames)
+-- | Ad-hoc data type for modelling the available top-level stanzas.
+-- Not intended right now for anything else but to avoid string
+-- comparisons in 'stanzaKeywordMap' and 'libExecTestBenchCommons'.
+data TopLevelStanza
+ = Library
+ | Executable
+ | TestSuite
+ | Benchmark
+ | ForeignLib
+ | Common
+
-- ----------------------------------------------------------------
-- Completion Data
-- ----------------------------------------------------------------
@@ -71,12 +79,13 @@ cabalKeywords =
stanzaKeywordMap :: Map StanzaType (Map KeyWordName Completer)
stanzaKeywordMap =
Map.fromList
- [ ("library", libraryFields <> libExecTestBenchCommons),
- ("executable", executableFields <> libExecTestBenchCommons),
- ("test-suite", testSuiteFields <> libExecTestBenchCommons),
- ("benchmark", benchmarkFields <> libExecTestBenchCommons),
- ("foreign-library", foreignLibraryFields <> libExecTestBenchCommons),
- ("common", libExecTestBenchCommons),
+ [ ("library", libraryFields <> libExecTestBenchCommons Library),
+ ("executable", executableFields <> libExecTestBenchCommons Executable),
+ ("test-suite", testSuiteFields <> libExecTestBenchCommons TestSuite),
+ ("benchmark", benchmarkFields <> libExecTestBenchCommons Benchmark),
+ ("foreign-library", foreignLibraryFields <> libExecTestBenchCommons ForeignLib),
+ ("common", libExecTestBenchCommons Library),
+ ("common", libExecTestBenchCommons Common),
("flag", flagFields),
("source-repository", sourceRepositoryFields)
]
@@ -162,8 +171,8 @@ flagFields =
("lib-version-linux:", noopCompleter)
]
-libExecTestBenchCommons :: Map KeyWordName Completer
-libExecTestBenchCommons =
+libExecTestBenchCommons :: TopLevelStanza -> Map KeyWordName Completer
+libExecTestBenchCommons st =
Map.fromList
[ ("import:", importCompleter),
("build-depends:", noopCompleter),
@@ -183,6 +192,8 @@ libExecTestBenchCommons =
("includes:", filePathCompleter),
("install-includes:", filePathCompleter),
("include-dirs:", directoryCompleter),
+ ("autogen-includes:", filePathCompleter),
+ ("autogen-modules:", moduleCompleterByTopLevelStanza),
("c-sources:", filePathCompleter),
("cxx-sources:", filePathCompleter),
("asm-sources:", filePathCompleter),
@@ -203,6 +214,26 @@ libExecTestBenchCommons =
("extra-framework-dirs:", directoryCompleter),
("mixins:", noopCompleter)
]
+ where
+ --
+ moduleCompleterByTopLevelStanza = case st of
+ Library -> modulesCompleter sourceDirsExtractionLibrary
+ Executable -> modulesCompleter sourceDirsExtractionExecutable
+ TestSuite -> modulesCompleter sourceDirsExtractionTestSuite
+ Benchmark -> modulesCompleter sourceDirsExtractionBenchmark
+ ForeignLib -> modulesCompleter sourceDirsExtractionForeignLib
+ Common ->
+ -- TODO: We can't provide a module completer because we provide
+ -- module completions based on the "hs-source-dirs" after parsing the file,
+ -- i.e. based on the 'PackageDescription'.
+ -- "common" stanzas are erased in the 'PackageDescription' representation,
+ -- thus we can't provide accurate module completers right now, as we don't
+ -- know what the 'hs-source-dirs' in the "common" stanza are.
+ --
+ -- A potential fix would be to introduce an intermediate representation that
+ -- parses the '.cabal' file s.t. that we have access to the 'hs-source-dirs',
+ -- but not have erased the "common" stanza.
+ noopCompleter
-- | Contains a map of the most commonly used licenses, weighted by their popularity.
--
diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs
index 2655fbcaa6..59796afe2b 100644
--- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs
+++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs
@@ -8,7 +8,6 @@ import Control.DeepSeq (NFData)
import Control.Lens ((^.))
import Data.Hashable
import qualified Data.Text as T
-import Data.Typeable
import Development.IDE as D
import qualified Distribution.Fields as Syntax
import qualified Distribution.PackageDescription as PD
@@ -44,7 +43,7 @@ instance Pretty Log where
type instance RuleResult ParseCabalFile = PD.GenericPackageDescription
data ParseCabalFile = ParseCabalFile
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable ParseCabalFile
@@ -53,7 +52,7 @@ instance NFData ParseCabalFile
type instance RuleResult ParseCabalFields = [Syntax.Field Syntax.Position]
data ParseCabalFields = ParseCabalFields
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable ParseCabalFields
@@ -62,7 +61,7 @@ instance NFData ParseCabalFields
type instance RuleResult ParseCabalCommonSections = [Syntax.Field Syntax.Position]
data ParseCabalCommonSections = ParseCabalCommonSections
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable ParseCabalCommonSections
diff --git a/plugins/hls-cabal-plugin/test/CabalAdd.hs b/plugins/hls-cabal-plugin/test/CabalAdd.hs
index 3b36f82bc2..6517c811fe 100644
--- a/plugins/hls-cabal-plugin/test/CabalAdd.hs
+++ b/plugins/hls-cabal-plugin/test/CabalAdd.hs
@@ -33,6 +33,8 @@ cabalAddTests =
(generateAddDependencyTestSession "cabal-add-lib.cabal" ("src" > "MyLib.hs") "split" [348])
, runHaskellTestCaseSession "Code Actions - Can add hidden package to a test" ("cabal-add-testdata" > "cabal-add-tests")
(generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" > "Main.hs") "split" [478])
+ , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test with PackageImports" ("cabal-add-testdata" > "cabal-add-tests")
+ (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" > "MainPackageImports.hs") "split" [731])
, runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark" ("cabal-add-testdata" > "cabal-add-bench")
(generateAddDependencyTestSession "cabal-add-bench.cabal" ("bench" > "Main.hs") "split" [403])
@@ -122,6 +124,23 @@ cabalAddTests =
[ ("3d-graphics-examples", T.empty)
, ("3d-graphics-examples", "1.1.6")
]
+ , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, with PackageImports"
+ [ "(needs flag -package-id base-0.1.0.0)"
+ , "(needs flag -package-id Blammo-wai-0.11.0)"
+ , "(needs flag -package-id BlastHTTP-2.6.4.3)"
+ , "(needs flag -package-id CC-delcont-ref-tf-0.0.0.2)"
+ , "(needs flag -package-id 3d-graphics-examples-1.1.6)"
+ , "(needs flag -package-id AAI-0.1)"
+ , "(needs flag -package-id AWin32Console-1.19.1)"
+ ]
+ [ ("base","0.1.0.0")
+ , ("Blammo-wai", "0.11.0")
+ , ("BlastHTTP", "2.6.4.3")
+ , ("CC-delcont-ref-tf", "0.0.0.2")
+ , ("3d-graphics-examples", "1.1.6")
+ , ("AAI", "0.1")
+ , ("AWin32Console", "1.19.1")
+ ]
]
where
generateAddDependencyTestSession :: FilePath -> FilePath -> T.Text -> [Int] -> Session ()
diff --git a/plugins/hls-cabal-plugin/test/Completer.hs b/plugins/hls-cabal-plugin/test/Completer.hs
index 6b1f772af0..ab7165b1ac 100644
--- a/plugins/hls-cabal-plugin/test/Completer.hs
+++ b/plugins/hls-cabal-plugin/test/Completer.hs
@@ -7,6 +7,7 @@ module Completer where
import Control.Lens ((^.), (^?))
import Control.Lens.Prism
+import Control.Monad (forM_)
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Char8 as BS8
import Data.Maybe (mapMaybe)
@@ -40,7 +41,8 @@ completerTests =
completionHelperTests,
filePathExposedModulesTests,
exposedModuleCompleterTests,
- importCompleterTests
+ importCompleterTests,
+ autogenFieldCompletionTests
]
basicCompleterTests :: TestTree
@@ -336,6 +338,26 @@ importCompleterTests =
[Syntax.SecArgName (Syntax.Position row (col + 7)) (BS8.pack name)]
[]
+autogenFieldCompletionTests :: TestTree
+autogenFieldCompletionTests =
+ testGroup "Autogen Field Completer Tests"
+ [ testAutogenField "library" "completion/autogen-completion.cabal" (Position 6 9) ["autogen-modules:", "autogen-includes:"]
+ , testAutogenField "executable" "completion/autogen-completion.cabal" (Position 11 9) ["autogen-modules:", "autogen-includes:"]
+ , testAutogenField "test-suite" "completion/autogen-completion.cabal" (Position 16 9) ["autogen-modules:", "autogen-includes:"]
+ , testAutogenField "benchmark" "completion/autogen-completion.cabal" (Position 21 9) ["autogen-modules:", "autogen-includes:"]
+ , testAutogenField "common" "completion/autogen-completion.cabal" (Position 24 9) ["autogen-modules:", "autogen-includes:"]
+ ]
+
+ where
+ testAutogenField :: String -> FilePath -> Position -> [T.Text] -> TestTree
+ testAutogenField section file pos expected = runCabalTestCaseSession ("autogen-modules completion in " <> section) "" $ do
+ doc <- openDoc file "cabal"
+ items <- getCompletions doc pos
+ let labels = map (^. L.label) items
+ liftIO $ forM_ expected $ \expect ->
+ assertBool (T.unpack expect <> " not found in " <> section) $
+ any (expect `T.isInfixOf`) labels
+
simpleCompleterData :: Maybe StanzaName -> FilePath -> T.Text -> CompleterData
simpleCompleterData sName dir pref = do
CompleterData
diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs
index cec2d36a53..fcb85a081e 100644
--- a/plugins/hls-cabal-plugin/test/Main.hs
+++ b/plugins/hls-cabal-plugin/test/Main.hs
@@ -110,7 +110,7 @@ pluginTests =
, runCabalTestCaseSession "Publishes Diagnostics on unsupported cabal version as Warning" "" $ do
_ <- openDoc "unsupportedVersion.cabal" "cabal"
diags <- cabalCaptureKick
- unknownVersionDiag <- liftIO $ inspectDiagnostic diags ["Unsupported cabal-version 99999.0"]
+ unknownVersionDiag <- liftIO $ inspectDiagnosticAny diags ["Unsupported cabal-version 99999.0", "Unsupported cabal format version in cabal-version field: 99999.0"]
liftIO $ do
length diags @?= 1
unknownVersionDiag ^. L.range @?= Range (Position 0 0) (Position 1 0)
@@ -131,29 +131,6 @@ pluginTests =
expectNoMoreDiagnostics 1 hsDoc "typechecking"
cabalDoc <- openDoc "simple-cabal.cabal" "cabal"
expectNoMoreDiagnostics 1 cabalDoc "parsing"
- , runCabalTestCaseSession "Diagnostics in .hs files from invalid .cabal file" "simple-cabal" $ do
- hsDoc <- openDoc "A.hs" "haskell"
- expectNoMoreDiagnostics 1 hsDoc "typechecking"
- cabalDoc <- openDoc "simple-cabal.cabal" "cabal"
- expectNoMoreDiagnostics 1 cabalDoc "parsing"
- let theRange = Range (Position 3 20) (Position 3 23)
- -- Invalid license
- changeDoc
- cabalDoc
- [ TextDocumentContentChangeEvent $
- InL TextDocumentContentChangePartial
- { _range = theRange
- , _rangeLength = Nothing
- , _text = "MIT3"
- }
- ]
- cabalDiags <- waitForDiagnosticsFrom cabalDoc
- unknownLicenseDiag <- liftIO $ inspectDiagnostic cabalDiags ["Unknown SPDX license identifier: 'MIT3'"]
- expectNoMoreDiagnostics 1 hsDoc "typechecking"
- liftIO $ do
- length cabalDiags @?= 1
- unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0)
- unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error
]
]
-- ----------------------------------------------------------------------------
diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal
index d217f8c4d5..9adc498231 100644
--- a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal
+++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal
@@ -16,3 +16,11 @@ test-suite cabal-add-tests-test
hs-source-dirs: test
main-is: Main.hs
build-depends: base
+
+test-suite cabal-add-tests-test-package-imports
+ import: warnings
+ default-language: Haskell2010
+ type: exitcode-stdio-1.0
+ hs-source-dirs: test
+ main-is: MainPackageImports.hs
+ build-depends: base
diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs
new file mode 100644
index 0000000000..753dd165dd
--- /dev/null
+++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs
@@ -0,0 +1,8 @@
+{-# LANGUAGE PackageImports #-}
+
+module Main (main) where
+
+import "split" Data.List.Split
+
+main :: IO ()
+main = putStrLn "Test suite not yet implemented."
diff --git a/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal
new file mode 100644
index 0000000000..dd5c86d339
--- /dev/null
+++ b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal
@@ -0,0 +1,25 @@
+cabal-version: 3.0
+name: autogen-completion
+version: 0.1.0.0
+
+library
+ hs-source-dirs: src
+ autogen-
+
+executable autoexe
+ main-is: Main.hs
+ hs-source-dirs: src
+ autogen-
+
+test-suite autotest
+ type: exitcode-stdio-1.0
+ hs-source-dirs: src
+ autogen-
+
+benchmark autobench
+ type: exitcode-stdio-1.0
+ hs-source-dirs: src
+ autogen-
+
+common defaults
+ autogen-
diff --git a/plugins/hls-call-hierarchy-plugin/test/Main.hs b/plugins/hls-call-hierarchy-plugin/test/Main.hs
index f356a0e278..31dad633e6 100644
--- a/plugins/hls-call-hierarchy-plugin/test/Main.hs
+++ b/plugins/hls-call-hierarchy-plugin/test/Main.hs
@@ -114,13 +114,13 @@ prepareCallHierarchyTests =
[ testCase "1" $ do
let contents = T.unlines ["{-# LANGUAGE TypeFamilies #-}", "data family A"]
-- Since GHC 9.10 the range also includes the family name (and its parameters if any)
- range = mkRange 1 0 1 (if ghcVersion == GHC910 then 13 else 11)
+ range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 13 else 11)
selRange = mkRange 1 12 1 13
expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange
oneCaseWithCreate contents 1 12 expected
, testCase "2" $ do
let contents = T.unlines [ "{-# LANGUAGE TypeFamilies #-}" , "data family A a"]
- range = mkRange 1 0 1 (if ghcVersion == GHC910 then 15 else 11)
+ range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 15 else 11)
selRange = mkRange 1 12 1 13
expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange
oneCaseWithCreate contents 1 12 expected
diff --git a/plugins/hls-change-type-signature-plugin/test/Main.hs b/plugins/hls-change-type-signature-plugin/test/Main.hs
index 3a45058a57..cd1b152c0b 100644
--- a/plugins/hls-change-type-signature-plugin/test/Main.hs
+++ b/plugins/hls-change-type-signature-plugin/test/Main.hs
@@ -39,7 +39,7 @@ test :: TestTree
test = testGroup "changeTypeSignature" [
testRegexes
, codeActionTest "TExpectedActual" 4 11
- , knownBrokenForGhcVersions [GHC94 .. GHC910] "Error Message in 9.2+ does not provide enough info" $
+ , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.2+ does not provide enough info" $
codeActionTest "TRigidType" 4 14
, codeActionTest "TRigidType2" 4 6
, codeActionTest "TLocalBinding" 7 22
diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs
index 5ff79e2e37..ecbd495246 100644
--- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs
+++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs
@@ -1,10 +1,15 @@
{-# LANGUAGE GADTs #-}
+{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-module Ide.Plugin.Class.CodeAction where
+module Ide.Plugin.Class.CodeAction (
+ addMethodPlaceholders,
+ codeAction,
+) where
+import Control.Arrow ((>>>))
import Control.Lens hiding (List, use)
import Control.Monad.Error.Class (MonadError (throwError))
import Control.Monad.Extra
@@ -13,8 +18,6 @@ import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (ExceptT)
import Control.Monad.Trans.Maybe
import Data.Aeson hiding (Null)
-import Data.Bifunctor (second)
-import Data.Either.Extra (rights)
import Data.List
import Data.List.Extra (nubOrdOn)
import qualified Data.Map.Strict as Map
@@ -23,11 +26,14 @@ import Data.Maybe (isNothing, listToMaybe,
import qualified Data.Set as Set
import qualified Data.Text as T
import Development.IDE
-import Development.IDE.Core.Compile (sourceTypecheck)
import Development.IDE.Core.FileStore (getVersionedTextDoc)
import Development.IDE.Core.PluginUtils
import Development.IDE.Core.PositionMapping (fromCurrentRange)
import Development.IDE.GHC.Compat
+import Development.IDE.GHC.Compat.Error (TcRnMessage (..),
+ _TcRnMessage,
+ msgEnvelopeErrorL,
+ stripTcRnMessageContext)
import Development.IDE.GHC.Compat.Util
import Development.IDE.Spans.AtPoint (pointCommand)
import Ide.Plugin.Class.ExactPrint
@@ -80,23 +86,25 @@ addMethodPlaceholders _ state _ param@AddMinimalMethodsParams{..} = do
-- This implementation is ad-hoc in a sense that the diagnostic detection mechanism is
-- sensitive to the format of diagnostic messages from GHC.
codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction
-codeAction recorder state plId (CodeActionParams _ _ docId _ context) = do
+codeAction recorder state plId (CodeActionParams _ _ docId caRange _) = do
verTxtDocId <- liftIO $ runAction "classplugin.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId
nfp <- getNormalizedFilePathE (verTxtDocId ^. L.uri)
- actions <- join <$> mapM (mkActions nfp verTxtDocId) methodDiags
- pure $ InL actions
+ activeDiagnosticsInRange (shakeExtras state) nfp caRange
+ >>= \case
+ Nothing -> pure $ InL []
+ Just fileDiags -> do
+ actions <- join <$> mapM (mkActions nfp verTxtDocId) (methodDiags fileDiags)
+ pure $ InL actions
where
- diags = context ^. L.diagnostics
-
- ghcDiags = filter (\d -> d ^. L.source == Just sourceTypecheck) diags
- methodDiags = filter (\d -> isClassMethodWarning (d ^. L.message)) ghcDiags
+ methodDiags fileDiags =
+ mapMaybe (\d -> (d,) <$> isClassMethodWarning (d ^. fdStructuredMessageL)) fileDiags
mkActions
:: NormalizedFilePath
-> VersionedTextDocumentIdentifier
- -> Diagnostic
+ -> (FileDiagnostic, ClassMinimalDef)
-> ExceptT PluginError (HandlerM Ide.Plugin.Config.Config) [Command |? CodeAction]
- mkActions docPath verTxtDocId diag = do
+ mkActions docPath verTxtDocId (diag, classMinDef) = do
(HAR {hieAst = ast}, pmap) <- runActionE "classplugin.findClassIdentifier.GetHieAst" state
$ useWithStaleE GetHieAst docPath
instancePosition <- handleMaybe (PluginInvalidUserState "fromCurrentRange") $
@@ -108,21 +116,19 @@ codeAction recorder state plId (CodeActionParams _ _ docId _ context) = do
$ useE GetInstanceBindTypeSigs docPath
(tmrTypechecked -> gblEnv ) <- runActionE "classplugin.codeAction.TypeCheck" state $ useE TypeCheck docPath
(hscEnv -> hsc) <- runActionE "classplugin.codeAction.GhcSession" state $ useE GhcSession docPath
- implemented <- findImplementedMethods ast instancePosition
- logWith recorder Info (LogImplementedMethods cls implemented)
+ logWith recorder Debug (LogImplementedMethods (hsc_dflags hsc) cls classMinDef)
pure
$ concatMap mkAction
$ nubOrdOn snd
$ filter ((/=) mempty . snd)
- $ fmap (second (filter (\(bind, _) -> bind `notElem` implemented)))
- $ mkMethodGroups hsc gblEnv range sigs cls
+ $ mkMethodGroups hsc gblEnv range sigs classMinDef
where
- range = diag ^. L.range
+ range = diag ^. fdLspDiagnosticL . L.range
- mkMethodGroups :: HscEnv -> TcGblEnv -> Range -> [InstanceBindTypeSig] -> Class -> [MethodGroup]
- mkMethodGroups hsc gblEnv range sigs cls = minimalDef <> [allClassMethods]
+ mkMethodGroups :: HscEnv -> TcGblEnv -> Range -> [InstanceBindTypeSig] -> ClassMinimalDef -> [MethodGroup]
+ mkMethodGroups hsc gblEnv range sigs classMinDef = minimalDef <> [allClassMethods]
where
- minimalDef = minDefToMethodGroups hsc gblEnv range sigs $ classMinimalDef cls
+ minimalDef = minDefToMethodGroups hsc gblEnv range sigs classMinDef
allClassMethods = ("all missing methods", makeMethodDefinitions hsc gblEnv range sigs)
mkAction :: MethodGroup -> [Command |? CodeAction]
@@ -163,25 +169,6 @@ codeAction recorder state plId (CodeActionParams _ _ docId _ context) = do
<=< nodeChildren
)
- findImplementedMethods
- :: HieASTs a
- -> Position
- -> ExceptT PluginError (HandlerM Ide.Plugin.Config.Config) [T.Text]
- findImplementedMethods asts instancePosition = do
- pure
- $ concat
- $ pointCommand asts instancePosition
- $ map (T.pack . getOccString) . rights . findInstanceValBindIdentifiers
-
- -- | Recurses through the given AST to find identifiers which are
- -- 'InstanceValBind's.
- findInstanceValBindIdentifiers :: HieAST a -> [Identifier]
- findInstanceValBindIdentifiers ast =
- let valBindIds = Map.keys
- . Map.filter (any isInstanceValBind . identInfo)
- $ getNodeIds ast
- in valBindIds <> concatMap findInstanceValBindIdentifiers (nodeChildren ast)
-
findClassFromIdentifier docPath (Right name) = do
(hscEnv -> hscenv, _) <- runActionE "classplugin.findClassFromIdentifier.GhcSessionDeps" state
$ useWithStaleE GhcSessionDeps docPath
@@ -203,12 +190,15 @@ isClassNodeIdentifier :: Identifier -> IdentifierDetails a -> Bool
isClassNodeIdentifier (Right i) ident | 'C':':':_ <- unpackFS $ occNameFS $ occName i = (isNothing . identType) ident && Use `Set.member` identInfo ident
isClassNodeIdentifier _ _ = False
-isClassMethodWarning :: T.Text -> Bool
-isClassMethodWarning = T.isPrefixOf "• No explicit implementation for"
+isClassMethodWarning :: StructuredMessage -> Maybe ClassMinimalDef
+isClassMethodWarning message = case message ^? _SomeStructuredMessage . msgEnvelopeErrorL . _TcRnMessage of
+ Nothing -> Nothing
+ Just tcRnMessage -> isUnsatisfiedMinimalDefWarning tcRnMessage
-isInstanceValBind :: ContextInfo -> Bool
-isInstanceValBind (ValBind InstanceBind _ _) = True
-isInstanceValBind _ = False
+isUnsatisfiedMinimalDefWarning :: TcRnMessage -> Maybe ClassMinimalDef
+isUnsatisfiedMinimalDefWarning = stripTcRnMessageContext >>> \case
+ TcRnUnsatisfiedMinimalDef classMinDef -> Just classMinDef
+ _ -> Nothing
type MethodSignature = T.Text
type MethodName = T.Text
diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs
index 11afcfd1c4..bb0994442a 100644
--- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs
+++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs
@@ -30,7 +30,11 @@ makeEditText pm df AddMinimalMethodsParams{..} = do
pm_parsed_source pm
old = T.pack $ exactPrint ps
+#if MIN_VERSION_ghc_exactprint(1,10,0)
+ ps' = addMethodDecls ps mDecls range withSig
+#else
(ps', _, _) = runTransform (addMethodDecls ps mDecls range withSig)
+#endif
new = T.pack $ exactPrint ps'
pure (old, new)
@@ -40,22 +44,30 @@ makeMethodDecl df (mName, sig) = do
sig' <- eitherToMaybe $ parseDecl df (T.unpack sig) $ T.unpack sig
pure (name, sig')
-#if MIN_VERSION_ghc(9,5,0)
-addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located (HsModule GhcPs))
+#if MIN_VERSION_ghc_exactprint(1,10,0)
+addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> Located (HsModule GhcPs)
#else
-addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located HsModule)
+addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located (HsModule GhcPs))
#endif
addMethodDecls ps mDecls range withSig
| withSig = go (concatMap (\(decl, sig) -> [sig, decl]) mDecls)
| otherwise = go (map fst mDecls)
where
go inserting = do
+#if MIN_VERSION_ghc_exactprint(1,10,0)
+ let allDecls = hsDecls ps
+#else
allDecls <- hsDecls ps
+#endif
case break (inRange range . getLoc) allDecls of
(before, L l inst : after) ->
let
instSpan = realSrcSpan $ getLoc l
+#if MIN_VERSION_ghc(9,11,0)
+ instCol = srcSpanStartCol instSpan - 1
+#else
instCol = srcSpanStartCol instSpan
+#endif
#if MIN_VERSION_ghc(9,9,0)
instRow = srcSpanEndLine instSpan
methodEpAnn = noAnnSrcSpanDP $ deltaPos 1 (instCol + defaultIndent)
@@ -91,7 +103,17 @@ addMethodDecls ps mDecls range withSig
addWhere :: HsDecl GhcPs -> HsDecl GhcPs
addWhere instd@(InstD xInstD (ClsInstD ext decl@ClsInstDecl{..})) =
case cid_ext of
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ (warnings, anns, key)
+ | EpTok _ <- acid_where anns -> instd
+ | otherwise ->
+ InstD xInstD (ClsInstD ext decl {
+ cid_ext = ( warnings
+ , anns { acid_where = EpTok d1 }
+ , key
+ )
+ })
+#elif MIN_VERSION_ghc(9,9,0)
(warnings, anns, key)
| any (\(AddEpAnn kw _ )-> kw == AnnWhere) anns -> instd
| otherwise ->
diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs
index 18c9dbae26..1669aba43d 100644
--- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs
+++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs
@@ -112,15 +112,15 @@ instance NFData InstanceBindLensResult where
type instance RuleResult GetInstanceBindLens = InstanceBindLensResult
data Log
- = LogImplementedMethods Class [T.Text]
+ = LogImplementedMethods DynFlags Class ClassMinimalDef
| LogShake Shake.Log
instance Pretty Log where
pretty = \case
- LogImplementedMethods cls methods ->
- pretty ("Detected implemented methods for class" :: String)
+ LogImplementedMethods dflags cls methods ->
+ pretty ("The following methods are missing" :: String)
<+> pretty (show (getOccString cls) <> ":") -- 'show' is used here to add quotes around the class name
- <+> pretty methods
+ <+> pretty (showSDoc dflags $ ppr methods)
LogShake log -> pretty log
data BindInfo = BindInfo
@@ -176,7 +176,11 @@ getInstanceBindLensRule recorder = do
getBindSpanWithoutSig :: ClsInstDecl GhcRn -> [BindInfo]
getBindSpanWithoutSig ClsInstDecl{..} =
- let bindNames = mapMaybe go (bagToList cid_binds)
+ let bindNames = mapMaybe go $
+#if !MIN_VERSION_ghc(9,11,0)
+ bagToList
+#endif
+ cid_binds
go (L l bind) = case bind of
FunBind{..}
-- `Generated` tagged for Template Haskell,
@@ -221,5 +225,10 @@ getInstanceBindTypeSigsRule recorder = do
let name = idName id
whenMaybe (isBindingName name) $ do
env <- tcInitTidyEnv
- let (_, ty) = tidyOpenType env (idType id)
+#if MIN_VERSION_ghc(9,11,0)
+ let ty =
+#else
+ let (_, ty) =
+#endif
+ tidyOpenType env (idType id)
pure $ InstanceBindTypeSig name ty
diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs
index 2c0adc9ca5..86d5923011 100644
--- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs
+++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs
@@ -29,7 +29,6 @@ import Control.Monad.Trans.Maybe (MaybeT (MaybeT),
maybeToExceptT)
import Control.Monad.Trans.Writer.CPS
import Data.Coerce (coerce)
-import Data.Data (Typeable)
import Data.Foldable (traverse_)
import Data.Function (on, (&))
import Data.Hashable
@@ -158,7 +157,7 @@ simplify r =
withChildrenSimplified = r { _codeRange_children = simplify <$> _codeRange_children r }
data GetCodeRange = GetCodeRange
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetCodeRange
instance NFData GetCodeRange
diff --git a/plugins/hls-eval-plugin/README.md b/plugins/hls-eval-plugin/README.md
index 5f134d154b..d2b39498cb 100644
--- a/plugins/hls-eval-plugin/README.md
+++ b/plugins/hls-eval-plugin/README.md
@@ -40,7 +40,7 @@ A test is composed by a sequence of contiguous lines, the result of their evalua
"CDAB"
```
-You execute a test by clicking on the _Evaluate_ code lens that appears above it (or _Refresh_, if the test has been run previously).
+You execute a test by clicking on the _Evaluate_ code lens that appears above it (or _Refresh_, if the test has been run previously). A code action is also provided.
All tests in the same comment block are executed together.
diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs
index eaf97e4a58..30d43de005 100644
--- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs
+++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs
@@ -13,8 +13,8 @@ module Ide.Plugin.Eval (
import Development.IDE (IdeState)
import Ide.Logger (Recorder, WithPriority)
-import qualified Ide.Plugin.Eval.CodeLens as CL
import Ide.Plugin.Eval.Config
+import qualified Ide.Plugin.Eval.Handlers as Handlers
import Ide.Plugin.Eval.Rules (rules)
import qualified Ide.Plugin.Eval.Types as Eval
import Ide.Types (ConfigDescriptor (..),
@@ -27,9 +27,12 @@ import Language.LSP.Protocol.Message
-- |Plugin descriptor
descriptor :: Recorder (WithPriority Eval.Log) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId =
- (defaultPluginDescriptor plId "Provies a code lens to evaluate expressions in doctest comments")
- { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeLens (CL.codeLens recorder)
- , pluginCommands = [CL.evalCommand recorder plId]
+ (defaultPluginDescriptor plId "Provides code action and lens to evaluate expressions in doctest comments")
+ { pluginHandlers = mconcat
+ [ mkPluginHandler SMethod_TextDocumentCodeAction (Handlers.codeAction recorder)
+ , mkPluginHandler SMethod_TextDocumentCodeLens (Handlers.codeLens recorder)
+ ]
+ , pluginCommands = [Handlers.evalCommand recorder plId]
, pluginRules = rules recorder
, pluginConfigDescriptor = defaultConfigDescriptor
{ configCustomConfig = mkCustomConfig properties
diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs
similarity index 90%
rename from plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs
rename to plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs
index 800980ae4a..1f19b5b476 100644
--- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/CodeLens.hs
+++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs
@@ -12,7 +12,8 @@ A plugin inspired by the REPLoid feature of
For a full example see the "Ide.Plugin.Eval.Tutorial" module.
-}
-module Ide.Plugin.Eval.CodeLens (
+module Ide.Plugin.Eval.Handlers (
+ codeAction,
codeLens,
evalCommand,
) where
@@ -40,15 +41,10 @@ import Data.String (IsString)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Utf16.Rope.Mixed as Rope
-import Data.Typeable (Typeable)
-import Development.IDE.Core.FileStore (getUriContents)
+import Development.IDE.Core.FileStore (getUriContents, setSomethingModified)
import Development.IDE.Core.Rules (IdeState,
runAction)
-import Development.IDE.Core.RuleTypes (LinkableResult (linkableHomeMod),
- TypeCheck (..),
- tmrTypechecked)
-import Development.IDE.Core.Shake (useNoFile_, use_,
- uses_)
+import Development.IDE.Core.Shake (use_, uses_, VFSModified (VFSUnmodified), useWithSeparateFingerprintRule_)
import Development.IDE.GHC.Compat hiding (typeKind,
unitState)
import Development.IDE.GHC.Compat.Util (OverridingBool (..))
@@ -76,16 +72,18 @@ import GHC (ClsInst,
import Development.IDE.Core.RuleTypes (GetLinkable (GetLinkable),
GetModSummary (GetModSummary),
- GetModuleGraph (GetModuleGraph),
+ GetModuleGraphTransDepsFingerprints (GetModuleGraphTransDepsFingerprints),
GhcSessionDeps (GhcSessionDeps),
- ModSummaryResult (msrModSummary))
-import Development.IDE.Core.Shake (VFSModified (VFSUnmodified))
+ ModSummaryResult (msrModSummary),
+ LinkableResult (linkableHomeMod),
+ TypeCheck (..),
+ tmrTypechecked, GetModuleGraphTransDepsFingerprints(..), GetModuleGraph(..))
import qualified Development.IDE.GHC.Compat.Core as Compat (InteractiveImport (IIModule))
import qualified Development.IDE.GHC.Compat.Core as SrcLoc (unLoc)
import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv))
import qualified GHC.LanguageExtensions.Type as LangExt (Extension (..))
-import Development.IDE.Core.FileStore (setSomethingModified)
+import Data.List.Extra (unsnoc)
import Development.IDE.Core.PluginUtils
import Development.IDE.Types.Shake (toKey)
import GHC.Types.SrcLoc (UnhelpfulSpanReason (UnhelpfulInteractive))
@@ -122,17 +120,39 @@ import qualified Language.LSP.Protocol.Lens as L
import Language.LSP.Protocol.Message
import Language.LSP.Protocol.Types
import Language.LSP.Server
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Unit.Module.ModIface (IfaceTopEnv (..))
+#endif
+
+codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction
+codeAction recorder st plId CodeActionParams{_textDocument,_range} = do
+ rangeCommands <- mkRangeCommands recorder st plId _textDocument
+ pure
+ $ InL
+ [ InL command
+ | (testRange, command) <- rangeCommands
+ , _range `isSubrangeOf` testRange
+ ]
{- | Code Lens provider
NOTE: Invoked every time the document is modified, not just when the document is saved.
-}
codeLens :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeLens
-codeLens recorder st plId CodeLensParams{_textDocument} =
+codeLens recorder st plId CodeLensParams{_textDocument} = do
+ rangeCommands <- mkRangeCommands recorder st plId _textDocument
+ pure
+ $ InL
+ [ CodeLens range (Just command) Nothing
+ | (range, command) <- rangeCommands
+ ]
+
+mkRangeCommands :: Recorder (WithPriority Log) -> IdeState -> PluginId -> TextDocumentIdentifier -> ExceptT PluginError (HandlerM Config) [(Range, Command)]
+mkRangeCommands recorder st plId textDocument =
let dbg = logWith recorder Debug
perf = timed (\lbl duration -> dbg $ LogExecutionTime lbl duration)
- in perf "codeLens" $
+ in perf "evalMkRangeCommands" $
do
- let TextDocumentIdentifier uri = _textDocument
+ let TextDocumentIdentifier uri = textDocument
fp <- uriToFilePathE uri
let nfp = toNormalizedFilePath' fp
isLHS = isLiterate fp
@@ -145,11 +165,11 @@ codeLens recorder st plId CodeLensParams{_textDocument} =
let Sections{..} = commentsToSections isLHS comments
tests = testsBySection nonSetupSections
cmd = mkLspCommand plId evalCommandName "Evaluate=..." (Just [])
- let lenses =
- [ CodeLens testRange (Just cmd') Nothing
+ let rangeCommands =
+ [ (testRange, cmd')
| (section, ident, test) <- tests
, let (testRange, resultRange) = testRanges test
- args = EvalParams (setupSections ++ [section]) _textDocument ident
+ args = EvalParams (setupSections ++ [section]) textDocument ident
cmd' =
(cmd :: Command)
{ _arguments = Just [toJSON args]
@@ -165,9 +185,9 @@ codeLens recorder st plId CodeLensParams{_textDocument} =
(length tests)
(length nonSetupSections)
(length setupSections)
- (length lenses)
+ (length rangeCommands)
- return $ InL lenses
+ pure rangeCommands
where
trivial (Range p p') = p == p'
@@ -233,24 +253,29 @@ initialiseSessionForEval needs_quickcheck st nfp = do
ms <- msrModSummary <$> use_ GetModSummary nfp
deps_hsc <- hscEnv <$> use_ GhcSessionDeps nfp
- linkables_needed <- transitiveDeps <$> useNoFile_ GetModuleGraph <*> pure nfp
+ linkables_needed <- transitiveDeps <$> useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph nfp <*> pure nfp
linkables <- uses_ GetLinkable (nfp : maybe [] transitiveModuleDeps linkables_needed)
-- We unset the global rdr env in mi_globals when we generate interfaces
-- See Note [Clearing mi_globals after generating an iface]
-- However, the eval plugin (setContext specifically) requires the rdr_env
-- for the current module - so get it from the Typechecked Module and add
-- it back to the iface for the current module.
- rdr_env <- tcg_rdr_env . tmrTypechecked <$> use_ TypeCheck nfp
+ tm <- tmrTypechecked <$> use_ TypeCheck nfp
+ let rdr_env = tcg_rdr_env tm
let linkable_hsc = loadModulesHome (map (addRdrEnv . linkableHomeMod) linkables) deps_hsc
addRdrEnv hmi
| iface <- hm_iface hmi
, ms_mod ms == mi_module iface
+#if MIN_VERSION_ghc(9,11,0)
+ = hmi { hm_iface = set_mi_top_env (Just $ IfaceTopEnv (forceGlobalRdrEnv (globalRdrEnvLocal rdr_env)) (mkIfaceImports $ tcg_import_decls tm)) iface}
+#else
= hmi { hm_iface = iface { mi_globals = Just $!
#if MIN_VERSION_ghc(9,8,0)
forceGlobalRdrEnv
#endif
rdr_env
}}
+#endif
| otherwise = hmi
return (ms, linkable_hsc)
@@ -271,6 +296,15 @@ initialiseSessionForEval needs_quickcheck st nfp = do
getSession
return env2
+#if MIN_VERSION_ghc(9,11,0)
+mkIfaceImports :: [ImportUserSpec] -> [IfaceImport]
+mkIfaceImports = map go
+ where
+ go (ImpUserSpec decl ImpUserAll) = IfaceImport decl ImpIfaceAll
+ go (ImpUserSpec decl (ImpUserExplicit env)) = IfaceImport decl (ImpIfaceExplicit (forceGlobalRdrEnv env))
+ go (ImpUserSpec decl (ImpUserEverythingBut ns)) = IfaceImport decl (ImpIfaceEverythingBut ns)
+#endif
+
addFinalReturn :: Text -> [TextEdit] -> [TextEdit]
addFinalReturn mdlText edits
| not (null edits) && not (T.null mdlText) && T.last mdlText /= '\n' =
@@ -281,7 +315,7 @@ finalReturn :: Text -> TextEdit
finalReturn txt =
let ls = T.lines txt
l = fromIntegral $ length ls -1
- c = fromIntegral $ T.length . last $ ls
+ c = fromIntegral $ T.length $ maybe T.empty snd (unsnoc ls)
p = Position l c
in TextEdit (Range p p) "\n"
@@ -497,7 +531,7 @@ singleLine s = [T.pack s]
errorLines :: String -> [Text]
errorLines =
dropWhileEnd T.null
- . takeWhile (not . ("CallStack" `T.isPrefixOf`))
+ . takeWhile (not . (\x -> "CallStack" `T.isPrefixOf` x || "HasCallStack" `T.isPrefixOf` x))
. T.lines
. T.pack
@@ -637,7 +671,6 @@ data GhciLikeCmdException = GhciLikeCmdNotImplemented
{ ghciCmdName :: Text
, ghciCmdArg :: Text
}
- deriving (Typeable)
instance Show GhciLikeCmdException where
showsPrec _ GhciLikeCmdNotImplemented{..} =
diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs
index 6990c4a6e5..d01ddbc55c 100644
--- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs
+++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs
@@ -65,13 +65,15 @@ unqueueForEvaluation ide nfp = do
apiAnnComments' :: ParsedModule -> [SrcLoc.RealLocated EpaCommentTok]
apiAnnComments' pm = do
L span (EpaComment c _) <- getEpaComments $ pm_parsed_source pm
- pure (L (anchor span) c)
- where
-#if MIN_VERSION_ghc(9,5,0)
- getEpaComments :: Development.IDE.GHC.Compat.Located (HsModule GhcPs) -> [LEpaComment]
+ pure (L (
+#if MIN_VERSION_ghc(9,11,0)
+ epaLocationRealSrcSpan
#else
- getEpaComments :: Development.IDE.GHC.Compat.Located HsModule -> [LEpaComment]
+ anchor
#endif
+ span) c)
+ where
+ getEpaComments :: Development.IDE.GHC.Compat.Located (HsModule GhcPs) -> [LEpaComment]
getEpaComments = toListOf biplate
pattern RealSrcSpanAlready :: SrcLoc.RealSrcSpan -> SrcLoc.RealSrcSpan
diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs
index 43ea57c956..1753ab4e6c 100644
--- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs
+++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs
@@ -157,14 +157,14 @@ data Test
deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData)
data IsEvaluating = IsEvaluating
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable IsEvaluating
instance NFData IsEvaluating
type instance RuleResult IsEvaluating = Bool
data GetEvalComments = GetEvalComments
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetEvalComments
instance NFData GetEvalComments
diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs
index 77b133ef92..9498076511 100644
--- a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs
+++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs
@@ -77,9 +77,7 @@ showErr e =
$ bagToList
$ fmap (vcat . unDecorated
. diagnosticMessage
-#if MIN_VERSION_ghc(9,5,0)
(defaultDiagnosticOpts @GhcMessage)
-#endif
. errMsgDiagnostic)
$ getMessages msgs
_ ->
diff --git a/plugins/hls-eval-plugin/test/Main.hs b/plugins/hls-eval-plugin/test/Main.hs
index f2adf6cb85..03416c6902 100644
--- a/plugins/hls-eval-plugin/test/Main.hs
+++ b/plugins/hls-eval-plugin/test/Main.hs
@@ -6,13 +6,15 @@ module Main
) where
import Control.Lens (_Just, folded, preview, view, (^.),
- (^..))
+ (^..), (^?))
+import Control.Monad (join)
import Data.Aeson (Value (Object), fromJSON, object,
(.=))
import Data.Aeson.Types (Pair, Result (Success))
import Data.List (isInfixOf)
import Data.List.Extra (nubOrdOn)
import qualified Data.Map as Map
+import qualified Data.Maybe as Maybe
import qualified Data.Text as T
import Ide.Plugin.Config (Config)
import qualified Ide.Plugin.Config as Plugin
@@ -59,6 +61,9 @@ tests =
lenses <- getCodeLenses doc
liftIO $ map (view range) lenses @?= [Range (Position 4 0) (Position 5 0)]
+ , goldenWithEvalForCodeAction "Evaluation of expressions via code action" "T1" "hs"
+ , goldenWithEvalForCodeAction "Reevaluation of expressions via code action" "T2" "hs"
+
, goldenWithEval "Evaluation of expressions" "T1" "hs"
, goldenWithEval "Reevaluation of expressions" "T2" "hs"
, goldenWithEval "Evaluation of expressions w/ imports" "T3" "hs"
@@ -75,12 +80,11 @@ tests =
else
"-- No instance for (Num String) arising from a use of `+'\n-- In the expression: \"a\" + \"bc\"\n-- In an equation for `res': res = \"a\" + \"bc\""
- evalInFile "T8.hs" "-- >>> \"" "-- lexical error in string/character literal at end of input"
+ evalInFile "T8.hs" "-- >>> \"" (if ghcVersion >= GHC912 then "-- lexical error at end of input" else "-- lexical error in string/character literal at end of input")
evalInFile "T8.hs" "-- >>> 3 `div` 0" "-- divide by zero" -- The default for marking exceptions is False
, goldenWithEval "Applies file LANGUAGE extensions" "T9" "hs"
, goldenWithEval "Evaluate a type with :kind!" "T10" "hs"
- , goldenWithEval' "Reports an error for an incorrect type with :kind!" "T11" "hs"
- (if ghcVersion >= GHC94 then "ghc94.expected" else "expected")
+ , goldenWithEval "Reports an error for an incorrect type with :kind!" "T11" "hs"
, goldenWithEval "Shows a kind with :kind" "T12" "hs"
, goldenWithEval "Reports an error for an incorrect type with :kind" "T13" "hs"
, goldenWithEval' "Returns a fully-instantiated type for :type" "T14" "hs" (if ghcVersion >= GHC98 then "ghc98.expected" else "expected") -- See https://gitlab.haskell.org/ghc/ghc/-/issues/24069
@@ -126,13 +130,13 @@ tests =
, goldenWithEval "The default language extensions for the eval plugin are the same as those for ghci" "TSameDefaultLanguageExtensionsAsGhci" "hs"
, goldenWithEval "IO expressions are supported, stdout/stderr output is ignored" "TIO" "hs"
, goldenWithEvalAndFs "Property checking" cabalProjectFS "TProperty" "hs"
- , knownBrokenInEnv [HostOS Windows] "The output has path separators in it, which on Windows look different. Just skip it there" $
+ , knownBrokenInWindowsBeforeGHC912 "The output has path separators in it, which on Windows look different. Just skip it there" $
goldenWithEvalAndFs' "Property checking with exception" cabalProjectFS "TPropertyError" "hs" $
case ghcVersion of
+ GHC912 -> "ghc912.expected"
GHC910 -> "ghc910.expected"
GHC98 -> "ghc98.expected"
GHC96 -> "ghc96.expected"
- GHC94 -> "ghc94.expected"
, goldenWithEval "Prelude has no special treatment, it is imported as stated in the module" "TPrelude" "hs"
, goldenWithEval "Don't panic on {-# UNPACK #-} pragma" "TUNPACK" "hs"
, goldenWithEval "Can handle eval inside nested comment properly" "TNested" "hs"
@@ -209,11 +213,21 @@ tests =
let ifaceKeys = filter ("GetModIface" `T.isPrefixOf`) keys
liftIO $ ifaceKeys @?= []
]
+ where
+ knownBrokenInWindowsBeforeGHC912 msg =
+ foldl (.) id
+ [ knownBrokenInSpecificEnv [GhcVer ghcVer, HostOS Windows] msg
+ | ghcVer <- [GHC96 .. GHC910]
+ ]
goldenWithEval :: TestName -> FilePath -> FilePath -> TestTree
goldenWithEval title path ext =
goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeLensesBackwards
+goldenWithEvalForCodeAction :: TestName -> FilePath -> FilePath -> TestTree
+goldenWithEvalForCodeAction title path ext =
+ goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeCodeActionsBackwards
+
goldenWithEvalAndFs :: TestName -> [FS.FileTree] -> FilePath -> FilePath -> TestTree
goldenWithEvalAndFs title tree path ext =
goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs tree) path "expected" ext executeLensesBackwards
@@ -232,14 +246,24 @@ goldenWithEvalAndFs' title tree path ext expected =
-- | Execute lenses backwards, to avoid affecting their position in the source file
executeLensesBackwards :: TextDocumentIdentifier -> Session ()
executeLensesBackwards doc = do
- codeLenses <- reverse <$> getCodeLenses doc
+ codeLenses <- getCodeLenses doc
-- liftIO $ print codeLenses
+ executeCmdsBackwards [c | CodeLens{_command = Just c} <- codeLenses]
+
+executeCodeActionsBackwards :: TextDocumentIdentifier -> Session ()
+executeCodeActionsBackwards doc = do
+ codeLenses <- getCodeLenses doc
+ let ranges = [_range | CodeLens{_range} <- codeLenses]
+ -- getAllCodeActions cannot get our code actions because they have no diagnostics
+ codeActions <- join <$> traverse (getCodeActions doc) ranges
+ let cmds = Maybe.mapMaybe (^? _L) codeActions
+ executeCmdsBackwards cmds
- -- Execute sequentially, nubbing elements to avoid
- -- evaluating the same section with multiple tests
- -- more than twice
- mapM_ executeCmd $
- nubOrdOn actSectionId [c | CodeLens{_command = Just c} <- codeLenses]
+-- Execute commands backwards, nubbing elements to avoid
+-- evaluating the same section with multiple tests
+-- more than twice
+executeCmdsBackwards :: [Command] -> Session ()
+executeCmdsBackwards = mapM_ executeCmd . nubOrdOn actSectionId . reverse
actSectionId :: Command -> Int
actSectionId Command{_arguments = Just [fromJSON -> Success EvalParams{..}]} = evalId
diff --git a/plugins/hls-eval-plugin/test/testdata/T11.ghc94.expected.hs b/plugins/hls-eval-plugin/test/testdata/T11.ghc94.expected.hs
deleted file mode 100644
index 63d0ed8a07..0000000000
--- a/plugins/hls-eval-plugin/test/testdata/T11.ghc94.expected.hs
+++ /dev/null
@@ -1,4 +0,0 @@
-module T11 where
-
--- >>> :kind! A
--- Not in scope: type constructor or class `A'
diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs
new file mode 100644
index 0000000000..46359c86ab
--- /dev/null
+++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs
@@ -0,0 +1,6 @@
+-- Support for property checking
+module TProperty where
+
+-- prop> \(l::[Bool]) -> head l
+-- *** Failed! Exception: 'Prelude.head: empty list' (after 1 test):
+-- []
diff --git a/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs
index 7ed9a67e97..92bc37f743 100644
--- a/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs
+++ b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
@@ -60,7 +61,11 @@ hover state _ (HoverParams (TextDocumentIdentifier uri) pos _) = do
in Just $ Hover (InL (mkPlainText contents')) Nothing
fixityText :: (Name, Fixity) -> T.Text
+#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0)
+ fixityText (name, Fixity precedence direction) =
+#else
fixityText (name, Fixity _ precedence direction) =
+#endif
printOutputable direction <> " " <> printOutputable precedence <> " `" <> printOutputable name <> "`"
newtype FixityMap = FixityMap (M.Map Name Fixity)
diff --git a/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs
index 611c02fc78..17634491fe 100644
--- a/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs
+++ b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs
@@ -25,7 +25,6 @@ import Control.Monad.Trans.Maybe
import qualified Data.Aeson as A (ToJSON (toJSON))
import Data.Aeson.Types (FromJSON)
import Data.Char (isSpace)
-import Data.Functor ((<&>))
import qualified Data.IntMap as IM (IntMap, elems,
fromList, (!?))
import Data.IORef (readIORef)
@@ -86,7 +85,7 @@ instance Pretty Log where
descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState
descriptor recorder =
-- (almost) no one wants to see an explicit import list for Prelude
- descriptorForModules recorder (/= moduleName pRELUDE)
+ descriptorForModules recorder (/= pRELUDE_NAME)
descriptorForModules
:: Recorder (WithPriority Log)
@@ -218,16 +217,18 @@ inlayHintProvider _ state _ InlayHintParams {_textDocument = TextDocumentIdentif
-- |^-_paddingLeft
-- ^-_position
generateInlayHints :: Range -> ImportEdit -> PositionMapping -> Maybe InlayHint
- generateInlayHints (Range _ end) ie pm = mkLabel ie <&> \label ->
- InlayHint { _position = end
- , _label = InL label
- , _kind = Nothing -- neither a type nor a parameter
- , _textEdits = fmap singleton $ toTEdit pm ie
- , _tooltip = Just $ InL "Make this import explicit" -- simple enough, no need to resolve
- , _paddingLeft = Just True -- show an extra space before the inlay hint
- , _paddingRight = Nothing
- , _data_ = Nothing
- }
+ generateInlayHints (Range _ end) ie pm = do
+ label <- mkLabel ie
+ currentEnd <- toCurrentPosition pm end
+ return InlayHint { _position = currentEnd
+ , _label = InL label
+ , _kind = Nothing -- neither a type nor a parameter
+ , _textEdits = fmap singleton $ toTEdit pm ie
+ , _tooltip = Just $ InL "Make this import explicit" -- simple enough, no need to resolve
+ , _paddingLeft = Just True -- show an extra space before the inlay hint
+ , _paddingRight = Nothing
+ , _data_ = Nothing
+ }
mkLabel :: ImportEdit -> Maybe T.Text
mkLabel (ImportEdit{ieResType, ieText}) =
let title ExplicitImport = Just $ abbreviateImportTitleWithoutModule ieText
@@ -403,7 +404,7 @@ minimalImportsRule recorder modFilter = defineNoDiagnostics (cmapWithPrio LogSha
-- for every minimal imports
| (location, origImport, minImport@(ImportDecl{ideclName = L _ mn})) <- locationImportWithMinimal
-- (almost) no one wants to see an refine import list for Prelude
- , mn /= moduleName pRELUDE
+ , mn /= pRELUDE_NAME
-- we check for the inner imports
, Just innerImports <- [Map.lookup mn import2Map]
-- and only get those symbols used
@@ -471,11 +472,7 @@ extractMinimalImports hsc TcModuleResult {..} = runMaybeT $ do
not $ any (\e -> ("module " ++ moduleNameString name) == e) exports
isExplicitImport :: ImportDecl GhcRn -> Bool
-#if MIN_VERSION_ghc(9,5,0)
isExplicitImport ImportDecl {ideclImportList = Just (Exactly, _)} = True
-#else
-isExplicitImport ImportDecl {ideclHiding = Just (False, _)} = True
-#endif
isExplicitImport _ = False
-- This number is somewhat arbitrarily chosen. Ideally the protocol would tell us these things,
@@ -527,11 +524,7 @@ abbreviateImportTitleWithoutModule = abbreviateImportTitle . T.dropWhile (/= '('
filterByImport :: ImportDecl GhcRn -> Map.Map ModuleName [AvailInfo] -> Maybe (Map.Map ModuleName [AvailInfo])
-#if MIN_VERSION_ghc(9,5,0)
filterByImport (ImportDecl{ideclImportList = Just (_, L _ names)})
-#else
-filterByImport (ImportDecl{ideclHiding = Just (_, L _ names)})
-#endif
avails =
-- if there is a function defined in the current module and is used
-- i.e. if a function is not reexported but defined in current
@@ -548,22 +541,12 @@ filterByImport (ImportDecl{ideclHiding = Just (_, L _ names)})
filterByImport _ _ = Nothing
constructImport :: ImportDecl GhcRn -> ImportDecl GhcRn -> (ModuleName, [AvailInfo]) -> ImportDecl GhcRn
-#if MIN_VERSION_ghc(9,5,0)
constructImport ImportDecl{ideclQualified = qualified, ideclImportList = origHiding} imd@ImportDecl{ideclImportList = Just (hiding, L _ names)}
-#else
-constructImport ImportDecl{ideclQualified = qualified, ideclHiding = origHiding} imd@ImportDecl{ideclHiding = Just (hiding, L _ names)}
-#endif
(newModuleName, avails) = imd
{ ideclName = noLocA newModuleName
-#if MIN_VERSION_ghc(9,5,0)
, ideclImportList = if isNothing origHiding && qualified /= NotQualified
then Nothing
else Just (hiding, noLocA newNames)
-#else
- , ideclHiding = if isNothing origHiding && qualified /= NotQualified
- then Nothing
- else Just (hiding, noLocA newNames)
-#endif
}
where newNames = filter (\n -> any (n `containsAvail`) avails) names
-- Check if a name is exposed by AvailInfo (the available information of a module)
diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs
index ff436c61fc..a761f648af 100644
--- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs
+++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs
@@ -43,7 +43,9 @@ import Development.IDE (IdeState,
srcSpanToLocation,
srcSpanToRange, viaShow)
import Development.IDE.Core.PluginUtils
-import Development.IDE.Core.PositionMapping (toCurrentRange)
+import Development.IDE.Core.PositionMapping (PositionMapping,
+ toCurrentPosition,
+ toCurrentRange)
import Development.IDE.Core.RuleTypes (TcModuleResult (..),
TypeCheck (..))
import qualified Development.IDE.Core.Shake as Shake
@@ -56,6 +58,7 @@ import Development.IDE.GHC.Compat (FieldLabel (flSelector),
HsExpr (HsApp, HsVar, XExpr),
HsFieldBind (hfbLHS),
HsRecFields (..),
+ HsWrap (HsWrap),
Identifier, LPat,
Located,
NamedThing (getName),
@@ -79,7 +82,8 @@ import Development.IDE.GHC.Compat.Core (Extension (NamedFieldPuns
pattern RealSrcSpan,
plusUFM_C, unitUFM)
import Development.IDE.GHC.Util (getExtensions,
- printOutputable)
+ printOutputable,
+ stripOccNamePrefix)
import Development.IDE.Graph (RuleResult)
import Development.IDE.Graph.Classes (Hashable, NFData)
import Development.IDE.Spans.Pragmas (NextPragmaInfo (..),
@@ -148,10 +152,17 @@ descriptor recorder plId =
codeActionProvider :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction
codeActionProvider ideState _ (CodeActionParams _ _ docId range _) = do
nfp <- getNormalizedFilePathE (docId ^. L.uri)
- CRR {crCodeActions, enabledExtensions} <- runActionE "ExplicitFields.CollectRecords" ideState $ useE CollectRecords nfp
+ CRR {crCodeActions, crCodeActionResolve, enabledExtensions} <- runActionE "ExplicitFields.CollectRecords" ideState $ useE CollectRecords nfp
-- All we need to build a code action is the list of extensions, and a int to
-- allow us to resolve it later.
- let actions = map (mkCodeAction enabledExtensions) (RangeMap.filterByRange range crCodeActions)
+ let recordUids = [ uid
+ | uid <- RangeMap.filterByRange range crCodeActions
+ , Just record <- [IntMap.lookup uid crCodeActionResolve]
+ -- Only fully saturated constructor applications can be
+ -- converted to the record syntax through the code action
+ , isConvertible record
+ ]
+ let actions = map (mkCodeAction enabledExtensions) recordUids
pure $ InL actions
where
mkCodeAction :: [Extension] -> Int -> Command |? CodeAction
@@ -166,6 +177,11 @@ codeActionProvider ideState _ (CodeActionParams _ _ docId range _) = do
, _data_ = Just $ toJSON uid
}
+ isConvertible :: RecordInfo -> Bool
+ isConvertible = \case
+ RecordInfoApp _ (RecordAppExpr Unsaturated _ _) -> False
+ _ -> True
+
codeActionResolveProvider :: ResolveFunction IdeState Int 'Method_CodeActionResolve
codeActionResolveProvider ideState pId ca uri uid = do
nfp <- getNormalizedFilePathE uri
@@ -203,19 +219,19 @@ inlayHintDotdotProvider _ state pId InlayHintParams {_textDocument = TextDocumen
| record <- records
, pos <- maybeToList $ fmap _start $ recordInfoToDotDotRange record ]
defnLocsList <- lift $ sequence locations
- pure $ InL $ mapMaybe (mkInlayHint crr pragma) defnLocsList
+ pure $ InL $ mapMaybe (mkInlayHint crr pragma pm) defnLocsList
where
- mkInlayHint :: CollectRecordsResult -> NextPragmaInfo -> (Maybe [(Location, Identifier)], RecordInfo) -> Maybe InlayHint
- mkInlayHint CRR {enabledExtensions, nameMap} pragma (defnLocs, record) =
+ mkInlayHint :: CollectRecordsResult -> NextPragmaInfo -> PositionMapping -> (Maybe [(Location, Identifier)], RecordInfo) -> Maybe InlayHint
+ mkInlayHint CRR {enabledExtensions, nameMap} pragma pm (defnLocs, record) =
let range = recordInfoToDotDotRange record
textEdits = maybeToList (renderRecordInfoAsTextEdit nameMap record)
<> maybeToList (pragmaEdit enabledExtensions pragma)
names = renderRecordInfoAsDotdotLabelName record
in do
- end <- fmap _end range
+ currentEnd <- range >>= toCurrentPosition pm . _end
names' <- names
defnLocs' <- defnLocs
- let excludeDotDot (Location _ (Range _ end')) = end' /= end
+ let excludeDotDot (Location _ (Range _ end)) = end /= currentEnd
-- find location from dotdot definitions that name equal to label name
findLocation name locations =
let -- filter locations not within dotdot range
@@ -223,10 +239,10 @@ inlayHintDotdotProvider _ state pId InlayHintParams {_textDocument = TextDocumen
-- checks if 'a' is equal to 'Name' if the 'Either' is 'Right a', otherwise return 'False'
nameEq = either (const False) ((==) name)
in fmap fst $ find (nameEq . snd) filteredLocations
- valueWithLoc = [ (T.pack $ printName name, findLocation name defnLocs') | name <- names' ]
+ valueWithLoc = [ (stripOccNamePrefix $ T.pack $ printName name, findLocation name defnLocs') | name <- names' ]
-- use `, ` to separate labels with definition location
label = intersperse (mkInlayHintLabelPart (", ", Nothing)) $ fmap mkInlayHintLabelPart valueWithLoc
- pure $ InlayHint { _position = end -- at the end of dotdot
+ pure $ InlayHint { _position = currentEnd -- at the end of dotdot
, _label = InR label
, _kind = Nothing -- neither a type nor a parameter
, _textEdits = Just textEdits -- same as CodeAction
@@ -247,20 +263,22 @@ inlayHintPosRecProvider _ state _pId InlayHintParams {_textDocument = TextDocume
| Just range <- [toCurrentRange pm visibleRange]
, uid <- RangeMap.elementsInRange range crCodeActions
, Just record <- [IntMap.lookup uid crCodeActionResolve] ]
- pure $ InL (concatMap (mkInlayHints nameMap) records)
+ pure $ InL (concatMap (mkInlayHints nameMap pm) records)
where
- mkInlayHints :: UniqFM Name [Name] -> RecordInfo -> [InlayHint]
- mkInlayHints nameMap record@(RecordInfoApp _ (RecordAppExpr _ fla)) =
+ mkInlayHints :: UniqFM Name [Name] -> PositionMapping -> RecordInfo -> [InlayHint]
+ mkInlayHints nameMap pm record@(RecordInfoApp _ (RecordAppExpr _ _ fla)) =
let textEdits = renderRecordInfoAsTextEdit nameMap record
- in mapMaybe (mkInlayHint textEdits) fla
- mkInlayHints _ _ = []
- mkInlayHint :: Maybe TextEdit -> (Located FieldLabel, HsExpr GhcTc) -> Maybe InlayHint
- mkInlayHint te (label, _) =
+ in mapMaybe (mkInlayHint textEdits pm) fla
+ mkInlayHints _ _ _ = []
+
+ mkInlayHint :: Maybe TextEdit -> PositionMapping -> (Located FieldLabel, HsExpr GhcTc) -> Maybe InlayHint
+ mkInlayHint te pm (label, _) =
let (name, loc) = ((flSelector . unLoc) &&& (srcSpanToLocation . getLoc)) label
fieldDefLoc = srcSpanToLocation (nameSrcSpan name)
in do
(Location _ recRange) <- loc
- pure InlayHint { _position = _start recRange
+ currentStart <- toCurrentPosition pm (_start recRange)
+ pure InlayHint { _position = currentStart
, _label = InR $ pure (mkInlayHintLabelPart name fieldDefLoc)
, _kind = Nothing -- neither a type nor a parameter
, _textEdits = Just (maybeToList te) -- same as CodeAction
@@ -269,7 +287,8 @@ inlayHintPosRecProvider _ state _pId InlayHintParams {_textDocument = TextDocume
, _paddingRight = Nothing
, _data_ = Nothing
}
- mkInlayHintLabelPart name loc = InlayHintLabelPart (printOutputable (pprNameUnqualified name) <> "=") Nothing loc Nothing
+
+ mkInlayHintLabelPart name loc = InlayHintLabelPart (printFieldName (pprNameUnqualified name) <> "=") Nothing loc Nothing
mkTitle :: [Extension] -> Text
mkTitle exts = "Expand record wildcard"
@@ -373,7 +392,16 @@ instance Show CollectNamesResult where
type instance RuleResult CollectNames = CollectNamesResult
-data RecordAppExpr = RecordAppExpr (LHsExpr GhcTc) [(Located FieldLabel, HsExpr GhcTc)]
+data Saturated = Saturated | Unsaturated
+ deriving (Generic)
+
+instance NFData Saturated
+
+data RecordAppExpr
+ = RecordAppExpr
+ Saturated -- ^ Is the DataCon application fully saturated or partially applied?
+ (LHsExpr GhcTc)
+ [(Located FieldLabel, HsExpr GhcTc)]
deriving (Generic)
data RecordInfo
@@ -383,10 +411,10 @@ data RecordInfo
deriving (Generic)
instance Pretty RecordInfo where
- pretty (RecordInfoPat ss p) = pretty (printOutputable ss) <> ":" <+> pretty (printOutputable p)
- pretty (RecordInfoCon ss e) = pretty (printOutputable ss) <> ":" <+> pretty (printOutputable e)
- pretty (RecordInfoApp ss (RecordAppExpr _ fla))
- = pretty (printOutputable ss) <> ":" <+> hsep (map (pretty . printOutputable) fla)
+ pretty (RecordInfoPat ss p) = pretty (printFieldName ss) <> ":" <+> pretty (printOutputable p)
+ pretty (RecordInfoCon ss e) = pretty (printFieldName ss) <> ":" <+> pretty (printOutputable e)
+ pretty (RecordInfoApp ss (RecordAppExpr _ _ fla))
+ = pretty (printFieldName ss) <> ":" <+> hsep (map (pretty . printOutputable) fla)
recordInfoToRange :: RecordInfo -> Range
recordInfoToRange (RecordInfoPat ss _) = realSrcSpanToRange ss
@@ -493,7 +521,7 @@ processRecordFlds flds = flds { rec_dotdot = Nothing , rec_flds = puns' }
showRecordPat :: Outputable (Pat GhcTc) => UniqFM Name [Name] -> Pat GhcTc -> Maybe Text
-showRecordPat names = fmap printOutputable . mapConPatDetail (\case
+showRecordPat names = fmap printFieldName . mapConPatDetail (\case
RecCon flds -> Just $ RecCon (preprocessRecordPat names flds)
_ -> Nothing)
@@ -505,7 +533,11 @@ showRecordPatFlds (ConPat _ _ args) = do
where
processRecCon (RecCon flds) = Just $ processRecordFlds flds
processRecCon _ = Nothing
+#if __GLASGOW_HASKELL__ < 911
getOccName (FieldOcc x _) = Just $ getName x
+#else
+ getOccName (FieldOcc _ x) = Just $ getName (unLoc x)
+#endif
getOccName _ = Nothing
getFieldName = getOccName . unLoc . hfbLHS . unLoc
showRecordPatFlds _ = Nothing
@@ -526,11 +558,11 @@ showRecordConFlds (RecordCon _ _ flds) =
showRecordConFlds _ = Nothing
showRecordApp :: RecordAppExpr -> Maybe Text
-showRecordApp (RecordAppExpr recConstr fla)
+showRecordApp (RecordAppExpr _ recConstr fla)
= Just $ printOutputable recConstr <> " { "
<> T.intercalate ", " (showFieldWithArg <$> fla)
<> " }"
- where showFieldWithArg (field, arg) = printOutputable field <> " = " <> printOutputable arg
+ where showFieldWithArg (field, arg) = printFieldName field <> " = " <> printOutputable arg
collectRecords :: GenericQ [RecordInfo]
collectRecords = everythingBut (<>) (([], False) `mkQ` getRecPatterns `extQ` getRecCons)
@@ -577,13 +609,29 @@ getRecCons expr@(unLoc -> app@(HsApp _ _ _)) =
[ RecordInfoApp realSpan' appExpr | RealSrcSpan realSpan' _ <- [ getLoc expr ] ]
getFields :: HsExpr GhcTc -> [LHsExpr GhcTc] -> Maybe RecordAppExpr
- getFields (HsApp _ constr@(unLoc -> (XExpr (ConLikeTc (conLikeFieldLabels -> fls) _ _))) arg) args
- | not (null fls)
- = Just (RecordAppExpr constr labelWithArgs)
- where labelWithArgs = zipWith mkLabelWithArg fls (arg : args)
+ getFields (HsApp _ constr@(unLoc -> expr) arg) args
+ | not (null fls) = Just $
+ -- Code action is only valid if the constructor application is fully
+ -- saturated, but we still want to display the inlay hints for partially
+ -- applied constructors
+ RecordAppExpr
+ (if length fls <= length args + 1 then Saturated else Unsaturated)
+ constr
+ labelWithArgs
+ where fls = getExprFields expr
+ labelWithArgs = zipWith mkLabelWithArg fls (arg : args)
mkLabelWithArg label arg = (L (getLoc arg) label, unLoc arg)
getFields (HsApp _ constr arg) args = getFields (unLoc constr) (arg : args)
getFields _ _ = Nothing
+
+ getExprFields :: HsExpr GhcTc -> [FieldLabel]
+ getExprFields (XExpr (ConLikeTc (conLikeFieldLabels -> fls) _ _)) = fls
+#if __GLASGOW_HASKELL__ >= 911
+ getExprFields (XExpr (WrapExpr _ expr)) = getExprFields expr
+#else
+ getExprFields (XExpr (WrapExpr (HsWrap _ expr))) = getExprFields expr
+#endif
+ getExprFields _ = []
getRecCons _ = ([], False)
getRecPatterns :: LPat GhcTc -> ([RecordInfo], Bool)
@@ -594,3 +642,7 @@ getRecPatterns conPat@(conPatDetails . unLoc -> Just (RecCon flds))
mkRecInfo pat =
[ RecordInfoPat realSpan' (unLoc pat) | RealSrcSpan realSpan' _ <- [ getLoc pat ]]
getRecPatterns _ = ([], False)
+
+printFieldName :: Outputable a => a -> Text
+printFieldName = stripOccNamePrefix . printOutputable
+
diff --git a/plugins/hls-explicit-record-fields-plugin/test/Main.hs b/plugins/hls-explicit-record-fields-plugin/test/Main.hs
index a2d980ab50..82ef449a25 100644
--- a/plugins/hls-explicit-record-fields-plugin/test/Main.hs
+++ b/plugins/hls-explicit-record-fields-plugin/test/Main.hs
@@ -36,6 +36,8 @@ test = testGroup "explicit-fields"
, mkTestNoAction "Puns" "Puns" 12 10 12 31
, mkTestNoAction "Infix" "Infix" 11 11 11 31
, mkTestNoAction "Prefix" "Prefix" 10 11 10 28
+ , mkTestNoAction "PartiallyAppliedCon" "PartiallyAppliedCon" 7 8 7 12
+ , mkTest "PolymorphicRecordConstruction" "PolymorphicRecordConstruction" 15 5 15 15
]
, testGroup "inlay hints"
[ mkInlayHintsTest "Construction" Nothing 16 $ \ih -> do
@@ -55,6 +57,24 @@ test = testGroup "explicit-fields"
, _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)"
, _paddingLeft = Just True
}]
+ , mkInlayHintsTest "ConstructionDuplicateRecordFields" Nothing 16 $ \ih -> do
+ let mkLabelPart' = mkLabelPartOffsetLength "ConstructionDuplicateRecordFields"
+ foo <- mkLabelPart' 13 6 "foo"
+ bar <- mkLabelPart' 14 6 "bar"
+ baz <- mkLabelPart' 15 6 "baz"
+ (@?=) ih
+ [defInlayHint { _position = Position 16 14
+ , _label = InR [ foo, commaPart
+ , bar, commaPart
+ , baz
+ ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 16 5 15
+ , mkPragmaTextEdit 3 -- Not 2 of the DuplicateRecordFields pragma
+ ]
+ , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)"
+ , _paddingLeft = Just True
+ }]
+
, mkInlayHintsTest "PositionalConstruction" Nothing 15 $ \ih -> do
let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PositionalConstruction"
foo <- mkLabelPart' 5 4 "foo="
@@ -80,6 +100,31 @@ test = testGroup "explicit-fields"
, _paddingLeft = Nothing
}
]
+ , mkInlayHintsTest "PositionalConstructionDuplicateRecordFields" Nothing 15 $ \ih -> do
+ let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PositionalConstructionDuplicateRecordFields"
+ foo <- mkLabelPart' 5 4 "foo="
+ bar <- mkLabelPart' 6 4 "bar="
+ baz <- mkLabelPart' 7 4 "baz="
+ (@?=) ih
+ [ defInlayHint { _position = Position 15 11
+ , _label = InR [ foo ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ , defInlayHint { _position = Position 15 13
+ , _label = InR [ bar ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ , defInlayHint { _position = Position 15 15
+ , _label = InR [ baz ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ ]
, mkInlayHintsTest "HsExpanded1" Nothing 17 $ \ih -> do
let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded1"
foo <- mkLabelPart' 11 4 "foo"
@@ -100,6 +145,16 @@ test = testGroup "explicit-fields"
, _tooltip = Just $ InL "Expand positional record"
, _paddingLeft = Nothing
}]
+ , mkInlayHintsTest "HsExpanded1DuplicateRecordFields" (Just " (positional)") 13 $ \ih -> do
+ let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded1DuplicateRecordFields"
+ foo <- mkLabelPart' 11 4 "foo="
+ (@?=) ih
+ [defInlayHint { _position = Position 13 21
+ , _label = InR [ foo ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 13 15 22 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }]
, mkInlayHintsTest "HsExpanded2" Nothing 23 $ \ih -> do
let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded2"
bar <- mkLabelPart' 14 4 "bar"
@@ -212,6 +267,31 @@ test = testGroup "explicit-fields"
, _tooltip = Just $ InL "Expand record wildcard"
, _paddingLeft = Just True
}]
+ , mkInlayHintsTest "PolymorphicRecordConstruction" Nothing 15 $ \ih -> do
+ let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PolymorphicRecordConstruction"
+ foo <- mkLabelPart' 5 4 "foo="
+ bar <- mkLabelPart' 6 4 "bar="
+ baz <- mkLabelPart' 7 4 "baz="
+ (@?=) ih
+ [ defInlayHint { _position = Position 15 11
+ , _label = InR [ foo ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ , defInlayHint { _position = Position 15 13
+ , _label = InR [ bar ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ , defInlayHint { _position = Position 15 15
+ , _label = InR [ baz ]
+ , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ]
+ , _tooltip = Just $ InL "Expand positional record"
+ , _paddingLeft = Nothing
+ }
+ ]
]
]
@@ -285,10 +365,10 @@ mkLabelPart offset fp line start value = do
uri = canonicalizeUri $ toUri (testDataDir > (fp ++ ".hs"))
location uri line char = Location uri (Range (Position line char) (Position line (char + offset value)))
-mkLabelPartOffsetLength ::FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart
+mkLabelPartOffsetLength :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart
mkLabelPartOffsetLength = mkLabelPart (fromIntegral . T.length)
-mkLabelPartOffsetLengthSub1 ::FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart
+mkLabelPartOffsetLengthSub1 :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart
mkLabelPartOffsetLengthSub1 = mkLabelPart (fromIntegral . subtract 1 . T.length)
commaPart :: InlayHintLabelPart
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs
new file mode 100644
index 0000000000..420711f0da
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs
@@ -0,0 +1,17 @@
+{-# LANGUAGE Haskell2010 #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+module Construction where
+
+data MyRec = MyRec
+ { foo :: Int
+ , bar :: Int
+ , baz :: Char
+ }
+
+convertMe :: () -> MyRec
+convertMe _ =
+ let foo = 3
+ bar = 5
+ baz = 'a'
+ in MyRec {..}
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs
new file mode 100644
index 0000000000..1e37d14668
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs
@@ -0,0 +1,19 @@
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE RebindableSyntax #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+module HsExpanded1DuplicateRecordFields where
+import Prelude
+
+ifThenElse :: Int -> Int -> Int -> Int
+ifThenElse x y z = x + y + z
+
+data MyRec = MyRec
+ { foo :: Int }
+
+myRecExample = MyRec 5
+
+convertMe :: Int
+convertMe =
+ if (let MyRec {..} = myRecExample
+ in foo) then 1 else 2
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs
new file mode 100644
index 0000000000..f289508524
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs
@@ -0,0 +1,16 @@
+{-# LANGUAGE Haskell2010 #-}
+
+module PolymorphicRecordConstruction where
+
+data MyRec m = MyRec
+ { foo :: Int
+ , bar :: Int
+ , baz :: Char
+ }
+
+convertMe :: () -> MyRec ()
+convertMe _ =
+ let a = 3
+ b = 5
+ c = 'a'
+ in MyRec { foo = a, bar = b, baz = c }
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs
new file mode 100644
index 0000000000..f8b9791da5
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs
@@ -0,0 +1,16 @@
+{-# LANGUAGE Haskell2010 #-}
+
+module PolymorphicRecordConstruction where
+
+data MyRec m = MyRec
+ { foo :: Int
+ , bar :: Int
+ , baz :: Char
+ }
+
+convertMe :: () -> MyRec ()
+convertMe _ =
+ let a = 3
+ b = 5
+ c = 'a'
+ in MyRec a b c
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs
new file mode 100644
index 0000000000..5227af9a83
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs
@@ -0,0 +1,17 @@
+{-# LANGUAGE Haskell2010 #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+module PositionalConstruction where
+
+data MyRec = MyRec
+ { foo :: Int
+ , bar :: Int
+ , baz :: Char
+ }
+
+convertMe :: () -> MyRec
+convertMe _ =
+ let a = 3
+ b = 5
+ c = 'a'
+ in MyRec a b c
+
diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs
new file mode 100644
index 0000000000..2f6f52e30b
--- /dev/null
+++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs
@@ -0,0 +1,8 @@
+{-# LANGUAGE Haskell2010 #-}
+
+module PartiallyAppliedCon where
+
+data T = MkT { fa :: Int, fb :: Char }
+
+foo :: Int -> Char -> T
+foo x = MkT x
diff --git a/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs
index a85a449704..f5687a9db3 100644
--- a/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs
+++ b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs
@@ -7,33 +7,32 @@
{-# OPTIONS_GHC -Wno-overlapping-patterns #-}
module Ide.Plugin.GHC where
+#if !MIN_VERSION_ghc(9,11,0)
import Data.Functor ((<&>))
+#endif
import Data.List.Extra (stripInfix)
import qualified Data.Text as T
import Development.IDE
import Development.IDE.GHC.Compat
import Development.IDE.GHC.Compat.ExactPrint
-import GHC.Parser.Annotation (AddEpAnn (..),
- DeltaPos (..),
+import GHC.Parser.Annotation (DeltaPos (..),
EpAnn (..),
EpAnnComments (EpaComments))
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Parser.Annotation (EpToken (..))
+#endif
import Ide.PluginUtils (subRange)
import Language.Haskell.GHC.ExactPrint.Parsers (parseDecl)
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if MIN_VERSION_ghc(9,5,0)
import qualified Data.List.NonEmpty as NE
-#endif
-
-#if MIN_VERSION_ghc(9,5,0) && !MIN_VERSION_ghc(9,9,0)
-import GHC.Parser.Annotation (TokenLocation (..))
-#endif
#if !MIN_VERSION_ghc(9,9,0)
import GHC.Parser.Annotation (Anchor (Anchor),
AnchorOperation (MovedAnchor),
SrcSpanAnn' (SrcSpanAnn),
+ TokenLocation (..),
spanAsAnchor)
#endif
@@ -44,6 +43,11 @@ import GHC.Parser.Annotation (EpUniToken (..),
import Language.Haskell.GHC.ExactPrint.Utils (showAst)
#endif
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Types.SrcLoc (UnhelpfulSpanReason (..))
+#else
+import GHC.Parser.Annotation (AddEpAnn (..))
+#endif
type GP = GhcPass Parsed
@@ -97,18 +101,18 @@ h98ToGADTConDecl ::
h98ToGADTConDecl dataName tyVars ctxt = \case
ConDeclH98{..} ->
ConDeclGADT
-#if MIN_VERSION_ghc(9,9,0)
+
+#if MIN_VERSION_ghc(9,11,0)
+ (AnnConDeclGADT [] [] NoEpUniTok)
+#elif MIN_VERSION_ghc(9,9,0)
(NoEpUniTok, con_ext)
#else
con_ext
#endif
-#if MIN_VERSION_ghc(9,5,0)
+
(NE.singleton con_name)
-#else
- [con_name]
-#endif
-#if MIN_VERSION_ghc(9,5,0) && !MIN_VERSION_ghc(9,9,0)
+#if !MIN_VERSION_ghc(9,9,0)
(L NoTokenLoc HsNormalTok)
#endif
-- Ignore all existential type variable since GADT not needed
@@ -209,7 +213,11 @@ prettyGADTDecl df decl =
adjustDataDecl DataDecl{..} = DataDecl
{ tcdDExt = adjustWhere tcdDExt
, tcdDataDefn = tcdDataDefn
- { dd_cons =
+ {
+#if MIN_VERSION_ghc(9,11,0)
+ dd_ext = adjustDefnWhere (dd_ext tcdDataDefn),
+#endif
+ dd_cons =
fmap adjustCon (dd_cons tcdDataDefn)
}
, ..
@@ -218,7 +226,11 @@ prettyGADTDecl df decl =
-- Make every data constructor start with a new line and 2 spaces
adjustCon :: LConDecl GP -> LConDecl GP
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ adjustCon (L _ r) =
+ let delta = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (DifferentLine 1 2) []
+ in L (EpAnn delta (AnnListItem []) (EpaComments [])) r
+#elif MIN_VERSION_ghc(9,9,0)
adjustCon (L _ r) =
let delta = EpaDelta (DifferentLine 1 3) []
in L (EpAnn delta (AnnListItem []) (EpaComments [])) r
@@ -229,6 +241,10 @@ prettyGADTDecl df decl =
#endif
-- Adjust where annotation to the same line of the type constructor
+#if MIN_VERSION_ghc(9,11,0)
+ -- tcdDext is just a placeholder in ghc-9.12
+ adjustWhere = id
+#else
adjustWhere tcdDExt = tcdDExt <&>
#if !MIN_VERSION_ghc(9,9,0)
map
@@ -238,7 +254,16 @@ prettyGADTDecl df decl =
then AddEpAnn AnnWhere d1
else AddEpAnn ann l
)
+#endif
+#if MIN_VERSION_ghc(9,11,0)
+ adjustDefnWhere annDataDefn
+ | andd_where annDataDefn == NoEpTok = annDataDefn
+ | otherwise = annDataDefn {andd_where = andd_where'}
+ where
+ (EpTok (EpaSpan aw)) = andd_where annDataDefn
+ andd_where' = EpTok (EpaDelta aw (SameLine 1) [])
+#endif
-- Remove the first extra line if exist
removeExtraEmptyLine s = case stripInfix "\n\n" s of
Just (x, xs) -> x <> "\n" <> xs
@@ -257,6 +282,10 @@ noUsed = EpAnnNotUsed
#endif
pattern UserTyVar' :: LIdP pass -> HsTyVarBndr flag pass
+#if MIN_VERSION_ghc(9,11,0)
+pattern UserTyVar' s <- HsTvb _ _ (HsBndrVar _ s) _
+#else
pattern UserTyVar' s <- UserTyVar _ _ s
+#endif
implicitTyVars = wrapXRec @GP mkHsOuterImplicit
diff --git a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs
index 14c43f8db8..5a72455eb5 100644
--- a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs
+++ b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs
@@ -5,7 +5,6 @@
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE PackageImports #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
@@ -54,8 +53,15 @@ import Development.IDE.Core.FileStore (getVersione
import Development.IDE.Core.Rules (defineNoFile,
getParsedModuleWithComments)
import Development.IDE.Core.Shake (getDiagnostics)
+
+#if APPLY_REFACT
import qualified Refact.Apply as Refact
import qualified Refact.Types as Refact
+#if !MIN_VERSION_apply_refact(0,12,0)
+import System.Environment (setEnv,
+ unsetEnv)
+#endif
+#endif
import Development.IDE.GHC.Compat (DynFlags,
WarningFlag (Opt_WarnUnrecognisedPragmas),
@@ -105,6 +111,7 @@ import Language.LSP.Protocol.Types hiding
(Null)
import qualified Language.LSP.Protocol.Types as LSP
+import Development.IDE.Core.PluginUtils as PluginUtils
import qualified Development.IDE.Core.Shake as Shake
import Development.IDE.Spans.Pragmas (LineSplitTextEdits (LineSplitTextEdits),
NextPragmaInfo (NextPragmaInfo),
@@ -114,11 +121,6 @@ import Development.IDE.Spans.Pragmas (LineSplitTe
lineSplitTextEdits,
nextPragmaLine)
import GHC.Generics (Generic)
-#if !MIN_VERSION_apply_refact(0,12,0)
-import System.Environment (setEnv,
- unsetEnv)
-#endif
-import Development.IDE.Core.PluginUtils as PluginUtils
import Text.Regex.TDFA.Text ()
-- ---------------------------------------------------------------------
@@ -126,7 +128,9 @@ import Text.Regex.TDFA.Text ()
data Log
= LogShake Shake.Log
| LogApplying NormalizedFilePath (Either String WorkspaceEdit)
+#if APPLY_REFACT
| LogGeneratedIdeas NormalizedFilePath [[Refact.Refactoring Refact.SrcSpan]]
+#endif
| LogGetIdeas NormalizedFilePath
| LogUsingExtensions NormalizedFilePath [String] -- Extension is only imported conditionally, so we just stringify them
| forall a. (Pretty a) => LogResolve a
@@ -135,7 +139,9 @@ instance Pretty Log where
pretty = \case
LogShake log -> pretty log
LogApplying fp res -> "Applying hint(s) for" <+> viaShow fp <> ":" <+> viaShow res
+#if APPLY_REFACT
LogGeneratedIdeas fp ideas -> "Generated hlint ideas for for" <+> viaShow fp <> ":" <+> viaShow ideas
+#endif
LogUsingExtensions fp exts -> "Using extensions for " <+> viaShow fp <> ":" <> line <> indent 4 (pretty exts)
LogGetIdeas fp -> "Getting hlint ideas for " <+> viaShow fp
LogResolve msg -> pretty msg
@@ -178,7 +184,7 @@ descriptor recorder plId =
-- This rule only exists for generating file diagnostics
-- so the RuleResult is empty
data GetHlintDiagnostics = GetHlintDiagnostics
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetHlintDiagnostics
instance NFData GetHlintDiagnostics
@@ -331,7 +337,7 @@ getExtensions nfp = do
-- ---------------------------------------------------------------------
data GetHlintSettings = GetHlintSettings
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetHlintSettings
instance NFData GetHlintSettings
instance NFData Hint where rnf = rwhnf
@@ -413,12 +419,19 @@ resolveProvider recorder ideState _plId ca uri resolveValue = do
edit <- ExceptT $ liftIO $ ignoreHint recorder ideState file verTxtDocId hintTitle
pure $ ca & LSP.edit ?~ edit
+applyRefactAvailable :: Bool
+#if APPLY_REFACT
+applyRefactAvailable = True
+#else
+applyRefactAvailable = False
+#endif
+
-- | Convert a hlint diagnostic into an apply and an ignore code action
-- if applicable
diagnosticToCodeActions :: VersionedTextDocumentIdentifier -> LSP.Diagnostic -> [LSP.CodeAction]
diagnosticToCodeActions verTxtDocId diagnostic
| LSP.Diagnostic{ _source = Just "hlint", _code = Just (InR code), _range = LSP.Range start _ } <- diagnostic
- , let isHintApplicable = "refact:" `T.isPrefixOf` code
+ , let isHintApplicable = "refact:" `T.isPrefixOf` code && applyRefactAvailable
, let hint = T.replace "refact:" "" code
, let suppressHintTitle = "Ignore hint \"" <> hint <> "\" in this module"
, let suppressHintArguments = IgnoreHint verTxtDocId hint
@@ -506,6 +519,11 @@ data OneHint =
} deriving (Generic, Eq, Show, ToJSON, FromJSON)
applyHint :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> Maybe OneHint -> VersionedTextDocumentIdentifier -> IO (Either PluginError WorkspaceEdit)
+#if !APPLY_REFACT
+applyHint _ _ _ _ _ =
+ -- https://github.com/ndmitchell/hlint/pull/1594#issuecomment-2338898673
+ evaluate $ error "Cannot apply refactoring: apply-refact does not work on GHC 9.10"
+#else
applyHint recorder ide nfp mhint verTxtDocId =
runExceptT $ do
let runAction' :: Action a -> IO a
@@ -607,7 +625,7 @@ applyRefactorings ::
-- with the @LANGUAGE@ pragmas, pragmas win.
[String] ->
IO String
-applyRefactorings =
+applyRefactorings =
#if MIN_VERSION_apply_refact(0,12,0)
Refact.applyRefactorings
#else
@@ -624,3 +642,4 @@ applyRefactorings =
withRuntimeLibdir libdir = bracket_ (setEnv key libdir) (unsetEnv key)
where key = "GHC_EXACTPRINT_GHC_LIBDIR"
#endif
+#endif
diff --git a/plugins/hls-hlint-plugin/test/Main.hs b/plugins/hls-hlint-plugin/test/Main.hs
index 5db5d485a4..4eea2a803a 100644
--- a/plugins/hls-hlint-plugin/test/Main.hs
+++ b/plugins/hls-hlint-plugin/test/Main.hs
@@ -45,7 +45,7 @@ getApplyHintText :: T.Text -> T.Text
getApplyHintText name = "Apply hint \"" <> name <> "\""
resolveTests :: TestTree
-resolveTests = testGroup "hlint resolve tests"
+resolveTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint resolve tests"
[
ignoreHintGoldenResolveTest
"Resolve version of: Ignore hint in this module inserts -Wno-unrecognised-pragmas and hlint ignore pragma if warn unrecognized pragmas is off"
@@ -76,7 +76,7 @@ ignoreHintTests = testGroup "hlint ignore hint tests"
]
applyHintTests :: TestTree
-applyHintTests = testGroup "hlint apply hint tests"
+applyHintTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint apply hint tests"
[
applyHintGoldenTest
"[#2612] Apply hint works when operator fixities go right-to-left"
@@ -88,7 +88,7 @@ applyHintTests = testGroup "hlint apply hint tests"
suggestionsTests :: TestTree
suggestionsTests =
testGroup "hlint suggestions" [
- testCase "provides 3.8 code actions including apply all" $ runHlintSession "" $ do
+ knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "provides 3.8 code actions including apply all" $ runHlintSession "" $ do
doc <- openDoc "Base.hs" "haskell"
diags@(reduceDiag:_) <- hlintCaptureKick
@@ -120,7 +120,7 @@ suggestionsTests =
contents <- skipManyTill anyMessage $ getDocumentEdit doc
liftIO $ contents @?= "main = undefined\nfoo x = x\n"
- , testCase "falls back to pre 3.8 code actions" $
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "falls back to pre 3.8 code actions" $
runSessionWithTestConfig def
{ testConfigCaps = noLiteralCaps
, testDirLocation = Left testDir
@@ -179,15 +179,15 @@ suggestionsTests =
doc <- openDoc "CppHeader.hs" "haskell"
testHlintDiagnostics doc
- , testCase "[#590] apply-refact works with -XLambdaCase argument" $ runHlintSession "lambdacase" $ do
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#590] apply-refact works with -XLambdaCase argument" $ runHlintSession "lambdacase" $ do
testRefactor "LambdaCase.hs" "Redundant bracket"
expectedLambdaCase
- , testCase "[#1242] apply-refact works with -XTypeApplications argument" $ runHlintSession "typeapps" $ do
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#1242] apply-refact works with -XTypeApplications argument" $ runHlintSession "typeapps" $ do
testRefactor "TypeApplication.hs" "Redundant bracket"
expectedTypeApp
- , testCase "apply hints works with LambdaCase via language pragma" $ runHlintSession "" $ do
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "apply hints works with LambdaCase via language pragma" $ runHlintSession "" $ do
testRefactor "LambdaCase.hs" "Redundant bracket"
("{-# LANGUAGE LambdaCase #-}" : expectedLambdaCase)
@@ -213,10 +213,10 @@ suggestionsTests =
doc <- openDoc "IgnoreAnnHlint.hs" "haskell"
testNoHlintDiagnostics doc
- , testCase "apply-refact preserve regular comments" $ runHlintSession "" $ do
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "apply-refact preserve regular comments" $ runHlintSession "" $ do
testRefactor "Comments.hs" "Redundant bracket" expectedComments
- , testCase "[#2290] apply all hints works with a trailing comment" $ runHlintSession "" $ do
+ , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#2290] apply all hints works with a trailing comment" $ runHlintSession "" $ do
testRefactor "TwoHintsAndComment.hs" "Apply all hints" expectedComments2
, testCase "applyAll is shown only when there is at least one diagnostic in range" $ runHlintSession "" $ do
@@ -276,14 +276,24 @@ suggestionsTests =
, "g = 2"
, "#endif", ""
]
- expectedComments = [ "-- comment before header"
- , "module Comments where", ""
- , "{-# standalone annotation #-}", ""
- , "-- standalone comment", ""
- , "-- | haddock comment"
- , "f = {- inline comment -} {- inline comment inside refactored code -}1 -- ending comment", ""
- , "-- final comment"
- ]
+ expectedComments = case ghcVersion of
+ GHC912 -> [ "-- comment before header"
+ , "module Comments where", ""
+ , "{-# standalone annotation #-}", ""
+ , "-- standalone comment", ""
+ , "-- | haddock comment"
+ , "f = {- inline comment -}{- inline comment inside refactored code -} 1 -- ending comment", ""
+ , "-- final comment"
+ ]
+
+ _ -> [ "-- comment before header"
+ , "module Comments where", ""
+ , "{-# standalone annotation #-}", ""
+ , "-- standalone comment", ""
+ , "-- | haddock comment"
+ , "f = {- inline comment -} {- inline comment inside refactored code -}1 -- ending comment", ""
+ , "-- final comment"
+ ]
expectedComments2 = [ "module TwoHintsAndComment where"
, "biggest = foldr1 max -- the line above will show two hlint hints, \"eta reduce\" and \"use maximum\""
]
diff --git a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs
index 1c40ea76b3..db1696d94b 100644
--- a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs
+++ b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs
@@ -1,17 +1,21 @@
module Ide.Plugin.Notes (descriptor, Log) where
import Control.Lens ((^.))
-import Control.Monad.Except (throwError)
+import Control.Monad.Except (ExceptT, MonadError,
+ throwError)
import Control.Monad.IO.Class (liftIO)
import qualified Data.Array as A
+import Data.Foldable (foldl')
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
+import Data.List (uncons)
import Data.Maybe (catMaybes, listToMaybe,
mapMaybe)
import Data.Text (Text, intercalate)
import qualified Data.Text as T
import qualified Data.Text.Utf16.Rope.Mixed as Rope
+import Data.Traversable (for)
import Development.IDE hiding (line)
import Development.IDE.Core.PluginUtils (runActionE, useE)
import Development.IDE.Core.Shake (toKnownFiles)
@@ -21,8 +25,8 @@ import GHC.Generics (Generic)
import Ide.Plugin.Error (PluginError (..))
import Ide.Types
import qualified Language.LSP.Protocol.Lens as L
-import Language.LSP.Protocol.Message (Method (Method_TextDocumentDefinition),
- SMethod (SMethod_TextDocumentDefinition))
+import Language.LSP.Protocol.Message (Method (Method_TextDocumentDefinition, Method_TextDocumentReferences),
+ SMethod (SMethod_TextDocumentDefinition, SMethod_TextDocumentReferences))
import Language.LSP.Protocol.Types
import Text.Regex.TDFA (Regex, caseSensitive,
defaultCompOpt,
@@ -31,25 +35,39 @@ import Text.Regex.TDFA (Regex, caseSensitive,
data Log
= LogShake Shake.Log
- | LogNotesFound NormalizedFilePath [(Text, Position)]
+ | LogNotesFound NormalizedFilePath [(Text, [Position])]
+ | LogNoteReferencesFound NormalizedFilePath [(Text, [Position])]
deriving Show
data GetNotesInFile = MkGetNotesInFile
deriving (Show, Generic, Eq, Ord)
deriving anyclass (Hashable, NFData)
-type instance RuleResult GetNotesInFile = HM.HashMap Text Position
+-- The GetNotesInFile action scans the source file and extracts a map of note
+-- definitions (note name -> position) and a map of note references
+-- (note name -> [position]).
+type instance RuleResult GetNotesInFile = (HM.HashMap Text Position, HM.HashMap Text [Position])
data GetNotes = MkGetNotes
deriving (Show, Generic, Eq, Ord)
deriving anyclass (Hashable, NFData)
+-- GetNotes collects all note definition across all files in the
+-- project. It returns a map from note name to pair of (filepath, position).
type instance RuleResult GetNotes = HashMap Text (NormalizedFilePath, Position)
+data GetNoteReferences = MkGetNoteReferences
+ deriving (Show, Generic, Eq, Ord)
+ deriving anyclass (Hashable, NFData)
+-- GetNoteReferences collects all note references across all files in the
+-- project. It returns a map from note name to list of (filepath, position).
+type instance RuleResult GetNoteReferences = HashMap Text [(NormalizedFilePath, Position)]
+
instance Pretty Log where
pretty = \case
- LogShake l -> pretty l
- LogNotesFound file notes ->
- "Found notes in " <> pretty (show file) <> ": ["
- <> pretty (intercalate ", " (fmap (\(s, p) -> "\"" <> s <> "\" at " <> T.pack (show p)) notes)) <> "]"
+ LogShake l -> pretty l
+ LogNoteReferencesFound file refs -> "Found note references in " <> prettyNotes file refs
+ LogNotesFound file notes -> "Found notes in " <> prettyNotes file notes
+ where prettyNotes file hm = pretty (show file) <> ": ["
+ <> pretty (intercalate ", " (fmap (\(s, p) -> "\"" <> s <> "\" at " <> intercalate ", " (map (T.pack . show) p)) hm)) <> "]"
{-
The first time the user requests a jump-to-definition on a note reference, the
@@ -59,7 +77,9 @@ title is then saved in the HLS database to be retrieved for all future requests.
descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId = (defaultPluginDescriptor plId "Provides goto definition support for GHC-style notes")
{ Ide.Types.pluginRules = findNotesRules recorder
- , Ide.Types.pluginHandlers = mkPluginHandler SMethod_TextDocumentDefinition jumpToNote
+ , Ide.Types.pluginHandlers =
+ mkPluginHandler SMethod_TextDocumentDefinition jumpToNote
+ <> mkPluginHandler SMethod_TextDocumentReferences listReferences
}
findNotesRules :: Recorder (WithPriority Log) -> Rules ()
@@ -69,20 +89,59 @@ findNotesRules recorder = do
defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \MkGetNotes _ -> do
targets <- toKnownFiles <$> useNoFile_ GetKnownTargets
- definedNotes <- catMaybes <$> mapM (\nfp -> fmap (HM.map (nfp,)) <$> use MkGetNotesInFile nfp) (HS.toList targets)
+ definedNotes <- catMaybes <$> mapM (\nfp -> fmap (HM.map (nfp,) . fst) <$> use MkGetNotesInFile nfp) (HS.toList targets)
pure $ Just $ HM.unions definedNotes
+ defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \MkGetNoteReferences _ -> do
+ targets <- toKnownFiles <$> useNoFile_ GetKnownTargets
+ definedReferences <- catMaybes <$> for (HS.toList targets) (\nfp -> do
+ references <- fmap snd <$> use MkGetNotesInFile nfp
+ pure $ fmap (HM.map (fmap (nfp,))) references
+ )
+ pure $ Just $ foldl' (HM.unionWith (<>)) HM.empty definedReferences
+
+err :: MonadError PluginError m => Text -> Maybe a -> m a
+err s = maybe (throwError $ PluginInternalError s) pure
+
+getNote :: NormalizedFilePath -> IdeState -> Position -> ExceptT PluginError (HandlerM c) (Maybe Text)
+getNote nfp state (Position l c) = do
+ contents <-
+ err "Error getting file contents"
+ =<< liftIO (runAction "notes.getfileContents" state (getFileContents nfp))
+ line <- err "Line not found in file" (listToMaybe $ Rope.lines $ fst
+ (Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (fromIntegral l) contents))
+ pure $ listToMaybe $ mapMaybe (atPos $ fromIntegral c) $ matchAllText noteRefRegex line
+ where
+ atPos c arr = case arr A.! 0 of
+ -- We check if the line we are currently at contains a note
+ -- reference. However, we need to know if the cursor is within the
+ -- match or somewhere else. The second entry of the array contains
+ -- the title of the note as extracted by the regex.
+ (_, (c', len)) -> if c' <= c && c <= c' + len
+ then Just (fst (arr A.! 1)) else Nothing
+
+listReferences :: PluginMethodHandler IdeState Method_TextDocumentReferences
+listReferences state _ param
+ | Just nfp <- uriToNormalizedFilePath uriOrig
+ = do
+ let pos@(Position l _) = param ^. L.position
+ noteOpt <- getNote nfp state pos
+ case noteOpt of
+ Nothing -> pure (InR Null)
+ Just note -> do
+ notes <- runActionE "notes.definedNoteReferencess" state $ useE MkGetNoteReferences nfp
+ poss <- err ("Note reference (a comment of the form `{- Note [" <> note <> "] -}`) not found") (HM.lookup note notes)
+ pure $ InL (mapMaybe (\(noteFp, pos@(Position l' _)) -> if l' == l then Nothing else Just (
+ Location (fromNormalizedUri $ normalizedFilePathToUri noteFp) (Range pos pos))) poss)
+ where
+ uriOrig = toNormalizedUri $ param ^. (L.textDocument . L.uri)
+listReferences _ _ _ = throwError $ PluginInternalError "conversion to normalized file path failed"
+
jumpToNote :: PluginMethodHandler IdeState Method_TextDocumentDefinition
jumpToNote state _ param
| Just nfp <- uriToNormalizedFilePath uriOrig
= do
- let Position l c = param ^. L.position
- contents <-
- err "Error getting file contents"
- =<< liftIO (runAction "notes.getfileContents" state (getFileContents nfp))
- line <- err "Line not found in file" (listToMaybe $ Rope.lines $ fst
- (Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (fromIntegral l) contents))
- let noteOpt = listToMaybe $ mapMaybe (atPos $ fromIntegral c) $ matchAllText noteRefRegex line
+ noteOpt <- getNote nfp state (param ^. L.position)
case noteOpt of
Nothing -> pure (InR (InR Null))
Just note -> do
@@ -93,17 +152,9 @@ jumpToNote state _ param
))
where
uriOrig = toNormalizedUri $ param ^. (L.textDocument . L.uri)
- err s = maybe (throwError $ PluginInternalError s) pure
- atPos c arr = case arr A.! 0 of
- -- We check if the line we are currently at contains a note
- -- reference. However, we need to know if the cursor is within the
- -- match or somewhere else. The second entry of the array contains
- -- the title of the note as extracted by the regex.
- (_, (c', len)) -> if c' <= c && c <= c' + len
- then Just (fst (arr A.! 1)) else Nothing
jumpToNote _ _ _ = throwError $ PluginInternalError "conversion to normalized file path failed"
-findNotesInFile :: NormalizedFilePath -> Recorder (WithPriority Log) -> Action (Maybe (HM.HashMap Text Position))
+findNotesInFile :: NormalizedFilePath -> Recorder (WithPriority Log) -> Action (Maybe (HM.HashMap Text Position, HM.HashMap Text [Position]))
findNotesInFile file recorder = do
-- GetFileContents only returns a value if the file is open in the editor of
-- the user. If not, we need to read it from disk.
@@ -111,10 +162,13 @@ findNotesInFile file recorder = do
content <- case contentOpt of
Just x -> pure $ Rope.toText x
Nothing -> liftIO $ readFileUtf8 $ fromNormalizedFilePath file
- let matches = (A.! 1) <$> matchAllText noteRegex content
- m = toPositions matches content
- logWith recorder Debug $ LogNotesFound file (HM.toList m)
- pure $ Just m
+ let noteMatches = (A.! 1) <$> matchAllText noteRegex content
+ notes = toPositions noteMatches content
+ logWith recorder Debug $ LogNotesFound file (HM.toList notes)
+ let refMatches = (A.! 1) <$> matchAllText noteRefRegex content
+ refs = toPositions refMatches content
+ logWith recorder Debug $ LogNoteReferencesFound file (HM.toList refs)
+ pure $ Just (HM.mapMaybe (fmap fst . uncons) notes, refs)
where
uint = fromIntegral . toInteger
-- the regex library returns the character index of the match. However
@@ -129,7 +183,7 @@ findNotesInFile file recorder = do
let !c' = c + 1
(!n', !nc') = if char' == '\n' then (n + 1, c') else (n, nc)
p@(!_, !_) = if char == c then
- (xs, HM.insert name (Position (uint n') (uint (char - nc'))) m)
+ (xs, HM.insertWith (<>) name [Position (uint n') (uint (char - nc'))] m)
else (x:xs, m)
in (p, (n', nc', c'))
) ((matches, HM.empty), (0, 0, 0))
diff --git a/plugins/hls-notes-plugin/test/NotesTest.hs b/plugins/hls-notes-plugin/test/NotesTest.hs
index f87cf98a98..f84bed9731 100644
--- a/plugins/hls-notes-plugin/test/NotesTest.hs
+++ b/plugins/hls-notes-plugin/test/NotesTest.hs
@@ -11,6 +11,7 @@ main :: IO ()
main = defaultTestRunner $
testGroup "Notes"
[ gotoNoteTests
+ , noteReferenceTests
]
runSessionWithServer' :: FilePath -> (FilePath -> Session a) -> IO a
@@ -21,6 +22,21 @@ runSessionWithServer' fp act =
, testDirLocation = Left fp
} act
+noteReferenceTests :: TestTree
+noteReferenceTests = testGroup "Note References"
+ [
+ testCase "multi_file" $ runSessionWithServer' testDataDir $ \dir -> do
+ doc <- openDoc "NoteDef.hs" "haskell"
+ waitForKickDone
+ refs <- getReferences doc (Position 21 15) False
+ let fp = dir > "NoteDef.hs"
+ liftIO $ refs @?= [
+ Location (filePathToUri (dir > "Other.hs")) (Range (Position 6 13) (Position 6 13)),
+ Location (filePathToUri fp) (Range (Position 9 9) (Position 9 9)),
+ Location (filePathToUri fp) (Range (Position 5 67) (Position 5 67))
+ ]
+ ]
+
gotoNoteTests :: TestTree
gotoNoteTests = testGroup "Goto Note Definition"
[
@@ -29,13 +45,13 @@ gotoNoteTests = testGroup "Goto Note Definition"
waitForKickDone
defs <- getDefinitions doc (Position 3 41)
let fp = dir > "NoteDef.hs"
- liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 8 9) (Position 8 9))]))
+ liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 11 9) (Position 11 9))]))
, testCase "liberal_format" $ runSessionWithServer' testDataDir $ \dir -> do
doc <- openDoc "NoteDef.hs" "haskell"
waitForKickDone
defs <- getDefinitions doc (Position 5 64)
let fp = dir > "NoteDef.hs"
- liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 18 11) (Position 18 11))]))
+ liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 21 11) (Position 21 11))]))
, testCase "invalid_note" $ runSessionWithServer' testDataDir $ const $ do
doc <- openDoc "NoteDef.hs" "haskell"
@@ -54,7 +70,7 @@ gotoNoteTests = testGroup "Goto Note Definition"
waitForKickDone
defs <- getDefinitions doc (Position 5 20)
let fp = dir > "NoteDef.hs"
- liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 12 6) (Position 12 6))]))
+ liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 15 6) (Position 15 6))]))
]
testDataDir :: FilePath
diff --git a/plugins/hls-notes-plugin/test/testdata/NoteDef.hs b/plugins/hls-notes-plugin/test/testdata/NoteDef.hs
index 56b1f6e72a..c4b450ced4 100644
--- a/plugins/hls-notes-plugin/test/testdata/NoteDef.hs
+++ b/plugins/hls-notes-plugin/test/testdata/NoteDef.hs
@@ -6,6 +6,9 @@ foo _ = 0 -- We always return zero, see Note [Returning zero from foo]
-- The plugin is more liberal with the note definitions, see Note [Single line comments]
-- It does not work on wrong note definitions, see Note [Not a valid Note]
+-- We can also have multiple references to the same note, see
+-- Note [Single line comments]
+
{- Note [Returning zero from foo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a big long form note, with very important info
diff --git a/plugins/hls-notes-plugin/test/testdata/Other.hs b/plugins/hls-notes-plugin/test/testdata/Other.hs
index 65f9a483aa..aa64e19a79 100644
--- a/plugins/hls-notes-plugin/test/testdata/Other.hs
+++ b/plugins/hls-notes-plugin/test/testdata/Other.hs
@@ -4,3 +4,4 @@ import NoteDef
bar :: Int
bar = 4 -- See @Note [Multiple notes in comment]@ in NoteDef
+-- See Note [Single line comments]
diff --git a/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs
index c37bba6359..8ead286b67 100644
--- a/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs
+++ b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs
@@ -291,12 +291,20 @@ getRecSels (unLoc -> XExpr (HsExpanded a _)) = (collectRecordSelectors a, True)
#endif
-- applied record selection: "selector record" or "selector (record)" or
-- "selector selector2.record2"
+#if __GLASGOW_HASKELL__ >= 911
+getRecSels e@(unLoc -> HsApp _ se@(unLoc -> XExpr (HsRecSelRn _)) re) =
+#else
getRecSels e@(unLoc -> HsApp _ se@(unLoc -> HsRecSel _ _) re) =
+#endif
( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re
| RealSrcSpan realSpan' _ <- [ getLoc e ] ], False )
-- Record selection where the field is being applied with the "$" operator:
-- "selector $ record"
+#if __GLASGOW_HASKELL__ >= 911
+getRecSels e@(unLoc -> OpApp _ se@(unLoc -> XExpr (HsRecSelRn _))
+#else
getRecSels e@(unLoc -> OpApp _ se@(unLoc -> HsRecSel _ _)
+#endif
(unLoc -> HsVar _ (unLoc -> d)) re) | d == dollarName =
( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re
| RealSrcSpan realSpan' _ <- [ getLoc e ] ], False )
diff --git a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs
index bd265b74db..23bfd727cf 100644
--- a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs
+++ b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs
@@ -19,6 +19,7 @@ import Control.Lens hiding (List)
import Control.Monad.IO.Class (MonadIO (liftIO))
import qualified Data.Aeson as JSON
import Data.Char (isAlphaNum)
+import qualified Data.Foldable as Foldable
import Data.List.Extra (nubOrdOn)
import qualified Data.Map as M
import Data.Maybe (mapMaybe)
@@ -122,10 +123,13 @@ suggest dflags diag =
suggestDisableWarning :: Diagnostic -> [PragmaEdit]
suggestDisableWarning diagnostic
- | Just (Just (JSON.String attachedReason)) <- diagnostic ^? attachedReason
- , Just w <- T.stripPrefix "-W" attachedReason
- , w `notElem` warningBlacklist =
- pure ("Disable \"" <> w <> "\" warnings", OptGHC w)
+ | Just (Just (JSON.Array attachedReasons)) <- diagnostic ^? attachedReason
+ =
+ [ ("Disable \"" <> w <> "\" warnings", OptGHC w)
+ | JSON.String attachedReason <- Foldable.toList attachedReasons
+ , Just w <- [T.stripPrefix "-W" attachedReason]
+ , w `notElem` warningBlacklist
+ ]
| otherwise = []
warningBlacklist :: [T.Text]
diff --git a/plugins/hls-pragmas-plugin/test/Main.hs b/plugins/hls-pragmas-plugin/test/Main.hs
index 9b1eb10181..1e38e439ab 100644
--- a/plugins/hls-pragmas-plugin/test/Main.hs
+++ b/plugins/hls-pragmas-plugin/test/Main.hs
@@ -73,10 +73,10 @@ codeActionTests =
, codeActionTestWithPragmasSuggest "adds TypeApplications pragma" "TypeApplications" [("Add \"TypeApplications\"", "Contains TypeApplications code action")]
, codeActionTestWithPragmasSuggest "after shebang" "AfterShebang" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")]
, codeActionTestWithPragmasSuggest "append to existing pragmas" "AppendToExisting" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")]
- , codeActionTestWithPragmasSuggest "before doc comments" "BeforeDocComment" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")]
+ , codeActionTestWithPragmasSuggest "before doc comments NamedFieldPuns" "BeforeDocComment" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")]
, codeActionTestWithPragmasSuggest "adds TypeSynonymInstances pragma" "NeedsPragmas" [("Add \"TypeSynonymInstances\"", "Contains TypeSynonymInstances code action"), ("Add \"FlexibleInstances\"", "Contains FlexibleInstances code action")]
- , codeActionTestWithDisableWarning "before doc comments" "MissingSignatures" [("Disable \"missing-signatures\" warnings", "Contains missing-signatures code action")]
- , codeActionTestWithDisableWarning "before doc comments" "UnusedImports" [("Disable \"unused-imports\" warnings", "Contains unused-imports code action")]
+ , codeActionTestWithDisableWarning "before doc comments missing-signatures" "MissingSignatures" [("Disable \"missing-signatures\" warnings", "Contains missing-signatures code action")]
+ , codeActionTestWithDisableWarning "before doc comments unused-imports" "UnusedImports" [("Disable \"unused-imports\" warnings", "Contains unused-imports code action")]
]
codeActionTestWithPragmasSuggest :: String -> FilePath -> [(T.Text, String)] -> TestTree
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs
index 6a157c4948..638d14c51d 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs
@@ -42,9 +42,13 @@ showAstDataHtml a0 = html $
generic
`ext1Q` list
`extQ` string `extQ` fastString `extQ` srcSpan `extQ` realSrcSpan
+#if !MIN_VERSION_ghc(9,11,0)
`extQ` annotation
+#endif
`extQ` annotationModule
+#if !MIN_VERSION_ghc(9,11,0)
`extQ` annotationAddEpAnn
+#endif
`extQ` annotationGrhsAnn
`extQ` annotationEpAnnHsCase
`extQ` annotationEpAnnHsLet
@@ -53,7 +57,9 @@ showAstDataHtml a0 = html $
`extQ` annotationAnnParen
`extQ` annotationTrailingAnn
`extQ` annotationEpaLocation
+#if !MIN_VERSION_ghc(9,11,0)
`extQ` addEpAnn
+#endif
`extQ` lit `extQ` litr `extQ` litt
`extQ` sourceText
`extQ` deltaPos
@@ -121,6 +127,7 @@ showAstDataHtml a0 = html $
sourceText :: SourceText -> SDoc
sourceText NoSourceText = text "NoSourceText"
+
#if MIN_VERSION_ghc(9,7,0)
sourceText (SourceText src) = text "SourceText" <+> ftext src
#else
@@ -128,14 +135,18 @@ showAstDataHtml a0 = html $
#endif
epaAnchor :: EpaLocation -> SDoc
+
#if MIN_VERSION_ghc(9,9,0)
epaAnchor (EpaSpan s) = parens $ text "EpaSpan" <+> srcSpan s
-#elif MIN_VERSION_ghc(9,5,0)
- epaAnchor (EpaSpan r _) = text "EpaSpan" <+> realSrcSpan r
#else
- epaAnchor (EpaSpan r) = text "EpaSpan" <+> realSrcSpan r
+ epaAnchor (EpaSpan r _) = text "EpaSpan" <+> realSrcSpan r
#endif
+
+#if MIN_VERSION_ghc(9,11,0)
+ epaAnchor (EpaDelta s d cs) = text "EpaDelta" <+> srcSpan s <+> deltaPos d <+> showAstDataHtml' cs
+#else
epaAnchor (EpaDelta d cs) = text "EpaDelta" <+> deltaPos d <+> showAstDataHtml' cs
+#endif
#if !MIN_VERSION_ghc(9,9,0)
anchorOp :: AnchorOperation -> SDoc
@@ -169,8 +180,10 @@ showAstDataHtml a0 = html $
-- TODO: show annotations here
(text "")
+#if !MIN_VERSION_ghc(9,11,0)
addEpAnn :: AddEpAnn -> SDoc
addEpAnn (AddEpAnn a s) = text "AddEpAnn" <+> ppr a <+> epaAnchor s
+#endif
var :: Var -> SDoc
var v = braces $ text "Var:" <+> ppr v
@@ -208,14 +221,18 @@ showAstDataHtml a0 = html $
-- -------------------------
+#if !MIN_VERSION_ghc(9,11,0)
annotation :: EpAnn [AddEpAnn] -> SDoc
annotation = annotation' (text "EpAnn [AddEpAnn]")
+#endif
annotationModule :: EpAnn AnnsModule -> SDoc
annotationModule = annotation' (text "EpAnn AnnsModule")
+#if !MIN_VERSION_ghc(9,11,0)
annotationAddEpAnn :: EpAnn AddEpAnn -> SDoc
annotationAddEpAnn = annotation' (text "EpAnn AddEpAnn")
+#endif
annotationGrhsAnn :: EpAnn GrhsAnn -> SDoc
annotationGrhsAnn = annotation' (text "EpAnn GrhsAnn")
@@ -223,15 +240,14 @@ showAstDataHtml a0 = html $
annotationEpAnnHsCase :: EpAnn EpAnnHsCase -> SDoc
annotationEpAnnHsCase = annotation' (text "EpAnn EpAnnHsCase")
-#if MIN_VERSION_ghc(9,4,0)
annotationEpAnnHsLet :: EpAnn NoEpAnns -> SDoc
annotationEpAnnHsLet = annotation' (text "EpAnn NoEpAnns")
-#else
- annotationEpAnnHsLet :: EpAnn AnnsLet -> SDoc
- annotationEpAnnHsLet = annotation' (text "EpAnn AnnsLet")
-#endif
+#if MIN_VERSION_ghc(9,11,0)
+ annotationAnnList :: EpAnn (AnnList ()) -> SDoc
+#else
annotationAnnList :: EpAnn AnnList -> SDoc
+#endif
annotationAnnList = annotation' (text "EpAnn AnnList")
annotationEpAnnImportDecl :: EpAnn EpAnnImportDecl -> SDoc
@@ -256,7 +272,11 @@ showAstDataHtml a0 = html $
srcSpanAnnA :: EpAnn AnnListItem -> SDoc
srcSpanAnnA = locatedAnn'' (text "SrcSpanAnnA")
+#if MIN_VERSION_ghc(9,11,0)
+ srcSpanAnnL :: EpAnn (AnnList ()) -> SDoc
+#else
srcSpanAnnL :: EpAnn AnnList -> SDoc
+#endif
srcSpanAnnL = locatedAnn'' (text "SrcSpanAnnL")
srcSpanAnnP :: EpAnn AnnPragma -> SDoc
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs
index 38080ca4e5..666de9a6f2 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs
@@ -90,7 +90,7 @@ import GHC (DeltaPos (..),
#if !MIN_VERSION_ghc(9,9,0)
import Data.Default (Default)
-import GHC (Anchor (..),
+import GHC ( Anchor (..),
AnchorOperation,
EpAnn (..),
NameAdornment (NameParens),
@@ -106,13 +106,22 @@ import GHC.Parser.Annotation (AnnContext (..),
deltaPos)
import GHC.Types.SrcLoc (generatedSrcSpan)
#endif
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Types.SrcLoc (UnhelpfulSpanReason(..))
+#endif
#if MIN_VERSION_ghc(9,9,0)
-import GHC (Anchor,
+import GHC (
+#if !MIN_VERSION_ghc(9,11,0)
+ Anchor,
+#endif
AnnContext (..),
EpAnn (..),
EpaLocation,
EpaLocation' (..),
+#if MIN_VERSION_ghc(9,11,0)
+ EpToken (..),
+#endif
NameAdornment (..),
NameAnn (..),
SrcSpanAnnA,
@@ -121,7 +130,6 @@ import GHC (Anchor,
emptyComments,
spanAsAnchor)
#endif
-
setPrecedingLines ::
#if !MIN_VERSION_ghc(9,9,0)
Default t =>
@@ -137,19 +145,14 @@ instance Pretty Log where
LogShake shakeLog -> pretty shakeLog
data GetAnnotatedParsedSource = GetAnnotatedParsedSource
- deriving (Eq, Show, Typeable, GHC.Generic)
+ deriving (Eq, Show, GHC.Generic)
instance Hashable GetAnnotatedParsedSource
instance NFData GetAnnotatedParsedSource
type instance RuleResult GetAnnotatedParsedSource = ParsedSource
-#if MIN_VERSION_ghc(9,5,0)
instance Show (HsModule GhcPs) where
show _ = ""
-#else
-instance Show HsModule where
- show _ = ""
-#endif
-- | Get the latest version of the annotated parse source with comments.
getAnnotatedParsedSourceRule :: Recorder (WithPriority Log) -> Rules ()
@@ -165,6 +168,10 @@ annotateParsedSource (ParsedModule _ ps _) =
(makeDeltaAst ps)
#endif
+#if MIN_VERSION_ghc(9,11,0)
+type Anchor =Â EpaLocation
+#endif
+
------------------------------------------------------------------------------
{- | A transformation for grafting source trees together. Use the semigroup
@@ -463,7 +470,10 @@ modifySmallestDeclWithM validSpan f a = do
False -> first (DL.singleton ldecl <>) <$> modifyMatchingDecl rest
modifyDeclsT' (fmap (first DL.toList) . modifyMatchingDecl) a
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+generatedAnchor :: DeltaPos -> Anchor
+generatedAnchor dp = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) dp []
+#elif MIN_VERSION_ghc(9,9,0)
generatedAnchor :: DeltaPos -> Anchor
generatedAnchor dp = EpaDelta dp []
#else
@@ -578,9 +588,17 @@ modifyDeclsT' :: (HasDecls t, HasTransform m)
=> ([LHsDecl GhcPs] -> m ([LHsDecl GhcPs], r))
-> t -> m (t, r)
modifyDeclsT' action t = do
+#if MIN_VERSION_ghc_exactprint(1,10,0)
+ decls <- pure $ hsDecls t
+#else
decls <- liftT $ hsDecls t
+#endif
(decls', r) <- action decls
+#if MIN_VERSION_ghc_exactprint(1,10,0)
+ t' <- pure $ replaceDecls t decls'
+#else
t' <- liftT $ replaceDecls t decls'
+#endif
pure (t', r)
-- | Modify each LMatch in a MatchGroup
@@ -599,17 +617,10 @@ modifyMgMatchesT' ::
r ->
(r -> r -> m r) ->
TransformT m (MatchGroup GhcPs (LHsExpr GhcPs), r)
-#if MIN_VERSION_ghc(9,5,0)
modifyMgMatchesT' (MG xMg (L locMatches matches)) f def combineResults = do
(unzip -> (matches', rs)) <- mapM f matches
r' <- TransformT $ lift $ foldM combineResults def rs
pure (MG xMg (L locMatches matches'), r')
-#else
-modifyMgMatchesT' (MG xMg (L locMatches matches) originMg) f def combineResults = do
- (unzip -> (matches', rs)) <- mapM f matches
- r' <- lift $ foldM combineResults def rs
- pure (MG xMg (L locMatches matches') originMg, r')
-#endif
graftSmallestDeclsWithM ::
forall a.
@@ -712,26 +723,16 @@ annotate :: ASTElement l ast
annotate dflags needs_space ast = do
uniq <- show <$> uniqueSrcSpanT
let rendered = render dflags ast
-#if MIN_VERSION_ghc(9,4,0)
expr' <- TransformT $ lift $ mapLeft (showSDoc dflags . ppr) $ parseAST dflags uniq rendered
pure $ setPrecedingLines expr' 0 (bool 0 1 needs_space)
-#else
- expr' <- lift $ mapLeft show $ parseAST dflags uniq rendered
- pure $ setPrecedingLines expr' 0 (bool 0 1 needs_space)
-#endif
-- | Given an 'LHsDecl', compute its exactprint annotations.
annotateDecl :: DynFlags -> LHsDecl GhcPs -> TransformT (Either String) (LHsDecl GhcPs)
annotateDecl dflags ast = do
uniq <- show <$> uniqueSrcSpanT
let rendered = render dflags ast
-#if MIN_VERSION_ghc(9,4,0)
expr' <- TransformT $ lift $ mapLeft (showSDoc dflags . ppr) $ parseDecl dflags uniq rendered
pure $ setPrecedingLines expr' 1 0
-#else
- expr' <- lift $ mapLeft show $ parseDecl dflags uniq rendered
- pure $ setPrecedingLines expr' 1 0
-#endif
------------------------------------------------------------------------------
@@ -755,15 +756,28 @@ eqSrcSpan l r = leftmost_smallest l r == EQ
addParensToCtxt :: Maybe EpaLocation -> AnnContext -> AnnContext
addParensToCtxt close_dp = addOpen . addClose
where
+#if MIN_VERSION_ghc(9,11,0)
+ addOpen it@AnnContext{ac_open = []} = it{ac_open = [EpTok (epl 0)]}
+#else
addOpen it@AnnContext{ac_open = []} = it{ac_open = [epl 0]}
+#endif
addOpen other = other
addClose it
+#if MIN_VERSION_ghc(9,11,0)
+ | Just c <- close_dp = it{ac_close = [EpTok c]}
+ | AnnContext{ac_close = []} <- it = it{ac_close = [EpTok (epl 0)]}
+#else
| Just c <- close_dp = it{ac_close = [c]}
| AnnContext{ac_close = []} <- it = it{ac_close = [epl 0]}
+#endif
| otherwise = it
epl :: Int -> EpaLocation
+#if MIN_VERSION_ghc(9,11,0)
+epl n = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (SameLine n) []
+#else
epl n = EpaDelta (SameLine n) []
+#endif
epAnn :: SrcSpan -> ann -> EpAnn ann
epAnn srcSpan anns = EpAnn (spanAsAnchor srcSpan) anns emptyComments
@@ -792,14 +806,25 @@ removeComma (SrcSpanAnn (EpAnn anc (AnnListItem as) cs) l)
#endif
addParens :: Bool -> GHC.NameAnn -> GHC.NameAnn
+#if MIN_VERSION_ghc(9,11,0)
+addParens True it@NameAnn{} =
+ it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) }
+addParens True it@NameAnnCommas{} =
+ it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) }
+addParens True it@NameAnnOnly{} =
+ it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) }
+addParens True it@NameAnnTrailing{} =
+ NameAnn{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)), nann_name = epl 0, nann_trailing = nann_trailing it}
+#else
addParens True it@NameAnn{} =
- it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 }
+ it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 }
addParens True it@NameAnnCommas{} =
- it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 }
+ it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 }
addParens True it@NameAnnOnly{} =
- it{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0 }
+ it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 }
addParens True NameAnnTrailing{..} =
- NameAnn{nann_adornment = NameParens, nann_open = epl 0, nann_close = epl 0, nann_name = epl 0, ..}
+ NameAnn{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0, nann_name = epl 0, ..}
+#endif
addParens _ it = it
removeTrailingComma :: GenLocated SrcSpanAnnA ast -> GenLocated SrcSpanAnnA ast
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs
index 3252d6b33a..e471d1781a 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs
@@ -12,11 +12,12 @@ module Development.IDE.Plugin.CodeAction
fillHolePluginDescriptor,
extendImportPluginDescriptor,
-- * For testing
- matchRegExMultipleImports
+ matchRegExMultipleImports,
+ extractNotInScopeName,
+ NotInScope(..)
) where
import Control.Applicative ((<|>))
-import Control.Applicative.Combinators.NonEmpty (sepBy1)
import Control.Arrow (second,
(&&&),
(>>>))
@@ -48,7 +49,9 @@ import Development.IDE.Core.Service
import Development.IDE.Core.Shake hiding (Log)
import Development.IDE.GHC.Compat hiding
(ImplicitPrelude)
+#if !MIN_VERSION_ghc(9,11,0)
import Development.IDE.GHC.Compat.Util
+#endif
import Development.IDE.GHC.Error
import Development.IDE.GHC.ExactPrint
import qualified Development.IDE.GHC.ExactPrint as E
@@ -69,16 +72,13 @@ import Development.IDE.Types.Diagnostics
import Development.IDE.Types.Exports
import Development.IDE.Types.Location
import Development.IDE.Types.Options
-import GHC (AddEpAnn (AddEpAnn),
- AnnsModule (am_main),
- DeltaPos (..),
+import GHC (DeltaPos (..),
EpAnn (..),
LEpaComment)
import qualified GHC.LanguageExtensions as Lang
import Ide.Logger hiding
(group)
import Ide.PluginUtils (extendToFullLines,
- extractTextInRange,
subRange)
import Ide.Types
import Language.LSP.Protocol.Message (Method (..),
@@ -98,25 +98,36 @@ import Language.LSP.Protocol.Types (ApplyWorkspa
type (|?) (InL, InR),
uriToFilePath)
import qualified Text.Fuzzy.Parallel as TFP
-import qualified Text.Regex.Applicative as RE
import Text.Regex.TDFA ((=~), (=~~))
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
#if !MIN_VERSION_ghc(9,9,0)
import Development.IDE.GHC.Compat.ExactPrint (makeDeltaAst)
-import GHC (Anchor (anchor_op),
+import GHC (AddEpAnn (AddEpAnn),
+ Anchor (anchor_op),
AnchorOperation (..),
+ AnnsModule (am_main),
EpaLocation (..))
#endif
-#if MIN_VERSION_ghc(9,9,0)
-import GHC (EpaLocation,
+#if MIN_VERSION_ghc(9,9,0) && !MIN_VERSION_ghc(9,11,0)
+import GHC (AddEpAnn (AddEpAnn),
+ AnnsModule (am_main),
+ EpaLocation,
EpaLocation' (..),
HasLoc (..))
-import GHC.Types.SrcLoc (srcSpanToRealSrcSpan)
#endif
+#if MIN_VERSION_ghc(9,11,0)
+import GHC (AnnsModule (am_where),
+ EpToken (..),
+ EpaLocation,
+ EpaLocation' (..),
+ HasLoc (..))
+#endif
+
+
-------------------------------------------------------------------------------------------------
-- | Generate code actions.
@@ -256,19 +267,11 @@ extendImportHandler' ideState ExtendImport {..}
isWantedModule :: ModuleName -> Maybe ModuleName -> GenLocated l (ImportDecl GhcPs) -> Bool
isWantedModule wantedModule Nothing (L _ it@ImportDecl{ ideclName
-#if MIN_VERSION_ghc(9,5,0)
, ideclImportList = Just (Exactly, _)
-#else
- , ideclHiding = Just (False, _)
-#endif
}) =
not (isQualifiedImport it) && unLoc ideclName == wantedModule
isWantedModule wantedModule (Just qual) (L _ ImportDecl{ ideclAs, ideclName
-#if MIN_VERSION_ghc(9,5,0)
, ideclImportList = Just (Exactly, _)
-#else
- , ideclHiding = Just (False, _)
-#endif
}) =
unLoc ideclName == wantedModule && (wantedModule == qual || (unLoc <$> ideclAs) == Just qual)
isWantedModule _ _ _ = False
@@ -339,7 +342,11 @@ findSigOfBinds range = go
case unLoc <$> findDeclContainingLoc (_start range) lsigs of
Just sig' -> Just sig'
Nothing -> do
+#if MIN_VERSION_ghc(9,11,0)
+ lHsBindLR <- findDeclContainingLoc (_start range) binds
+#else
lHsBindLR <- findDeclContainingLoc (_start range) (bagToList binds)
+#endif
findSigOfBind range (unLoc lHsBindLR)
go _ = Nothing
@@ -420,7 +427,11 @@ isUnusedImportedId
modName
importSpan
| occ <- mkVarOcc identifier,
+#if MIN_VERSION_ghc(9,11,0)
+ impModsVals <- importedByUser . concat $ M.elems imp_mods,
+#else
impModsVals <- importedByUser . concat $ moduleEnvElts imp_mods,
+#endif
Just rdrEnv <-
listToMaybe
[ imv_all_exports
@@ -658,10 +669,16 @@ suggestDeleteUnusedBinding
indexedContent
name
(L _ Match{m_grhss=GRHSs{grhssLocalBinds}}) = do
- let go bag lsigs =
- if isEmptyBag bag
- then []
- else concatMap (findRelatedSpanForHsBind indexedContent name lsigs) bag
+ let emptyBag bag =
+#if MIN_VERSION_ghc(9,11,0)
+ null bag
+#else
+ isEmptyBag bag
+#endif
+ go bag lsigs =
+ if emptyBag bag
+ then []
+ else concatMap (findRelatedSpanForHsBind indexedContent name lsigs) bag
case grhssLocalBinds of
(HsValBinds _ (ValBinds _ bag lsigs)) -> go bag lsigs
_ -> []
@@ -832,7 +849,6 @@ suggestAddTypeAnnotationToSatisfyConstraints sourceOpt Diagnostic{_range=_range,
| otherwise = []
where
makeAnnotatedLit ty lit = "(" <> lit <> " :: " <> ty <> ")"
-#if MIN_VERSION_ghc(9,4,0)
pat multiple at inArg inExpr = T.concat [ ".*Defaulting the type variable "
, ".*to type ‘([^ ]+)’ "
, "in the following constraint"
@@ -843,17 +859,6 @@ suggestAddTypeAnnotationToSatisfyConstraints sourceOpt Diagnostic{_range=_range,
, if inExpr then ".+In the expression" else ""
, ".+In the expression"
]
-#else
- pat multiple at inArg inExpr = T.concat [ ".*Defaulting the following constraint"
- , if multiple then "s" else ""
- , " to type ‘([^ ]+)’ "
- , ".*arising from the literal ‘(.+)’"
- , if inArg then ".+In the.+argument" else ""
- , if at then ".+at ([^ ]*)" else ""
- , if inExpr then ".+In the expression" else ""
- , ".+In the expression"
- ]
-#endif
codeEdit range ty lit replacement =
let title = "Add type annotation ‘" <> ty <> "’ to ‘" <> lit <> "’"
edits = [TextEdit range replacement]
@@ -1133,17 +1138,10 @@ occursUnqualified symbol ImportDecl{..}
| isNothing ideclAs = Just False /=
-- I don't find this particularly comprehensible,
-- but HLint suggested me to do so...
-#if MIN_VERSION_ghc(9,5,0)
(ideclImportList <&> \(isHiding, L _ ents) ->
let occurs = any ((symbol `symbolOccursIn`) . unLoc) ents
in (isHiding == EverythingBut) && not occurs || (isHiding == Exactly) && occurs
)
-#else
- (ideclHiding <&> \(isHiding, L _ ents) ->
- let occurs = any ((symbol `symbolOccursIn`) . unLoc) ents
- in isHiding && not occurs || not isHiding && occurs
- )
-#endif
occursUnqualified _ _ = False
symbolOccursIn :: T.Text -> IE GhcPs -> Bool
@@ -1481,11 +1479,6 @@ suggestNewImport df packageExportsMap ps fileContents Diagnostic{..}
>>= (findImportDeclByModuleName hsmodImports . T.unpack)
>>= ideclAs . unLoc
<&> T.pack . moduleNameString . unLoc
- , -- tentative workaround for detecting qualification in GHC 9.4
- -- FIXME: We can delete this after dropping the support for GHC 9.4
- qualGHC94 <-
- guard (ghcVersion == GHC94)
- *> extractQualifiedModuleNameFromMissingName (extractTextInRange _range fileContents)
, Just (range, indent) <- newImportInsertRange ps fileContents
, extendImportSuggestions <- matchRegexUnifySpaces msg
#if MIN_VERSION_ghc(9,7,0)
@@ -1494,82 +1487,32 @@ suggestNewImport df packageExportsMap ps fileContents Diagnostic{..}
"Perhaps you want to add ‘[^’]*’ to the import list in the import of ‘([^’]*)’"
#endif
= let qis = qualifiedImportStyle df
- -- FIXME: we can use thingMissing once the support for GHC 9.4 is dropped.
- -- In what fllows, @missing@ is assumed to be qualified name.
- -- @thingMissing@ is already as desired with GHC != 9.4.
- -- In GHC 9.4, however, GHC drops a module qualifier from a qualified symbol.
- -- Thus we need to explicitly concatenate qualifier explicity in GHC 9.4.
- missing
- | GHC94 <- ghcVersion
- , isNothing (qual <|> qual')
- , Just q <- qualGHC94 =
- qualify q thingMissing
- | otherwise = thingMissing
suggestions = nubSortBy simpleCompareImportSuggestion
- (constructNewImportSuggestions packageExportsMap (qual <|> qual' <|> qualGHC94, missing) extendImportSuggestions qis) in
+ (constructNewImportSuggestions packageExportsMap (qual <|> qual', thingMissing) extendImportSuggestions qis) in
map (\(ImportSuggestion _ kind (unNewImport -> imp)) -> (imp, kind, TextEdit range (imp <> "\n" <> T.replicate indent " "))) suggestions
where
- qualify q (NotInScopeDataConstructor d) = NotInScopeDataConstructor (q <> "." <> d)
- qualify q (NotInScopeTypeConstructorOrClass d) = NotInScopeTypeConstructorOrClass (q <> "." <> d)
- qualify q (NotInScopeThing d) = NotInScopeThing (q <> "." <> d)
-
L _ HsModule {..} = ps
suggestNewImport _ _ _ _ _ = []
-{- |
-Extracts qualifier of the symbol from the missing symbol.
-Input must be either a plain qualified variable or possibly-parenthesized qualified binary operator (though no strict checking is done for symbol part).
-This is only needed to alleviate the issue #3473.
-
-FIXME: We can delete this after dropping the support for GHC 9.4
-
->>> extractQualifiedModuleNameFromMissingName "P.lookup"
-Just "P"
-
->>> extractQualifiedModuleNameFromMissingName "ΣP3_'.σlookup"
-Just "\931P3_'"
-
->>> extractQualifiedModuleNameFromMissingName "ModuleA.Gre_ekσ.goodδ"
-Just "ModuleA.Gre_ek\963"
-
->>> extractQualifiedModuleNameFromMissingName "(ModuleA.Gre_ekσ.+)"
-Just "ModuleA.Gre_ek\963"
-
->>> extractQualifiedModuleNameFromMissingName "(ModuleA.Gre_ekσ..|.)"
-Just "ModuleA.Gre_ek\963"
-
->>> extractQualifiedModuleNameFromMissingName "A.B.|."
-Just "A.B"
--}
-extractQualifiedModuleNameFromMissingName :: T.Text -> Maybe T.Text
-extractQualifiedModuleNameFromMissingName (T.strip -> missing)
- = T.pack <$> (T.unpack missing RE.=~ qualIdentP)
- where
- {-
- NOTE: Haskell 2010 allows /unicode/ upper & lower letters
- as a module name component; otoh, regex-tdfa only allows
- /ASCII/ letters to be matched with @[[:upper:]]@ and/or @[[:lower:]]@.
- Hence we use regex-applicative(-text) for finer-grained predicates.
-
- RULES (from [Section 10 of Haskell 2010 Report](https://www.haskell.org/onlinereport/haskell2010/haskellch10.html)):
- modid → {conid .} conid
- conid → large {small | large | digit | ' }
- small → ascSmall | uniSmall | _
- ascSmall → a | b | … | z
- uniSmall → any Unicode lowercase letter
- large → ascLarge | uniLarge
- ascLarge → A | B | … | Z
- uniLarge → any uppercase or titlecase Unicode letter
- -}
-
- qualIdentP = parensQualOpP <|> qualVarP
- parensQualOpP = RE.sym '(' *> modNameP <* RE.sym '.' <* RE.anySym <* RE.few RE.anySym <* RE.sym ')'
- qualVarP = modNameP <* RE.sym '.' <* RE.some RE.anySym
- conIDP = RE.withMatched $
- RE.psym isUpper
- *> RE.many
- (RE.psym $ \c -> c == '\'' || c == '_' || isUpper c || isLower c || isDigit c)
- modNameP = fmap snd $ RE.withMatched $ conIDP `sepBy1` RE.sym '.'
+-- | A Backward compatible implementation of `lookupOccEnv_AllNameSpaces` for
+-- GHC <=9.6
+--
+-- It looks for a symbol name in all known namespaces, including types,
+-- variables, and fieldnames.
+--
+-- Note that on GHC >= 9.8, the record selectors are not in the `mkVarOrDataOcc`
+-- anymore, but are in a custom namespace, see
+-- https://gitlab.haskell.org/ghc/ghc/-/wikis/migration/9.8#new-namespace-for-record-fields,
+-- hence we need to use this "AllNamespaces" implementation, otherwise we'll
+-- miss them.
+lookupOccEnvAllNamespaces :: ExportsMap -> T.Text -> [IdentInfo]
+#if MIN_VERSION_ghc(9,7,0)
+lookupOccEnvAllNamespaces exportsMap name = Set.toList $ mconcat (lookupOccEnv_AllNameSpaces (getExportsMap exportsMap) (mkTypeOcc name))
+#else
+lookupOccEnvAllNamespaces exportsMap name = maybe [] Set.toList $
+ lookupOccEnv (getExportsMap exportsMap) (mkVarOrDataOcc name)
+ <> lookupOccEnv (getExportsMap exportsMap) (mkTypeOcc name) -- look up the modified unknown name in the export map
+#endif
constructNewImportSuggestions
@@ -1577,8 +1520,8 @@ constructNewImportSuggestions
constructNewImportSuggestions exportsMap (qual, thingMissing) notTheseModules qis = nubOrdBy simpleCompareImportSuggestion
[ suggestion
| Just name <- [T.stripPrefix (maybe "" (<> ".") qual) $ notInScope thingMissing] -- strip away qualified module names from the unknown name
- , identInfo <- maybe [] Set.toList $ lookupOccEnv (getExportsMap exportsMap) (mkVarOrDataOcc name)
- <> lookupOccEnv (getExportsMap exportsMap) (mkTypeOcc name) -- look up the modified unknown name in the export map
+
+ , identInfo <- lookupOccEnvAllNamespaces exportsMap name -- look up the modified unknown name in the export map
, canUseIdent thingMissing identInfo -- check if the identifier information retrieved can be used
, moduleNameText identInfo `notElem` fromMaybe [] notTheseModules -- check if the module of the identifier is allowed
, suggestion <- renderNewImport identInfo -- creates a list of import suggestions for the retrieved identifier information
@@ -1693,38 +1636,47 @@ findPositionAfterModuleName ps _hsmodName' = do
-- The relative position of 'where' keyword (in lines, relative to the previous AST node).
-- The exact-print API changed a lot in ghc-9.2, so we need to handle it separately for different compiler versions.
whereKeywordLineOffset :: Maybe Int
-#if MIN_VERSION_ghc(9,5,0)
whereKeywordLineOffset = case hsmodAnn hsmodExt of
-#else
- whereKeywordLineOffset = case hsmodAnn of
-#endif
EpAnn _ annsModule _ -> do
-- Find the first 'where'
+#if MIN_VERSION_ghc(9,11,0)
+ whereLocation <- filterWhere $ am_where annsModule
+#else
whereLocation <- listToMaybe . mapMaybe filterWhere $ am_main annsModule
+#endif
epaLocationToLine whereLocation
#if !MIN_VERSION_ghc(9,9,0)
EpAnnNotUsed -> Nothing
#endif
+#if MIN_VERSION_ghc(9,11,0)
+ filterWhere (EpTok loc) = Just loc
+ filterWhere _ = Nothing
+#else
filterWhere (AddEpAnn AnnWhere loc) = Just loc
filterWhere _ = Nothing
+#endif
epaLocationToLine :: EpaLocation -> Maybe Int
#if MIN_VERSION_ghc(9,9,0)
epaLocationToLine (EpaSpan sp)
= fmap (srcLocLine . realSrcSpanEnd) $ srcSpanToRealSrcSpan sp
-#elif MIN_VERSION_ghc(9,5,0)
- epaLocationToLine (EpaSpan sp _)
- = Just . srcLocLine . realSrcSpanEnd $ sp
#else
- epaLocationToLine (EpaSpan sp)
+ epaLocationToLine (EpaSpan sp _)
= Just . srcLocLine . realSrcSpanEnd $ sp
#endif
+#if MIN_VERSION_ghc(9,11,0)
+ epaLocationToLine (EpaDelta _ (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments
+ -- 'priorComments' contains the comments right before the current EpaLocation
+ -- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and
+ -- the current AST node
+ epaLocationToLine (EpaDelta _ (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments)
+#else
epaLocationToLine (EpaDelta (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments
-- 'priorComments' contains the comments right before the current EpaLocation
-- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and
-- the current AST node
epaLocationToLine (EpaDelta (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments)
-
+#endif
sumCommentsOffset :: [LEpaComment] -> Int
#if MIN_VERSION_ghc(9,9,0)
sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine anchor)
@@ -1732,7 +1684,12 @@ findPositionAfterModuleName ps _hsmodName' = do
sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine (anchor_op anchor))
#endif
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ anchorOpLine :: EpaLocation' a -> Int
+ anchorOpLine EpaSpan{} = 0
+ anchorOpLine (EpaDelta _ (SameLine _) _) = 0
+ anchorOpLine (EpaDelta _ (DifferentLine line _) _) = line
+#elif MIN_VERSION_ghc(9,9,0)
anchorOpLine :: EpaLocation' a -> Int
anchorOpLine EpaSpan{} = 0
anchorOpLine (EpaDelta (SameLine _) _) = 0
@@ -1825,7 +1782,7 @@ data NotInScope
= NotInScopeDataConstructor T.Text
| NotInScopeTypeConstructorOrClass T.Text
| NotInScopeThing T.Text
- deriving Show
+ deriving (Show, Eq)
notInScope :: NotInScope -> T.Text
notInScope (NotInScopeDataConstructor t) = t
@@ -1840,6 +1797,38 @@ extractNotInScopeName x
= Just $ NotInScopeDataConstructor name
| Just [name] <- matchRegexUnifySpaces x "ot in scope: type constructor or class [^‘]*‘([^’]*)’"
= Just $ NotInScopeTypeConstructorOrClass name
+ | Just [name] <- matchRegexUnifySpaces x "The data constructors of ‘([^ ]+)’ are not all in scope"
+ = Just $ NotInScopeDataConstructor name
+ | Just [name] <- matchRegexUnifySpaces x "of newtype ‘([^’]*)’ is not in scope"
+ = Just $ NotInScopeThing name
+ -- Match for HasField "foo" Bar String in the context where, e.g. x.foo is
+ -- used, and x :: Bar.
+ --
+ -- This usually mean that the field is not in scope and the correct fix is to
+ -- import (Bar(foo)) or (Bar(..)).
+ --
+ -- However, it is more reliable to match for the type name instead of the field
+ -- name, and most of the time you'll want to import the complete type with all
+ -- their fields instead of the specific field.
+ --
+ -- The regex is convoluted because it accounts for:
+ --
+ -- - Qualified (or not) `HasField`
+ -- - The type bar is always qualified. If it is unqualified, it means that the
+ -- parent module is already imported, and in this context it uses an hint
+ -- already available in the GHC error message. However this regex accounts for
+ -- qualified or not, it does not cost much and should be more robust if the
+ -- hint changes in the future
+ -- - Next regex will account for polymorphic types, which appears as `HasField
+ -- "foo" (Bar Int)...`, e.g. see the parenthesis
+ | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" ([^ (.]+\\.)*([^ (.]+).*[’)]"
+ = Just $ NotInScopeThing name
+ | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" \\(([^ .]+\\.)*([^ .]+)[^)]*\\).*[’)]"
+ = Just $ NotInScopeThing name
+ -- The order of the "Not in scope" is important, for example, some of the
+ -- matcher may catch the "record" value instead of the value later.
+ | Just [name] <- matchRegexUnifySpaces x "Not in scope: record field ‘([^’]*)’"
+ = Just $ NotInScopeThing name
| Just [name] <- matchRegexUnifySpaces x "ot in scope: \\(([^‘ ]+)\\)"
= Just $ NotInScopeThing name
| Just [name] <- matchRegexUnifySpaces x "ot in scope: ([^‘ ]+)"
@@ -1881,14 +1870,11 @@ extractQualifiedModuleName x
-- ‘Data.Functor’ nor ‘Data.Text’ exports ‘putStrLn’.
extractDoesNotExportModuleName :: T.Text -> Maybe T.Text
extractDoesNotExportModuleName x
- | Just [m] <-
-#if MIN_VERSION_ghc(9,4,0)
- matchRegexUnifySpaces x "the module ‘([^’]*)’ does not export"
- <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export"
-#else
- matchRegexUnifySpaces x "Module ‘([^’]*)’ does not export"
- <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ exports"
-#endif
+ | Just [m] <- case ghcVersion of
+ GHC912 -> matchRegexUnifySpaces x "The module ‘([^’]*)’ does not export"
+ <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export"
+ _ -> matchRegexUnifySpaces x "the module ‘([^’]*)’ does not export"
+ <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export"
= Just m
| otherwise
= Nothing
@@ -1959,21 +1945,12 @@ textInRange (Range (Position (fromIntegral -> startRow) (fromIntegral -> startCo
-- | Returns the ranges for a binding in an import declaration
rangesForBindingImport :: ImportDecl GhcPs -> String -> [Range]
-#if MIN_VERSION_ghc(9,5,0)
rangesForBindingImport ImportDecl{
ideclImportList = Just (Exactly, L _ lies)
} b =
concatMap (mapMaybe srcSpanToRange . rangesForBinding' b') lies
where
b' = wrapOperatorInParens b
-#else
-rangesForBindingImport ImportDecl{
- ideclHiding = Just (False, L _ lies)
- } b =
- concatMap (mapMaybe srcSpanToRange . rangesForBinding' b') lies
- where
- b' = wrapOperatorInParens b
-#endif
rangesForBindingImport _ _ = []
wrapOperatorInParens :: String -> String
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs
index 53ee5200c0..a4132dd787 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs
@@ -22,11 +22,13 @@ import Data.Either (fromRight,
import Data.Functor ((<&>))
import Data.IORef.Extra
import qualified Data.Map as Map
-import Data.Maybe (fromMaybe)
+import Data.Maybe (fromMaybe,
+ maybeToList)
import qualified Data.Text as T
import qualified Data.Text.Utf16.Rope.Mixed as Rope
import Development.IDE hiding
(pluginHandlers)
+import Development.IDE.Core.PluginUtils (activeDiagnosticsInRange)
import Development.IDE.Core.Shake
import Development.IDE.GHC.Compat
import Development.IDE.GHC.ExactPrint
@@ -53,38 +55,42 @@ type GhcideCodeAction = ExceptT PluginError (ReaderT CodeActionArgs IO) GhcideCo
-------------------------------------------------------------------------------------------------
runGhcideCodeAction :: IdeState -> MessageParams Method_TextDocumentCodeAction -> GhcideCodeAction -> HandlerM Config GhcideCodeActionResult
-runGhcideCodeAction state (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext {_diagnostics = diags}) codeAction = do
- let mbFile = toNormalizedFilePath' <$> uriToFilePath uri
- runRule key = runAction ("GhcideCodeActions." <> show key) state $ runMaybeT $ MaybeT (pure mbFile) >>= MaybeT . use key
- caaGhcSession <- onceIO $ runRule GhcSession
- caaExportsMap <-
- onceIO $
- caaGhcSession >>= \case
- Just env -> do
- pkgExports <- envPackageExports env
- localExports <- readTVarIO (exportsMap $ shakeExtras state)
- pure $ localExports <> pkgExports
- _ -> pure mempty
- caaIdeOptions <- onceIO $ runAction "GhcideCodeActions.getIdeOptions" state getIdeOptions
- caaParsedModule <- onceIO $ runRule GetParsedModuleWithComments
- caaContents <-
- onceIO $
- runRule GetFileContents <&> \case
- Just (_, mbContents) -> fmap Rope.toText mbContents
- Nothing -> Nothing
- caaDf <- onceIO $ fmap (ms_hspp_opts . pm_mod_summary) <$> caaParsedModule
- caaAnnSource <- onceIO $ runRule GetAnnotatedParsedSource
- caaTmr <- onceIO $ runRule TypeCheck
- caaHar <- onceIO $ runRule GetHieAst
- caaBindings <- onceIO $ runRule GetBindings
- caaGblSigs <- onceIO $ runRule GetGlobalBindingTypeSigs
- results <- liftIO $
- sequence
- [ runReaderT (runExceptT codeAction) CodeActionArgs {..}
- | caaDiagnostic <- diags
- ]
- let (_errs, successes) = partitionEithers results
- pure $ concat successes
+runGhcideCodeAction state (CodeActionParams _ _ (TextDocumentIdentifier uri) _range _) codeAction
+ | Just nfp <- toNormalizedFilePath' <$> uriToFilePath uri = do
+ let runRule key = runAction ("GhcideCodeActions." <> show key) state $ runMaybeT $ MaybeT (pure (Just nfp)) >>= MaybeT . use key
+ caaGhcSession <- onceIO $ runRule GhcSession
+ caaExportsMap <-
+ onceIO $
+ caaGhcSession >>= \case
+ Just env -> do
+ pkgExports <- envPackageExports env
+ localExports <- readTVarIO (exportsMap $ shakeExtras state)
+ pure $ localExports <> pkgExports
+ _ -> pure mempty
+ caaIdeOptions <- onceIO $ runAction "GhcideCodeActions.getIdeOptions" state getIdeOptions
+ caaParsedModule <- onceIO $ runRule GetParsedModuleWithComments
+ caaContents <-
+ onceIO $
+ runRule GetFileContents <&> \case
+ Just (_, mbContents) -> fmap Rope.toText mbContents
+ Nothing -> Nothing
+ caaDf <- onceIO $ fmap (ms_hspp_opts . pm_mod_summary) <$> caaParsedModule
+ caaAnnSource <- onceIO $ runRule GetAnnotatedParsedSource
+ caaTmr <- onceIO $ runRule TypeCheck
+ caaHar <- onceIO $ runRule GetHieAst
+ caaBindings <- onceIO $ runRule GetBindings
+ caaGblSigs <- onceIO $ runRule GetGlobalBindingTypeSigs
+ diags <- concat . maybeToList <$> activeDiagnosticsInRange (shakeExtras state) nfp _range
+ results <- liftIO $
+ sequence
+ [
+ runReaderT (runExceptT codeAction) CodeActionArgs {..}
+ | caaDiagnostic <- diags
+ ]
+ let (_errs, successes) = partitionEithers results
+ pure $ concat successes
+ | otherwise = pure []
+
mkCA :: T.Text -> Maybe CodeActionKind -> Maybe Bool -> [Diagnostic] -> WorkspaceEdit -> (Command |? CodeAction)
mkCA title kind isPreferred diags edit =
@@ -145,7 +151,7 @@ data CodeActionArgs = CodeActionArgs
caaHar :: IO (Maybe HieAstResult),
caaBindings :: IO (Maybe Bindings),
caaGblSigs :: IO (Maybe GlobalBindingTypeSigsResult),
- caaDiagnostic :: Diagnostic
+ caaDiagnostic :: FileDiagnostic
}
-- | There's no concurrency in each provider,
@@ -223,6 +229,9 @@ instance ToCodeAction r => ToCodeAction (IdeOptions -> r) where
toCodeAction = toCodeAction3 caaIdeOptions
instance ToCodeAction r => ToCodeAction (Diagnostic -> r) where
+ toCodeAction f = ExceptT . ReaderT $ \caa@CodeActionArgs {caaDiagnostic = x} -> flip runReaderT caa . runExceptT . toCodeAction $ f (fdLspDiagnostic x)
+
+instance ToCodeAction r => ToCodeAction (FileDiagnostic -> r) where
toCodeAction f = ExceptT . ReaderT $ \caa@CodeActionArgs {caaDiagnostic = x} -> flip runReaderT caa . runExceptT . toCodeAction $ f x
instance ToCodeAction r => ToCodeAction (Maybe ParsedModule -> r) where
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs
index 7326e2d7e2..0f48a3a649 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
module Development.IDE.Plugin.CodeAction.ExactPrint (
@@ -35,10 +36,8 @@ import Control.Lens (_head, _last, over)
import Data.Bifunctor (first)
import Data.Maybe (fromMaybe, mapMaybe)
import Development.IDE.Plugin.CodeAction.Util
-import GHC (AddEpAnn (..),
- AnnContext (..),
+import GHC (AnnContext (..),
AnnList (..),
- AnnParen (..),
DeltaPos (SameLine),
EpAnn (..),
IsUnicodeSyntax (NormalSyntax),
@@ -46,8 +45,17 @@ import GHC (AddEpAnn (..),
TrailingAnn (AddCommaAnn),
emptyComments, reAnnL)
+
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
+#if MIN_VERSION_ghc(9,11,0)
+import GHC (EpToken (..)
+ , AnnListBrackets (..)
+ , EpUniToken (..))
+#else
+import GHC (AddEpAnn (..),
+ AnnParen (..))
+#endif
#if !MIN_VERSION_ghc(9,9,0)
import Data.Default (Default (..))
import GHC (addAnns, ann)
@@ -131,10 +139,8 @@ removeConstraint toRemove = go . traceAst "REMOVE_CONSTRAINT_input"
go :: LHsType GhcPs -> Rewrite
#if MIN_VERSION_ghc(9,9,0)
go lHsType@(makeDeltaAst -> L l it@HsQualTy{hst_ctxt = L l' ctxt, hst_body}) = Rewrite (locA lHsType) $ \_ -> do
-#elif MIN_VERSION_ghc(9,4,0)
- go (L l it@HsQualTy{hst_ctxt = L l' ctxt, hst_body}) = Rewrite (locA l) $ \_ -> do
#else
- go (L l it@HsQualTy{hst_ctxt = Just (L l' ctxt), hst_body}) = Rewrite (locA l) $ \_ -> do
+ go (L l it@HsQualTy{hst_ctxt = L l' ctxt, hst_body}) = Rewrite (locA l) $ \_ -> do
#endif
let ctxt' = filter (not . toRemove) ctxt
removeStuff = (toRemove <$> headMaybe ctxt) == Just True
@@ -143,11 +149,7 @@ removeConstraint toRemove = go . traceAst "REMOVE_CONSTRAINT_input"
[] -> hst_body'
_ -> do
let ctxt'' = over _last (first removeComma) ctxt'
-#if MIN_VERSION_ghc(9,4,0)
L l $ it{ hst_ctxt = L l' ctxt''
-#else
- L l $ it{ hst_ctxt = Just $ L l' ctxt''
-#endif
, hst_body = hst_body'
}
go (L _ (HsParTy _ ty)) = go ty
@@ -164,11 +166,7 @@ appendConstraint ::
Rewrite
appendConstraint constraintT = go . traceAst "appendConstraint"
where
-#if MIN_VERSION_ghc(9,4,0)
go (L l it@HsQualTy{hst_ctxt = L l' ctxt}) = Rewrite (locA l) $ \df -> do
-#else
- go (L l it@HsQualTy{hst_ctxt = Just (L l' ctxt)}) = Rewrite (locA l) $ \df -> do
-#endif
constraint <- liftParseAST df constraintT
constraint <- pure $ setEntryDP constraint (SameLine 1)
#if MIN_VERSION_ghc(9,9,0)
@@ -179,18 +177,16 @@ appendConstraint constraintT = go . traceAst "appendConstraint"
-- For singleton constraints, the close Paren DP is attached to an HsPar wrapping the constraint
-- we have to reposition it manually into the AnnContext
close_dp = case ctxt of
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ [L _ (HsParTy (_, (EpTok ap_close)) _)] -> Just ap_close
+#elif MIN_VERSION_ghc(9,9,0)
[L _ (HsParTy AnnParen{ap_close} _)] -> Just ap_close
#else
[L _ (HsParTy EpAnn{anns=AnnParen{ap_close}} _)] -> Just ap_close
#endif
_ -> Nothing
ctxt' = over _last (first addComma) $ map dropHsParTy ctxt
-#if MIN_VERSION_ghc(9,4,0)
return $ L l $ it{hst_ctxt = L l'' $ ctxt' ++ [constraint]}
-#else
- return $ L l $ it{hst_ctxt = Just $ L l'' $ ctxt' ++ [constraint]}
-#endif
go (L _ HsForAllTy{hst_body}) = go hst_body
go (L _ (HsParTy _ ty)) = go ty
go ast@(L l _) = Rewrite (locA l) $ \df -> do
@@ -198,12 +194,12 @@ appendConstraint constraintT = go . traceAst "appendConstraint"
constraint <- liftParseAST df constraintT
lContext <- uniqueSrcSpanT
lTop <- uniqueSrcSpanT
-#if MIN_VERSION_ghc(9,4,0)
let context = reAnnL annCtxt emptyComments $ L lContext [resetEntryDP constraint]
+#if MIN_VERSION_ghc(9,11,0)
+ annCtxt = AnnContext (Just (EpUniTok (epl 1) NormalSyntax)) [EpTok (epl 0) | needsParens] [EpTok (epl 0) | needsParens]
#else
- let context = Just $ reAnnL annCtxt emptyComments $ L lContext [resetEntryDP constraint]
-#endif
annCtxt = AnnContext (Just (NormalSyntax, epl 1)) [epl 0 | needsParens] [epl 0 | needsParens]
+#endif
needsParens = hsTypeNeedsParens sigPrec $ unLoc constraint
ast <- pure $ setEntryDP (makeDeltaAst ast) (SameLine 1)
@@ -250,11 +246,7 @@ extendImportTopLevel ::
LImportDecl GhcPs ->
TransformT (Either String) (LImportDecl GhcPs)
extendImportTopLevel thing (L l it@ImportDecl{..})
-#if MIN_VERSION_ghc(9,5,0)
| Just (hide, L l' lies) <- ideclImportList
-#else
- | Just (hide, L l' lies) <- ideclHiding
-#endif
= do
src <- uniqueSrcSpanT
top <- uniqueSrcSpanT
@@ -266,9 +258,7 @@ extendImportTopLevel thing (L l it@ImportDecl{..})
TransformT $ lift (Left $ thing <> " already imported")
let lie = reLocA $ L src $ IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
rdr
x = reLocA $ L top $ IEVar
#if MIN_VERSION_ghc(9,8,0)
@@ -285,11 +275,7 @@ extendImportTopLevel thing (L l it@ImportDecl{..})
then TransformT $ lift (Left $ thing <> " already imported")
else do
let lies' = addCommaInImportList lies x
-#if MIN_VERSION_ghc(9,5,0)
return $ L l it{ideclImportList = Just (hide, L l' lies')}
-#else
- return $ L l it{ideclHiding = Just (hide, L l' lies')}
-#endif
extendImportTopLevel _ _ = TransformT $ lift $ Left "Unable to extend the import list"
wildCardSymbol :: String
@@ -319,11 +305,7 @@ extendImportViaParent ::
LImportDecl GhcPs ->
TransformT (Either String) (LImportDecl GhcPs)
extendImportViaParent df parent child (L l it@ImportDecl{..})
-#if MIN_VERSION_ghc(9,5,0)
| Just (hide, L l' lies) <- ideclImportList = go hide l' [] lies
-#else
- | Just (hide, L l' lies) <- ideclHiding = go hide l' [] lies
-#endif
where
#if MIN_VERSION_ghc(9,9,0)
go _hide _l' _pre ((L _ll' (IEThingAll _ (L _ ie) _)) : _xs)
@@ -341,12 +323,12 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
srcChild <- uniqueSrcSpanT
let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child
childLIE = reLocA $ L srcChild $ IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
childRdr
x :: LIE GhcPs = L ll' $ IEThingWith
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ (Nothing, (EpTok d1, NoEpTok, NoEpTok, EpTok noAnn))
+#elif MIN_VERSION_ghc(9,9,0)
(Nothing, [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP noAnn])
#elif MIN_VERSION_ghc(9,7,0)
(Nothing, addAnns mempty [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP def] emptyComments)
@@ -358,12 +340,7 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
docs
#endif
-
-#if MIN_VERSION_ghc(9,5,0)
return $ L l it{ideclImportList = Just (hide, L l' $ reverse pre ++ [x] ++ xs)}
-#else
- return $ L l it{ideclHiding = Just (hide, L l' $ reverse pre ++ [x] ++ xs)}
-#endif
#if MIN_VERSION_ghc(9,9,0)
go hide l' pre ((L l'' (IEThingWith l''' twIE@(L _ ie) _ lies' docs)) : xs)
@@ -373,17 +350,15 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
-- ThingWith ie lies' => ThingWith ie (lies' ++ [child])
| parent == unIEWrappedName ie
, child == wildCardSymbol = do
-#if MIN_VERSION_ghc(9,5,0)
let it' = it{ideclImportList = Just (hide, lies)}
-#else
- let it' = it{ideclHiding = Just (hide, lies)}
-#endif
thing = IEThingWith newl twIE (IEWildcard 2) []
#if MIN_VERSION_ghc(9,9,0)
docs
#endif
#if MIN_VERSION_ghc(9,7,0) && !MIN_VERSION_ghc(9,9,0)
newl = fmap (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l'''
+#elif MIN_VERSION_ghc(9,11,0)
+ newl = (\(open, _, comma, close) -> (open, EpTok d0, comma, close)) <$> l'''
#else
newl = (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l'''
#endif
@@ -401,15 +376,9 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
TransformT $ lift (Left $ child <> " already included in " <> parent <> " imports")
let childLIE = reLocA $ L srcChild $ IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
childRdr
-#if MIN_VERSION_ghc(9,5,0)
let it' = it{ideclImportList = Just (hide, lies)}
-#else
- let it' = it{ideclHiding = Just (hide, lies)}
-#endif
lies = L l' $ reverse pre ++
[L l'' (IEThingWith l''' twIE NoIEWildcard (over _last fixLast lies' ++ [childLIE])
#if MIN_VERSION_ghc(9,9,0)
@@ -427,21 +396,27 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
parentRdr <- liftParseAST df parent
let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child
isParentOperator = hasParen parent
+#if MIN_VERSION_ghc(9,11,0)
+ let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (EpTok (epl 0)) parentRdr'
+#else
let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (epl 0) parentRdr'
+#endif
else IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
parentRdr'
parentRdr' = modifyAnns parentRdr $ \case
+#if MIN_VERSION_ghc(9,11,0)
+ it@NameAnn{nann_adornment = NameParens _ _} -> it{nann_adornment=NameParens (EpTok (epl 1)) (EpTok (epl 0))}
+#else
it@NameAnn{nann_adornment = NameParens} -> it{nann_open = epl 1, nann_close = epl 0}
+#endif
other -> other
childLIE = reLocA $ L srcChild $ IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
childRdr
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ listAnn = (Nothing, (EpTok (epl 1), NoEpTok, NoEpTok, EpTok (epl 0)))
+#elif MIN_VERSION_ghc(9,9,0)
listAnn = (Nothing, [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)])
#elif MIN_VERSION_ghc(9,7,0)
listAnn = (Nothing, epAnn srcParent [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)])
@@ -454,11 +429,7 @@ extendImportViaParent df parent child (L l it@ImportDecl{..})
#endif
lies' = addCommaInImportList (reverse pre) x
-#if MIN_VERSION_ghc(9,5,0)
return $ L l it{ideclImportList = Just (hide, L l' lies')}
-#else
- return $ L l it{ideclHiding = Just (hide, L l' lies')}
-#endif
extendImportViaParent _ _ _ _ = TransformT $ lift $ Left "Unable to extend the import list via parent"
-- Add an item in an import list, taking care of adding comma if needed.
@@ -499,11 +470,7 @@ addCommaInImportList lies x =
fixLast :: [LocatedAn AnnListItem a] -> [LocatedAn AnnListItem a]
fixLast = over _last (first (if existingTrailingComma then id else addComma))
-#if MIN_VERSION_ghc(9,5,0)
unIEWrappedName :: IEWrappedName GhcPs -> String
-#else
-unIEWrappedName :: IEWrappedName (IdP GhcPs) -> String
-#endif
unIEWrappedName (occName -> occ) = T.unpack $ printOutputable $ parenSymOcc occ (ppr occ)
hasParen :: String -> Bool
@@ -517,17 +484,10 @@ hasParen _ = False
hideSymbol ::
String -> LImportDecl GhcPs -> Rewrite
hideSymbol symbol lidecl@(L loc ImportDecl{..}) =
-#if MIN_VERSION_ghc(9,5,0)
case ideclImportList of
Nothing -> Rewrite (locA loc) $ extendHiding symbol lidecl Nothing
Just (EverythingBut, hides) -> Rewrite (locA loc) $ extendHiding symbol lidecl (Just hides)
Just (Exactly, imports) -> Rewrite (locA loc) $ deleteFromImport symbol lidecl $ setEntryDP (makeDeltaAst imports) (SameLine 1)
-#else
- case ideclHiding of
- Nothing -> Rewrite (locA loc) $ extendHiding symbol lidecl Nothing
- Just (True, hides) -> Rewrite (locA loc) $ extendHiding symbol lidecl (Just hides)
- Just (False, imports) -> Rewrite (locA loc) $ deleteFromImport symbol lidecl imports
-#endif
extendHiding ::
String ->
@@ -538,7 +498,10 @@ extendHiding ::
extendHiding symbol (L l idecls) mlies df = do
L l' lies <- case mlies of
Nothing -> do
-#if MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,11,0)
+ let ann :: EpAnn (AnnList (EpToken "hiding", [EpToken ","]))
+ ann = noAnnSrcSpanDP0
+#elif MIN_VERSION_ghc(9,9,0)
let ann = noAnnSrcSpanDP0
#else
src <- uniqueSrcSpanT
@@ -549,9 +512,14 @@ extendHiding symbol (L l idecls) mlies df = do
#else
ann' = flip (fmap.fmap) ann $ \x -> x
#endif
+#if MIN_VERSION_ghc(9,11,0)
+ {al_rest = (EpTok (epl 1), [NoEpTok])
+ ,al_brackets=ListParens (EpTok (epl 1)) (EpTok (epl 0))
+#else
{al_rest = [AddEpAnn AnnHiding (epl 1)]
,al_open = Just $ AddEpAnn AnnOpenP (epl 1)
,al_close = Just $ AddEpAnn AnnCloseP (epl 0)
+#endif
}
return $ L ann' []
Just pr -> pure pr
@@ -561,9 +529,7 @@ extendHiding symbol (L l idecls) mlies df = do
rdr <- liftParseAST df symbol
rdr <- pure $ modifyAnns rdr $ addParens (isOperator $ unLoc rdr)
let lie = reLocA $ L src $ IEName
-#if MIN_VERSION_ghc(9,5,0)
noExtField
-#endif
rdr
x = reLocA $ L top $ IEVar
#if MIN_VERSION_ghc(9,7,0)
@@ -577,11 +543,7 @@ extendHiding symbol (L l idecls) mlies df = do
#endif
x <- pure $ if hasSibling then first addComma x else x
lies <- pure $ over _head (`setEntryDP` SameLine 1) lies
-#if MIN_VERSION_ghc(9,5,0)
return $ L l idecls{ideclImportList = Just (EverythingBut, L l' $ x : lies)}
-#else
- return $ L l idecls{ideclHiding = Just (True, L l' $ x : lies)}
-#endif
where
isOperator = not . all isAlphaNum . occNameString . rdrNameOcc
@@ -596,11 +558,7 @@ deleteFromImport (T.pack -> symbol) (L l idecl) (L lieLoc lies) _ = do
lidecl' =
L l $
idecl
-#if MIN_VERSION_ghc(9,5,0)
{ ideclImportList = Just (Exactly, edited) }
-#else
- { ideclHiding = Just (False, edited) }
-#endif
pure lidecl'
where
deletedLies =
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs
index c338903d35..69f3332dc0 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs
@@ -5,7 +5,6 @@ module Development.IDE.Plugin.CodeAction.RuleTypes
import Control.DeepSeq (NFData)
import Data.Hashable (Hashable)
-import Data.Typeable (Typeable)
import Development.IDE.Graph (RuleResult)
import Development.IDE.Types.Exports
import Development.IDE.Types.HscEnvEq (HscEnvEq)
@@ -15,7 +14,7 @@ import GHC.Generics (Generic)
type instance RuleResult PackageExports = ExportsMap
newtype PackageExports = PackageExports HscEnvEq
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable PackageExports
instance NFData PackageExports
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs
index a7407b6791..aec82cb17f 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs
@@ -24,13 +24,7 @@ import Language.LSP.Protocol.Types
-- See Note [Guidelines For Using CPP In GHCIDE Import Statements]
-#if !MIN_VERSION_ghc(9,4,0)
-import GHC.Parser.Annotation (IsUnicodeSyntax (..),
- TrailingAnn (..))
-import Language.Haskell.GHC.ExactPrint (d1)
-#endif
-
-#if MIN_VERSION_ghc(9,4,0) && !MIN_VERSION_ghc(9,9,0)
+#if MIN_VERSION_ghc(9,6,0) && !MIN_VERSION_ghc(9,9,0)
import Development.IDE.GHC.ExactPrint (epl)
import GHC.Parser.Annotation (TokenLocation (..))
#endif
@@ -51,6 +45,9 @@ import GHC (DeltaPos (..),
import Language.Haskell.GHC.ExactPrint (d1, setEntryDP)
#endif
+#if MIN_VERSION_ghc(9,11,0)
+import GHC.Parser.Annotation (EpToken (..))
+#endif
-- When GHC tells us that a variable is not bound, it will tell us either:
-- - there is an unbound variable with a given type
@@ -77,19 +74,34 @@ plugin parsedModule Diagnostic {_message, _range}
-- addArgToMatch "foo" `bar arg1 arg2 = ...`
-- => (`bar arg1 arg2 foo = ...`, 2)
addArgToMatch :: T.Text -> GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))) -> (GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))), Int)
+
+-- NOTE: The code duplication within CPP clauses avoids a parse error with
+-- `stylish-haskell`.
+#if MIN_VERSION_ghc(9,11,0)
+addArgToMatch name (L locMatch (Match xMatch ctxMatch (L l pats) rhs)) =
+ let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name
+ newPat = L noAnnSrcSpanDP1 $ VarPat NoExtField $ L noAnn unqualName
+ -- The intention is to move `= ...` (right-hand side with equals) to the right so there's 1 space between
+ -- the newly added pattern and the rest
+ indentRhs :: GRHSs GhcPs (LocatedA (HsExpr GhcPs)) -> GRHSs GhcPs (LocatedA (HsExpr GhcPs))
+ indentRhs rhs@GRHSs{grhssGRHSs} = rhs {grhssGRHSs = fmap (`setEntryDP` (SameLine 1)) grhssGRHSs }
+ in (L locMatch (Match xMatch ctxMatch (L l (pats <> [newPat])) (indentRhs rhs)), Prelude.length pats)
+#elif MIN_VERSION_ghc(9,9,0)
addArgToMatch name (L locMatch (Match xMatch ctxMatch pats rhs)) =
let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name
-#if MIN_VERSION_ghc(9,9,0)
newPat = L noAnnSrcSpanDP1 $ VarPat NoExtField $ L noAnn unqualName
-- The intention is to move `= ...` (right-hand side with equals) to the right so there's 1 space between
-- the newly added pattern and the rest
indentRhs :: GRHSs GhcPs (LocatedA (HsExpr GhcPs)) -> GRHSs GhcPs (LocatedA (HsExpr GhcPs))
indentRhs rhs@GRHSs{grhssGRHSs} = rhs {grhssGRHSs = fmap (`setEntryDP` (SameLine 1)) grhssGRHSs }
+ in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats)
#else
+addArgToMatch name (L locMatch (Match xMatch ctxMatch pats rhs)) =
+ let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name
newPat = L (noAnnSrcSpanDP1 generatedSrcSpan) $ VarPat NoExtField (noLocA unqualName)
indentRhs = id
+ in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats)
#endif
- in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats)
-- Attempt to insert a binding pattern into each match for the given LHsDecl; succeeds only if the function is a FunBind.
-- Also return:
@@ -171,9 +183,13 @@ addTyHoleToTySigArg loc (L annHsSig (HsSig xHsSig tyVarBndrs lsigTy)) =
( noAnn
, noExtField
, HsUnrestrictedArrow (EpUniTok d1 NormalSyntax)
+#if MIN_VERSION_ghc(9,11,0)
+ , L wildCardAnn $ HsWildCardTy NoEpTok
+#else
, L wildCardAnn $ HsWildCardTy noExtField
+#endif
)
-#elif MIN_VERSION_ghc(9,4,0)
+#else
wildCardAnn = SrcSpanAnn (EpAnn genAnchor1 (AnnListItem []) emptyComments) generatedSrcSpan
arrowAnn = TokenLoc (epl 1)
newArg =
@@ -182,14 +198,6 @@ addTyHoleToTySigArg loc (L annHsSig (HsSig xHsSig tyVarBndrs lsigTy)) =
, HsUnrestrictedArrow (L arrowAnn HsNormalTok)
, L wildCardAnn $ HsWildCardTy noExtField
)
-#else
- wildCardAnn = SrcSpanAnn (EpAnn genAnchor1 (AnnListItem [AddRarrowAnn d1]) emptyComments) generatedSrcSpan
- newArg =
- ( SrcSpanAnn mempty generatedSrcSpan
- , noAnn
- , HsUnrestrictedArrow NormalSyntax
- , L wildCardAnn $ HsWildCardTy noExtField
- )
#endif
-- NOTE if the location that the argument wants to be placed at is not one more than the number of arguments
-- in the signature, then we return the original type signature.
diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs
index d64edbd0e2..7facc8f54c 100644
--- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs
+++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs
@@ -21,6 +21,9 @@ matchRegex message regex = case message =~~ regex of
Nothing -> Nothing
-- | 'matchRegex' combined with 'unifySpaces'
+--
+-- >>> matchRegexUnifySpaces "hello I'm a cow" "he(ll)o"
+-- Just ["ll"]
matchRegexUnifySpaces :: T.Text -> T.Text -> Maybe [T.Text]
matchRegexUnifySpaces message = matchRegex (unifySpaces message)
diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs
index 7cb37f2785..da45083a08 100644
--- a/plugins/hls-refactor-plugin/test/Main.hs
+++ b/plugins/hls-refactor-plugin/test/Main.hs
@@ -1,4 +1,5 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE GADTs #-}
@@ -46,6 +47,7 @@ import Development.IDE.Plugin.CodeAction (matchRegExMultipleImp
import Test.Hls
import qualified Development.IDE.GHC.ExactPrint
+import Development.IDE.Plugin.CodeAction (NotInScope (..))
import qualified Development.IDE.Plugin.CodeAction as Refactor
import qualified Test.AddArgument
@@ -68,6 +70,7 @@ tests =
, codeActionTests
, codeActionHelperFunctionTests
, completionTests
+ , extractNotInScopeNameTests
]
initializeTests :: TestTree
@@ -300,6 +303,8 @@ codeActionTests = testGroup "code actions"
, suggestImportClassMethodTests
, suggestImportTests
, suggestAddRecordFieldImportTests
+ , suggestAddCoerceMissingConstructorImportTests
+ , suggestAddGenericMissingConstructorImportTests
, suggestHideShadowTests
, fixConstructorImportTests
, fixModuleImportTypoTests
@@ -316,6 +321,7 @@ codeActionTests = testGroup "code actions"
, addImplicitParamsConstraintTests
, removeExportTests
, Test.AddArgument.tests
+ , suggestAddRecordFieldUpdateImportTests
]
insertImportTests :: TestTree
@@ -1151,7 +1157,7 @@ extendImportTests = testGroup "extend import actions"
, "x :: (:~:) [] []"
, "x = Refl"
])
- (Range (Position 3 17) (Position 3 18))
+ (Range (Position 3 4) (Position 3 8))
[ "Add (:~:)(..) to the import list of Data.Type.Equality"
, "Add type (:~:)(Refl) to the import list of Data.Type.Equality"]
(T.unlines
@@ -1215,7 +1221,7 @@ extendImportTests = testGroup "extend import actions"
, "import ModuleA as A (stuffB)"
, "main = print (stuffB .* stuffB)"
])
- (Range (Position 2 17) (Position 2 18))
+ (Range (Position 2 22) (Position 2 24))
["Add (.*) to the import list of ModuleA"]
(T.unlines
[ "module ModuleB where"
@@ -1229,7 +1235,7 @@ extendImportTests = testGroup "extend import actions"
, "import Data.List.NonEmpty (fromList)"
, "main = case (fromList []) of _ :| _ -> pure ()"
])
- (Range (Position 2 5) (Position 2 6))
+ (Range (Position 2 31) (Position 2 33))
[ "Add NonEmpty((:|)) to the import list of Data.List.NonEmpty"
, "Add NonEmpty(..) to the import list of Data.List.NonEmpty"
]
@@ -1246,7 +1252,7 @@ extendImportTests = testGroup "extend import actions"
, "import Data.Maybe (catMaybes)"
, "x = Just 10"
])
- (Range (Position 3 5) (Position 2 6))
+ (Range (Position 3 4) (Position 3 8))
[ "Add Maybe(Just) to the import list of Data.Maybe"
, "Add Maybe(..) to the import list of Data.Maybe"
]
@@ -1353,8 +1359,7 @@ extendImportTests = testGroup "extend import actions"
[ "import Data.Monoid (First (..))"
, "f = (First Nothing) <> mempty"
])
- , brokenForGHC94 "On GHC 9.4, the error messages with -fdefer-type-errors don't have necessary imported target srcspan info." $
- testSession "extend single line qualified import with value" $ template
+ , testSession "extend single line qualified import with value" $ template
[("ModuleA.hs", T.unlines
[ "module ModuleA where"
, "stuffA :: Double"
@@ -1479,7 +1484,7 @@ extendImportTests = testGroup "extend import actions"
, "import ModuleA ()"
, "foo = bar"
])
- (Range (Position 3 17) (Position 3 18))
+ (Range (Position 3 6) (Position 3 9))
["Add bar to the import list of ModuleA",
"Add bar to the import list of ModuleB"]
(T.unlines
@@ -1496,7 +1501,7 @@ extendImportTests = testGroup "extend import actions"
, "x :: (:~:) [] []"
, "x = Refl"
])
- (Range (Position 3 17) (Position 3 18))
+ (Range (Position 3 4) (Position 3 8))
[ "Add type (:~:)(Refl) to the import list of Data.Type.Equality"
, "Add (:~:)(..) to the import list of Data.Type.Equality"]
(T.unlines
@@ -1546,8 +1551,7 @@ extendImportTests = testGroup "extend import actions"
)
(Range (Position 2 3) (Position 2 7))
)
- , ignoreForGhcVersions [GHC94] "Diagnostic message has no suggestions" $
- testSession "type constructor name same as data constructor name" $ template
+ , testSession "type constructor name same as data constructor name" $ template
[("ModuleA.hs", T.unlines
[ "module ModuleA where"
, "newtype Foo = Foo Int"
@@ -1849,8 +1853,14 @@ suggestImportTests = testGroup "suggest import actions"
suggestAddRecordFieldImportTests :: TestTree
suggestAddRecordFieldImportTests = testGroup "suggest imports of record fields when using OverloadedRecordDot"
[ testGroup "The field is suggested when an instance resolution failure occurs"
- [ ignoreForGhcVersions [GHC94, GHC96] "Extension not present <9.2, and the assist is derived from the help message in >=9.4" theTest
+ ([ ignoreForGhcVersions [GHC96] "Extension not present <9.2, and the assist is derived from the help message in >=9.4" theTest
]
+ ++ [
+ theTestIndirect qualifiedGhcRecords polymorphicType
+ |
+ qualifiedGhcRecords <- [False, True]
+ , polymorphicType <- [False, True]
+ ])
]
where
theTest = testSessionWithExtraFiles "hover" def $ \dir -> do
@@ -1871,6 +1881,144 @@ suggestAddRecordFieldImportTests = testGroup "suggest imports of record fields w
contentAfterAction <- documentContents doc
liftIO $ after @=? contentAfterAction
+ theTestIndirect qualifiedGhcRecords polymorphicType = testGroup
+ ((if qualifiedGhcRecords then "qualified-" else "unqualified-")
+ <> ("HasField " :: String)
+ <>
+ (if polymorphicType then "polymorphic-" else "monomorphic-")
+ <> "type ")
+ . (\x -> [x]) $ testSessionWithExtraFiles "hover" def $ \dir -> do
+ -- Hopefully enable project indexing?
+ configureCheckProject True
+
+ let
+ before = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "spam = bar.foo"]
+ after = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "import B (Foo(..))", "spam = bar.foo"]
+ cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}"
+ liftIO $ writeFileUTF8 (dir > "hie.yaml") cradle
+ liftIO $ writeFileUTF8 (dir > "B.hs") $ unlines ["module B where", if polymorphicType then "data Foo x = Foo { foo :: x }" else "data Foo = Foo { foo :: Int }"]
+ liftIO $ writeFileUTF8 (dir > "C.hs") $ unlines ["module C where", "import B", "bar = Foo 10" ]
+ doc <- createDoc "Test.hs" "haskell" before
+ waitForProgressDone
+ _ <- waitForDiagnostics
+ let defLine = 4
+ range = Range (Position defLine 0) (Position defLine maxBound)
+ actions <- getCodeActions doc range
+ action <- pickActionWithTitle "import B (Foo(..))" actions
+ executeCodeAction action
+ contentAfterAction <- documentContents doc
+ liftIO $ after @=? contentAfterAction
+
+suggestAddRecordFieldUpdateImportTests :: TestTree
+suggestAddRecordFieldUpdateImportTests = testGroup "suggest imports of record fields in update"
+ [ testGroup "implicit import of type" [theTest ] ]
+ where
+ theTest = testSessionWithExtraFiles "hover" def $ \dir -> do
+ configureCheckProject True
+
+ let
+ before = T.unlines ["module C where", "import B", "biz = bar { foo = 100 }"]
+ after = T.unlines ["module C where", "import B", "import A (Foo(..))", "biz = bar { foo = 100 }"]
+ cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}"
+ liftIO $ writeFileUTF8 (dir > "hie.yaml") cradle
+ liftIO $ writeFileUTF8 (dir > "A.hs") $ unlines ["module A where", "data Foo = Foo { foo :: Int }"]
+ liftIO $ writeFileUTF8 (dir > "B.hs") $ unlines ["module B where", "import A", "bar = Foo 10" ]
+ doc <- createDoc "Test.hs" "haskell" before
+ waitForProgressDone
+ diags <- waitForDiagnostics
+ liftIO $ print diags
+ let defLine = 2
+ range = Range (Position defLine 0) (Position defLine maxBound)
+ actions <- getCodeActions doc range
+ liftIO $ print actions
+ action <- pickActionWithTitle "import A (Foo(..))" actions
+ executeCodeAction action
+ contentAfterAction <- documentContents doc
+ liftIO $ after @=? contentAfterAction
+
+extractNotInScopeNameTests :: TestTree
+extractNotInScopeNameTests =
+ testGroup "extractNotInScopeName" [
+ testGroup "record field" [
+ testCase ">=ghc 910" $ Refactor.extractNotInScopeName "Not in scope: ‘foo’" @=? Just (NotInScopeThing "foo"),
+ testCase " do
+ configureCheckProject False
+ let before = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "bar = coerce (10 :: Int) :: Sum Int"]
+ after = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "bar = coerce (10 :: Int) :: Sum Int"]
+ cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}"
+ liftIO $ writeFileUTF8 (dir > "hie.yaml") cradle
+ doc <- createDoc "Test.hs" "haskell" before
+ waitForProgressDone
+ _ <- waitForDiagnostics
+ let defLine = 3
+ range = Range (Position defLine 0) (Position defLine maxBound)
+ actions <- getCodeActions doc range
+ action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions
+ executeCodeAction action
+ contentAfterAction <- documentContents doc
+ liftIO $ after @=? contentAfterAction
+
+suggestAddGenericMissingConstructorImportTests :: TestTree
+suggestAddGenericMissingConstructorImportTests = testGroup "suggest imports of type constructors when using generic deriving"
+ [ testGroup "The type constructors are suggested when not in scope"
+ [ theTest
+ ]
+ ]
+ where
+ theTest = testSessionWithExtraFiles "hover" def $ \dir -> do
+ configureCheckProject False
+ let
+ before = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "deriving instance Generic (Sum Int)"]
+ after = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "deriving instance Generic (Sum Int)"]
+ cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}"
+ liftIO $ writeFileUTF8 (dir > "hie.yaml") cradle
+ doc <- createDoc "Test.hs" "haskell" before
+ waitForProgressDone
+ _ <- waitForDiagnostics
+ let defLine = 3
+ range = Range (Position defLine 0) (Position defLine maxBound)
+ actions <- getCodeActions doc range
+ action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions
+ executeCodeAction action
+ contentAfterAction <- documentContents doc
+ liftIO $ after @=? contentAfterAction
+
suggestImportDisambiguationTests :: TestTree
suggestImportDisambiguationTests = testGroup "suggest import disambiguation actions"
@@ -2277,7 +2425,7 @@ insertNewDefinitionTests = testGroup "insert new definition actions"
docB <- createDoc "ModuleB.hs" "haskell" (T.unlines start)
_ <- waitForDiagnostics
action <- pickActionWithTitle "Define select :: Int -> Bool"
- =<< getCodeActions docB (R 1 0 0 50)
+ =<< getCodeActions docB (R 1 8 1 14)
executeCodeAction action
contentAfterAction <- documentContents docB
liftIO $ contentAfterAction @?= T.unlines expected
@@ -2301,7 +2449,7 @@ insertNewDefinitionTests = testGroup "insert new definition actions"
docB <- createDoc "ModuleB.hs" "haskell" (T.unlines start)
_ <- waitForDiagnostics
action <- pickActionWithTitle "Define select :: Int -> Bool"
- =<< getCodeActions docB (R 1 0 0 50)
+ =<< getCodeActions docB (R 1 8 1 14)
executeCodeAction action
contentAfterAction <- documentContents docB
liftIO $ contentAfterAction @?= T.unlines expected
@@ -2469,9 +2617,7 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, ""
, "f = 1"
]
- (if ghcVersion >= GHC94
- then [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the type variable", Nothing) ]
- else [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the following constraint", Nothing) ])
+ [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the type variable", Nothing) ]
"Add type annotation ‘Integer’ to ‘1’"
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "module A (f) where"
@@ -2488,9 +2634,7 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, " let x = 3"
, " in x"
]
- (if ghcVersion >= GHC94
- then [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the type variable", Nothing) ]
- else [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the following constraint", Nothing) ])
+ [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the type variable", Nothing) ]
"Add type annotation ‘Integer’ to ‘3’"
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "module A where"
@@ -2508,9 +2652,7 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, " let x = let y = 5 in y"
, " in x"
]
- (if ghcVersion >= GHC94
- then [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the type variable", Nothing) ]
- else [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the following constraint", Nothing) ])
+ [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the type variable", Nothing) ]
"Add type annotation ‘Integer’ to ‘5’"
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "module A where"
@@ -2529,23 +2671,17 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, ""
, "f = seq \"debug\" traceShow \"debug\""
]
- (if ghcVersion >= GHC94
- then
- [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the type variable", Nothing)
- , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the type variable", Nothing)
- ]
- else
- [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the following constraint", Nothing)
- , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the following constraint", Nothing)
- ])
- "Add type annotation ‘String’ to ‘\"debug\"’"
+ [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the type variable", Nothing)
+ , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the type variable", Nothing)
+ ]
+ ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’")
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "{-# LANGUAGE OverloadedStrings #-}"
, "module A (f) where"
, ""
, "import Debug.Trace"
, ""
- , "f = seq (\"debug\" :: String) traceShow \"debug\""
+ , "f = seq (\"debug\" :: "<> stringLit <> ") traceShow \"debug\""
]
, testSession "add default type to satisfy two constraints" $
testFor
@@ -2557,17 +2693,15 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, ""
, "f a = traceShow \"debug\" a"
]
- (if ghcVersion >= GHC94
- then [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the type variable", Nothing) ]
- else [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the following constraint", Nothing) ])
- "Add type annotation ‘String’ to ‘\"debug\"’"
+ [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the type variable", Nothing) ]
+ ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’")
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "{-# LANGUAGE OverloadedStrings #-}"
, "module A (f) where"
, ""
, "import Debug.Trace"
, ""
- , "f a = traceShow (\"debug\" :: String) a"
+ , "f a = traceShow (\"debug\" :: " <> stringLit <> ") a"
]
, testSession "add default type to satisfy two constraints with duplicate literals" $
testFor
@@ -2579,20 +2713,19 @@ addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals t
, ""
, "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow \"debug\"))"
]
- (if ghcVersion >= GHC94
- then [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the type variable", Nothing) ]
- else [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the following constraint", Nothing) ])
- "Add type annotation ‘String’ to ‘\"debug\"’"
+ [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the type variable", Nothing) ]
+ ("Add type annotation ‘"<> stringLit <>"’ to ‘\"debug\"’")
[ "{-# OPTIONS_GHC -Wtype-defaults #-}"
, "{-# LANGUAGE OverloadedStrings #-}"
, "module A (f) where"
, ""
, "import Debug.Trace"
, ""
- , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: String)))"
+ , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: "<> stringLit <> ")))"
]
]
where
+ stringLit = if ghcVersion >= GHC912 then "[Char]" else "String"
testFor sourceLines diag expectedTitle expectedLines = do
docId <- createDoc "A.hs" "haskell" $ T.unlines sourceLines
expectDiagnostics [ ("A.hs", diag) ]
@@ -2617,7 +2750,7 @@ fixConstructorImportTests = testGroup "fix import actions"
[ "module ModuleB where"
, "import ModuleA(Constructor)"
])
- (Range (Position 1 10) (Position 1 11))
+ (Range (Position 1 15) (Position 1 26))
"Fix import of A(Constructor)"
(T.unlines
[ "module ModuleB where"
@@ -3208,6 +3341,10 @@ addSigActionTests = let
executeCodeAction chosenAction
modifiedCode <- documentContents doc
liftIO $ expectedCode @=? modifiedCode
+ issue806 = if ghcVersion >= GHC912 then
+ "hello = print" >:: "hello :: GHC.Types.ZonkAny 0 -> IO ()" -- GHC now returns ZonkAny 0 instead of Any. https://gitlab.haskell.org/ghc/ghc/-/issues/25895
+ else
+ "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806
in
testGroup "add signature"
[ "abc = True" >:: "abc :: Bool"
@@ -3216,7 +3353,7 @@ addSigActionTests = let
, "(!!!) a b = a > b" >:: "(!!!) :: Ord a => a -> a -> Bool"
, "a >>>> b = a + b" >:: "(>>>>) :: Num a => a -> a -> a"
, "a `haha` b = a b" >:: "haha :: (t1 -> t2) -> t1 -> t2"
- , "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806
+ , issue806
, "pattern Some a = Just a" >:: "pattern Some :: a -> Maybe a"
, "pattern Some a <- Just a" >:: "pattern Some :: a -> Maybe a"
, "pattern Some a <- Just a\n where Some a = Just a" >:: "pattern Some :: a -> Maybe a"
@@ -3250,8 +3387,7 @@ exportUnusedTests = testGroup "export unused actions"
]
(R 2 0 2 11)
"Export ‘bar’"
- , ignoreForGhcVersions [GHC94] "Diagnostic message has no suggestions" $
- testSession "type is exported but not the constructor of same name" $ templateNoAction
+ , testSession "type is exported but not the constructor of same name" $ templateNoAction
[ "{-# OPTIONS_GHC -Wunused-top-binds #-}"
, "module A (Foo) where"
, "data Foo = Foo"
@@ -3893,8 +4029,4 @@ pattern R x y x' y' = Range (Position x y) (Position x' y')
-- Which we need to do on macOS since the $TMPDIR can be in @/private/var@ or
-- @/var@
withTempDir :: (FilePath -> IO a) -> IO a
-withTempDir f = System.IO.Extra.withTempDir $ \dir ->
- canonicalizePath dir >>= f
-
-brokenForGHC94 :: String -> TestTree -> TestTree
-brokenForGHC94 = knownBrokenForGhcVersions [GHC94]
+withTempDir f = System.IO.Extra.withTempDir $ (canonicalizePath >=> f)
diff --git a/plugins/hls-refactor-plugin/test/Test/AddArgument.hs b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs
index 2f741c0003..a0bf8b004e 100644
--- a/plugins/hls-refactor-plugin/test/Test/AddArgument.hs
+++ b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs
@@ -35,7 +35,7 @@ tests =
mkGoldenAddArgTest "AddArgFromLet" (r 2 0 2 50),
mkGoldenAddArgTest "AddArgFromWhere" (r 3 0 3 50),
-- TODO can we make this work for GHC 9.10?
- knownBrokenForGhcVersions [GHC910] "In GHC 9.10 end-of-line comment annotation is in different place" $
+ knownBrokenForGhcVersions [GHC910, GHC912] "In GHC 9.10 and 9.12 end-of-line comment annotation is in different place" $
mkGoldenAddArgTest "AddArgFromWhereComments" (r 3 0 3 50),
mkGoldenAddArgTest "AddArgWithTypeSynSig" (r 2 0 2 50),
mkGoldenAddArgTest "AddArgWithTypeSynSigContravariant" (r 2 0 2 50),
diff --git a/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs
index e65eafa52b..2e39ffcd98 100644
--- a/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs
+++ b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs
@@ -465,11 +465,7 @@ suggestRuleRewrites originatingFile pos ms_mod (L _ HsRules {rds_rules}) =
]
| L (locA -> l) r <- rds_rules,
pos `isInsideSrcSpan` l,
-#if MIN_VERSION_ghc(9,5,0)
let HsRule {rd_name = L _ rn} = r,
-#else
- let HsRule {rd_name = L _ (_, rn)} = r,
-#endif
let ruleName = unpackFS rn
]
where
@@ -502,7 +498,7 @@ data CallRetrieError
| NoParse NormalizedFilePath
| GHCParseError NormalizedFilePath String
| NoTypeCheck NormalizedFilePath
- deriving (Eq, Typeable)
+ deriving (Eq)
instance Show CallRetrieError where
show (CallRetrieInternalError msg f) = msg <> " - " <> fromNormalizedFilePath f
@@ -736,7 +732,6 @@ toImportDecl AddImport {..} = GHC.ImportDecl {ideclSource = ideclSource', ..}
ideclPkgQual = NoRawPkgQual
-#if MIN_VERSION_ghc(9,5,0)
ideclImportList = Nothing
ideclExt = GHCGHC.XImportDeclPass
{ ideclAnn =
@@ -748,11 +743,6 @@ toImportDecl AddImport {..} = GHC.ImportDecl {ideclSource = ideclSource', ..}
, ideclSourceText = ideclSourceSrc
, ideclImplicit = ideclImplicit
}
-#else
- ideclExt = GHCGHC.EpAnnNotUsed
- ideclHiding = Nothing
-#endif
-
reuseParsedModule :: IdeState -> NormalizedFilePath -> IO (FixityEnv, Annotated GHCGHC.ParsedSource)
reuseParsedModule state f = do
diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs
index cda4fda6e6..7f445bf7ac 100644
--- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs
+++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs
@@ -10,7 +10,6 @@ module Ide.Plugin.SemanticTokens.Types where
import Control.DeepSeq (NFData (rnf), rwhnf)
import qualified Data.Array as A
import Data.Default (Default (def))
-import Data.Generics (Typeable)
import Development.IDE (Pretty (pretty), RuleResult)
import qualified Development.IDE.Core.Shake as Shake
import Development.IDE.GHC.Compat hiding (loc)
@@ -108,7 +107,7 @@ instance Show Loc where
show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len)
data GetSemanticTokens = GetSemanticTokens
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetSemanticTokens
diff --git a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs
index eacd47e2d2..a0d1648fb3 100644
--- a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs
+++ b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs
@@ -10,8 +10,7 @@ import Data.Functor (void)
import qualified Data.List as T
import Data.Map.Strict as Map hiding (map)
import Data.String (fromString)
-import Data.Text hiding (length, map,
- unlines)
+import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Utf16.Rope.Mixed as Rope
import Data.Version (Version (..))
diff --git a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs
index 43bdf5decb..de468e2a87 100644
--- a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs
+++ b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs
@@ -38,14 +38,14 @@ import Data.Maybe (fromMaybe, listToMaybe,
mapMaybe)
import qualified Data.Text as T
import Development.IDE
-import Development.IDE.Core.FileStore (getVersionedTextDoc)
+import Development.IDE.Core.FileStore (getVersionedTextDoc)
import Development.IDE.Core.PluginUtils
import Development.IDE.GHC.Compat as Compat
import Development.IDE.GHC.Compat.ExactPrint
import qualified Development.IDE.GHC.Compat.Util as Util
import Development.IDE.GHC.ExactPrint
import GHC.Exts
-import qualified GHC.Runtime.Loader as Loader
+import qualified GHC.Runtime.Loader as Loader
import qualified GHC.Types.Error as Error
import Ide.Plugin.Error (PluginError (PluginInternalError))
import Ide.Plugin.Splice.Types
@@ -58,9 +58,7 @@ import Language.LSP.Protocol.Types
import Data.Foldable (Foldable (foldl'))
#endif
-#if MIN_VERSION_ghc(9,4,1)
import GHC.Data.Bag (Bag)
-#endif
#if MIN_VERSION_ghc(9,9,0)
import GHC.Parser.Annotation (EpAnn (..))
@@ -294,11 +292,9 @@ data SpliceClass where
OneToOneAST :: HasSplice AnnListItem ast => Proxy# ast -> SpliceClass
IsHsDecl :: SpliceClass
-#if MIN_VERSION_ghc(9,5,0)
data HsSpliceCompat pass
= UntypedSplice (HsUntypedSplice pass)
| TypedSplice (LHsExpr pass)
-#endif
class (Outputable (ast GhcRn), ASTElement l (ast GhcPs)) => HasSplice l ast where
@@ -307,43 +303,24 @@ class (Outputable (ast GhcRn), ASTElement l (ast GhcPs)) => HasSplice l ast wher
expandSplice :: Proxy# ast -> SpliceOf ast GhcPs -> RnM (Either (ast GhcPs) (ast GhcRn), FreeVars)
instance HasSplice AnnListItem HsExpr where
-#if MIN_VERSION_ghc(9,5,0)
type SpliceOf HsExpr = HsSpliceCompat
matchSplice _ (HsUntypedSplice _ spl) = Just (UntypedSplice spl)
matchSplice _ (HsTypedSplice _ spl) = Just (TypedSplice spl)
-#else
- type SpliceOf HsExpr = HsSplice
- matchSplice _ (HsSpliceE _ spl) = Just spl
-#endif
- matchSplice _ _ = Nothing
-#if MIN_VERSION_ghc(9,5,0)
+ matchSplice _ _ = Nothing
expandSplice _ (UntypedSplice e) = fmap (first Right) $ rnUntypedSpliceExpr e
expandSplice _ (TypedSplice e) = fmap (first Right) $ rnTypedSplice e
-#else
- expandSplice _ = fmap (first Right) . rnSpliceExpr
-#endif
instance HasSplice AnnListItem Pat where
-#if MIN_VERSION_ghc(9,5,0)
type SpliceOf Pat = HsUntypedSplice
-#else
- type SpliceOf Pat = HsSplice
-#endif
matchSplice _ (SplicePat _ spl) = Just spl
matchSplice _ _ = Nothing
expandSplice _ =
-#if MIN_VERSION_ghc(9,5,0)
fmap (first (Left . unLoc . utsplice_result . snd )) .
-#endif
rnSplicePat
instance HasSplice AnnListItem HsType where
-#if MIN_VERSION_ghc(9,5,0)
type SpliceOf HsType = HsUntypedSplice
-#else
- type SpliceOf HsType = HsSplice
-#endif
matchSplice _ (HsSpliceTy _ spl) = Just spl
matchSplice _ _ = Nothing
expandSplice _ = fmap (first Right) . rnSpliceType
@@ -418,14 +395,8 @@ manualCalcEdit clientCapabilities reportEditor ran ps hscEnv typechkd srcSpan _e
pure resl
where
dflags = hsc_dflags hscEnv
-
-#if MIN_VERSION_ghc(9,4,1)
showErrors = showBag
-#else
- showErrors = show
-#endif
-#if MIN_VERSION_ghc(9,4,1)
showBag :: Error.Diagnostic a => Bag (Error.MsgEnvelope a) -> String
showBag = show . fmap (fmap toDiagnosticMessage)
@@ -433,15 +404,12 @@ toDiagnosticMessage :: forall a. Error.Diagnostic a => a -> Error.DiagnosticMess
toDiagnosticMessage message =
Error.DiagnosticMessage
{ diagMessage = Error.diagnosticMessage
-#if MIN_VERSION_ghc(9,5,0)
(Error.defaultDiagnosticOpts @a)
-#endif
message
, diagReason = Error.diagnosticReason message
, diagHints = Error.diagnosticHints message
}
-#endif
-- | FIXME: Is thereAny "clever" way to do this exploiting TTG?
unRenamedE ::
@@ -458,15 +426,11 @@ unRenamedE dflags expr = do
showSDoc dflags $ ppr expr
pure expr'
where
-#if MIN_VERSION_ghc(9,4,1)
showErrors = showBag . Error.getMessages
-#else
- showErrors = show
-#endif
data SearchResult r =
Continue | Stop | Here r
- deriving (Read, Show, Eq, Ord, Data, Typeable)
+ deriving (Read, Show, Eq, Ord, Data)
fromSearchResult :: SearchResult a -> Maybe a
fromSearchResult (Here r) = Just r
@@ -510,12 +474,8 @@ codeAction state plId (CodeActionParams _ _ docId ran _) = do
(L (AsSrcSpan l@(RealSrcSpan spLoc _)) expr :: LHsExpr GhcPs)
| spanIsRelevant l ->
case expr of
-#if MIN_VERSION_ghc(9,5,0)
HsTypedSplice{} -> Here (spLoc, Expr)
HsUntypedSplice{} -> Here (spLoc, Expr)
-#else
- HsSpliceE {} -> Here (spLoc, Expr)
-#endif
_ -> Continue
_ -> Stop
)
diff --git a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs
index a862e57fb8..767cc061df 100644
--- a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs
+++ b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs
@@ -79,10 +79,15 @@ provider recorder ide _token typ contents fp _opts = do
-- If no such file has been found, return default config.
loadConfigFrom :: FilePath -> IO Config
loadConfigFrom file = do
+#if MIN_VERSION_stylish_haskell(0,15,0)
+ let configSearchStrategy = SearchFromDirectory (takeDirectory file)
+ config <- loadConfig (makeVerbose False) configSearchStrategy
+#else
currDir <- getCurrentDirectory
setCurrentDirectory (takeDirectory file)
config <- loadConfig (makeVerbose False) Nothing
setCurrentDirectory currDir
+#endif
pure config
-- | Run stylish-haskell on the given text with the given configuration.
diff --git a/release/upload.sh b/release/upload.sh
index 29f6849757..22dc6d438d 100755
--- a/release/upload.sh
+++ b/release/upload.sh
@@ -35,7 +35,7 @@ fi
echo HLS version $ver
-host="gitlab-storage.haskell.org"
+host="gitlab.haskell.org:2222"
usage() {
echo "Usage: [rel_name=] SIGNING_KEY= $0 "
diff --git a/scripts/release/create-yaml-snippet.sh b/scripts/release/create-yaml-snippet.sh
index 2fb7413f82..6ee25b01b5 100644
--- a/scripts/release/create-yaml-snippet.sh
+++ b/scripts/release/create-yaml-snippet.sh
@@ -28,56 +28,61 @@ cat < /dev/stdout
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb10.tar.xz
dlSubdir: haskell-language-server-$RELEASE
dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb10.tar.xz" | awk '{ print $1 }')
+ '(>= 11 && < 12)': &hls-${RELEASE//./}-64-deb11
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz
+ dlSubdir: haskell-language-server-$RELEASE
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz" | awk '{ print $1 }')
+ '>= 12': &hls-${RELEASE//./}-64-deb12
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb12.tar.xz
+ dlSubdir: haskell-language-server-$RELEASE
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb12.tar.xz" | awk '{ print $1 }')
unknown_versioning: &hls-${RELEASE//./}-64-deb11
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz
dlSubdir: haskell-language-server-$RELEASE
dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz" | awk '{ print $1 }')
Linux_Ubuntu:
'( >= 16 && < 19 )': &hls-${RELEASE//./}-64-ubuntu18
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu18.04.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu18.04.tar.xz" | awk '{ print $1 }')
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz" | awk '{ print $1 }')
'( >= 20 && < 22 )': &hls-${RELEASE//./}-64-ubuntu20
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu20.04.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu20.04.tar.xz" | awk '{ print $1 }')
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }')
unknown_versioning: &hls-${RELEASE//./}-64-ubuntu22
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu22.04.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu22.04.tar.xz" | awk '{ print $1 }')
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz" | awk '{ print $1 }')
Linux_Mint:
'< 20':
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint19.3.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint19.3.tar.xz" | awk '{ print $1 }')
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz" | awk '{ print $1 }')
'(>= 20 && < 21)':
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint20.2.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint20.2.tar.xz" | awk '{ print $1 }')
- '>= 21': *hls-${RELEASE//./}-64-ubuntu22
- Linux_Fedora:
- '< 33': &hls-${RELEASE//./}-64-fedora27
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-fedora27.tar.xz
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz" | awk '{ print $1 }')
+ '>= 21':
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint213.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-fedora27.tar.xz" | awk '{ print $1 }')
- '>= 33': &hls-${RELEASE//./}-64-fedora33
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint213.tar.xz" | awk '{ print $1 }')
+ Linux_Fedora:
+ '(>= 33 && < 40)': &hls-${RELEASE//./}-64-fedora33
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-fedora33.tar.xz
dlSubdir: haskell-language-server-$RELEASE
dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-fedora33.tar.xz" | awk '{ print $1 }')
- unknown_versioning: *hls-${RELEASE//./}-64-fedora27
- Linux_CentOS:
- '( >= 7 && < 8 )': &hls-${RELEASE//./}-64-centos
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-centos7.tar.xz
+ '>= 40': &hls-${RELEASE//./}-64-fedora40
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-fedora40.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-centos7.tar.xz" | awk '{ print $1 }')
- unknown_versioning: *hls-${RELEASE//./}-64-centos
- Linux_RedHat:
- unknown_versioning: *hls-${RELEASE//./}-64-centos
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-fedora40.tar.xz" | awk '{ print $1 }')
+ unknown_versioning: *hls-${RELEASE//./}-64-unknown
Linux_UnknownLinux:
- unknown_versioning:
+ unknown_versioning: &hls-${RELEASE//./}-64-unknown
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-unknown.tar.xz
dlSubdir: haskell-language-server-$RELEASE
dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-unknown.tar.xz" | awk '{ print $1 }')
+ Linux_RedHat:
+ unknown_versioning: *hls-${RELEASE//./}-64-unknown
Darwin:
unknown_versioning:
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-apple-darwin.tar.xz
@@ -87,17 +92,12 @@ cat < /dev/stdout
unknown_versioning:
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-mingw64.zip
dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-mingw64.zip" | awk '{ print $1 }')
- FreeBSD:
- unknown_versioning:
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-freebsd.tar.xz
- dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-freebsd.tar.xz" | awk '{ print $1 }')
A_ARM64:
Linux_UnknownLinux:
unknown_versioning:
- dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-linux-ubuntu20.tar.xz
+ dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz
dlSubdir: haskell-language-server-$RELEASE
- dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-linux-ubuntu20.tar.xz" | awk '{ print $1 }')
+ dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }')
Darwin:
unknown_versioning:
dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-apple-darwin.tar.xz
diff --git a/shake-bench/shake-bench.cabal b/shake-bench/shake-bench.cabal
index d5852a6310..c381089aba 100644
--- a/shake-bench/shake-bench.cabal
+++ b/shake-bench/shake-bench.cabal
@@ -16,7 +16,7 @@ source-repository head
location: https://github.com/haskell/haskell-language-server.git
library
- if impl(ghc >= 9.10)
+ if impl(ghc > 9.11)
buildable: False
exposed-modules: Development.Benchmark.Rules
hs-source-dirs: src
diff --git a/shake-bench/src/Development/Benchmark/Rules.hs b/shake-bench/src/Development/Benchmark/Rules.hs
index 98cfd717d2..8ba2b3f0df 100644
--- a/shake-bench/src/Development/Benchmark/Rules.hs
+++ b/shake-bench/src/Development/Benchmark/Rules.hs
@@ -131,7 +131,7 @@ type RuleResultForExample e =
, IsExample e)
data Configuration = Configuration {confName :: String, confValue :: ByteString}
- deriving (Binary, Eq, Generic, Hashable, NFData, Show, Typeable)
+ deriving (Binary, Eq, Generic, Hashable, NFData, Show)
type instance RuleResult GetConfigurations = [Configuration]
-- | Knowledge needed to run an example
@@ -535,7 +535,7 @@ heapProfileRules build = do
build -/- "*/*/*/*/*.heap.svg" %> \out -> do
let hpFile = dropExtension2 out <.> "hp"
need [hpFile]
- cmd_ ("hp2pretty" :: String) [hpFile]
+ cmd_ ("eventlog2html" :: String) ["--heap-profile", hpFile]
liftIO $ renameFile (dropExtension hpFile <.> "svg") out
dropExtension2 :: FilePath -> FilePath
diff --git a/src/Ide/Arguments.hs b/src/Ide/Arguments.hs
index 733da2e557..be7f35e455 100644
--- a/src/Ide/Arguments.hs
+++ b/src/Ide/Arguments.hs
@@ -33,6 +33,7 @@ data Arguments
| BiosMode BiosAction
| Ghcide GhcideArguments
| VSCodeExtensionSchemaMode
+ | PluginsCustomConfigMarkdownReferenceMode
| DefaultConfigurationMode
| PrintLibDir
@@ -69,6 +70,7 @@ getArguments exeName plugins = execParser opts
<|> hsubparser
( command "vscode-extension-schema" extensionSchemaCommand
<> command "generate-default-config" generateDefaultConfigCommand
+ <> command "plugins-custom-config-markdown-reference" pluginsCustomConfigMarkdownReferenceCommand
)
<|> listPluginsParser
<|> BiosMode <$> biosParser
@@ -86,6 +88,9 @@ getArguments exeName plugins = execParser opts
generateDefaultConfigCommand =
info (pure DefaultConfigurationMode)
(fullDesc <> progDesc "Print config supported by the server with default values")
+ pluginsCustomConfigMarkdownReferenceCommand =
+ info (pure PluginsCustomConfigMarkdownReferenceMode)
+ (fullDesc <> progDesc "Print markdown reference for plugins custom config")
printVersionParser :: String -> Parser PrintVersion
printVersionParser exeName =
diff --git a/src/Ide/Main.hs b/src/Ide/Main.hs
index 33b1d51a11..f122b53fa6 100644
--- a/src/Ide/Main.hs
+++ b/src/Ide/Main.hs
@@ -15,6 +15,7 @@ import Data.Function ((&))
import Data.List (sortOn)
import Data.Text (Text)
import qualified Data.Text as T
+import qualified Data.Text.IO as T (putStrLn)
import Data.Text.Lazy.Encoding (decodeUtf8)
import qualified Data.Text.Lazy.IO as LT
import Development.IDE.Core.Rules hiding (Log)
@@ -28,7 +29,8 @@ import HIE.Bios.Types hiding (Log)
import qualified HIE.Bios.Types as HieBios
import Ide.Arguments
import Ide.Logger as G
-import Ide.Plugin.ConfigUtils (pluginsToDefaultConfig,
+import Ide.Plugin.ConfigUtils (pluginsCustomConfigToMarkdownTables,
+ pluginsToDefaultConfig,
pluginsToVSCodeExtensionSchema)
import Ide.Types (IdePlugins, PluginId (PluginId),
describePlugin, ipMap, pluginId)
@@ -103,6 +105,8 @@ defaultMain recorder args idePlugins = do
VSCodeExtensionSchemaMode -> do
LT.putStrLn $ decodeUtf8 $ encodePrettySorted $ pluginsToVSCodeExtensionSchema idePlugins
+ PluginsCustomConfigMarkdownReferenceMode -> do
+ T.putStrLn $ pluginsCustomConfigToMarkdownTables idePlugins
DefaultConfigurationMode -> do
LT.putStrLn $ decodeUtf8 $ encodePrettySorted $ pluginsToDefaultConfig idePlugins
PrintLibDir -> do
diff --git a/stack-lts22.yaml b/stack-lts22.yaml
index ecd17a99c2..7306295a8a 100644
--- a/stack-lts22.yaml
+++ b/stack-lts22.yaml
@@ -1,4 +1,4 @@
-resolver: lts-22.25 # ghc-9.6.5
+resolver: lts-22.43 # ghc-9.6.6
packages:
- .
@@ -15,12 +15,14 @@ ghc-options:
allow-newer: true
allow-newer-deps:
- extensions
+ # stan dependencies
+ - directory-ospath-streaming
extra-deps:
- Diff-0.5
- floskell-0.11.1
- - hiedb-0.6.0.1
- - hie-bios-0.14.0
+ - hiedb-0.6.0.2
+ - hie-bios-0.15.0
- implicit-hie-0.1.4.0
- lsp-2.7.0.0
- lsp-test-0.17.1.0
@@ -29,7 +31,7 @@ extra-deps:
- retrie-1.2.3
# stan and friends
- - stan-0.1.3.0
+ - stan-0.2.1.0
- dir-traverse-0.2.3.0
- extensions-0.1.0.1
- tomland-1.3.3.2
@@ -39,6 +41,7 @@ extra-deps:
- validation-selective-0.2.0.0
- cabal-add-0.1
- cabal-install-parsers-0.6.1.1
+ - directory-ospath-streaming-0.2.2
configure-options:
@@ -56,6 +59,9 @@ flags:
BuildExecutable: false
cabal-add:
cabal-syntax: true
+ # stan dependencies
+ directory-ospath-streaming:
+ os-string: false
nix:
packages: [icu libcxx zlib]
diff --git a/stack.yaml b/stack.yaml
index 8df29e1b00..ba89370091 100644
--- a/stack.yaml
+++ b/stack.yaml
@@ -1,4 +1,4 @@
-resolver: nightly-2024-06-12 # ghc-9.8.2
+resolver: lts-23.18 # ghc-9.8.4
packages:
- .
@@ -17,29 +17,26 @@ allow-newer-deps:
- extensions
- hw-fingertree
- retrie
+ # stan dependencies
+ - directory-ospath-streaming
extra-deps:
- floskell-0.11.1
- - hiedb-0.6.0.1
- - hie-bios-0.14.0
+ - hiedb-0.6.0.2
- implicit-hie-0.1.4.0
+ - hie-bios-0.15.0
- hw-fingertree-0.1.2.1
- - lsp-2.7.0.0
- - lsp-test-0.17.1.0
- - lsp-types-2.3.0.0
- monad-dijkstra-0.1.1.5
- - stylish-haskell-0.14.6.0
- retrie-1.2.3
# stan dependencies not found in the stackage snapshot
- - stan-0.1.3.0
+ - stan-0.2.1.0
- dir-traverse-0.2.3.0
- extensions-0.1.0.1
- trial-0.0.0.0
- trial-optparse-applicative-0.0.0.0
- trial-tomland-0.0.0.0
- - cabal-add-0.1
- - cabal-install-parsers-0.6.1.1
+ - directory-ospath-streaming-0.2.2
configure-options:
ghcide:
@@ -56,6 +53,9 @@ flags:
BuildExecutable: false
cabal-add:
cabal-syntax: true
+ # stan dependencies
+ directory-ospath-streaming:
+ os-string: false
nix:
packages: [icu libcxx zlib]
diff --git a/test/functional/Config.hs b/test/functional/Config.hs
index 1f91ec4466..874792784f 100644
--- a/test/functional/Config.hs
+++ b/test/functional/Config.hs
@@ -8,7 +8,6 @@ import Control.Monad
import Data.Hashable
import qualified Data.HashMap.Strict as HM
import qualified Data.Map as Map
-import Data.Typeable (Typeable)
import Development.IDE (RuleResult, action, define,
getFilesOfInterestUntracked,
getPluginConfigAction, ideErrorText,
@@ -102,7 +101,7 @@ genericConfigTests = testGroup "generic plugin config"
data GetTestDiagnostics = GetTestDiagnostics
- deriving (Eq, Show, Typeable, Generic)
+ deriving (Eq, Show, Generic)
instance Hashable GetTestDiagnostics
instance NFData GetTestDiagnostics
type instance RuleResult GetTestDiagnostics = ()
diff --git a/test/functional/ConfigSchema.hs b/test/functional/ConfigSchema.hs
index 3dbbe0ce2f..2ece6972e9 100644
--- a/test/functional/ConfigSchema.hs
+++ b/test/functional/ConfigSchema.hs
@@ -31,6 +31,9 @@ tests = testGroup "generate schema"
, goldenGitDiff "generate-default-config" (defaultConfigFp ghcVersion) $ do
stdout <- readProcess hlsExeCommand ["generate-default-config"] ""
pure $ BS.pack stdout
+ , goldenGitDiff "plugins-custom-config-markdown-reference" (markdownReferenceFp ghcVersion) $ do
+ stdout <- readProcess hlsExeCommand ["plugins-custom-config-markdown-reference"] ""
+ pure $ BS.pack stdout
]
vscodeSchemaFp :: GhcVersion -> FilePath
@@ -39,11 +42,17 @@ vscodeSchemaFp ghcVer = "test" > "testdata" > "schema" > prettyGhcVersion
defaultConfigFp :: GhcVersion -> FilePath
defaultConfigFp ghcVer = "test" > "testdata" > "schema" > prettyGhcVersion ghcVer > generateDefaultConfigJson
+markdownReferenceFp :: GhcVersion -> FilePath
+markdownReferenceFp ghcVer = "test" > "testdata" > "schema" > prettyGhcVersion ghcVer > markdownReferenceMd
+
vscodeSchemaJson :: FilePath
vscodeSchemaJson = "vscode-extension-schema.golden.json"
generateDefaultConfigJson :: FilePath
generateDefaultConfigJson = "default-config.golden.json"
+markdownReferenceMd :: FilePath
+markdownReferenceMd = "markdown-reference.md"
+
prettyGhcVersion :: GhcVersion -> String
prettyGhcVersion ghcVer = map toLower (show ghcVer)
diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc910/default-config.golden.json
similarity index 96%
rename from test/testdata/schema/ghc94/default-config.golden.json
rename to test/testdata/schema/ghc910/default-config.golden.json
index 751aa6f28e..3b4e687ef9 100644
--- a/test/testdata/schema/ghc94/default-config.golden.json
+++ b/test/testdata/schema/ghc910/default-config.golden.json
@@ -39,11 +39,12 @@
"codeLensOn": true
},
"eval": {
+ "codeActionsOn": true,
+ "codeLensOn": true,
"config": {
"diff": true,
"exception": false
- },
- "globalOn": true
+ }
},
"explicit-fields": {
"codeActionsOn": true,
@@ -131,9 +132,6 @@
},
"globalOn": true
},
- "retrie": {
- "globalOn": true
- },
"semanticTokens": {
"config": {
"classMethodToken": "method",
@@ -152,9 +150,6 @@
},
"globalOn": false
},
- "splice": {
- "globalOn": true
- },
"stan": {
"globalOn": false
}
diff --git a/test/testdata/schema/ghc910/markdown-reference.md b/test/testdata/schema/ghc910/markdown-reference.md
new file mode 100644
index 0000000000..668323ce66
--- /dev/null
+++ b/test/testdata/schema/ghc910/markdown-reference.md
@@ -0,0 +1,66 @@
+## hlint
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `flags` | Flags used by hlint | `TODO: Array values` | |
+
+## cabal-fmt
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` | |
+
+## ghcide-completions
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` | |
+| `snippetsOn` | Inserts snippets when using code completions | `True` | |
+
+## eval
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` | |
+| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` | |
+
+## ghcide-type-lenses
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `mode` | Control how type lenses are shown | `Always` | Always
Exported
Diagnostics
|
+
+## ormolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` | |
+
+## rename
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `crossModule` | Enable experimental cross-module renaming | `False` | |
+
+## semanticTokens
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+
+## fourmolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` | |
+| `path` | Set path to executable (for "external" mode). | `"fourmolu"` | |
+
+## cabal-gild
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` | |
+
+
diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json
similarity index 99%
rename from test/testdata/schema/ghc94/vscode-extension-schema.golden.json
rename to test/testdata/schema/ghc910/vscode-extension-schema.golden.json
index 938964fc50..4ca08f296c 100644
--- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json
+++ b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json
@@ -77,6 +77,18 @@
"scope": "resource",
"type": "boolean"
},
+ "haskell.plugin.eval.codeActionsOn": {
+ "default": true,
+ "description": "Enables eval code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.codeLensOn": {
+ "default": true,
+ "description": "Enables eval code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
"haskell.plugin.eval.config.diff": {
"default": true,
"markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses",
@@ -89,12 +101,6 @@
"scope": "resource",
"type": "boolean"
},
- "haskell.plugin.eval.globalOn": {
- "default": true,
- "description": "Enables eval plugin",
- "scope": "resource",
- "type": "boolean"
- },
"haskell.plugin.explicit-fields.codeActionsOn": {
"default": true,
"description": "Enables explicit-fields code actions",
@@ -297,12 +303,6 @@
"scope": "resource",
"type": "boolean"
},
- "haskell.plugin.retrie.globalOn": {
- "default": true,
- "description": "Enables retrie plugin",
- "scope": "resource",
- "type": "boolean"
- },
"haskell.plugin.semanticTokens.config.classMethodToken": {
"default": "method",
"description": "LSP semantic token type to use for typeclass methods",
@@ -1037,12 +1037,6 @@
"scope": "resource",
"type": "boolean"
},
- "haskell.plugin.splice.globalOn": {
- "default": true,
- "description": "Enables splice plugin",
- "scope": "resource",
- "type": "boolean"
- },
"haskell.plugin.stan.globalOn": {
"default": false,
"description": "Enables stan plugin",
diff --git a/test/testdata/schema/ghc912/default-config.golden.json b/test/testdata/schema/ghc912/default-config.golden.json
new file mode 100644
index 0000000000..0dfbd39df2
--- /dev/null
+++ b/test/testdata/schema/ghc912/default-config.golden.json
@@ -0,0 +1,155 @@
+{
+ "cabalFormattingProvider": "cabal-gild",
+ "checkParents": "CheckOnSave",
+ "checkProject": true,
+ "formattingProvider": "ormolu",
+ "maxCompletions": 40,
+ "plugin": {
+ "alternateNumberFormat": {
+ "globalOn": true
+ },
+ "cabal": {
+ "codeActionsOn": true,
+ "completionOn": true,
+ "diagnosticsOn": true,
+ "hoverOn": true,
+ "symbolsOn": true
+ },
+ "cabal-fmt": {
+ "config": {
+ "path": "cabal-fmt"
+ }
+ },
+ "cabal-gild": {
+ "config": {
+ "path": "cabal-gild"
+ }
+ },
+ "cabalHaskellIntegration": {
+ "globalOn": true
+ },
+ "callHierarchy": {
+ "globalOn": true
+ },
+ "changeTypeSignature": {
+ "globalOn": true
+ },
+ "class": {
+ "codeActionsOn": true,
+ "codeLensOn": true
+ },
+ "eval": {
+ "codeActionsOn": true,
+ "codeLensOn": true,
+ "config": {
+ "diff": true,
+ "exception": false
+ }
+ },
+ "explicit-fields": {
+ "codeActionsOn": true,
+ "inlayHintsOn": true
+ },
+ "explicit-fixity": {
+ "globalOn": true
+ },
+ "fourmolu": {
+ "config": {
+ "external": false,
+ "path": "fourmolu"
+ }
+ },
+ "gadt": {
+ "globalOn": true
+ },
+ "ghcide-code-actions-bindings": {
+ "globalOn": true
+ },
+ "ghcide-code-actions-fill-holes": {
+ "globalOn": true
+ },
+ "ghcide-code-actions-imports-exports": {
+ "globalOn": true
+ },
+ "ghcide-code-actions-type-signatures": {
+ "globalOn": true
+ },
+ "ghcide-completions": {
+ "config": {
+ "autoExtendOn": true,
+ "snippetsOn": true
+ },
+ "globalOn": true
+ },
+ "ghcide-hover-and-symbols": {
+ "hoverOn": true,
+ "symbolsOn": true
+ },
+ "ghcide-type-lenses": {
+ "config": {
+ "mode": "always"
+ },
+ "globalOn": true
+ },
+ "hlint": {
+ "codeActionsOn": true,
+ "config": {
+ "flags": []
+ },
+ "diagnosticsOn": true
+ },
+ "importLens": {
+ "codeActionsOn": true,
+ "codeLensOn": true,
+ "inlayHintsOn": true
+ },
+ "moduleName": {
+ "globalOn": true
+ },
+ "ormolu": {
+ "config": {
+ "external": false
+ }
+ },
+ "overloaded-record-dot": {
+ "globalOn": true
+ },
+ "pragmas-completion": {
+ "globalOn": true
+ },
+ "pragmas-disable": {
+ "globalOn": true
+ },
+ "pragmas-suggest": {
+ "globalOn": true
+ },
+ "qualifyImportedNames": {
+ "globalOn": true
+ },
+ "rename": {
+ "config": {
+ "crossModule": false
+ },
+ "globalOn": true
+ },
+ "semanticTokens": {
+ "config": {
+ "classMethodToken": "method",
+ "classToken": "class",
+ "dataConstructorToken": "enumMember",
+ "functionToken": "function",
+ "moduleToken": "namespace",
+ "operatorToken": "operator",
+ "patternSynonymToken": "macro",
+ "recordFieldToken": "property",
+ "typeConstructorToken": "enum",
+ "typeFamilyToken": "interface",
+ "typeSynonymToken": "type",
+ "typeVariableToken": "typeParameter",
+ "variableToken": "variable"
+ },
+ "globalOn": false
+ }
+ },
+ "sessionLoading": "singleComponent"
+}
diff --git a/test/testdata/schema/ghc912/markdown-reference.md b/test/testdata/schema/ghc912/markdown-reference.md
new file mode 100644
index 0000000000..668323ce66
--- /dev/null
+++ b/test/testdata/schema/ghc912/markdown-reference.md
@@ -0,0 +1,66 @@
+## hlint
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `flags` | Flags used by hlint | `TODO: Array values` | |
+
+## cabal-fmt
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` | |
+
+## ghcide-completions
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` | |
+| `snippetsOn` | Inserts snippets when using code completions | `True` | |
+
+## eval
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` | |
+| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` | |
+
+## ghcide-type-lenses
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `mode` | Control how type lenses are shown | `Always` | Always
Exported
Diagnostics
|
+
+## ormolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` | |
+
+## rename
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `crossModule` | Enable experimental cross-module renaming | `False` | |
+
+## semanticTokens
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+
+## fourmolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` | |
+| `path` | Set path to executable (for "external" mode). | `"fourmolu"` | |
+
+## cabal-gild
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` | |
+
+
diff --git a/test/testdata/schema/ghc912/vscode-extension-schema.golden.json b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json
new file mode 100644
index 0000000000..77d398438e
--- /dev/null
+++ b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json
@@ -0,0 +1,1040 @@
+{
+ "haskell.plugin.alternateNumberFormat.globalOn": {
+ "default": true,
+ "description": "Enables alternateNumberFormat plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabal-fmt.config.path": {
+ "default": "cabal-fmt",
+ "markdownDescription": "Set path to 'cabal-fmt' executable",
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.cabal-gild.config.path": {
+ "default": "cabal-gild",
+ "markdownDescription": "Set path to 'cabal-gild' executable",
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.cabal.codeActionsOn": {
+ "default": true,
+ "description": "Enables cabal code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabal.completionOn": {
+ "default": true,
+ "description": "Enables cabal completions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabal.diagnosticsOn": {
+ "default": true,
+ "description": "Enables cabal diagnostics",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabal.hoverOn": {
+ "default": true,
+ "description": "Enables cabal hover",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabal.symbolsOn": {
+ "default": true,
+ "description": "Enables cabal symbols",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.cabalHaskellIntegration.globalOn": {
+ "default": true,
+ "description": "Enables cabalHaskellIntegration plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.callHierarchy.globalOn": {
+ "default": true,
+ "description": "Enables callHierarchy plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.changeTypeSignature.globalOn": {
+ "default": true,
+ "description": "Enables changeTypeSignature plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.class.codeActionsOn": {
+ "default": true,
+ "description": "Enables class code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.class.codeLensOn": {
+ "default": true,
+ "description": "Enables class code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.codeActionsOn": {
+ "default": true,
+ "description": "Enables eval code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.codeLensOn": {
+ "default": true,
+ "description": "Enables eval code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.config.diff": {
+ "default": true,
+ "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.config.exception": {
+ "default": false,
+ "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.explicit-fields.codeActionsOn": {
+ "default": true,
+ "description": "Enables explicit-fields code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.explicit-fields.inlayHintsOn": {
+ "default": true,
+ "description": "Enables explicit-fields inlay hints",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.explicit-fixity.globalOn": {
+ "default": true,
+ "description": "Enables explicit-fixity plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.fourmolu.config.external": {
+ "default": false,
+ "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.fourmolu.config.path": {
+ "default": "fourmolu",
+ "markdownDescription": "Set path to executable (for \"external\" mode).",
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.gadt.globalOn": {
+ "default": true,
+ "description": "Enables gadt plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-code-actions-bindings.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-code-actions-bindings plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-code-actions-fill-holes plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-code-actions-imports-exports plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-code-actions-type-signatures plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-completions.config.autoExtendOn": {
+ "default": true,
+ "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-completions.config.snippetsOn": {
+ "default": true,
+ "markdownDescription": "Inserts snippets when using code completions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-completions.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-completions plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-hover-and-symbols.hoverOn": {
+ "default": true,
+ "description": "Enables ghcide-hover-and-symbols hover",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": {
+ "default": true,
+ "description": "Enables ghcide-hover-and-symbols symbols",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ghcide-type-lenses.config.mode": {
+ "default": "always",
+ "description": "Control how type lenses are shown",
+ "enum": [
+ "always",
+ "exported",
+ "diagnostics"
+ ],
+ "enumDescriptions": [
+ "Always displays type lenses of global bindings",
+ "Only display type lenses of exported global bindings",
+ "Follows error messages produced by GHC about missing signatures"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.ghcide-type-lenses.globalOn": {
+ "default": true,
+ "description": "Enables ghcide-type-lenses plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.hlint.codeActionsOn": {
+ "default": true,
+ "description": "Enables hlint code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.hlint.config.flags": {
+ "default": [],
+ "markdownDescription": "Flags used by hlint",
+ "scope": "resource",
+ "type": "array"
+ },
+ "haskell.plugin.hlint.diagnosticsOn": {
+ "default": true,
+ "description": "Enables hlint diagnostics",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.importLens.codeActionsOn": {
+ "default": true,
+ "description": "Enables importLens code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.importLens.codeLensOn": {
+ "default": true,
+ "description": "Enables importLens code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.importLens.inlayHintsOn": {
+ "default": true,
+ "description": "Enables importLens inlay hints",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.moduleName.globalOn": {
+ "default": true,
+ "description": "Enables moduleName plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.ormolu.config.external": {
+ "default": false,
+ "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.overloaded-record-dot.globalOn": {
+ "default": true,
+ "description": "Enables overloaded-record-dot plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.pragmas-completion.globalOn": {
+ "default": true,
+ "description": "Enables pragmas-completion plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.pragmas-disable.globalOn": {
+ "default": true,
+ "description": "Enables pragmas-disable plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.pragmas-suggest.globalOn": {
+ "default": true,
+ "description": "Enables pragmas-suggest plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.qualifyImportedNames.globalOn": {
+ "default": true,
+ "description": "Enables qualifyImportedNames plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.rename.config.crossModule": {
+ "default": false,
+ "markdownDescription": "Enable experimental cross-module renaming",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.rename.globalOn": {
+ "default": true,
+ "description": "Enables rename plugin",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.semanticTokens.config.classMethodToken": {
+ "default": "method",
+ "description": "LSP semantic token type to use for typeclass methods",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.classToken": {
+ "default": "class",
+ "description": "LSP semantic token type to use for typeclasses",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.dataConstructorToken": {
+ "default": "enumMember",
+ "description": "LSP semantic token type to use for data constructors",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.functionToken": {
+ "default": "function",
+ "description": "LSP semantic token type to use for functions",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.moduleToken": {
+ "default": "namespace",
+ "description": "LSP semantic token type to use for modules",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.operatorToken": {
+ "default": "operator",
+ "description": "LSP semantic token type to use for operators",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.patternSynonymToken": {
+ "default": "macro",
+ "description": "LSP semantic token type to use for pattern synonyms",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.recordFieldToken": {
+ "default": "property",
+ "description": "LSP semantic token type to use for record fields",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.typeConstructorToken": {
+ "default": "enum",
+ "description": "LSP semantic token type to use for type constructors",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.typeFamilyToken": {
+ "default": "interface",
+ "description": "LSP semantic token type to use for type families",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.typeSynonymToken": {
+ "default": "type",
+ "description": "LSP semantic token type to use for type synonyms",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.typeVariableToken": {
+ "default": "typeParameter",
+ "description": "LSP semantic token type to use for type variables",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.config.variableToken": {
+ "default": "variable",
+ "description": "LSP semantic token type to use for variables",
+ "enum": [
+ "namespace",
+ "type",
+ "class",
+ "enum",
+ "interface",
+ "struct",
+ "typeParameter",
+ "parameter",
+ "variable",
+ "property",
+ "enumMember",
+ "event",
+ "function",
+ "method",
+ "macro",
+ "keyword",
+ "modifier",
+ "comment",
+ "string",
+ "number",
+ "regexp",
+ "operator",
+ "decorator"
+ ],
+ "enumDescriptions": [
+ "LSP Semantic Token Type: namespace",
+ "LSP Semantic Token Type: type",
+ "LSP Semantic Token Type: class",
+ "LSP Semantic Token Type: enum",
+ "LSP Semantic Token Type: interface",
+ "LSP Semantic Token Type: struct",
+ "LSP Semantic Token Type: typeParameter",
+ "LSP Semantic Token Type: parameter",
+ "LSP Semantic Token Type: variable",
+ "LSP Semantic Token Type: property",
+ "LSP Semantic Token Type: enumMember",
+ "LSP Semantic Token Type: event",
+ "LSP Semantic Token Type: function",
+ "LSP Semantic Token Type: method",
+ "LSP Semantic Token Type: macro",
+ "LSP Semantic Token Type: keyword",
+ "LSP Semantic Token Type: modifier",
+ "LSP Semantic Token Type: comment",
+ "LSP Semantic Token Type: string",
+ "LSP Semantic Token Type: number",
+ "LSP Semantic Token Type: regexp",
+ "LSP Semantic Token Type: operator",
+ "LSP Semantic Token Type: decorator"
+ ],
+ "scope": "resource",
+ "type": "string"
+ },
+ "haskell.plugin.semanticTokens.globalOn": {
+ "default": false,
+ "description": "Enables semanticTokens plugin",
+ "scope": "resource",
+ "type": "boolean"
+ }
+}
diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json
index 751aa6f28e..8467b451f1 100644
--- a/test/testdata/schema/ghc96/default-config.golden.json
+++ b/test/testdata/schema/ghc96/default-config.golden.json
@@ -39,11 +39,12 @@
"codeLensOn": true
},
"eval": {
+ "codeActionsOn": true,
+ "codeLensOn": true,
"config": {
"diff": true,
"exception": false
- },
- "globalOn": true
+ }
},
"explicit-fields": {
"codeActionsOn": true,
diff --git a/test/testdata/schema/ghc96/markdown-reference.md b/test/testdata/schema/ghc96/markdown-reference.md
new file mode 100644
index 0000000000..668323ce66
--- /dev/null
+++ b/test/testdata/schema/ghc96/markdown-reference.md
@@ -0,0 +1,66 @@
+## hlint
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `flags` | Flags used by hlint | `TODO: Array values` | |
+
+## cabal-fmt
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` | |
+
+## ghcide-completions
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` | |
+| `snippetsOn` | Inserts snippets when using code completions | `True` | |
+
+## eval
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` | |
+| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` | |
+
+## ghcide-type-lenses
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `mode` | Control how type lenses are shown | `Always` | Always
Exported
Diagnostics
|
+
+## ormolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` | |
+
+## rename
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `crossModule` | Enable experimental cross-module renaming | `False` | |
+
+## semanticTokens
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+
+## fourmolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` | |
+| `path` | Set path to executable (for "external" mode). | `"fourmolu"` | |
+
+## cabal-gild
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` | |
+
+
diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json
index 938964fc50..1c0b19eb27 100644
--- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json
+++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json
@@ -77,6 +77,18 @@
"scope": "resource",
"type": "boolean"
},
+ "haskell.plugin.eval.codeActionsOn": {
+ "default": true,
+ "description": "Enables eval code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.codeLensOn": {
+ "default": true,
+ "description": "Enables eval code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
"haskell.plugin.eval.config.diff": {
"default": true,
"markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses",
@@ -89,12 +101,6 @@
"scope": "resource",
"type": "boolean"
},
- "haskell.plugin.eval.globalOn": {
- "default": true,
- "description": "Enables eval plugin",
- "scope": "resource",
- "type": "boolean"
- },
"haskell.plugin.explicit-fields.codeActionsOn": {
"default": true,
"description": "Enables explicit-fields code actions",
diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json
index 751aa6f28e..8467b451f1 100644
--- a/test/testdata/schema/ghc98/default-config.golden.json
+++ b/test/testdata/schema/ghc98/default-config.golden.json
@@ -39,11 +39,12 @@
"codeLensOn": true
},
"eval": {
+ "codeActionsOn": true,
+ "codeLensOn": true,
"config": {
"diff": true,
"exception": false
- },
- "globalOn": true
+ }
},
"explicit-fields": {
"codeActionsOn": true,
diff --git a/test/testdata/schema/ghc98/markdown-reference.md b/test/testdata/schema/ghc98/markdown-reference.md
new file mode 100644
index 0000000000..668323ce66
--- /dev/null
+++ b/test/testdata/schema/ghc98/markdown-reference.md
@@ -0,0 +1,66 @@
+## hlint
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `flags` | Flags used by hlint | `TODO: Array values` | |
+
+## cabal-fmt
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` | |
+
+## ghcide-completions
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` | |
+| `snippetsOn` | Inserts snippets when using code completions | `True` | |
+
+## eval
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` | |
+| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` | |
+
+## ghcide-type-lenses
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `mode` | Control how type lenses are shown | `Always` | Always
Exported
Diagnostics
|
+
+## ormolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` | |
+
+## rename
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `crossModule` | Enable experimental cross-module renaming | `False` | |
+
+## semanticTokens
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` | SemanticTokenTypes_Namespace
SemanticTokenTypes_Type
SemanticTokenTypes_Class
SemanticTokenTypes_Enum
SemanticTokenTypes_Interface
SemanticTokenTypes_Struct
SemanticTokenTypes_TypeParameter
SemanticTokenTypes_Parameter
SemanticTokenTypes_Variable
SemanticTokenTypes_Property
SemanticTokenTypes_EnumMember
SemanticTokenTypes_Event
SemanticTokenTypes_Function
SemanticTokenTypes_Method
SemanticTokenTypes_Macro
SemanticTokenTypes_Keyword
SemanticTokenTypes_Modifier
SemanticTokenTypes_Comment
SemanticTokenTypes_String
SemanticTokenTypes_Number
SemanticTokenTypes_Regexp
SemanticTokenTypes_Operator
SemanticTokenTypes_Decorator
|
+
+## fourmolu
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` | |
+| `path` | Set path to executable (for "external" mode). | `"fourmolu"` | |
+
+## cabal-gild
+| Property | Description | Default | Allowed values |
+| --- | --- | --- | --- |
+| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` | |
+
+
diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json
index 938964fc50..1c0b19eb27 100644
--- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json
+++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json
@@ -77,6 +77,18 @@
"scope": "resource",
"type": "boolean"
},
+ "haskell.plugin.eval.codeActionsOn": {
+ "default": true,
+ "description": "Enables eval code actions",
+ "scope": "resource",
+ "type": "boolean"
+ },
+ "haskell.plugin.eval.codeLensOn": {
+ "default": true,
+ "description": "Enables eval code lenses",
+ "scope": "resource",
+ "type": "boolean"
+ },
"haskell.plugin.eval.config.diff": {
"default": true,
"markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses",
@@ -89,12 +101,6 @@
"scope": "resource",
"type": "boolean"
},
- "haskell.plugin.eval.globalOn": {
- "default": true,
- "description": "Enables eval plugin",
- "scope": "resource",
- "type": "boolean"
- },
"haskell.plugin.explicit-fields.codeActionsOn": {
"default": true,
"description": "Enables explicit-fields code actions",